OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/objects.h" | 5 #include "src/objects.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 #include <iomanip> | 8 #include <iomanip> |
9 #include <sstream> | 9 #include <sstream> |
10 | 10 |
(...skipping 11039 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
11050 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); | 11050 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); |
11051 // No write barrier required, since the builtin is part of the root set. | 11051 // No write barrier required, since the builtin is part of the root set. |
11052 } | 11052 } |
11053 | 11053 |
11054 | 11054 |
11055 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( | 11055 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( |
11056 Handle<SharedFunctionInfo> shared, Handle<Code> code) { | 11056 Handle<SharedFunctionInfo> shared, Handle<Code> code) { |
11057 Isolate* isolate = shared->GetIsolate(); | 11057 Isolate* isolate = shared->GetIsolate(); |
11058 if (isolate->serializer_enabled()) return; | 11058 if (isolate->serializer_enabled()) return; |
11059 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); | 11059 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
11060 Handle<Object> value(shared->optimized_code_map(), isolate); | 11060 // Empty code maps are unsupported. |
11061 if (value->IsSmi()) return; // Empty code maps are unsupported. | 11061 if (shared->OptimizedCodeMapIsCleared()) return; |
11062 Handle<FixedArray> code_map = Handle<FixedArray>::cast(value); | 11062 shared->optimized_code_map()->set(kSharedCodeIndex, *code); |
11063 code_map->set(kSharedCodeIndex, *code); | |
11064 } | 11063 } |
11065 | 11064 |
11066 | 11065 |
11067 void SharedFunctionInfo::AddToOptimizedCodeMap( | 11066 void SharedFunctionInfo::AddToOptimizedCodeMap( |
11068 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, | 11067 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, |
11069 Handle<HeapObject> code, Handle<LiteralsArray> literals, | 11068 Handle<HeapObject> code, Handle<LiteralsArray> literals, |
11070 BailoutId osr_ast_id) { | 11069 BailoutId osr_ast_id) { |
11071 Isolate* isolate = shared->GetIsolate(); | 11070 Isolate* isolate = shared->GetIsolate(); |
11072 if (isolate->serializer_enabled()) return; | 11071 if (isolate->serializer_enabled()) return; |
11073 DCHECK(*code == isolate->heap()->undefined_value() || | 11072 DCHECK(*code == isolate->heap()->undefined_value() || |
11074 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); | 11073 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); |
11075 DCHECK(*code == isolate->heap()->undefined_value() || | 11074 DCHECK(*code == isolate->heap()->undefined_value() || |
11076 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); | 11075 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); |
11077 DCHECK(native_context->IsNativeContext()); | 11076 DCHECK(native_context->IsNativeContext()); |
11078 STATIC_ASSERT(kEntryLength == 4); | 11077 STATIC_ASSERT(kEntryLength == 4); |
11079 Handle<FixedArray> new_code_map; | 11078 Handle<FixedArray> new_code_map; |
11080 Handle<Object> value(shared->optimized_code_map(), isolate); | |
11081 int entry; | 11079 int entry; |
11082 if (value->IsSmi()) { | 11080 if (shared->OptimizedCodeMapIsCleared()) { |
11083 // No optimized code map. | |
11084 DCHECK_EQ(0, Smi::cast(*value)->value()); | |
11085 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | 11081 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); |
11086 entry = kEntriesStart; | 11082 entry = kEntriesStart; |
11087 } else { | 11083 } else { |
11088 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); | 11084 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); |
11089 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); | 11085 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); |
11090 if (entry > kSharedCodeIndex) { | 11086 if (entry > kSharedCodeIndex) { |
11091 // Found an existing context-specific entry, it must not contain any code. | 11087 // Found an existing context-specific entry, it must not contain any code. |
11092 DCHECK_EQ(isolate->heap()->undefined_value(), | 11088 DCHECK_EQ(isolate->heap()->undefined_value(), |
11093 old_code_map->get(entry + kCachedCodeOffset)); | 11089 old_code_map->get(entry + kCachedCodeOffset)); |
11094 // Just set the code and literals to the entry. | 11090 // Just set the code and literals to the entry. |
11095 old_code_map->set(entry + kCachedCodeOffset, *code); | 11091 old_code_map->set(entry + kCachedCodeOffset, *code); |
11096 old_code_map->set(entry + kLiteralsOffset, *literals); | 11092 old_code_map->set(entry + kLiteralsOffset, *literals); |
11097 return; | 11093 return; |
11098 } | 11094 } |
11099 | 11095 |
11100 // Copy old optimized code map and append one new entry. | 11096 // Copy old optimized code map and append one new entry. |
11101 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11097 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( |
11102 old_code_map, kEntryLength, TENURED); | 11098 old_code_map, kEntryLength, TENURED); |
11103 // TODO(mstarzinger): Temporary workaround. The allocation above might have | 11099 // TODO(mstarzinger): Temporary workaround. The allocation above might have |
11104 // flushed the optimized code map and the copy we created is full of holes. | 11100 // flushed the optimized code map and the copy we created is full of holes. |
11105 // For now we just give up on adding the entry and pretend it got flushed. | 11101 // For now we just give up on adding the entry and pretend it got flushed. |
11106 if (shared->optimized_code_map()->IsSmi()) return; | 11102 if (shared->OptimizedCodeMapIsCleared()) return; |
11107 entry = old_code_map->length(); | 11103 entry = old_code_map->length(); |
11108 } | 11104 } |
11109 new_code_map->set(entry + kContextOffset, *native_context); | 11105 new_code_map->set(entry + kContextOffset, *native_context); |
11110 new_code_map->set(entry + kCachedCodeOffset, *code); | 11106 new_code_map->set(entry + kCachedCodeOffset, *code); |
11111 new_code_map->set(entry + kLiteralsOffset, *literals); | 11107 new_code_map->set(entry + kLiteralsOffset, *literals); |
11112 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 11108 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
11113 | 11109 |
11114 #ifdef DEBUG | 11110 #ifdef DEBUG |
11115 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 11111 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
11116 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 11112 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
11117 Object* code = new_code_map->get(i + kCachedCodeOffset); | 11113 Object* code = new_code_map->get(i + kCachedCodeOffset); |
11118 if (code != isolate->heap()->undefined_value()) { | 11114 if (code != isolate->heap()->undefined_value()) { |
11119 DCHECK(code->IsCode()); | 11115 DCHECK(code->IsCode()); |
11120 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 11116 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); |
11121 } | 11117 } |
11122 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 11118 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
11123 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 11119 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
11124 } | 11120 } |
11125 #endif | 11121 #endif |
11126 | 11122 |
11127 // Zap any old optimized code map. | 11123 // Zap any old optimized code map. |
11128 if (!shared->optimized_code_map()->IsSmi()) { | 11124 if (!shared->OptimizedCodeMapIsCleared()) { |
11129 FixedArray* old_code_map = FixedArray::cast(shared->optimized_code_map()); | 11125 FixedArray* old_code_map = shared->optimized_code_map(); |
11130 old_code_map->FillWithHoles(0, old_code_map->length()); | 11126 old_code_map->FillWithHoles(0, old_code_map->length()); |
11131 } | 11127 } |
11132 | 11128 |
11133 shared->set_optimized_code_map(*new_code_map); | 11129 shared->set_optimized_code_map(*new_code_map); |
11134 } | 11130 } |
11135 | 11131 |
11136 | 11132 |
11137 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 11133 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
11138 // Zap any old optimized code map. | 11134 // Zap any old optimized code map. |
11139 if (!optimized_code_map()->IsSmi()) { | 11135 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); |
11140 FixedArray* old_code_map = FixedArray::cast(optimized_code_map()); | 11136 if (optimized_code_map() != cleared_map) { |
Michael Starzinger
2015/11/13 12:14:22
suggestion: Not sure this optimization is worth it
mvstanton
2015/11/17 20:32:12
Done.
| |
11137 FixedArray* old_code_map = optimized_code_map(); | |
11141 old_code_map->FillWithHoles(0, old_code_map->length()); | 11138 old_code_map->FillWithHoles(0, old_code_map->length()); |
11142 } | 11139 } |
11143 | 11140 |
11144 set_optimized_code_map(Smi::FromInt(0)); | 11141 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); |
11145 } | 11142 } |
11146 | 11143 |
11147 | 11144 |
11148 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 11145 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
11149 const char* reason) { | 11146 const char* reason) { |
11150 DisallowHeapAllocation no_gc; | 11147 DisallowHeapAllocation no_gc; |
11151 if (optimized_code_map()->IsSmi()) return; | 11148 if (OptimizedCodeMapIsCleared()) return; |
11152 | 11149 |
11153 Heap* heap = GetHeap(); | 11150 Heap* heap = GetHeap(); |
11154 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 11151 FixedArray* code_map = optimized_code_map(); |
11155 int dst = kEntriesStart; | 11152 int dst = kEntriesStart; |
11156 int length = code_map->length(); | 11153 int length = code_map->length(); |
11157 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 11154 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
11158 DCHECK(code_map->get(src)->IsNativeContext()); | 11155 DCHECK(code_map->get(src)->IsNativeContext()); |
11159 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { | 11156 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { |
11160 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | 11157 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); |
11161 if (FLAG_trace_opt) { | 11158 if (FLAG_trace_opt) { |
11162 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 11159 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
11163 ShortPrint(); | 11160 ShortPrint(); |
11164 if (osr.IsNone()) { | 11161 if (osr.IsNone()) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
11203 length - dst); | 11200 length - dst); |
11204 if (code_map->length() == kEntriesStart && | 11201 if (code_map->length() == kEntriesStart && |
11205 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 11202 code_map->get(kSharedCodeIndex)->IsUndefined()) { |
11206 ClearOptimizedCodeMap(); | 11203 ClearOptimizedCodeMap(); |
11207 } | 11204 } |
11208 } | 11205 } |
11209 } | 11206 } |
11210 | 11207 |
11211 | 11208 |
11212 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { | 11209 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
11213 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 11210 FixedArray* code_map = optimized_code_map(); |
11214 DCHECK(shrink_by % kEntryLength == 0); | 11211 DCHECK(shrink_by % kEntryLength == 0); |
11215 DCHECK(shrink_by <= code_map->length() - kEntriesStart); | 11212 DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
11216 // Always trim even when array is cleared because of heap verifier. | 11213 // Always trim even when array is cleared because of heap verifier. |
11217 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, | 11214 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, |
11218 shrink_by); | 11215 shrink_by); |
11219 if (code_map->length() == kEntriesStart && | 11216 if (code_map->length() == kEntriesStart && |
11220 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 11217 code_map->get(kSharedCodeIndex)->IsUndefined()) { |
11221 ClearOptimizedCodeMap(); | 11218 ClearOptimizedCodeMap(); |
11222 } | 11219 } |
11223 } | 11220 } |
(...skipping 1121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
12345 set_opt_count(0); | 12342 set_opt_count(0); |
12346 set_deopt_count(0); | 12343 set_deopt_count(0); |
12347 } | 12344 } |
12348 } | 12345 } |
12349 | 12346 |
12350 | 12347 |
12351 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, | 12348 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, |
12352 BailoutId osr_ast_id) { | 12349 BailoutId osr_ast_id) { |
12353 DisallowHeapAllocation no_gc; | 12350 DisallowHeapAllocation no_gc; |
12354 DCHECK(native_context->IsNativeContext()); | 12351 DCHECK(native_context->IsNativeContext()); |
12355 Object* value = optimized_code_map(); | 12352 if (!OptimizedCodeMapIsCleared()) { |
12356 if (!value->IsSmi()) { | 12353 FixedArray* optimized_code_map = this->optimized_code_map(); |
12357 FixedArray* optimized_code_map = FixedArray::cast(value); | |
12358 int length = optimized_code_map->length(); | 12354 int length = optimized_code_map->length(); |
12359 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 12355 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
12360 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 12356 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
12361 if (optimized_code_map->get(i + kContextOffset) == native_context && | 12357 if (optimized_code_map->get(i + kContextOffset) == native_context && |
12362 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 12358 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
12363 return i; | 12359 return i; |
12364 } | 12360 } |
12365 } | 12361 } |
12366 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); | 12362 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); |
12367 if (shared_code->IsCode() && osr_ast_id.IsNone()) { | 12363 if (shared_code->IsCode() && osr_ast_id.IsNone()) { |
12368 return kSharedCodeIndex; | 12364 return kSharedCodeIndex; |
12369 } | 12365 } |
12370 } | 12366 } |
12371 return -1; | 12367 return -1; |
12372 } | 12368 } |
12373 | 12369 |
12374 | 12370 |
12375 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( | 12371 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( |
12376 Context* native_context, BailoutId osr_ast_id) { | 12372 Context* native_context, BailoutId osr_ast_id) { |
12377 CodeAndLiterals result = {nullptr, nullptr}; | 12373 CodeAndLiterals result = {nullptr, nullptr}; |
12378 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); | 12374 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); |
12379 if (entry != kNotFound) { | 12375 if (entry != kNotFound) { |
12380 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 12376 FixedArray* code_map = optimized_code_map(); |
12381 if (entry == kSharedCodeIndex) { | 12377 if (entry == kSharedCodeIndex) { |
12382 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; | 12378 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; |
12383 | 12379 |
12384 } else { | 12380 } else { |
12385 DCHECK_LE(entry + kEntryLength, code_map->length()); | 12381 DCHECK_LE(entry + kEntryLength, code_map->length()); |
12386 Object* code = code_map->get(entry + kCachedCodeOffset); | 12382 Object* code = code_map->get(entry + kCachedCodeOffset); |
12387 result = {code->IsUndefined() ? nullptr : Code::cast(code), | 12383 result = {code->IsUndefined() ? nullptr : Code::cast(code), |
12388 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; | 12384 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; |
12389 } | 12385 } |
12390 } | 12386 } |
12391 if (FLAG_trace_opt && !optimized_code_map()->IsSmi() && | 12387 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && |
12392 result.code == nullptr) { | 12388 result.code == nullptr) { |
12393 PrintF("[didn't find optimized code in optimized code map for "); | 12389 PrintF("[didn't find optimized code in optimized code map for "); |
12394 ShortPrint(); | 12390 ShortPrint(); |
12395 PrintF("]\n"); | 12391 PrintF("]\n"); |
12396 } | 12392 } |
12397 return result; | 12393 return result; |
12398 } | 12394 } |
12399 | 12395 |
12400 | 12396 |
12401 #define DECLARE_TAG(ignore1, name, ignore2) name, | 12397 #define DECLARE_TAG(ignore1, name, ignore2) name, |
(...skipping 5508 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
17910 if (cell->value() != *new_value) { | 17906 if (cell->value() != *new_value) { |
17911 cell->set_value(*new_value); | 17907 cell->set_value(*new_value); |
17912 Isolate* isolate = cell->GetIsolate(); | 17908 Isolate* isolate = cell->GetIsolate(); |
17913 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 17909 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
17914 isolate, DependentCode::kPropertyCellChangedGroup); | 17910 isolate, DependentCode::kPropertyCellChangedGroup); |
17915 } | 17911 } |
17916 } | 17912 } |
17917 | 17913 |
17918 } // namespace internal | 17914 } // namespace internal |
17919 } // namespace v8 | 17915 } // namespace v8 |
OLD | NEW |