OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/objects.h" | 5 #include "src/objects.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 #include <iomanip> | 8 #include <iomanip> |
9 #include <sstream> | 9 #include <sstream> |
10 | 10 |
(...skipping 11244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11255 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); | 11255 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); |
11256 // No write barrier required, since the builtin is part of the root set. | 11256 // No write barrier required, since the builtin is part of the root set. |
11257 } | 11257 } |
11258 | 11258 |
11259 | 11259 |
11260 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( | 11260 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( |
11261 Handle<SharedFunctionInfo> shared, Handle<Code> code) { | 11261 Handle<SharedFunctionInfo> shared, Handle<Code> code) { |
11262 Isolate* isolate = shared->GetIsolate(); | 11262 Isolate* isolate = shared->GetIsolate(); |
11263 if (isolate->serializer_enabled()) return; | 11263 if (isolate->serializer_enabled()) return; |
11264 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); | 11264 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
11265 Handle<Object> value(shared->optimized_code_map(), isolate); | 11265 // Empty code maps are unsupported. |
11266 if (value->IsSmi()) return; // Empty code maps are unsupported. | 11266 if (shared->OptimizedCodeMapIsCleared()) return; |
11267 Handle<FixedArray> code_map = Handle<FixedArray>::cast(value); | 11267 shared->optimized_code_map()->set(kSharedCodeIndex, *code); |
11268 code_map->set(kSharedCodeIndex, *code); | |
11269 } | 11268 } |
11270 | 11269 |
11271 | 11270 |
11272 void SharedFunctionInfo::AddToOptimizedCodeMap( | 11271 void SharedFunctionInfo::AddToOptimizedCodeMap( |
11273 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, | 11272 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, |
11274 Handle<HeapObject> code, Handle<LiteralsArray> literals, | 11273 Handle<HeapObject> code, Handle<LiteralsArray> literals, |
11275 BailoutId osr_ast_id) { | 11274 BailoutId osr_ast_id) { |
11276 Isolate* isolate = shared->GetIsolate(); | 11275 Isolate* isolate = shared->GetIsolate(); |
11277 if (isolate->serializer_enabled()) return; | 11276 if (isolate->serializer_enabled()) return; |
11278 DCHECK(*code == isolate->heap()->undefined_value() || | 11277 DCHECK(*code == isolate->heap()->undefined_value() || |
11279 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); | 11278 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); |
11280 DCHECK(*code == isolate->heap()->undefined_value() || | 11279 DCHECK(*code == isolate->heap()->undefined_value() || |
11281 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); | 11280 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); |
11282 DCHECK(native_context->IsNativeContext()); | 11281 DCHECK(native_context->IsNativeContext()); |
11283 STATIC_ASSERT(kEntryLength == 4); | 11282 STATIC_ASSERT(kEntryLength == 4); |
11284 Handle<FixedArray> new_code_map; | 11283 Handle<FixedArray> new_code_map; |
11285 Handle<Object> value(shared->optimized_code_map(), isolate); | |
11286 int entry; | 11284 int entry; |
11287 if (value->IsSmi()) { | 11285 if (shared->OptimizedCodeMapIsCleared()) { |
11288 // No optimized code map. | |
11289 DCHECK_EQ(0, Smi::cast(*value)->value()); | |
11290 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | 11286 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); |
11291 entry = kEntriesStart; | 11287 entry = kEntriesStart; |
11292 } else { | 11288 } else { |
11293 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); | 11289 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); |
11294 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); | 11290 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); |
11295 if (entry > kSharedCodeIndex) { | 11291 if (entry > kSharedCodeIndex) { |
11296 // Found an existing context-specific entry, it must not contain any code. | 11292 // Found an existing context-specific entry, it must not contain any code. |
11297 DCHECK_EQ(isolate->heap()->undefined_value(), | 11293 DCHECK_EQ(isolate->heap()->undefined_value(), |
11298 old_code_map->get(entry + kCachedCodeOffset)); | 11294 old_code_map->get(entry + kCachedCodeOffset)); |
11299 // Just set the code and literals to the entry. | 11295 // Just set the code and literals to the entry. |
11300 old_code_map->set(entry + kCachedCodeOffset, *code); | 11296 old_code_map->set(entry + kCachedCodeOffset, *code); |
11301 old_code_map->set(entry + kLiteralsOffset, *literals); | 11297 old_code_map->set(entry + kLiteralsOffset, *literals); |
11302 return; | 11298 return; |
11303 } | 11299 } |
11304 | 11300 |
11305 // Copy old optimized code map and append one new entry. | 11301 // Copy old optimized code map and append one new entry. |
11306 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11302 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( |
11307 old_code_map, kEntryLength, TENURED); | 11303 old_code_map, kEntryLength, TENURED); |
11308 // TODO(mstarzinger): Temporary workaround. The allocation above might have | 11304 // TODO(mstarzinger): Temporary workaround. The allocation above might have |
11309 // flushed the optimized code map and the copy we created is full of holes. | 11305 // flushed the optimized code map and the copy we created is full of holes. |
11310 // For now we just give up on adding the entry and pretend it got flushed. | 11306 // For now we just give up on adding the entry and pretend it got flushed. |
11311 if (shared->optimized_code_map()->IsSmi()) return; | 11307 if (shared->OptimizedCodeMapIsCleared()) return; |
11312 entry = old_code_map->length(); | 11308 entry = old_code_map->length(); |
11313 } | 11309 } |
11314 new_code_map->set(entry + kContextOffset, *native_context); | 11310 new_code_map->set(entry + kContextOffset, *native_context); |
11315 new_code_map->set(entry + kCachedCodeOffset, *code); | 11311 new_code_map->set(entry + kCachedCodeOffset, *code); |
11316 new_code_map->set(entry + kLiteralsOffset, *literals); | 11312 new_code_map->set(entry + kLiteralsOffset, *literals); |
11317 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 11313 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
11318 | 11314 |
11319 #ifdef DEBUG | 11315 #ifdef DEBUG |
11320 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 11316 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
11321 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 11317 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
11322 Object* code = new_code_map->get(i + kCachedCodeOffset); | 11318 Object* code = new_code_map->get(i + kCachedCodeOffset); |
11323 if (code != isolate->heap()->undefined_value()) { | 11319 if (code != isolate->heap()->undefined_value()) { |
11324 DCHECK(code->IsCode()); | 11320 DCHECK(code->IsCode()); |
11325 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 11321 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); |
11326 } | 11322 } |
11327 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 11323 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
11328 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 11324 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
11329 } | 11325 } |
11330 #endif | 11326 #endif |
11331 | 11327 |
11332 // Zap any old optimized code map. | 11328 // Zap any old optimized code map. |
11333 if (!shared->optimized_code_map()->IsSmi()) { | 11329 if (!shared->OptimizedCodeMapIsCleared()) { |
11334 FixedArray* old_code_map = FixedArray::cast(shared->optimized_code_map()); | 11330 FixedArray* old_code_map = shared->optimized_code_map(); |
11335 old_code_map->FillWithHoles(0, old_code_map->length()); | 11331 old_code_map->FillWithHoles(0, old_code_map->length()); |
11336 } | 11332 } |
11337 | 11333 |
11338 shared->set_optimized_code_map(*new_code_map); | 11334 shared->set_optimized_code_map(*new_code_map); |
11339 } | 11335 } |
11340 | 11336 |
11341 | 11337 |
11342 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 11338 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
11343 // Zap any old optimized code map. | 11339 // Zap any old optimized code map. |
11344 if (!optimized_code_map()->IsSmi()) { | 11340 if (!OptimizedCodeMapIsCleared()) { |
11345 FixedArray* old_code_map = FixedArray::cast(optimized_code_map()); | 11341 FixedArray* old_code_map = optimized_code_map(); |
11346 old_code_map->FillWithHoles(0, old_code_map->length()); | 11342 old_code_map->FillWithHoles(0, old_code_map->length()); |
11347 } | 11343 } |
11348 | 11344 |
11349 set_optimized_code_map(Smi::FromInt(0)); | 11345 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); |
| 11346 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); |
11350 } | 11347 } |
11351 | 11348 |
11352 | 11349 |
11353 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 11350 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
11354 const char* reason) { | 11351 const char* reason) { |
11355 DisallowHeapAllocation no_gc; | 11352 DisallowHeapAllocation no_gc; |
11356 if (optimized_code_map()->IsSmi()) return; | 11353 if (OptimizedCodeMapIsCleared()) return; |
11357 | 11354 |
11358 Heap* heap = GetHeap(); | 11355 Heap* heap = GetHeap(); |
11359 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 11356 FixedArray* code_map = optimized_code_map(); |
11360 int dst = kEntriesStart; | 11357 int dst = kEntriesStart; |
11361 int length = code_map->length(); | 11358 int length = code_map->length(); |
11362 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 11359 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
11363 DCHECK(code_map->get(src)->IsNativeContext()); | 11360 DCHECK(code_map->get(src)->IsNativeContext()); |
11364 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { | 11361 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { |
11365 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | 11362 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); |
11366 if (FLAG_trace_opt) { | 11363 if (FLAG_trace_opt) { |
11367 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 11364 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
11368 ShortPrint(); | 11365 ShortPrint(); |
11369 if (osr.IsNone()) { | 11366 if (osr.IsNone()) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11408 length - dst); | 11405 length - dst); |
11409 if (code_map->length() == kEntriesStart && | 11406 if (code_map->length() == kEntriesStart && |
11410 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 11407 code_map->get(kSharedCodeIndex)->IsUndefined()) { |
11411 ClearOptimizedCodeMap(); | 11408 ClearOptimizedCodeMap(); |
11412 } | 11409 } |
11413 } | 11410 } |
11414 } | 11411 } |
11415 | 11412 |
11416 | 11413 |
11417 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { | 11414 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
11418 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 11415 FixedArray* code_map = optimized_code_map(); |
11419 DCHECK(shrink_by % kEntryLength == 0); | 11416 DCHECK(shrink_by % kEntryLength == 0); |
11420 DCHECK(shrink_by <= code_map->length() - kEntriesStart); | 11417 DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
11421 // Always trim even when array is cleared because of heap verifier. | 11418 // Always trim even when array is cleared because of heap verifier. |
11422 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, | 11419 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, |
11423 shrink_by); | 11420 shrink_by); |
11424 if (code_map->length() == kEntriesStart && | 11421 if (code_map->length() == kEntriesStart && |
11425 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 11422 code_map->get(kSharedCodeIndex)->IsUndefined()) { |
11426 ClearOptimizedCodeMap(); | 11423 ClearOptimizedCodeMap(); |
11427 } | 11424 } |
11428 } | 11425 } |
(...skipping 1121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12550 set_opt_count(0); | 12547 set_opt_count(0); |
12551 set_deopt_count(0); | 12548 set_deopt_count(0); |
12552 } | 12549 } |
12553 } | 12550 } |
12554 | 12551 |
12555 | 12552 |
12556 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, | 12553 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, |
12557 BailoutId osr_ast_id) { | 12554 BailoutId osr_ast_id) { |
12558 DisallowHeapAllocation no_gc; | 12555 DisallowHeapAllocation no_gc; |
12559 DCHECK(native_context->IsNativeContext()); | 12556 DCHECK(native_context->IsNativeContext()); |
12560 Object* value = optimized_code_map(); | 12557 if (!OptimizedCodeMapIsCleared()) { |
12561 if (!value->IsSmi()) { | 12558 FixedArray* optimized_code_map = this->optimized_code_map(); |
12562 FixedArray* optimized_code_map = FixedArray::cast(value); | |
12563 int length = optimized_code_map->length(); | 12559 int length = optimized_code_map->length(); |
12564 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 12560 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
12565 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 12561 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
12566 if (optimized_code_map->get(i + kContextOffset) == native_context && | 12562 if (optimized_code_map->get(i + kContextOffset) == native_context && |
12567 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 12563 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
12568 return i; | 12564 return i; |
12569 } | 12565 } |
12570 } | 12566 } |
12571 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); | 12567 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); |
12572 if (shared_code->IsCode() && osr_ast_id.IsNone()) { | 12568 if (shared_code->IsCode() && osr_ast_id.IsNone()) { |
12573 return kSharedCodeIndex; | 12569 return kSharedCodeIndex; |
12574 } | 12570 } |
12575 } | 12571 } |
12576 return -1; | 12572 return -1; |
12577 } | 12573 } |
12578 | 12574 |
12579 | 12575 |
12580 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( | 12576 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( |
12581 Context* native_context, BailoutId osr_ast_id) { | 12577 Context* native_context, BailoutId osr_ast_id) { |
12582 CodeAndLiterals result = {nullptr, nullptr}; | 12578 CodeAndLiterals result = {nullptr, nullptr}; |
12583 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); | 12579 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); |
12584 if (entry != kNotFound) { | 12580 if (entry != kNotFound) { |
12585 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 12581 FixedArray* code_map = optimized_code_map(); |
12586 if (entry == kSharedCodeIndex) { | 12582 if (entry == kSharedCodeIndex) { |
12587 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; | 12583 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; |
12588 | 12584 |
12589 } else { | 12585 } else { |
12590 DCHECK_LE(entry + kEntryLength, code_map->length()); | 12586 DCHECK_LE(entry + kEntryLength, code_map->length()); |
12591 Object* code = code_map->get(entry + kCachedCodeOffset); | 12587 Object* code = code_map->get(entry + kCachedCodeOffset); |
12592 result = {code->IsUndefined() ? nullptr : Code::cast(code), | 12588 result = {code->IsUndefined() ? nullptr : Code::cast(code), |
12593 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; | 12589 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; |
12594 } | 12590 } |
12595 } | 12591 } |
12596 if (FLAG_trace_opt && !optimized_code_map()->IsSmi() && | 12592 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && |
12597 result.code == nullptr) { | 12593 result.code == nullptr) { |
12598 PrintF("[didn't find optimized code in optimized code map for "); | 12594 PrintF("[didn't find optimized code in optimized code map for "); |
12599 ShortPrint(); | 12595 ShortPrint(); |
12600 PrintF("]\n"); | 12596 PrintF("]\n"); |
12601 } | 12597 } |
12602 return result; | 12598 return result; |
12603 } | 12599 } |
12604 | 12600 |
12605 | 12601 |
12606 #define DECLARE_TAG(ignore1, name, ignore2) name, | 12602 #define DECLARE_TAG(ignore1, name, ignore2) name, |
(...skipping 5516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
18123 if (cell->value() != *new_value) { | 18119 if (cell->value() != *new_value) { |
18124 cell->set_value(*new_value); | 18120 cell->set_value(*new_value); |
18125 Isolate* isolate = cell->GetIsolate(); | 18121 Isolate* isolate = cell->GetIsolate(); |
18126 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 18122 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
18127 isolate, DependentCode::kPropertyCellChangedGroup); | 18123 isolate, DependentCode::kPropertyCellChangedGroup); |
18128 } | 18124 } |
18129 } | 18125 } |
18130 | 18126 |
18131 } // namespace internal | 18127 } // namespace internal |
18132 } // namespace v8 | 18128 } // namespace v8 |
OLD | NEW |