| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/objects.h" | 5 #include "src/objects.h" |
| 6 | 6 |
| 7 #include <cmath> | 7 #include <cmath> |
| 8 #include <iomanip> | 8 #include <iomanip> |
| 9 #include <sstream> | 9 #include <sstream> |
| 10 | 10 |
| (...skipping 11923 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11934 } | 11934 } |
| 11935 | 11935 |
| 11936 | 11936 |
| 11937 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( | 11937 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( |
| 11938 Handle<SharedFunctionInfo> shared, Handle<Code> code) { | 11938 Handle<SharedFunctionInfo> shared, Handle<Code> code) { |
| 11939 Isolate* isolate = shared->GetIsolate(); | 11939 Isolate* isolate = shared->GetIsolate(); |
| 11940 if (isolate->serializer_enabled()) return; | 11940 if (isolate->serializer_enabled()) return; |
| 11941 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); | 11941 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 11942 // Empty code maps are unsupported. | 11942 // Empty code maps are unsupported. |
| 11943 if (shared->OptimizedCodeMapIsCleared()) return; | 11943 if (shared->OptimizedCodeMapIsCleared()) return; |
| 11944 shared->optimized_code_map()->set(kSharedCodeIndex, *code); | 11944 Handle<WeakCell> cell = isolate->factory()->NewWeakCell(code); |
| 11945 shared->optimized_code_map()->set(kSharedCodeIndex, *cell); |
| 11945 } | 11946 } |
| 11946 | 11947 |
| 11947 | 11948 |
| 11948 void SharedFunctionInfo::AddToOptimizedCodeMap( | 11949 void SharedFunctionInfo::AddToOptimizedCodeMap( |
| 11949 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, | 11950 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, |
| 11950 Handle<HeapObject> code, Handle<LiteralsArray> literals, | 11951 Handle<HeapObject> code, Handle<LiteralsArray> literals, |
| 11951 BailoutId osr_ast_id) { | 11952 BailoutId osr_ast_id) { |
| 11952 Isolate* isolate = shared->GetIsolate(); | 11953 Isolate* isolate = shared->GetIsolate(); |
| 11953 if (isolate->serializer_enabled()) return; | 11954 if (isolate->serializer_enabled()) return; |
| 11954 DCHECK(*code == isolate->heap()->undefined_value() || | 11955 DCHECK(*code == isolate->heap()->undefined_value() || |
| 11955 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); | 11956 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); |
| 11956 DCHECK(*code == isolate->heap()->undefined_value() || | 11957 DCHECK(*code == isolate->heap()->undefined_value() || |
| 11957 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); | 11958 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); |
| 11958 DCHECK(native_context->IsNativeContext()); | 11959 DCHECK(native_context->IsNativeContext()); |
| 11959 STATIC_ASSERT(kEntryLength == 4); | 11960 STATIC_ASSERT(kEntryLength == 4); |
| 11960 Handle<FixedArray> new_code_map; | 11961 Handle<FixedArray> new_code_map; |
| 11961 int entry; | 11962 int entry; |
| 11963 |
| 11962 if (shared->OptimizedCodeMapIsCleared()) { | 11964 if (shared->OptimizedCodeMapIsCleared()) { |
| 11963 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | 11965 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); |
| 11966 new_code_map->set(kSharedCodeIndex, *isolate->factory()->empty_weak_cell(), |
| 11967 SKIP_WRITE_BARRIER); |
| 11964 entry = kEntriesStart; | 11968 entry = kEntriesStart; |
| 11965 } else { | 11969 } else { |
| 11966 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); | 11970 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); |
| 11967 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); | 11971 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); |
| 11968 if (entry > kSharedCodeIndex) { | 11972 if (entry > kSharedCodeIndex) { |
| 11969 // Found an existing context-specific entry, it must not contain any code. | 11973 // Found an existing context-specific entry, it must not contain any code. |
| 11970 DCHECK_EQ(isolate->heap()->undefined_value(), | 11974 DCHECK(WeakCell::cast(old_code_map->get(entry + kCachedCodeOffset)) |
| 11971 old_code_map->get(entry + kCachedCodeOffset)); | 11975 ->cleared()); |
| 11972 // Just set the code and literals to the entry. | 11976 // Just set the code and literals to the entry. |
| 11973 old_code_map->set(entry + kCachedCodeOffset, *code); | 11977 Handle<WeakCell> code_cell = code->IsUndefined() |
| 11974 old_code_map->set(entry + kLiteralsOffset, *literals); | 11978 ? isolate->factory()->empty_weak_cell() |
| 11979 : isolate->factory()->NewWeakCell(code); |
| 11980 Handle<WeakCell> literals_cell = |
| 11981 isolate->factory()->NewWeakCell(literals); |
| 11982 old_code_map->set(entry + kCachedCodeOffset, *code_cell); |
| 11983 old_code_map->set(entry + kLiteralsOffset, *literals_cell); |
| 11975 return; | 11984 return; |
| 11976 } | 11985 } |
| 11977 | 11986 |
| 11978 // Copy old optimized code map and append one new entry. | 11987 // Can we reuse an entry? |
| 11979 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11988 DCHECK(entry < kEntriesStart); |
| 11980 old_code_map, kEntryLength, TENURED); | 11989 int length = old_code_map->length(); |
| 11981 // TODO(mstarzinger): Temporary workaround. The allocation above might have | 11990 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
| 11982 // flushed the optimized code map and the copy we created is full of holes. | 11991 if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) { |
| 11983 // For now we just give up on adding the entry and pretend it got flushed. | 11992 new_code_map = old_code_map; |
| 11984 if (shared->OptimizedCodeMapIsCleared()) return; | 11993 entry = i; |
| 11985 entry = old_code_map->length(); | 11994 break; |
| 11995 } |
| 11996 } |
| 11997 |
| 11998 if (entry < kEntriesStart) { |
| 11999 // Copy old optimized code map and append one new entry. |
| 12000 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( |
| 12001 old_code_map, kEntryLength, TENURED); |
| 12002 // TODO(mstarzinger): Temporary workaround. The allocation above might |
| 12003 // have flushed the optimized code map and the copy we created is full of |
| 12004 // holes. For now we just give up on adding the entry and pretend it got |
| 12005 // flushed. |
| 12006 if (shared->OptimizedCodeMapIsCleared()) return; |
| 12007 entry = old_code_map->length(); |
| 12008 } |
| 11986 } | 12009 } |
| 11987 new_code_map->set(entry + kContextOffset, *native_context); | 12010 |
| 11988 new_code_map->set(entry + kCachedCodeOffset, *code); | 12011 Handle<WeakCell> code_cell = code->IsUndefined() |
| 11989 new_code_map->set(entry + kLiteralsOffset, *literals); | 12012 ? isolate->factory()->empty_weak_cell() |
| 12013 : isolate->factory()->NewWeakCell(code); |
| 12014 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals); |
| 12015 WeakCell* context_cell = native_context->self_weak_cell(); |
| 12016 |
| 12017 new_code_map->set(entry + kContextOffset, context_cell); |
| 12018 new_code_map->set(entry + kCachedCodeOffset, *code_cell); |
| 12019 new_code_map->set(entry + kLiteralsOffset, *literals_cell); |
| 11990 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 12020 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
| 11991 | 12021 |
| 11992 #ifdef DEBUG | 12022 #ifdef DEBUG |
| 11993 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 12023 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
| 11994 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 12024 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset)); |
| 11995 Object* code = new_code_map->get(i + kCachedCodeOffset); | 12025 DCHECK(cell->cleared() || cell->value()->IsNativeContext()); |
| 11996 if (code != isolate->heap()->undefined_value()) { | 12026 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); |
| 11997 DCHECK(code->IsCode()); | 12027 DCHECK(cell->cleared() || |
| 11998 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 12028 (cell->value()->IsCode() && |
| 11999 } | 12029 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); |
| 12000 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 12030 cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset)); |
| 12031 DCHECK(cell->cleared() || cell->value()->IsFixedArray()); |
| 12001 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 12032 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
| 12002 } | 12033 } |
| 12003 #endif | 12034 #endif |
| 12004 | 12035 |
| 12005 // Zap any old optimized code map. | 12036 FixedArray* old_code_map = shared->optimized_code_map(); |
| 12006 if (!shared->OptimizedCodeMapIsCleared()) { | 12037 if (old_code_map != *new_code_map) { |
| 12007 FixedArray* old_code_map = shared->optimized_code_map(); | 12038 shared->set_optimized_code_map(*new_code_map); |
| 12008 old_code_map->FillWithHoles(0, old_code_map->length()); | |
| 12009 } | 12039 } |
| 12010 | |
| 12011 shared->set_optimized_code_map(*new_code_map); | |
| 12012 } | 12040 } |
| 12013 | 12041 |
| 12014 | 12042 |
| 12015 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 12043 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
| 12016 // Zap any old optimized code map. | |
| 12017 if (!OptimizedCodeMapIsCleared()) { | |
| 12018 FixedArray* old_code_map = optimized_code_map(); | |
| 12019 old_code_map->FillWithHoles(0, old_code_map->length()); | |
| 12020 } | |
| 12021 | |
| 12022 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); | 12044 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); |
| 12023 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); | 12045 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); |
| 12024 } | 12046 } |
| 12025 | 12047 |
| 12026 | 12048 |
| 12027 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 12049 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
| 12028 const char* reason) { | 12050 const char* reason) { |
| 12029 DisallowHeapAllocation no_gc; | 12051 DisallowHeapAllocation no_gc; |
| 12030 if (OptimizedCodeMapIsCleared()) return; | 12052 if (OptimizedCodeMapIsCleared()) return; |
| 12031 | 12053 |
| 12032 Heap* heap = GetHeap(); | 12054 Heap* heap = GetHeap(); |
| 12033 FixedArray* code_map = optimized_code_map(); | 12055 FixedArray* code_map = optimized_code_map(); |
| 12034 int dst = kEntriesStart; | 12056 int dst = kEntriesStart; |
| 12035 int length = code_map->length(); | 12057 int length = code_map->length(); |
| 12036 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 12058 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
| 12037 DCHECK(code_map->get(src)->IsNativeContext()); | 12059 DCHECK(WeakCell::cast(code_map->get(src))->cleared() || |
| 12038 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { | 12060 WeakCell::cast(code_map->get(src))->value()->IsNativeContext()); |
| 12061 if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == |
| 12062 optimized_code) { |
| 12039 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | 12063 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); |
| 12040 if (FLAG_trace_opt) { | 12064 if (FLAG_trace_opt) { |
| 12041 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 12065 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
| 12042 ShortPrint(); | 12066 ShortPrint(); |
| 12043 if (osr.IsNone()) { | 12067 if (osr.IsNone()) { |
| 12044 PrintF("]\n"); | 12068 PrintF("]\n"); |
| 12045 } else { | 12069 } else { |
| 12046 PrintF(" (osr ast id %d)]\n", osr.ToInt()); | 12070 PrintF(" (osr ast id %d)]\n", osr.ToInt()); |
| 12047 } | 12071 } |
| 12048 } | 12072 } |
| 12049 if (!osr.IsNone()) { | 12073 if (!osr.IsNone()) { |
| 12050 // Evict the src entry by not copying it to the dst entry. | 12074 // Evict the src entry by not copying it to the dst entry. |
| 12051 continue; | 12075 continue; |
| 12052 } | 12076 } |
| 12053 // In case of non-OSR entry just clear the code in order to proceed | 12077 // In case of non-OSR entry just clear the code in order to proceed |
| 12054 // sharing literals. | 12078 // sharing literals. |
| 12055 code_map->set_undefined(src + kCachedCodeOffset); | 12079 code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(), |
| 12080 SKIP_WRITE_BARRIER); |
| 12056 } | 12081 } |
| 12057 | 12082 |
| 12058 // Keep the src entry by copying it to the dst entry. | 12083 // Keep the src entry by copying it to the dst entry. |
| 12059 if (dst != src) { | 12084 if (dst != src) { |
| 12060 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); | 12085 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); |
| 12061 code_map->set(dst + kCachedCodeOffset, | 12086 code_map->set(dst + kCachedCodeOffset, |
| 12062 code_map->get(src + kCachedCodeOffset)); | 12087 code_map->get(src + kCachedCodeOffset)); |
| 12063 code_map->set(dst + kLiteralsOffset, | 12088 code_map->set(dst + kLiteralsOffset, |
| 12064 code_map->get(src + kLiteralsOffset)); | 12089 code_map->get(src + kLiteralsOffset)); |
| 12065 code_map->set(dst + kOsrAstIdOffset, | 12090 code_map->set(dst + kOsrAstIdOffset, |
| 12066 code_map->get(src + kOsrAstIdOffset)); | 12091 code_map->get(src + kOsrAstIdOffset)); |
| 12067 } | 12092 } |
| 12068 dst += kEntryLength; | 12093 dst += kEntryLength; |
| 12069 } | 12094 } |
| 12070 if (code_map->get(kSharedCodeIndex) == optimized_code) { | 12095 if (WeakCell::cast(code_map->get(kSharedCodeIndex))->value() == |
| 12096 optimized_code) { |
| 12071 // Evict context-independent code as well. | 12097 // Evict context-independent code as well. |
| 12072 code_map->set_undefined(kSharedCodeIndex); | 12098 code_map->set(kSharedCodeIndex, heap->empty_weak_cell(), |
| 12099 SKIP_WRITE_BARRIER); |
| 12073 if (FLAG_trace_opt) { | 12100 if (FLAG_trace_opt) { |
| 12074 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 12101 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
| 12075 ShortPrint(); | 12102 ShortPrint(); |
| 12076 PrintF(" (context-independent code)]\n"); | 12103 PrintF(" (context-independent code)]\n"); |
| 12077 } | 12104 } |
| 12078 } | 12105 } |
| 12079 if (dst != length) { | 12106 if (dst != length) { |
| 12080 // Always trim even when array is cleared because of heap verifier. | 12107 // Always trim even when array is cleared because of heap verifier. |
| 12081 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, | 12108 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, |
| 12082 length - dst); | 12109 length - dst); |
| 12083 if (code_map->length() == kEntriesStart && | 12110 if (code_map->length() == kEntriesStart && |
| 12084 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12111 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
| 12085 ClearOptimizedCodeMap(); | 12112 ClearOptimizedCodeMap(); |
| 12086 } | 12113 } |
| 12087 } | 12114 } |
| 12088 } | 12115 } |
| 12089 | 12116 |
| 12090 | 12117 |
| 12091 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { | 12118 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
| 12092 FixedArray* code_map = optimized_code_map(); | 12119 FixedArray* code_map = optimized_code_map(); |
| 12093 DCHECK(shrink_by % kEntryLength == 0); | 12120 DCHECK(shrink_by % kEntryLength == 0); |
| 12094 DCHECK(shrink_by <= code_map->length() - kEntriesStart); | 12121 DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
| 12095 // Always trim even when array is cleared because of heap verifier. | 12122 // Always trim even when array is cleared because of heap verifier. |
| 12096 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, | 12123 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, |
| 12097 shrink_by); | 12124 shrink_by); |
| 12098 if (code_map->length() == kEntriesStart && | 12125 if (code_map->length() == kEntriesStart && |
| 12099 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12126 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
| 12100 ClearOptimizedCodeMap(); | 12127 ClearOptimizedCodeMap(); |
| 12101 } | 12128 } |
| 12102 } | 12129 } |
| 12103 | 12130 |
| 12104 | 12131 |
| 12105 static void GetMinInobjectSlack(Map* map, void* data) { | 12132 static void GetMinInobjectSlack(Map* map, void* data) { |
| 12106 int slack = map->unused_property_fields(); | 12133 int slack = map->unused_property_fields(); |
| 12107 if (*reinterpret_cast<int*>(data) > slack) { | 12134 if (*reinterpret_cast<int*>(data) > slack) { |
| 12108 *reinterpret_cast<int*>(data) = slack; | 12135 *reinterpret_cast<int*>(data) = slack; |
| 12109 } | 12136 } |
| (...skipping 1220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13330 | 13357 |
| 13331 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, | 13358 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, |
| 13332 BailoutId osr_ast_id) { | 13359 BailoutId osr_ast_id) { |
| 13333 DisallowHeapAllocation no_gc; | 13360 DisallowHeapAllocation no_gc; |
| 13334 DCHECK(native_context->IsNativeContext()); | 13361 DCHECK(native_context->IsNativeContext()); |
| 13335 if (!OptimizedCodeMapIsCleared()) { | 13362 if (!OptimizedCodeMapIsCleared()) { |
| 13336 FixedArray* optimized_code_map = this->optimized_code_map(); | 13363 FixedArray* optimized_code_map = this->optimized_code_map(); |
| 13337 int length = optimized_code_map->length(); | 13364 int length = optimized_code_map->length(); |
| 13338 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 13365 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
| 13339 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 13366 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
| 13340 if (optimized_code_map->get(i + kContextOffset) == native_context && | 13367 if (WeakCell::cast(optimized_code_map->get(i + kContextOffset)) |
| 13368 ->value() == native_context && |
| 13341 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 13369 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
| 13342 return i; | 13370 return i; |
| 13343 } | 13371 } |
| 13344 } | 13372 } |
| 13345 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); | 13373 Object* shared_code = |
| 13374 WeakCell::cast(optimized_code_map->get(kSharedCodeIndex))->value(); |
| 13346 if (shared_code->IsCode() && osr_ast_id.IsNone()) { | 13375 if (shared_code->IsCode() && osr_ast_id.IsNone()) { |
| 13347 return kSharedCodeIndex; | 13376 return kSharedCodeIndex; |
| 13348 } | 13377 } |
| 13349 } | 13378 } |
| 13350 return -1; | 13379 return -1; |
| 13351 } | 13380 } |
| 13352 | 13381 |
| 13353 | 13382 |
| 13354 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( | 13383 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( |
| 13355 Context* native_context, BailoutId osr_ast_id) { | 13384 Context* native_context, BailoutId osr_ast_id) { |
| 13356 CodeAndLiterals result = {nullptr, nullptr}; | 13385 CodeAndLiterals result = {nullptr, nullptr}; |
| 13357 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); | 13386 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); |
| 13358 if (entry != kNotFound) { | 13387 if (entry != kNotFound) { |
| 13359 FixedArray* code_map = optimized_code_map(); | 13388 FixedArray* code_map = optimized_code_map(); |
| 13360 if (entry == kSharedCodeIndex) { | 13389 if (entry == kSharedCodeIndex) { |
| 13361 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; | 13390 // We know the weak cell isn't cleared because we made sure of it in |
| 13362 | 13391 // SearchOptimizedCodeMapEntry and performed no allocations since that |
| 13392 // call. |
| 13393 result = { |
| 13394 Code::cast(WeakCell::cast(code_map->get(kSharedCodeIndex))->value()), |
| 13395 nullptr}; |
| 13363 } else { | 13396 } else { |
| 13364 DCHECK_LE(entry + kEntryLength, code_map->length()); | 13397 DCHECK_LE(entry + kEntryLength, code_map->length()); |
| 13365 Object* code = code_map->get(entry + kCachedCodeOffset); | 13398 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); |
| 13366 result = {code->IsUndefined() ? nullptr : Code::cast(code), | 13399 WeakCell* literals_cell = |
| 13367 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; | 13400 WeakCell::cast(code_map->get(entry + kLiteralsOffset)); |
| 13401 |
| 13402 result = {cell->cleared() ? nullptr : Code::cast(cell->value()), |
| 13403 literals_cell->cleared() |
| 13404 ? nullptr |
| 13405 : LiteralsArray::cast(literals_cell->value())}; |
| 13368 } | 13406 } |
| 13369 } | 13407 } |
| 13370 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && | 13408 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && |
| 13371 result.code == nullptr) { | 13409 result.code == nullptr) { |
| 13372 PrintF("[didn't find optimized code in optimized code map for "); | 13410 PrintF("[didn't find optimized code in optimized code map for "); |
| 13373 ShortPrint(); | 13411 ShortPrint(); |
| 13374 PrintF("]\n"); | 13412 PrintF("]\n"); |
| 13375 } | 13413 } |
| 13376 return result; | 13414 return result; |
| 13377 } | 13415 } |
| (...skipping 5756 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 19134 if (cell->value() != *new_value) { | 19172 if (cell->value() != *new_value) { |
| 19135 cell->set_value(*new_value); | 19173 cell->set_value(*new_value); |
| 19136 Isolate* isolate = cell->GetIsolate(); | 19174 Isolate* isolate = cell->GetIsolate(); |
| 19137 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 19175 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
| 19138 isolate, DependentCode::kPropertyCellChangedGroup); | 19176 isolate, DependentCode::kPropertyCellChangedGroup); |
| 19139 } | 19177 } |
| 19140 } | 19178 } |
| 19141 | 19179 |
| 19142 } // namespace internal | 19180 } // namespace internal |
| 19143 } // namespace v8 | 19181 } // namespace v8 |
| OLD | NEW |