Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/objects.h" | 5 #include "src/objects.h" |
| 6 | 6 |
| 7 #include <cmath> | 7 #include <cmath> |
| 8 #include <iomanip> | 8 #include <iomanip> |
| 9 #include <sstream> | 9 #include <sstream> |
| 10 | 10 |
| (...skipping 11891 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 11902 } | 11902 } |
| 11903 | 11903 |
| 11904 | 11904 |
| 11905 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( | 11905 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( |
| 11906 Handle<SharedFunctionInfo> shared, Handle<Code> code) { | 11906 Handle<SharedFunctionInfo> shared, Handle<Code> code) { |
| 11907 Isolate* isolate = shared->GetIsolate(); | 11907 Isolate* isolate = shared->GetIsolate(); |
| 11908 if (isolate->serializer_enabled()) return; | 11908 if (isolate->serializer_enabled()) return; |
| 11909 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); | 11909 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 11910 // Empty code maps are unsupported. | 11910 // Empty code maps are unsupported. |
| 11911 if (shared->OptimizedCodeMapIsCleared()) return; | 11911 if (shared->OptimizedCodeMapIsCleared()) return; |
| 11912 shared->optimized_code_map()->set(kSharedCodeIndex, *code); | 11912 Handle<WeakCell> cell = isolate->factory()->NewWeakCell(code); |
| 11913 shared->optimized_code_map()->set(kSharedCodeIndex, *cell); | |
| 11913 } | 11914 } |
| 11914 | 11915 |
| 11915 | 11916 |
| 11916 void SharedFunctionInfo::AddToOptimizedCodeMap( | 11917 void SharedFunctionInfo::AddToOptimizedCodeMap( |
| 11917 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, | 11918 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, |
| 11918 Handle<HeapObject> code, Handle<LiteralsArray> literals, | 11919 Handle<HeapObject> code, Handle<LiteralsArray> literals, |
| 11919 BailoutId osr_ast_id) { | 11920 BailoutId osr_ast_id) { |
| 11920 Isolate* isolate = shared->GetIsolate(); | 11921 Isolate* isolate = shared->GetIsolate(); |
| 11921 if (isolate->serializer_enabled()) return; | 11922 if (isolate->serializer_enabled()) return; |
| 11922 DCHECK(*code == isolate->heap()->undefined_value() || | 11923 DCHECK(*code == isolate->heap()->undefined_value() || |
| 11923 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); | 11924 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); |
| 11924 DCHECK(*code == isolate->heap()->undefined_value() || | 11925 DCHECK(*code == isolate->heap()->undefined_value() || |
| 11925 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); | 11926 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); |
| 11926 DCHECK(native_context->IsNativeContext()); | 11927 DCHECK(native_context->IsNativeContext()); |
| 11927 STATIC_ASSERT(kEntryLength == 4); | 11928 STATIC_ASSERT(kEntryLength == 4); |
| 11928 Handle<FixedArray> new_code_map; | 11929 Handle<FixedArray> new_code_map; |
| 11929 int entry; | 11930 int entry; |
| 11931 | |
| 11930 if (shared->OptimizedCodeMapIsCleared()) { | 11932 if (shared->OptimizedCodeMapIsCleared()) { |
| 11931 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | 11933 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); |
| 11934 new_code_map->set(kSharedCodeIndex, *isolate->factory()->empty_weak_cell(), | |
| 11935 SKIP_WRITE_BARRIER); | |
| 11932 entry = kEntriesStart; | 11936 entry = kEntriesStart; |
| 11933 } else { | 11937 } else { |
| 11934 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); | 11938 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); |
| 11935 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); | 11939 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); |
| 11936 if (entry > kSharedCodeIndex) { | 11940 if (entry > kSharedCodeIndex) { |
| 11937 // Found an existing context-specific entry, it must not contain any code. | 11941 // Found an existing context-specific entry, it must not contain any code. |
| 11938 DCHECK_EQ(isolate->heap()->undefined_value(), | 11942 DCHECK(WeakCell::cast(old_code_map->get(entry + kCachedCodeOffset)) |
| 11939 old_code_map->get(entry + kCachedCodeOffset)); | 11943 ->cleared()); |
| 11940 // Just set the code and literals to the entry. | 11944 // Just set the code and literals to the entry. |
| 11941 old_code_map->set(entry + kCachedCodeOffset, *code); | 11945 Handle<WeakCell> code_cell = code->IsUndefined() |
| 11942 old_code_map->set(entry + kLiteralsOffset, *literals); | 11946 ? isolate->factory()->empty_weak_cell() |
| 11947 : isolate->factory()->NewWeakCell(code); | |
| 11948 Handle<WeakCell> literals_cell = | |
| 11949 isolate->factory()->NewWeakCell(literals); | |
| 11950 old_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
| 11951 old_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
| 11943 return; | 11952 return; |
| 11944 } | 11953 } |
| 11945 | 11954 |
| 11946 // Copy old optimized code map and append one new entry. | 11955 // Can we reuse an entry? |
| 11947 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11956 DCHECK(entry < kEntriesStart); |
| 11948 old_code_map, kEntryLength, TENURED); | 11957 int length = old_code_map->length(); |
| 11949 // TODO(mstarzinger): Temporary workaround. The allocation above might have | 11958 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
| 11950 // flushed the optimized code map and the copy we created is full of holes. | 11959 if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) { |
| 11951 // For now we just give up on adding the entry and pretend it got flushed. | 11960 new_code_map = old_code_map; |
| 11952 if (shared->OptimizedCodeMapIsCleared()) return; | 11961 entry = i; |
| 11953 entry = old_code_map->length(); | 11962 break; |
| 11963 } | |
| 11964 } | |
| 11965 | |
| 11966 if (entry < kEntriesStart) { | |
| 11967 // Copy old optimized code map and append one new entry. | |
| 11968 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | |
| 11969 old_code_map, kEntryLength, TENURED); | |
| 11970 // TODO(mstarzinger): Temporary workaround. The allocation above might | |
| 11971 // have flushed the optimized code map and the copy we created is full of | |
| 11972 // holes. For now we just give up on adding the entry and pretend it got | |
| 11973 // flushed. | |
| 11974 if (shared->OptimizedCodeMapIsCleared()) return; | |
| 11975 entry = old_code_map->length(); | |
| 11976 } | |
| 11954 } | 11977 } |
| 11955 new_code_map->set(entry + kContextOffset, *native_context); | 11978 |
| 11956 new_code_map->set(entry + kCachedCodeOffset, *code); | 11979 Handle<WeakCell> code_cell = code->IsUndefined() |
| 11957 new_code_map->set(entry + kLiteralsOffset, *literals); | 11980 ? isolate->factory()->empty_weak_cell() |
| 11981 : isolate->factory()->NewWeakCell(code); | |
| 11982 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals); | |
| 11983 WeakCell* context_cell = native_context->self_weak_cell(); | |
| 11984 | |
| 11985 new_code_map->set(entry + kContextOffset, context_cell); | |
| 11986 new_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
| 11987 new_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
| 11958 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 11988 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
| 11959 | 11989 |
| 11960 #ifdef DEBUG | 11990 #ifdef DEBUG |
| 11961 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 11991 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
| 11962 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 11992 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset)); |
| 11963 Object* code = new_code_map->get(i + kCachedCodeOffset); | 11993 DCHECK(cell->cleared() || cell->value()->IsNativeContext()); |
| 11964 if (code != isolate->heap()->undefined_value()) { | 11994 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); |
| 11965 DCHECK(code->IsCode()); | 11995 DCHECK(cell->cleared() || |
| 11966 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 11996 (cell->value()->IsCode() && |
| 11967 } | 11997 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); |
| 11968 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 11998 cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset)); |
| 11999 DCHECK(cell->cleared() || cell->value()->IsFixedArray()); | |
| 11969 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 12000 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
| 11970 } | 12001 } |
| 11971 #endif | 12002 #endif |
| 11972 | 12003 |
| 11973 // Zap any old optimized code map. | 12004 // Zap any old optimized code map. |
| 11974 if (!shared->OptimizedCodeMapIsCleared()) { | 12005 FixedArray* old_code_map = shared->optimized_code_map(); |
| 11975 FixedArray* old_code_map = shared->optimized_code_map(); | 12006 if (old_code_map != *new_code_map) { |
|
ulan
2015/12/08 08:47:06
Since we are using weak cells now, there is no nee
mvstanton
2015/12/08 16:40:29
Good point! Removed that here and in the function
| |
| 11976 old_code_map->FillWithHoles(0, old_code_map->length()); | 12007 if (!shared->OptimizedCodeMapIsCleared()) { |
| 12008 old_code_map->FillWithHoles(0, old_code_map->length()); | |
| 12009 } | |
| 12010 shared->set_optimized_code_map(*new_code_map); | |
| 11977 } | 12011 } |
| 11978 | |
| 11979 shared->set_optimized_code_map(*new_code_map); | |
| 11980 } | 12012 } |
| 11981 | 12013 |
| 11982 | 12014 |
| 11983 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 12015 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
| 11984 // Zap any old optimized code map. | 12016 // Zap any old optimized code map. |
| 11985 if (!OptimizedCodeMapIsCleared()) { | 12017 if (!OptimizedCodeMapIsCleared()) { |
| 11986 FixedArray* old_code_map = optimized_code_map(); | 12018 FixedArray* old_code_map = optimized_code_map(); |
| 11987 old_code_map->FillWithHoles(0, old_code_map->length()); | 12019 old_code_map->FillWithHoles(0, old_code_map->length()); |
| 11988 } | 12020 } |
| 11989 | 12021 |
| 11990 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); | 12022 FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map(); |
| 11991 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); | 12023 set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER); |
| 11992 } | 12024 } |
| 11993 | 12025 |
| 11994 | 12026 |
| 11995 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 12027 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
| 11996 const char* reason) { | 12028 const char* reason) { |
| 11997 DisallowHeapAllocation no_gc; | 12029 DisallowHeapAllocation no_gc; |
| 11998 if (OptimizedCodeMapIsCleared()) return; | 12030 if (OptimizedCodeMapIsCleared()) return; |
| 11999 | 12031 |
| 12000 Heap* heap = GetHeap(); | 12032 Heap* heap = GetHeap(); |
| 12001 FixedArray* code_map = optimized_code_map(); | 12033 FixedArray* code_map = optimized_code_map(); |
| 12002 int dst = kEntriesStart; | 12034 int dst = kEntriesStart; |
| 12003 int length = code_map->length(); | 12035 int length = code_map->length(); |
| 12004 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 12036 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
| 12005 DCHECK(code_map->get(src)->IsNativeContext()); | 12037 DCHECK(WeakCell::cast(code_map->get(src))->cleared() || |
| 12006 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { | 12038 WeakCell::cast(code_map->get(src))->value()->IsNativeContext()); |
| 12039 if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == | |
| 12040 optimized_code) { | |
| 12007 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | 12041 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); |
| 12008 if (FLAG_trace_opt) { | 12042 if (FLAG_trace_opt) { |
| 12009 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 12043 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
| 12010 ShortPrint(); | 12044 ShortPrint(); |
| 12011 if (osr.IsNone()) { | 12045 if (osr.IsNone()) { |
| 12012 PrintF("]\n"); | 12046 PrintF("]\n"); |
| 12013 } else { | 12047 } else { |
| 12014 PrintF(" (osr ast id %d)]\n", osr.ToInt()); | 12048 PrintF(" (osr ast id %d)]\n", osr.ToInt()); |
| 12015 } | 12049 } |
| 12016 } | 12050 } |
| 12017 if (!osr.IsNone()) { | 12051 if (!osr.IsNone()) { |
| 12018 // Evict the src entry by not copying it to the dst entry. | 12052 // Evict the src entry by not copying it to the dst entry. |
| 12019 continue; | 12053 continue; |
| 12020 } | 12054 } |
| 12021 // In case of non-OSR entry just clear the code in order to proceed | 12055 // In case of non-OSR entry just clear the code in order to proceed |
| 12022 // sharing literals. | 12056 // sharing literals. |
| 12023 code_map->set_undefined(src + kCachedCodeOffset); | 12057 code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(), |
| 12058 SKIP_WRITE_BARRIER); | |
| 12024 } | 12059 } |
| 12025 | 12060 |
| 12026 // Keep the src entry by copying it to the dst entry. | 12061 // Keep the src entry by copying it to the dst entry. |
| 12027 if (dst != src) { | 12062 if (dst != src) { |
| 12028 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); | 12063 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); |
| 12029 code_map->set(dst + kCachedCodeOffset, | 12064 code_map->set(dst + kCachedCodeOffset, |
| 12030 code_map->get(src + kCachedCodeOffset)); | 12065 code_map->get(src + kCachedCodeOffset)); |
| 12031 code_map->set(dst + kLiteralsOffset, | 12066 code_map->set(dst + kLiteralsOffset, |
| 12032 code_map->get(src + kLiteralsOffset)); | 12067 code_map->get(src + kLiteralsOffset)); |
| 12033 code_map->set(dst + kOsrAstIdOffset, | 12068 code_map->set(dst + kOsrAstIdOffset, |
| 12034 code_map->get(src + kOsrAstIdOffset)); | 12069 code_map->get(src + kOsrAstIdOffset)); |
| 12035 } | 12070 } |
| 12036 dst += kEntryLength; | 12071 dst += kEntryLength; |
| 12037 } | 12072 } |
| 12038 if (code_map->get(kSharedCodeIndex) == optimized_code) { | 12073 if (WeakCell::cast(code_map->get(kSharedCodeIndex))->value() == |
| 12074 optimized_code) { | |
| 12039 // Evict context-independent code as well. | 12075 // Evict context-independent code as well. |
| 12040 code_map->set_undefined(kSharedCodeIndex); | 12076 code_map->set(kSharedCodeIndex, heap->empty_weak_cell(), |
| 12077 SKIP_WRITE_BARRIER); | |
| 12041 if (FLAG_trace_opt) { | 12078 if (FLAG_trace_opt) { |
| 12042 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 12079 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
| 12043 ShortPrint(); | 12080 ShortPrint(); |
| 12044 PrintF(" (context-independent code)]\n"); | 12081 PrintF(" (context-independent code)]\n"); |
| 12045 } | 12082 } |
| 12046 } | 12083 } |
| 12047 if (dst != length) { | 12084 if (dst != length) { |
| 12048 // Always trim even when array is cleared because of heap verifier. | 12085 // Always trim even when array is cleared because of heap verifier. |
| 12049 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, | 12086 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, |
| 12050 length - dst); | 12087 length - dst); |
| 12051 if (code_map->length() == kEntriesStart && | 12088 if (code_map->length() == kEntriesStart && |
| 12052 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12089 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
| 12053 ClearOptimizedCodeMap(); | 12090 ClearOptimizedCodeMap(); |
| 12054 } | 12091 } |
| 12055 } | 12092 } |
| 12056 } | 12093 } |
| 12057 | 12094 |
| 12058 | 12095 |
| 12059 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { | 12096 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
| 12060 FixedArray* code_map = optimized_code_map(); | 12097 FixedArray* code_map = optimized_code_map(); |
| 12061 DCHECK(shrink_by % kEntryLength == 0); | 12098 DCHECK(shrink_by % kEntryLength == 0); |
| 12062 DCHECK(shrink_by <= code_map->length() - kEntriesStart); | 12099 DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
| 12063 // Always trim even when array is cleared because of heap verifier. | 12100 // Always trim even when array is cleared because of heap verifier. |
| 12064 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, | 12101 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, |
| 12065 shrink_by); | 12102 shrink_by); |
| 12066 if (code_map->length() == kEntriesStart && | 12103 if (code_map->length() == kEntriesStart && |
| 12067 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12104 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
| 12068 ClearOptimizedCodeMap(); | 12105 ClearOptimizedCodeMap(); |
| 12069 } | 12106 } |
| 12070 } | 12107 } |
| 12071 | 12108 |
| 12072 | 12109 |
| 12073 static void GetMinInobjectSlack(Map* map, void* data) { | 12110 static void GetMinInobjectSlack(Map* map, void* data) { |
| 12074 int slack = map->unused_property_fields(); | 12111 int slack = map->unused_property_fields(); |
| 12075 if (*reinterpret_cast<int*>(data) > slack) { | 12112 if (*reinterpret_cast<int*>(data) > slack) { |
| 12076 *reinterpret_cast<int*>(data) = slack; | 12113 *reinterpret_cast<int*>(data) = slack; |
| 12077 } | 12114 } |
| (...skipping 1212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 13290 | 13327 |
| 13291 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, | 13328 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, |
| 13292 BailoutId osr_ast_id) { | 13329 BailoutId osr_ast_id) { |
| 13293 DisallowHeapAllocation no_gc; | 13330 DisallowHeapAllocation no_gc; |
| 13294 DCHECK(native_context->IsNativeContext()); | 13331 DCHECK(native_context->IsNativeContext()); |
| 13295 if (!OptimizedCodeMapIsCleared()) { | 13332 if (!OptimizedCodeMapIsCleared()) { |
| 13296 FixedArray* optimized_code_map = this->optimized_code_map(); | 13333 FixedArray* optimized_code_map = this->optimized_code_map(); |
| 13297 int length = optimized_code_map->length(); | 13334 int length = optimized_code_map->length(); |
| 13298 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 13335 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
| 13299 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 13336 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
| 13300 if (optimized_code_map->get(i + kContextOffset) == native_context && | 13337 if (WeakCell::cast(optimized_code_map->get(i + kContextOffset)) |
| 13338 ->value() == native_context && | |
| 13301 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 13339 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
| 13302 return i; | 13340 return i; |
| 13303 } | 13341 } |
| 13304 } | 13342 } |
| 13305 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); | 13343 Object* shared_code = |
| 13344 WeakCell::cast(optimized_code_map->get(kSharedCodeIndex))->value(); | |
| 13306 if (shared_code->IsCode() && osr_ast_id.IsNone()) { | 13345 if (shared_code->IsCode() && osr_ast_id.IsNone()) { |
| 13307 return kSharedCodeIndex; | 13346 return kSharedCodeIndex; |
| 13308 } | 13347 } |
| 13309 } | 13348 } |
| 13310 return -1; | 13349 return -1; |
| 13311 } | 13350 } |
| 13312 | 13351 |
| 13313 | 13352 |
| 13314 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( | 13353 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( |
| 13315 Context* native_context, BailoutId osr_ast_id) { | 13354 Context* native_context, BailoutId osr_ast_id) { |
| 13316 CodeAndLiterals result = {nullptr, nullptr}; | 13355 CodeAndLiterals result = {nullptr, nullptr}; |
| 13317 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); | 13356 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); |
| 13318 if (entry != kNotFound) { | 13357 if (entry != kNotFound) { |
| 13319 FixedArray* code_map = optimized_code_map(); | 13358 FixedArray* code_map = optimized_code_map(); |
| 13320 if (entry == kSharedCodeIndex) { | 13359 if (entry == kSharedCodeIndex) { |
| 13321 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; | 13360 // We know the weak cell isn't cleared because we made sure of it in |
| 13322 | 13361 // SearchOptimizedCodeMapEntry and performed no allocations since that |
| 13362 // call. | |
| 13363 result = { | |
| 13364 Code::cast(WeakCell::cast(code_map->get(kSharedCodeIndex))->value()), | |
| 13365 nullptr}; | |
| 13323 } else { | 13366 } else { |
| 13324 DCHECK_LE(entry + kEntryLength, code_map->length()); | 13367 DCHECK_LE(entry + kEntryLength, code_map->length()); |
| 13325 Object* code = code_map->get(entry + kCachedCodeOffset); | 13368 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); |
| 13326 result = {code->IsUndefined() ? nullptr : Code::cast(code), | 13369 WeakCell* literals_cell = |
| 13327 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; | 13370 WeakCell::cast(code_map->get(entry + kLiteralsOffset)); |
| 13371 | |
| 13372 result = {cell->cleared() ? nullptr : Code::cast(cell->value()), | |
| 13373 literals_cell->cleared() | |
| 13374 ? nullptr | |
| 13375 : LiteralsArray::cast(literals_cell->value())}; | |
| 13328 } | 13376 } |
| 13329 } | 13377 } |
| 13330 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && | 13378 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && |
| 13331 result.code == nullptr) { | 13379 result.code == nullptr) { |
| 13332 PrintF("[didn't find optimized code in optimized code map for "); | 13380 PrintF("[didn't find optimized code in optimized code map for "); |
| 13333 ShortPrint(); | 13381 ShortPrint(); |
| 13334 PrintF("]\n"); | 13382 PrintF("]\n"); |
| 13335 } | 13383 } |
| 13336 return result; | 13384 return result; |
| 13337 } | 13385 } |
| (...skipping 5756 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 19094 if (cell->value() != *new_value) { | 19142 if (cell->value() != *new_value) { |
| 19095 cell->set_value(*new_value); | 19143 cell->set_value(*new_value); |
| 19096 Isolate* isolate = cell->GetIsolate(); | 19144 Isolate* isolate = cell->GetIsolate(); |
| 19097 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 19145 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
| 19098 isolate, DependentCode::kPropertyCellChangedGroup); | 19146 isolate, DependentCode::kPropertyCellChangedGroup); |
| 19099 } | 19147 } |
| 19100 } | 19148 } |
| 19101 | 19149 |
| 19102 } // namespace internal | 19150 } // namespace internal |
| 19103 } // namespace v8 | 19151 } // namespace v8 |
| OLD | NEW |