OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/objects.h" | 5 #include "src/objects.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 #include <iomanip> | 8 #include <iomanip> |
9 #include <sstream> | 9 #include <sstream> |
10 | 10 |
(...skipping 11818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11829 } | 11829 } |
11830 | 11830 |
11831 | 11831 |
11832 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( | 11832 void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( |
11833 Handle<SharedFunctionInfo> shared, Handle<Code> code) { | 11833 Handle<SharedFunctionInfo> shared, Handle<Code> code) { |
11834 Isolate* isolate = shared->GetIsolate(); | 11834 Isolate* isolate = shared->GetIsolate(); |
11835 if (isolate->serializer_enabled()) return; | 11835 if (isolate->serializer_enabled()) return; |
11836 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); | 11836 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
11837 // Empty code maps are unsupported. | 11837 // Empty code maps are unsupported. |
11838 if (shared->OptimizedCodeMapIsCleared()) return; | 11838 if (shared->OptimizedCodeMapIsCleared()) return; |
11839 shared->optimized_code_map()->set(kSharedCodeIndex, *code); | 11839 Handle<WeakCell> cell = isolate->factory()->NewWeakCell(code); |
| 11840 shared->optimized_code_map()->set(kSharedCodeIndex, *cell); |
11840 } | 11841 } |
11841 | 11842 |
11842 | 11843 |
11843 void SharedFunctionInfo::AddToOptimizedCodeMap( | 11844 void SharedFunctionInfo::AddToOptimizedCodeMap( |
11844 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, | 11845 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, |
11845 Handle<HeapObject> code, Handle<LiteralsArray> literals, | 11846 Handle<HeapObject> code, Handle<LiteralsArray> literals, |
11846 BailoutId osr_ast_id) { | 11847 BailoutId osr_ast_id) { |
11847 Isolate* isolate = shared->GetIsolate(); | 11848 Isolate* isolate = shared->GetIsolate(); |
11848 if (isolate->serializer_enabled()) return; | 11849 if (isolate->serializer_enabled()) return; |
11849 DCHECK(*code == isolate->heap()->undefined_value() || | 11850 DCHECK(*code == isolate->heap()->undefined_value() || |
11850 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); | 11851 !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); |
11851 DCHECK(*code == isolate->heap()->undefined_value() || | 11852 DCHECK(*code == isolate->heap()->undefined_value() || |
11852 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); | 11853 Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); |
11853 DCHECK(native_context->IsNativeContext()); | 11854 DCHECK(native_context->IsNativeContext()); |
11854 STATIC_ASSERT(kEntryLength == 4); | 11855 STATIC_ASSERT(kEntryLength == 4); |
11855 Handle<FixedArray> new_code_map; | 11856 Handle<FixedArray> new_code_map; |
11856 int entry; | 11857 int entry; |
| 11858 |
11857 if (shared->OptimizedCodeMapIsCleared()) { | 11859 if (shared->OptimizedCodeMapIsCleared()) { |
11858 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | 11860 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); |
| 11861 new_code_map->set(kSharedCodeIndex, *isolate->factory()->empty_weak_cell(), |
| 11862 SKIP_WRITE_BARRIER); |
11859 entry = kEntriesStart; | 11863 entry = kEntriesStart; |
11860 } else { | 11864 } else { |
11861 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); | 11865 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); |
11862 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); | 11866 entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); |
11863 if (entry > kSharedCodeIndex) { | 11867 if (entry > kSharedCodeIndex) { |
11864 // Found an existing context-specific entry, it must not contain any code. | 11868 // Found an existing context-specific entry, it must not contain any code. |
11865 DCHECK_EQ(isolate->heap()->undefined_value(), | 11869 DCHECK(WeakCell::cast(old_code_map->get(entry + kCachedCodeOffset)) |
11866 old_code_map->get(entry + kCachedCodeOffset)); | 11870 ->cleared()); |
11867 // Just set the code and literals to the entry. | 11871 // Just set the code and literals to the entry. |
11868 old_code_map->set(entry + kCachedCodeOffset, *code); | 11872 Handle<WeakCell> code_cell = code->IsUndefined() |
11869 old_code_map->set(entry + kLiteralsOffset, *literals); | 11873 ? isolate->factory()->empty_weak_cell() |
| 11874 : isolate->factory()->NewWeakCell(code); |
| 11875 Handle<WeakCell> literals_cell = |
| 11876 isolate->factory()->NewWeakCell(literals); |
| 11877 old_code_map->set(entry + kCachedCodeOffset, *code_cell); |
| 11878 old_code_map->set(entry + kLiteralsOffset, *literals_cell); |
11870 return; | 11879 return; |
11871 } | 11880 } |
11872 | 11881 |
11873 // Copy old optimized code map and append one new entry. | 11882 // Can we reuse an entry? |
11874 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11883 DCHECK(entry < kEntriesStart); |
11875 old_code_map, kEntryLength, TENURED); | 11884 int length = old_code_map->length(); |
11876 // TODO(mstarzinger): Temporary workaround. The allocation above might have | 11885 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
11877 // flushed the optimized code map and the copy we created is full of holes. | 11886 if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) { |
11878 // For now we just give up on adding the entry and pretend it got flushed. | 11887 entry = i; |
11879 if (shared->OptimizedCodeMapIsCleared()) return; | 11888 break; |
11880 entry = old_code_map->length(); | 11889 } |
| 11890 } |
| 11891 |
| 11892 if (entry < kEntriesStart) { |
| 11893 // Copy old optimized code map and append one new entry. |
| 11894 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( |
| 11895 old_code_map, kEntryLength, TENURED); |
| 11896 // TODO(mstarzinger): Temporary workaround. The allocation above might |
| 11897 // have flushed the optimized code map and the copy we created is full of |
| 11898 // holes. For now we just give up on adding the entry and pretend it got |
| 11899 // flushed. |
| 11900 if (shared->OptimizedCodeMapIsCleared()) return; |
| 11901 entry = old_code_map->length(); |
| 11902 } |
11881 } | 11903 } |
11882 new_code_map->set(entry + kContextOffset, *native_context); | 11904 |
11883 new_code_map->set(entry + kCachedCodeOffset, *code); | 11905 Handle<WeakCell> code_cell = code->IsUndefined() |
11884 new_code_map->set(entry + kLiteralsOffset, *literals); | 11906 ? isolate->factory()->empty_weak_cell() |
| 11907 : isolate->factory()->NewWeakCell(code); |
| 11908 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals); |
| 11909 WeakCell* context_cell = native_context->self_weak_cell(); |
| 11910 |
| 11911 new_code_map->set(entry + kContextOffset, context_cell); |
| 11912 new_code_map->set(entry + kCachedCodeOffset, *code_cell); |
| 11913 new_code_map->set(entry + kLiteralsOffset, *literals_cell); |
11885 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 11914 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
11886 | 11915 |
11887 #ifdef DEBUG | 11916 #ifdef DEBUG |
11888 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 11917 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
11889 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 11918 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset)); |
11890 Object* code = new_code_map->get(i + kCachedCodeOffset); | 11919 DCHECK(cell->cleared() || cell->value()->IsNativeContext()); |
11891 if (code != isolate->heap()->undefined_value()) { | 11920 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); |
11892 DCHECK(code->IsCode()); | 11921 DCHECK(cell->cleared() || |
11893 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 11922 (cell->value()->IsCode() && |
11894 } | 11923 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); |
11895 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 11924 cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset)); |
| 11925 DCHECK(cell->cleared() || cell->value()->IsFixedArray()); |
11896 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 11926 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
11897 } | 11927 } |
11898 #endif | 11928 #endif |
11899 | 11929 |
11900 // Zap any old optimized code map. | 11930 // Zap any old optimized code map. |
11901 if (!shared->OptimizedCodeMapIsCleared()) { | 11931 if (!shared->OptimizedCodeMapIsCleared()) { |
11902 FixedArray* old_code_map = shared->optimized_code_map(); | 11932 FixedArray* old_code_map = shared->optimized_code_map(); |
11903 old_code_map->FillWithHoles(0, old_code_map->length()); | 11933 old_code_map->FillWithHoles(0, old_code_map->length()); |
11904 } | 11934 } |
11905 | 11935 |
(...skipping 16 matching lines...) Expand all Loading... |
11922 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 11952 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
11923 const char* reason) { | 11953 const char* reason) { |
11924 DisallowHeapAllocation no_gc; | 11954 DisallowHeapAllocation no_gc; |
11925 if (OptimizedCodeMapIsCleared()) return; | 11955 if (OptimizedCodeMapIsCleared()) return; |
11926 | 11956 |
11927 Heap* heap = GetHeap(); | 11957 Heap* heap = GetHeap(); |
11928 FixedArray* code_map = optimized_code_map(); | 11958 FixedArray* code_map = optimized_code_map(); |
11929 int dst = kEntriesStart; | 11959 int dst = kEntriesStart; |
11930 int length = code_map->length(); | 11960 int length = code_map->length(); |
11931 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 11961 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
11932 DCHECK(code_map->get(src)->IsNativeContext()); | 11962 DCHECK(WeakCell::cast(code_map->get(src))->cleared() || |
11933 if (code_map->get(src + kCachedCodeOffset) == optimized_code) { | 11963 WeakCell::cast(code_map->get(src))->value()->IsNativeContext()); |
| 11964 if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == |
| 11965 optimized_code) { |
11934 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | 11966 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); |
11935 if (FLAG_trace_opt) { | 11967 if (FLAG_trace_opt) { |
11936 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 11968 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
11937 ShortPrint(); | 11969 ShortPrint(); |
11938 if (osr.IsNone()) { | 11970 if (osr.IsNone()) { |
11939 PrintF("]\n"); | 11971 PrintF("]\n"); |
11940 } else { | 11972 } else { |
11941 PrintF(" (osr ast id %d)]\n", osr.ToInt()); | 11973 PrintF(" (osr ast id %d)]\n", osr.ToInt()); |
11942 } | 11974 } |
11943 } | 11975 } |
11944 if (!osr.IsNone()) { | 11976 if (!osr.IsNone()) { |
11945 // Evict the src entry by not copying it to the dst entry. | 11977 // Evict the src entry by not copying it to the dst entry. |
11946 continue; | 11978 continue; |
11947 } | 11979 } |
11948 // In case of non-OSR entry just clear the code in order to proceed | 11980 // In case of non-OSR entry just clear the code in order to proceed |
11949 // sharing literals. | 11981 // sharing literals. |
11950 code_map->set_undefined(src + kCachedCodeOffset); | 11982 code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(), |
| 11983 SKIP_WRITE_BARRIER); |
11951 } | 11984 } |
11952 | 11985 |
11953 // Keep the src entry by copying it to the dst entry. | 11986 // Keep the src entry by copying it to the dst entry. |
11954 if (dst != src) { | 11987 if (dst != src) { |
11955 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); | 11988 code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); |
11956 code_map->set(dst + kCachedCodeOffset, | 11989 code_map->set(dst + kCachedCodeOffset, |
11957 code_map->get(src + kCachedCodeOffset)); | 11990 code_map->get(src + kCachedCodeOffset)); |
11958 code_map->set(dst + kLiteralsOffset, | 11991 code_map->set(dst + kLiteralsOffset, |
11959 code_map->get(src + kLiteralsOffset)); | 11992 code_map->get(src + kLiteralsOffset)); |
11960 code_map->set(dst + kOsrAstIdOffset, | 11993 code_map->set(dst + kOsrAstIdOffset, |
11961 code_map->get(src + kOsrAstIdOffset)); | 11994 code_map->get(src + kOsrAstIdOffset)); |
11962 } | 11995 } |
11963 dst += kEntryLength; | 11996 dst += kEntryLength; |
11964 } | 11997 } |
11965 if (code_map->get(kSharedCodeIndex) == optimized_code) { | 11998 if (WeakCell::cast(code_map->get(kSharedCodeIndex))->value() == |
| 11999 optimized_code) { |
11966 // Evict context-independent code as well. | 12000 // Evict context-independent code as well. |
11967 code_map->set_undefined(kSharedCodeIndex); | 12001 code_map->set(kSharedCodeIndex, heap->empty_weak_cell(), |
| 12002 SKIP_WRITE_BARRIER); |
11968 if (FLAG_trace_opt) { | 12003 if (FLAG_trace_opt) { |
11969 PrintF("[evicting entry from optimizing code map (%s) for ", reason); | 12004 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
11970 ShortPrint(); | 12005 ShortPrint(); |
11971 PrintF(" (context-independent code)]\n"); | 12006 PrintF(" (context-independent code)]\n"); |
11972 } | 12007 } |
11973 } | 12008 } |
11974 if (dst != length) { | 12009 if (dst != length) { |
11975 // Always trim even when array is cleared because of heap verifier. | 12010 // Always trim even when array is cleared because of heap verifier. |
11976 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, | 12011 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map, |
11977 length - dst); | 12012 length - dst); |
11978 if (code_map->length() == kEntriesStart && | 12013 if (code_map->length() == kEntriesStart && |
11979 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12014 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
11980 ClearOptimizedCodeMap(); | 12015 ClearOptimizedCodeMap(); |
11981 } | 12016 } |
11982 } | 12017 } |
11983 } | 12018 } |
11984 | 12019 |
11985 | 12020 |
11986 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { | 12021 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
11987 FixedArray* code_map = optimized_code_map(); | 12022 FixedArray* code_map = optimized_code_map(); |
11988 DCHECK(shrink_by % kEntryLength == 0); | 12023 DCHECK(shrink_by % kEntryLength == 0); |
11989 DCHECK(shrink_by <= code_map->length() - kEntriesStart); | 12024 DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
11990 // Always trim even when array is cleared because of heap verifier. | 12025 // Always trim even when array is cleared because of heap verifier. |
11991 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, | 12026 GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map, |
11992 shrink_by); | 12027 shrink_by); |
11993 if (code_map->length() == kEntriesStart && | 12028 if (code_map->length() == kEntriesStart && |
11994 code_map->get(kSharedCodeIndex)->IsUndefined()) { | 12029 WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) { |
11995 ClearOptimizedCodeMap(); | 12030 ClearOptimizedCodeMap(); |
11996 } | 12031 } |
11997 } | 12032 } |
11998 | 12033 |
11999 | 12034 |
12000 static void GetMinInobjectSlack(Map* map, void* data) { | 12035 static void GetMinInobjectSlack(Map* map, void* data) { |
12001 int slack = map->unused_property_fields(); | 12036 int slack = map->unused_property_fields(); |
12002 if (*reinterpret_cast<int*>(data) > slack) { | 12037 if (*reinterpret_cast<int*>(data) > slack) { |
12003 *reinterpret_cast<int*>(data) = slack; | 12038 *reinterpret_cast<int*>(data) = slack; |
12004 } | 12039 } |
(...skipping 1224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
13229 | 13264 |
13230 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, | 13265 int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, |
13231 BailoutId osr_ast_id) { | 13266 BailoutId osr_ast_id) { |
13232 DisallowHeapAllocation no_gc; | 13267 DisallowHeapAllocation no_gc; |
13233 DCHECK(native_context->IsNativeContext()); | 13268 DCHECK(native_context->IsNativeContext()); |
13234 if (!OptimizedCodeMapIsCleared()) { | 13269 if (!OptimizedCodeMapIsCleared()) { |
13235 FixedArray* optimized_code_map = this->optimized_code_map(); | 13270 FixedArray* optimized_code_map = this->optimized_code_map(); |
13236 int length = optimized_code_map->length(); | 13271 int length = optimized_code_map->length(); |
13237 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 13272 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
13238 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 13273 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
13239 if (optimized_code_map->get(i + kContextOffset) == native_context && | 13274 if (WeakCell::cast(optimized_code_map->get(i + kContextOffset)) |
| 13275 ->value() == native_context && |
13240 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 13276 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
13241 return i; | 13277 return i; |
13242 } | 13278 } |
13243 } | 13279 } |
13244 Object* shared_code = optimized_code_map->get(kSharedCodeIndex); | 13280 Object* shared_code = |
| 13281 WeakCell::cast(optimized_code_map->get(kSharedCodeIndex))->value(); |
13245 if (shared_code->IsCode() && osr_ast_id.IsNone()) { | 13282 if (shared_code->IsCode() && osr_ast_id.IsNone()) { |
13246 return kSharedCodeIndex; | 13283 return kSharedCodeIndex; |
13247 } | 13284 } |
13248 } | 13285 } |
13249 return -1; | 13286 return -1; |
13250 } | 13287 } |
13251 | 13288 |
13252 | 13289 |
13253 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( | 13290 CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( |
13254 Context* native_context, BailoutId osr_ast_id) { | 13291 Context* native_context, BailoutId osr_ast_id) { |
13255 CodeAndLiterals result = {nullptr, nullptr}; | 13292 CodeAndLiterals result = {nullptr, nullptr}; |
13256 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); | 13293 int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); |
13257 if (entry != kNotFound) { | 13294 if (entry != kNotFound) { |
13258 FixedArray* code_map = optimized_code_map(); | 13295 FixedArray* code_map = optimized_code_map(); |
13259 if (entry == kSharedCodeIndex) { | 13296 if (entry == kSharedCodeIndex) { |
13260 result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; | 13297 // We know the weak cell isn't cleared because we made sure of it in |
13261 | 13298 // SearchOptimizedCodeMapEntry and performed no allocations since that |
| 13299 // call. |
| 13300 result = { |
| 13301 Code::cast(WeakCell::cast(code_map->get(kSharedCodeIndex))->value()), |
| 13302 nullptr}; |
13262 } else { | 13303 } else { |
13263 DCHECK_LE(entry + kEntryLength, code_map->length()); | 13304 DCHECK_LE(entry + kEntryLength, code_map->length()); |
13264 Object* code = code_map->get(entry + kCachedCodeOffset); | 13305 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); |
13265 result = {code->IsUndefined() ? nullptr : Code::cast(code), | 13306 WeakCell* literals_cell = |
13266 LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))}; | 13307 WeakCell::cast(code_map->get(entry + kLiteralsOffset)); |
| 13308 |
| 13309 result = {cell->cleared() ? nullptr : Code::cast(cell->value()), |
| 13310 literals_cell->cleared() |
| 13311 ? nullptr |
| 13312 : LiteralsArray::cast(literals_cell->value())}; |
13267 } | 13313 } |
13268 } | 13314 } |
13269 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && | 13315 if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() && |
13270 result.code == nullptr) { | 13316 result.code == nullptr) { |
13271 PrintF("[didn't find optimized code in optimized code map for "); | 13317 PrintF("[didn't find optimized code in optimized code map for "); |
13272 ShortPrint(); | 13318 ShortPrint(); |
13273 PrintF("]\n"); | 13319 PrintF("]\n"); |
13274 } | 13320 } |
13275 return result; | 13321 return result; |
13276 } | 13322 } |
(...skipping 5790 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
19067 if (cell->value() != *new_value) { | 19113 if (cell->value() != *new_value) { |
19068 cell->set_value(*new_value); | 19114 cell->set_value(*new_value); |
19069 Isolate* isolate = cell->GetIsolate(); | 19115 Isolate* isolate = cell->GetIsolate(); |
19070 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 19116 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
19071 isolate, DependentCode::kPropertyCellChangedGroup); | 19117 isolate, DependentCode::kPropertyCellChangedGroup); |
19072 } | 19118 } |
19073 } | 19119 } |
19074 | 19120 |
19075 } // namespace internal | 19121 } // namespace internal |
19076 } // namespace v8 | 19122 } // namespace v8 |
OLD | NEW |