| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/objects.h" | 5 #include "src/objects.h" |
| 6 | 6 |
| 7 #include <cmath> | 7 #include <cmath> |
| 8 #include <iomanip> | 8 #include <iomanip> |
| 9 #include <sstream> | 9 #include <sstream> |
| 10 | 10 |
| (...skipping 11062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11073 old_code_map->get(entry + kCachedCodeOffset)); | 11073 old_code_map->get(entry + kCachedCodeOffset)); |
| 11074 // Just set the code and literals to the entry. | 11074 // Just set the code and literals to the entry. |
| 11075 old_code_map->set(entry + kCachedCodeOffset, *code); | 11075 old_code_map->set(entry + kCachedCodeOffset, *code); |
| 11076 old_code_map->set(entry + kLiteralsOffset, *literals); | 11076 old_code_map->set(entry + kLiteralsOffset, *literals); |
| 11077 return; | 11077 return; |
| 11078 } | 11078 } |
| 11079 | 11079 |
| 11080 // Copy old optimized code map and append one new entry. | 11080 // Copy old optimized code map and append one new entry. |
| 11081 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | 11081 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( |
| 11082 old_code_map, kEntryLength, TENURED); | 11082 old_code_map, kEntryLength, TENURED); |
| 11083 // TODO(mstarzinger): Temporary workaround. The allocation above might have |
| 11084 // flushed the optimized code map and the copy we created is full of holes. |
| 11085 // For now we just give up on adding the entry and pretend it got flushed. |
| 11086 if (shared->optimized_code_map()->IsSmi()) return; |
| 11083 int old_length = old_code_map->length(); | 11087 int old_length = old_code_map->length(); |
| 11084 // Zap the old map to avoid any stale entries. Note that this is required | 11088 // Zap the old map to avoid any stale entries. Note that this is required |
| 11085 // for correctness because entries are being treated weakly by the GC. | 11089 // for correctness because entries are being treated weakly by the GC. |
| 11086 MemsetPointer(old_code_map->data_start(), isolate->heap()->the_hole_value(), | 11090 MemsetPointer(old_code_map->data_start(), isolate->heap()->the_hole_value(), |
| 11087 old_length); | 11091 old_length); |
| 11088 entry = old_length; | 11092 entry = old_length; |
| 11089 } | 11093 } |
| 11090 new_code_map->set(entry + kContextOffset, *native_context); | 11094 new_code_map->set(entry + kContextOffset, *native_context); |
| 11091 new_code_map->set(entry + kCachedCodeOffset, *code); | 11095 new_code_map->set(entry + kCachedCodeOffset, *code); |
| 11092 new_code_map->set(entry + kLiteralsOffset, *literals); | 11096 new_code_map->set(entry + kLiteralsOffset, *literals); |
| 11093 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | 11097 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); |
| 11094 | 11098 |
| 11095 #ifdef DEBUG | 11099 #ifdef DEBUG |
| 11096 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 11100 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
| 11097 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 11101 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
| 11098 Object* code = new_code_map->get(i + kCachedCodeOffset); | 11102 Object* code = new_code_map->get(i + kCachedCodeOffset); |
| 11099 if (code != isolate->heap()->undefined_value()) { | 11103 if (code != isolate->heap()->undefined_value()) { |
| 11100 DCHECK(code->IsCode()); | 11104 DCHECK(code->IsCode()); |
| 11101 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); | 11105 DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); |
| 11102 } | 11106 } |
| 11103 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 11107 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
| 11104 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 11108 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
| 11105 } | 11109 } |
| 11106 #endif | 11110 #endif |
| 11111 |
| 11112 if (Heap::ShouldZapGarbage()) { |
| 11113 // Zap any old optimized code map for heap-verifier. |
| 11114 if (!shared->optimized_code_map()->IsSmi()) { |
| 11115 FixedArray* old_code_map = FixedArray::cast(shared->optimized_code_map()); |
| 11116 old_code_map->FillWithHoles(0, old_code_map->length()); |
| 11117 } |
| 11118 } |
| 11119 |
| 11107 shared->set_optimized_code_map(*new_code_map); | 11120 shared->set_optimized_code_map(*new_code_map); |
| 11108 } | 11121 } |
| 11109 | 11122 |
| 11110 | 11123 |
| 11111 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 11124 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
| 11112 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 11125 if (Heap::ShouldZapGarbage()) { |
| 11113 | 11126 // Zap any old optimized code map for heap-verifier. |
| 11114 // If the next map link slot is already used then the function was | 11127 if (!optimized_code_map()->IsSmi()) { |
| 11115 // enqueued with code flushing and we remove it now. | 11128 FixedArray* old_code_map = FixedArray::cast(optimized_code_map()); |
| 11116 if (!code_map->get(kNextMapIndex)->IsUndefined()) { | 11129 old_code_map->FillWithHoles(0, old_code_map->length()); |
| 11117 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); | 11130 } |
| 11118 flusher->EvictOptimizedCodeMap(this); | |
| 11119 } | 11131 } |
| 11120 | 11132 |
| 11121 DCHECK(code_map->get(kNextMapIndex)->IsUndefined()); | |
| 11122 set_optimized_code_map(Smi::FromInt(0)); | 11133 set_optimized_code_map(Smi::FromInt(0)); |
| 11123 } | 11134 } |
| 11124 | 11135 |
| 11125 | 11136 |
| 11126 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 11137 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
| 11127 const char* reason) { | 11138 const char* reason) { |
| 11128 DisallowHeapAllocation no_gc; | 11139 DisallowHeapAllocation no_gc; |
| 11129 if (optimized_code_map()->IsSmi()) return; | 11140 if (optimized_code_map()->IsSmi()) return; |
| 11130 | 11141 |
| 11131 Heap* heap = GetHeap(); | 11142 Heap* heap = GetHeap(); |
| (...skipping 6750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 17882 if (cell->value() != *new_value) { | 17893 if (cell->value() != *new_value) { |
| 17883 cell->set_value(*new_value); | 17894 cell->set_value(*new_value); |
| 17884 Isolate* isolate = cell->GetIsolate(); | 17895 Isolate* isolate = cell->GetIsolate(); |
| 17885 cell->dependent_code()->DeoptimizeDependentCodeGroup( | 17896 cell->dependent_code()->DeoptimizeDependentCodeGroup( |
| 17886 isolate, DependentCode::kPropertyCellChangedGroup); | 17897 isolate, DependentCode::kPropertyCellChangedGroup); |
| 17887 } | 17898 } |
| 17888 } | 17899 } |
| 17889 | 17900 |
| 17890 } // namespace internal | 17901 } // namespace internal |
| 17891 } // namespace v8 | 17902 } // namespace v8 |
| OLD | NEW |