| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 927 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 938 // the new space, then there may be uninitialized memory behind the top | 938 // the new space, then there may be uninitialized memory behind the top |
| 939 // pointer of the new space page. We store a filler object there to | 939 // pointer of the new space page. We store a filler object there to |
| 940 // identify the unused space. | 940 // identify the unused space. |
| 941 Address from_top = new_space_.top(); | 941 Address from_top = new_space_.top(); |
| 942 // Check that from_top is inside its page (i.e., not at the end). | 942 // Check that from_top is inside its page (i.e., not at the end). |
| 943 Address space_end = new_space_.ToSpaceEnd(); | 943 Address space_end = new_space_.ToSpaceEnd(); |
| 944 if (from_top < space_end) { | 944 if (from_top < space_end) { |
| 945 Page* page = Page::FromAddress(from_top); | 945 Page* page = Page::FromAddress(from_top); |
| 946 if (page->Contains(from_top)) { | 946 if (page->Contains(from_top)) { |
| 947 int remaining_in_page = static_cast<int>(page->area_end() - from_top); | 947 int remaining_in_page = static_cast<int>(page->area_end() - from_top); |
| 948 CreateFillerObjectAt(from_top, remaining_in_page); | 948 CreateFillerObjectAt(from_top, remaining_in_page, |
| 949 ClearRecordedSlots::kNo); |
| 949 } | 950 } |
| 950 } | 951 } |
| 951 } | 952 } |
| 952 | 953 |
| 953 | 954 |
| 954 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, | 955 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
| 955 const char* collector_reason, | 956 const char* collector_reason, |
| 956 const v8::GCCallbackFlags gc_callback_flags) { | 957 const v8::GCCallbackFlags gc_callback_flags) { |
| 957 // The VM is in the GC state until exiting this function. | 958 // The VM is in the GC state until exiting this function. |
| 958 VMState<GC> state(isolate_); | 959 VMState<GC> state(isolate_); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1159 if (space == NEW_SPACE) { | 1160 if (space == NEW_SPACE) { |
| 1160 allocation = new_space()->AllocateRawUnaligned(size); | 1161 allocation = new_space()->AllocateRawUnaligned(size); |
| 1161 } else { | 1162 } else { |
| 1162 allocation = paged_space(space)->AllocateRawUnaligned(size); | 1163 allocation = paged_space(space)->AllocateRawUnaligned(size); |
| 1163 } | 1164 } |
| 1164 HeapObject* free_space = nullptr; | 1165 HeapObject* free_space = nullptr; |
| 1165 if (allocation.To(&free_space)) { | 1166 if (allocation.To(&free_space)) { |
| 1166 // Mark with a free list node, in case we have a GC before | 1167 // Mark with a free list node, in case we have a GC before |
| 1167 // deserializing. | 1168 // deserializing. |
| 1168 Address free_space_address = free_space->address(); | 1169 Address free_space_address = free_space->address(); |
| 1169 CreateFillerObjectAt(free_space_address, size); | 1170 CreateFillerObjectAt(free_space_address, size, |
| 1171 ClearRecordedSlots::kNo); |
| 1170 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); | 1172 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); |
| 1171 chunk.start = free_space_address; | 1173 chunk.start = free_space_address; |
| 1172 chunk.end = free_space_address + size; | 1174 chunk.end = free_space_address + size; |
| 1173 } else { | 1175 } else { |
| 1174 perform_gc = true; | 1176 perform_gc = true; |
| 1175 break; | 1177 break; |
| 1176 } | 1178 } |
| 1177 } | 1179 } |
| 1178 } | 1180 } |
| 1179 if (perform_gc) { | 1181 if (perform_gc) { |
| (...skipping 812 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1992 return kDoubleSize - kPointerSize; // No fill if double is always aligned. | 1994 return kDoubleSize - kPointerSize; // No fill if double is always aligned. |
| 1993 if (alignment == kSimd128Unaligned) { | 1995 if (alignment == kSimd128Unaligned) { |
| 1994 return (kSimd128Size - (static_cast<int>(offset) + kPointerSize)) & | 1996 return (kSimd128Size - (static_cast<int>(offset) + kPointerSize)) & |
| 1995 kSimd128AlignmentMask; | 1997 kSimd128AlignmentMask; |
| 1996 } | 1998 } |
| 1997 return 0; | 1999 return 0; |
| 1998 } | 2000 } |
| 1999 | 2001 |
| 2000 | 2002 |
| 2001 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { | 2003 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { |
| 2002 CreateFillerObjectAt(object->address(), filler_size); | 2004 CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo); |
| 2003 return HeapObject::FromAddress(object->address() + filler_size); | 2005 return HeapObject::FromAddress(object->address() + filler_size); |
| 2004 } | 2006 } |
| 2005 | 2007 |
| 2006 | 2008 |
| 2007 HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, | 2009 HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, |
| 2008 int allocation_size, | 2010 int allocation_size, |
| 2009 AllocationAlignment alignment) { | 2011 AllocationAlignment alignment) { |
| 2010 int filler_size = allocation_size - object_size; | 2012 int filler_size = allocation_size - object_size; |
| 2011 DCHECK(filler_size > 0); | 2013 DCHECK(filler_size > 0); |
| 2012 int pre_filler = GetFillToAlign(object->address(), alignment); | 2014 int pre_filler = GetFillToAlign(object->address(), alignment); |
| 2013 if (pre_filler) { | 2015 if (pre_filler) { |
| 2014 object = PrecedeWithFiller(object, pre_filler); | 2016 object = PrecedeWithFiller(object, pre_filler); |
| 2015 filler_size -= pre_filler; | 2017 filler_size -= pre_filler; |
| 2016 } | 2018 } |
| 2017 if (filler_size) | 2019 if (filler_size) |
| 2018 CreateFillerObjectAt(object->address() + object_size, filler_size); | 2020 CreateFillerObjectAt(object->address() + object_size, filler_size, |
| 2021 ClearRecordedSlots::kNo); |
| 2019 return object; | 2022 return object; |
| 2020 } | 2023 } |
| 2021 | 2024 |
| 2022 | 2025 |
| 2023 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 2026 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { |
| 2024 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); | 2027 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); |
| 2025 } | 2028 } |
| 2026 | 2029 |
| 2027 | 2030 |
| 2028 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { | 2031 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2126 HeapObject* obj = nullptr; | 2129 HeapObject* obj = nullptr; |
| 2127 { | 2130 { |
| 2128 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; | 2131 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; |
| 2129 AllocationResult allocation = AllocateRaw(size, space, align); | 2132 AllocationResult allocation = AllocateRaw(size, space, align); |
| 2130 if (!allocation.To(&obj)) return allocation; | 2133 if (!allocation.To(&obj)) return allocation; |
| 2131 } | 2134 } |
| 2132 #ifdef DEBUG | 2135 #ifdef DEBUG |
| 2133 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 2136 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 2134 DCHECK(chunk->owner()->identity() == space); | 2137 DCHECK(chunk->owner()->identity() == space); |
| 2135 #endif | 2138 #endif |
| 2136 CreateFillerObjectAt(obj->address(), size); | 2139 CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo); |
| 2137 return obj; | 2140 return obj; |
| 2138 } | 2141 } |
| 2139 | 2142 |
| 2140 | 2143 |
| 2141 const Heap::StringTypeTable Heap::string_type_table[] = { | 2144 const Heap::StringTypeTable Heap::string_type_table[] = { |
| 2142 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ | 2145 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ |
| 2143 { type, size, k##camel_name##MapRootIndex } \ | 2146 { type, size, k##camel_name##MapRootIndex } \ |
| 2144 , | 2147 , |
| 2145 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) | 2148 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) |
| 2146 #undef STRING_TYPE_ELEMENT | 2149 #undef STRING_TYPE_ELEMENT |
| (...skipping 888 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3035 instance->set_parameter_count(parameter_count); | 3038 instance->set_parameter_count(parameter_count); |
| 3036 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); | 3039 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); |
| 3037 instance->set_constant_pool(constant_pool); | 3040 instance->set_constant_pool(constant_pool); |
| 3038 instance->set_handler_table(empty_fixed_array()); | 3041 instance->set_handler_table(empty_fixed_array()); |
| 3039 instance->set_source_position_table(empty_byte_array()); | 3042 instance->set_source_position_table(empty_byte_array()); |
| 3040 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); | 3043 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); |
| 3041 | 3044 |
| 3042 return result; | 3045 return result; |
| 3043 } | 3046 } |
| 3044 | 3047 |
| 3045 | 3048 void Heap::CreateFillerObjectAt(Address addr, int size, |
| 3046 void Heap::CreateFillerObjectAt(Address addr, int size) { | 3049 ClearRecordedSlots mode) { |
| 3047 if (size == 0) return; | 3050 if (size == 0) return; |
| 3048 HeapObject* filler = HeapObject::FromAddress(addr); | 3051 HeapObject* filler = HeapObject::FromAddress(addr); |
| 3049 if (size == kPointerSize) { | 3052 if (size == kPointerSize) { |
| 3050 filler->set_map_no_write_barrier( | 3053 filler->set_map_no_write_barrier( |
| 3051 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); | 3054 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); |
| 3052 } else if (size == 2 * kPointerSize) { | 3055 } else if (size == 2 * kPointerSize) { |
| 3053 filler->set_map_no_write_barrier( | 3056 filler->set_map_no_write_barrier( |
| 3054 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); | 3057 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); |
| 3055 } else { | 3058 } else { |
| 3056 DCHECK_GT(size, 2 * kPointerSize); | 3059 DCHECK_GT(size, 2 * kPointerSize); |
| 3057 filler->set_map_no_write_barrier( | 3060 filler->set_map_no_write_barrier( |
| 3058 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); | 3061 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); |
| 3059 FreeSpace::cast(filler)->nobarrier_set_size(size); | 3062 FreeSpace::cast(filler)->nobarrier_set_size(size); |
| 3060 } | 3063 } |
| 3064 if (mode == ClearRecordedSlots::kYes) { |
| 3065 ClearRecordedSlotRange(addr, addr + size); |
| 3066 } |
| 3061 // At this point, we may be deserializing the heap from a snapshot, and | 3067 // At this point, we may be deserializing the heap from a snapshot, and |
| 3062 // none of the maps have been created yet and are NULL. | 3068 // none of the maps have been created yet and are NULL. |
| 3063 DCHECK((filler->map() == NULL && !deserialization_complete_) || | 3069 DCHECK((filler->map() == NULL && !deserialization_complete_) || |
| 3064 filler->map()->IsMap()); | 3070 filler->map()->IsMap()); |
| 3065 } | 3071 } |
| 3066 | 3072 |
| 3067 | 3073 |
| 3068 bool Heap::CanMoveObjectStart(HeapObject* object) { | 3074 bool Heap::CanMoveObjectStart(HeapObject* object) { |
| 3069 if (!FLAG_move_object_start) return false; | 3075 if (!FLAG_move_object_start) return false; |
| 3070 | 3076 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3124 | 3130 |
| 3125 const int len = object->length(); | 3131 const int len = object->length(); |
| 3126 DCHECK(elements_to_trim <= len); | 3132 DCHECK(elements_to_trim <= len); |
| 3127 | 3133 |
| 3128 // Calculate location of new array start. | 3134 // Calculate location of new array start. |
| 3129 Address new_start = object->address() + bytes_to_trim; | 3135 Address new_start = object->address() + bytes_to_trim; |
| 3130 | 3136 |
| 3131 // Technically in new space this write might be omitted (except for | 3137 // Technically in new space this write might be omitted (except for |
| 3132 // debug mode which iterates through the heap), but to play safer | 3138 // debug mode which iterates through the heap), but to play safer |
| 3133 // we still do it. | 3139 // we still do it. |
| 3134 CreateFillerObjectAt(object->address(), bytes_to_trim); | 3140 CreateFillerObjectAt(object->address(), bytes_to_trim, |
| 3141 ClearRecordedSlots::kYes); |
| 3135 | 3142 |
| 3136 // Initialize header of the trimmed array. Since left trimming is only | 3143 // Initialize header of the trimmed array. Since left trimming is only |
| 3137 // performed on pages which are not concurrently swept creating a filler | 3144 // performed on pages which are not concurrently swept creating a filler |
| 3138 // object does not require synchronization. | 3145 // object does not require synchronization. |
| 3139 DCHECK(CanMoveObjectStart(object)); | 3146 DCHECK(CanMoveObjectStart(object)); |
| 3140 Object** former_start = HeapObject::RawField(object, 0); | 3147 Object** former_start = HeapObject::RawField(object, 0); |
| 3141 int new_start_index = elements_to_trim * (element_size / kPointerSize); | 3148 int new_start_index = elements_to_trim * (element_size / kPointerSize); |
| 3142 former_start[new_start_index] = map; | 3149 former_start[new_start_index] = map; |
| 3143 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); | 3150 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
| 3144 FixedArrayBase* new_object = | 3151 FixedArrayBase* new_object = |
| 3145 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); | 3152 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
| 3146 | 3153 |
| 3147 // Maintain consistency of live bytes during incremental marking | 3154 // Maintain consistency of live bytes during incremental marking |
| 3148 Marking::TransferMark(this, object->address(), new_start); | 3155 Marking::TransferMark(this, object->address(), new_start); |
| 3149 if (mark_compact_collector()->sweeping_in_progress()) { | |
| 3150 // Array trimming during sweeping can add invalid slots in free list. | |
| 3151 ClearRecordedSlotRange(object, former_start, | |
| 3152 HeapObject::RawField(new_object, 0)); | |
| 3153 } | |
| 3154 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); | 3156 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
| 3155 | 3157 |
| 3156 // Notify the heap profiler of change in object layout. | 3158 // Notify the heap profiler of change in object layout. |
| 3157 OnMoveEvent(new_object, object, new_object->Size()); | 3159 OnMoveEvent(new_object, object, new_object->Size()); |
| 3158 return new_object; | 3160 return new_object; |
| 3159 } | 3161 } |
| 3160 | 3162 |
| 3161 | 3163 |
| 3162 // Force instantiation of templatized method. | 3164 // Force instantiation of templatized method. |
| 3163 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( | 3165 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3203 Address old_end = object->address() + object->Size(); | 3205 Address old_end = object->address() + object->Size(); |
| 3204 Address new_end = old_end - bytes_to_trim; | 3206 Address new_end = old_end - bytes_to_trim; |
| 3205 | 3207 |
| 3206 // Technically in new space this write might be omitted (except for | 3208 // Technically in new space this write might be omitted (except for |
| 3207 // debug mode which iterates through the heap), but to play safer | 3209 // debug mode which iterates through the heap), but to play safer |
| 3208 // we still do it. | 3210 // we still do it. |
| 3209 // We do not create a filler for objects in large object space. | 3211 // We do not create a filler for objects in large object space. |
| 3210 // TODO(hpayer): We should shrink the large object page if the size | 3212 // TODO(hpayer): We should shrink the large object page if the size |
| 3211 // of the object changed significantly. | 3213 // of the object changed significantly. |
| 3212 if (!lo_space()->Contains(object)) { | 3214 if (!lo_space()->Contains(object)) { |
| 3213 CreateFillerObjectAt(new_end, bytes_to_trim); | 3215 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); |
| 3214 if (mark_compact_collector()->sweeping_in_progress()) { | |
| 3215 // Array trimming during sweeping can add invalid slots in free list. | |
| 3216 ClearRecordedSlotRange(object, reinterpret_cast<Object**>(new_end), | |
| 3217 reinterpret_cast<Object**>(old_end)); | |
| 3218 } | |
| 3219 } | 3216 } |
| 3220 | 3217 |
| 3221 // Initialize header of the trimmed array. We are storing the new length | 3218 // Initialize header of the trimmed array. We are storing the new length |
| 3222 // using release store after creating a filler for the left-over space to | 3219 // using release store after creating a filler for the left-over space to |
| 3223 // avoid races with the sweeper thread. | 3220 // avoid races with the sweeper thread. |
| 3224 object->synchronized_set_length(len - elements_to_trim); | 3221 object->synchronized_set_length(len - elements_to_trim); |
| 3225 | 3222 |
| 3226 // Maintain consistency of live bytes during incremental marking | 3223 // Maintain consistency of live bytes during incremental marking |
| 3227 AdjustLiveBytes(object, -bytes_to_trim, mode); | 3224 AdjustLiveBytes(object, -bytes_to_trim, mode); |
| 3228 | 3225 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3312 | 3309 |
| 3313 if (immovable) { | 3310 if (immovable) { |
| 3314 Address address = result->address(); | 3311 Address address = result->address(); |
| 3315 // Code objects which should stay at a fixed address are allocated either | 3312 // Code objects which should stay at a fixed address are allocated either |
| 3316 // in the first page of code space (objects on the first page of each space | 3313 // in the first page of code space (objects on the first page of each space |
| 3317 // are never moved) or in large object space. | 3314 // are never moved) or in large object space. |
| 3318 if (!code_space_->FirstPage()->Contains(address) && | 3315 if (!code_space_->FirstPage()->Contains(address) && |
| 3319 MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { | 3316 MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { |
| 3320 // Discard the first code allocation, which was on a page where it could | 3317 // Discard the first code allocation, which was on a page where it could |
| 3321 // be moved. | 3318 // be moved. |
| 3322 CreateFillerObjectAt(result->address(), object_size); | 3319 CreateFillerObjectAt(result->address(), object_size, |
| 3320 ClearRecordedSlots::kNo); |
| 3323 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); | 3321 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); |
| 3324 if (!allocation.To(&result)) return allocation; | 3322 if (!allocation.To(&result)) return allocation; |
| 3325 OnAllocationEvent(result, object_size); | 3323 OnAllocationEvent(result, object_size); |
| 3326 } | 3324 } |
| 3327 } | 3325 } |
| 3328 | 3326 |
| 3329 result->set_map_no_write_barrier(code_map()); | 3327 result->set_map_no_write_barrier(code_map()); |
| 3330 Code* code = Code::cast(result); | 3328 Code* code = Code::cast(result); |
| 3331 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); | 3329 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); |
| 3332 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || | 3330 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || |
| (...skipping 2241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5574 void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { | 5572 void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { |
| 5575 if (!InNewSpace(object)) { | 5573 if (!InNewSpace(object)) { |
| 5576 store_buffer()->MoveEntriesToRememberedSet(); | 5574 store_buffer()->MoveEntriesToRememberedSet(); |
| 5577 Address slot_addr = reinterpret_cast<Address>(slot); | 5575 Address slot_addr = reinterpret_cast<Address>(slot); |
| 5578 Page* page = Page::FromAddress(slot_addr); | 5576 Page* page = Page::FromAddress(slot_addr); |
| 5579 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); | 5577 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); |
| 5580 RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr); | 5578 RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr); |
| 5581 } | 5579 } |
| 5582 } | 5580 } |
| 5583 | 5581 |
| 5584 void Heap::ClearRecordedSlotRange(HeapObject* object, Object** start, | 5582 void Heap::ClearRecordedSlotRange(Address start, Address end) { |
| 5585 Object** end) { | 5583 Page* page = Page::FromAddress(start); |
| 5586 if (!InNewSpace(object)) { | 5584 if (!page->InNewSpace()) { |
| 5587 store_buffer()->MoveEntriesToRememberedSet(); | 5585 store_buffer()->MoveEntriesToRememberedSet(); |
| 5588 Address start_addr = reinterpret_cast<Address>(start); | |
| 5589 Address end_addr = reinterpret_cast<Address>(end); | |
| 5590 Page* page = Page::FromAddress(start_addr); | |
| 5591 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); | 5586 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); |
| 5592 RememberedSet<OLD_TO_NEW>::RemoveRange(page, start_addr, end_addr); | 5587 RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end); |
| 5593 } | 5588 } |
| 5594 } | 5589 } |
| 5595 | 5590 |
| 5596 Space* AllSpaces::next() { | 5591 Space* AllSpaces::next() { |
| 5597 switch (counter_++) { | 5592 switch (counter_++) { |
| 5598 case NEW_SPACE: | 5593 case NEW_SPACE: |
| 5599 return heap_->new_space(); | 5594 return heap_->new_space(); |
| 5600 case OLD_SPACE: | 5595 case OLD_SPACE: |
| 5601 return heap_->old_space(); | 5596 return heap_->old_space(); |
| 5602 case CODE_SPACE: | 5597 case CODE_SPACE: |
| (...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6325 } | 6320 } |
| 6326 | 6321 |
| 6327 | 6322 |
| 6328 // static | 6323 // static |
| 6329 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6324 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6330 return StaticVisitorBase::GetVisitorId(map); | 6325 return StaticVisitorBase::GetVisitorId(map); |
| 6331 } | 6326 } |
| 6332 | 6327 |
| 6333 } // namespace internal | 6328 } // namespace internal |
| 6334 } // namespace v8 | 6329 } // namespace v8 |
| OLD | NEW |