| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 927 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 938 // the new space, then there may be uninitialized memory behind the top | 938 // the new space, then there may be uninitialized memory behind the top |
| 939 // pointer of the new space page. We store a filler object there to | 939 // pointer of the new space page. We store a filler object there to |
| 940 // identify the unused space. | 940 // identify the unused space. |
| 941 Address from_top = new_space_.top(); | 941 Address from_top = new_space_.top(); |
| 942 // Check that from_top is inside its page (i.e., not at the end). | 942 // Check that from_top is inside its page (i.e., not at the end). |
| 943 Address space_end = new_space_.ToSpaceEnd(); | 943 Address space_end = new_space_.ToSpaceEnd(); |
| 944 if (from_top < space_end) { | 944 if (from_top < space_end) { |
| 945 Page* page = Page::FromAddress(from_top); | 945 Page* page = Page::FromAddress(from_top); |
| 946 if (page->Contains(from_top)) { | 946 if (page->Contains(from_top)) { |
| 947 int remaining_in_page = static_cast<int>(page->area_end() - from_top); | 947 int remaining_in_page = static_cast<int>(page->area_end() - from_top); |
| 948 CreateFillerObjectAt(from_top, remaining_in_page); | 948 CreateFillerObjectAt(from_top, remaining_in_page, kNoRecordedSlots); |
| 949 } | 949 } |
| 950 } | 950 } |
| 951 } | 951 } |
| 952 | 952 |
| 953 | 953 |
| 954 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, | 954 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
| 955 const char* collector_reason, | 955 const char* collector_reason, |
| 956 const v8::GCCallbackFlags gc_callback_flags) { | 956 const v8::GCCallbackFlags gc_callback_flags) { |
| 957 // The VM is in the GC state until exiting this function. | 957 // The VM is in the GC state until exiting this function. |
| 958 VMState<GC> state(isolate_); | 958 VMState<GC> state(isolate_); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1159 if (space == NEW_SPACE) { | 1159 if (space == NEW_SPACE) { |
| 1160 allocation = new_space()->AllocateRawUnaligned(size); | 1160 allocation = new_space()->AllocateRawUnaligned(size); |
| 1161 } else { | 1161 } else { |
| 1162 allocation = paged_space(space)->AllocateRawUnaligned(size); | 1162 allocation = paged_space(space)->AllocateRawUnaligned(size); |
| 1163 } | 1163 } |
| 1164 HeapObject* free_space = nullptr; | 1164 HeapObject* free_space = nullptr; |
| 1165 if (allocation.To(&free_space)) { | 1165 if (allocation.To(&free_space)) { |
| 1166 // Mark with a free list node, in case we have a GC before | 1166 // Mark with a free list node, in case we have a GC before |
| 1167 // deserializing. | 1167 // deserializing. |
| 1168 Address free_space_address = free_space->address(); | 1168 Address free_space_address = free_space->address(); |
| 1169 CreateFillerObjectAt(free_space_address, size); | 1169 CreateFillerObjectAt(free_space_address, size, kNoRecordedSlots); |
| 1170 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); | 1170 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); |
| 1171 chunk.start = free_space_address; | 1171 chunk.start = free_space_address; |
| 1172 chunk.end = free_space_address + size; | 1172 chunk.end = free_space_address + size; |
| 1173 } else { | 1173 } else { |
| 1174 perform_gc = true; | 1174 perform_gc = true; |
| 1175 break; | 1175 break; |
| 1176 } | 1176 } |
| 1177 } | 1177 } |
| 1178 } | 1178 } |
| 1179 if (perform_gc) { | 1179 if (perform_gc) { |
| (...skipping 812 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1992 return kDoubleSize - kPointerSize; // No fill if double is always aligned. | 1992 return kDoubleSize - kPointerSize; // No fill if double is always aligned. |
| 1993 if (alignment == kSimd128Unaligned) { | 1993 if (alignment == kSimd128Unaligned) { |
| 1994 return (kSimd128Size - (static_cast<int>(offset) + kPointerSize)) & | 1994 return (kSimd128Size - (static_cast<int>(offset) + kPointerSize)) & |
| 1995 kSimd128AlignmentMask; | 1995 kSimd128AlignmentMask; |
| 1996 } | 1996 } |
| 1997 return 0; | 1997 return 0; |
| 1998 } | 1998 } |
| 1999 | 1999 |
| 2000 | 2000 |
| 2001 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { | 2001 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { |
| 2002 CreateFillerObjectAt(object->address(), filler_size); | 2002 CreateFillerObjectAt(object->address(), filler_size, kNoRecordedSlots); |
| 2003 return HeapObject::FromAddress(object->address() + filler_size); | 2003 return HeapObject::FromAddress(object->address() + filler_size); |
| 2004 } | 2004 } |
| 2005 | 2005 |
| 2006 | 2006 |
| 2007 HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, | 2007 HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, |
| 2008 int allocation_size, | 2008 int allocation_size, |
| 2009 AllocationAlignment alignment) { | 2009 AllocationAlignment alignment) { |
| 2010 int filler_size = allocation_size - object_size; | 2010 int filler_size = allocation_size - object_size; |
| 2011 DCHECK(filler_size > 0); | 2011 DCHECK(filler_size > 0); |
| 2012 int pre_filler = GetFillToAlign(object->address(), alignment); | 2012 int pre_filler = GetFillToAlign(object->address(), alignment); |
| 2013 if (pre_filler) { | 2013 if (pre_filler) { |
| 2014 object = PrecedeWithFiller(object, pre_filler); | 2014 object = PrecedeWithFiller(object, pre_filler); |
| 2015 filler_size -= pre_filler; | 2015 filler_size -= pre_filler; |
| 2016 } | 2016 } |
| 2017 if (filler_size) | 2017 if (filler_size) |
| 2018 CreateFillerObjectAt(object->address() + object_size, filler_size); | 2018 CreateFillerObjectAt(object->address() + object_size, filler_size, |
| 2019 kNoRecordedSlots); |
| 2019 return object; | 2020 return object; |
| 2020 } | 2021 } |
| 2021 | 2022 |
| 2022 | 2023 |
| 2023 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 2024 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { |
| 2024 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); | 2025 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); |
| 2025 } | 2026 } |
| 2026 | 2027 |
| 2027 | 2028 |
| 2028 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { | 2029 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2126 HeapObject* obj = nullptr; | 2127 HeapObject* obj = nullptr; |
| 2127 { | 2128 { |
| 2128 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; | 2129 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; |
| 2129 AllocationResult allocation = AllocateRaw(size, space, align); | 2130 AllocationResult allocation = AllocateRaw(size, space, align); |
| 2130 if (!allocation.To(&obj)) return allocation; | 2131 if (!allocation.To(&obj)) return allocation; |
| 2131 } | 2132 } |
| 2132 #ifdef DEBUG | 2133 #ifdef DEBUG |
| 2133 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 2134 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 2134 DCHECK(chunk->owner()->identity() == space); | 2135 DCHECK(chunk->owner()->identity() == space); |
| 2135 #endif | 2136 #endif |
| 2136 CreateFillerObjectAt(obj->address(), size); | 2137 CreateFillerObjectAt(obj->address(), size, kNoRecordedSlots); |
| 2137 return obj; | 2138 return obj; |
| 2138 } | 2139 } |
| 2139 | 2140 |
| 2140 | 2141 |
| 2141 const Heap::StringTypeTable Heap::string_type_table[] = { | 2142 const Heap::StringTypeTable Heap::string_type_table[] = { |
| 2142 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ | 2143 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ |
| 2143 { type, size, k##camel_name##MapRootIndex } \ | 2144 { type, size, k##camel_name##MapRootIndex } \ |
| 2144 , | 2145 , |
| 2145 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) | 2146 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) |
| 2146 #undef STRING_TYPE_ELEMENT | 2147 #undef STRING_TYPE_ELEMENT |
| (...skipping 888 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3035 instance->set_parameter_count(parameter_count); | 3036 instance->set_parameter_count(parameter_count); |
| 3036 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); | 3037 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); |
| 3037 instance->set_constant_pool(constant_pool); | 3038 instance->set_constant_pool(constant_pool); |
| 3038 instance->set_handler_table(empty_fixed_array()); | 3039 instance->set_handler_table(empty_fixed_array()); |
| 3039 instance->set_source_position_table(empty_byte_array()); | 3040 instance->set_source_position_table(empty_byte_array()); |
| 3040 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); | 3041 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); |
| 3041 | 3042 |
| 3042 return result; | 3043 return result; |
| 3043 } | 3044 } |
| 3044 | 3045 |
| 3045 | 3046 void Heap::CreateFillerObjectAt(Address addr, int size, |
| 3046 void Heap::CreateFillerObjectAt(Address addr, int size) { | 3047 RecordedSlotsMode mode) { |
| 3047 if (size == 0) return; | 3048 if (size == 0) return; |
| 3048 HeapObject* filler = HeapObject::FromAddress(addr); | 3049 HeapObject* filler = HeapObject::FromAddress(addr); |
| 3049 if (size == kPointerSize) { | 3050 if (size == kPointerSize) { |
| 3050 filler->set_map_no_write_barrier( | 3051 filler->set_map_no_write_barrier( |
| 3051 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); | 3052 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); |
| 3052 } else if (size == 2 * kPointerSize) { | 3053 } else if (size == 2 * kPointerSize) { |
| 3053 filler->set_map_no_write_barrier( | 3054 filler->set_map_no_write_barrier( |
| 3054 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); | 3055 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); |
| 3055 } else { | 3056 } else { |
| 3056 DCHECK_GT(size, 2 * kPointerSize); | 3057 DCHECK_GT(size, 2 * kPointerSize); |
| 3057 filler->set_map_no_write_barrier( | 3058 filler->set_map_no_write_barrier( |
| 3058 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); | 3059 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); |
| 3059 FreeSpace::cast(filler)->nobarrier_set_size(size); | 3060 FreeSpace::cast(filler)->nobarrier_set_size(size); |
| 3060 } | 3061 } |
| 3062 if (mode == kClearRecordedSlots) { |
| 3063 ClearRecordedSlotRange(addr, addr + size); |
| 3064 } |
| 3061 // At this point, we may be deserializing the heap from a snapshot, and | 3065 // At this point, we may be deserializing the heap from a snapshot, and |
| 3062 // none of the maps have been created yet and are NULL. | 3066 // none of the maps have been created yet and are NULL. |
| 3063 DCHECK((filler->map() == NULL && !deserialization_complete_) || | 3067 DCHECK((filler->map() == NULL && !deserialization_complete_) || |
| 3064 filler->map()->IsMap()); | 3068 filler->map()->IsMap()); |
| 3065 } | 3069 } |
| 3066 | 3070 |
| 3067 | 3071 |
| 3068 bool Heap::CanMoveObjectStart(HeapObject* object) { | 3072 bool Heap::CanMoveObjectStart(HeapObject* object) { |
| 3069 if (!FLAG_move_object_start) return false; | 3073 if (!FLAG_move_object_start) return false; |
| 3070 | 3074 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3124 | 3128 |
| 3125 const int len = object->length(); | 3129 const int len = object->length(); |
| 3126 DCHECK(elements_to_trim <= len); | 3130 DCHECK(elements_to_trim <= len); |
| 3127 | 3131 |
| 3128 // Calculate location of new array start. | 3132 // Calculate location of new array start. |
| 3129 Address new_start = object->address() + bytes_to_trim; | 3133 Address new_start = object->address() + bytes_to_trim; |
| 3130 | 3134 |
| 3131 // Technically in new space this write might be omitted (except for | 3135 // Technically in new space this write might be omitted (except for |
| 3132 // debug mode which iterates through the heap), but to play safer | 3136 // debug mode which iterates through the heap), but to play safer |
| 3133 // we still do it. | 3137 // we still do it. |
| 3134 CreateFillerObjectAt(object->address(), bytes_to_trim); | 3138 CreateFillerObjectAt(object->address(), bytes_to_trim, kClearRecordedSlots); |
| 3135 | 3139 |
| 3136 // Initialize header of the trimmed array. Since left trimming is only | 3140 // Initialize header of the trimmed array. Since left trimming is only |
| 3137 // performed on pages which are not concurrently swept creating a filler | 3141 // performed on pages which are not concurrently swept creating a filler |
| 3138 // object does not require synchronization. | 3142 // object does not require synchronization. |
| 3139 DCHECK(CanMoveObjectStart(object)); | 3143 DCHECK(CanMoveObjectStart(object)); |
| 3140 Object** former_start = HeapObject::RawField(object, 0); | 3144 Object** former_start = HeapObject::RawField(object, 0); |
| 3141 int new_start_index = elements_to_trim * (element_size / kPointerSize); | 3145 int new_start_index = elements_to_trim * (element_size / kPointerSize); |
| 3142 former_start[new_start_index] = map; | 3146 former_start[new_start_index] = map; |
| 3143 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); | 3147 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
| 3144 FixedArrayBase* new_object = | 3148 FixedArrayBase* new_object = |
| 3145 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); | 3149 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
| 3146 | 3150 |
| 3147 // Maintain consistency of live bytes during incremental marking | 3151 // Maintain consistency of live bytes during incremental marking |
| 3148 Marking::TransferMark(this, object->address(), new_start); | 3152 Marking::TransferMark(this, object->address(), new_start); |
| 3149 if (mark_compact_collector()->sweeping_in_progress()) { | |
| 3150 // Array trimming during sweeping can add invalid slots in free list. | |
| 3151 ClearRecordedSlotRange(object, former_start, | |
| 3152 HeapObject::RawField(new_object, 0)); | |
| 3153 } | |
| 3154 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); | 3153 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
| 3155 | 3154 |
| 3156 // Notify the heap profiler of change in object layout. | 3155 // Notify the heap profiler of change in object layout. |
| 3157 OnMoveEvent(new_object, object, new_object->Size()); | 3156 OnMoveEvent(new_object, object, new_object->Size()); |
| 3158 return new_object; | 3157 return new_object; |
| 3159 } | 3158 } |
| 3160 | 3159 |
| 3161 | 3160 |
| 3162 // Force instantiation of templatized method. | 3161 // Force instantiation of templatized method. |
| 3163 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( | 3162 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3203 Address old_end = object->address() + object->Size(); | 3202 Address old_end = object->address() + object->Size(); |
| 3204 Address new_end = old_end - bytes_to_trim; | 3203 Address new_end = old_end - bytes_to_trim; |
| 3205 | 3204 |
| 3206 // Technically in new space this write might be omitted (except for | 3205 // Technically in new space this write might be omitted (except for |
| 3207 // debug mode which iterates through the heap), but to play safer | 3206 // debug mode which iterates through the heap), but to play safer |
| 3208 // we still do it. | 3207 // we still do it. |
| 3209 // We do not create a filler for objects in large object space. | 3208 // We do not create a filler for objects in large object space. |
| 3210 // TODO(hpayer): We should shrink the large object page if the size | 3209 // TODO(hpayer): We should shrink the large object page if the size |
| 3211 // of the object changed significantly. | 3210 // of the object changed significantly. |
| 3212 if (!lo_space()->Contains(object)) { | 3211 if (!lo_space()->Contains(object)) { |
| 3213 CreateFillerObjectAt(new_end, bytes_to_trim); | 3212 CreateFillerObjectAt(new_end, bytes_to_trim, kClearRecordedSlots); |
| 3214 if (mark_compact_collector()->sweeping_in_progress()) { | |
| 3215 // Array trimming during sweeping can add invalid slots in free list. | |
| 3216 ClearRecordedSlotRange(object, reinterpret_cast<Object**>(new_end), | |
| 3217 reinterpret_cast<Object**>(old_end)); | |
| 3218 } | |
| 3219 } | 3213 } |
| 3220 | 3214 |
| 3221 // Initialize header of the trimmed array. We are storing the new length | 3215 // Initialize header of the trimmed array. We are storing the new length |
| 3222 // using release store after creating a filler for the left-over space to | 3216 // using release store after creating a filler for the left-over space to |
| 3223 // avoid races with the sweeper thread. | 3217 // avoid races with the sweeper thread. |
| 3224 object->synchronized_set_length(len - elements_to_trim); | 3218 object->synchronized_set_length(len - elements_to_trim); |
| 3225 | 3219 |
| 3226 // Maintain consistency of live bytes during incremental marking | 3220 // Maintain consistency of live bytes during incremental marking |
| 3227 AdjustLiveBytes(object, -bytes_to_trim, mode); | 3221 AdjustLiveBytes(object, -bytes_to_trim, mode); |
| 3228 | 3222 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3312 | 3306 |
| 3313 if (immovable) { | 3307 if (immovable) { |
| 3314 Address address = result->address(); | 3308 Address address = result->address(); |
| 3315 // Code objects which should stay at a fixed address are allocated either | 3309 // Code objects which should stay at a fixed address are allocated either |
| 3316 // in the first page of code space (objects on the first page of each space | 3310 // in the first page of code space (objects on the first page of each space |
| 3317 // are never moved) or in large object space. | 3311 // are never moved) or in large object space. |
| 3318 if (!code_space_->FirstPage()->Contains(address) && | 3312 if (!code_space_->FirstPage()->Contains(address) && |
| 3319 MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { | 3313 MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { |
| 3320 // Discard the first code allocation, which was on a page where it could | 3314 // Discard the first code allocation, which was on a page where it could |
| 3321 // be moved. | 3315 // be moved. |
| 3322 CreateFillerObjectAt(result->address(), object_size); | 3316 CreateFillerObjectAt(result->address(), object_size, kNoRecordedSlots); |
| 3323 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); | 3317 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); |
| 3324 if (!allocation.To(&result)) return allocation; | 3318 if (!allocation.To(&result)) return allocation; |
| 3325 OnAllocationEvent(result, object_size); | 3319 OnAllocationEvent(result, object_size); |
| 3326 } | 3320 } |
| 3327 } | 3321 } |
| 3328 | 3322 |
| 3329 result->set_map_no_write_barrier(code_map()); | 3323 result->set_map_no_write_barrier(code_map()); |
| 3330 Code* code = Code::cast(result); | 3324 Code* code = Code::cast(result); |
| 3331 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); | 3325 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); |
| 3332 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || | 3326 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || |
| (...skipping 2241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5574 void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { | 5568 void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { |
| 5575 if (!InNewSpace(object)) { | 5569 if (!InNewSpace(object)) { |
| 5576 store_buffer()->MoveEntriesToRememberedSet(); | 5570 store_buffer()->MoveEntriesToRememberedSet(); |
| 5577 Address slot_addr = reinterpret_cast<Address>(slot); | 5571 Address slot_addr = reinterpret_cast<Address>(slot); |
| 5578 Page* page = Page::FromAddress(slot_addr); | 5572 Page* page = Page::FromAddress(slot_addr); |
| 5579 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); | 5573 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); |
| 5580 RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr); | 5574 RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr); |
| 5581 } | 5575 } |
| 5582 } | 5576 } |
| 5583 | 5577 |
| 5584 void Heap::ClearRecordedSlotRange(HeapObject* object, Object** start, | 5578 void Heap::ClearRecordedSlotRange(Address start, Address end) { |
| 5585 Object** end) { | 5579 Page* page = Page::FromAddress(start); |
| 5586 if (!InNewSpace(object)) { | 5580 if (!page->InNewSpace()) { |
| 5587 store_buffer()->MoveEntriesToRememberedSet(); | 5581 store_buffer()->MoveEntriesToRememberedSet(); |
| 5588 Address start_addr = reinterpret_cast<Address>(start); | |
| 5589 Address end_addr = reinterpret_cast<Address>(end); | |
| 5590 Page* page = Page::FromAddress(start_addr); | |
| 5591 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); | 5582 DCHECK_EQ(page->owner()->identity(), OLD_SPACE); |
| 5592 RememberedSet<OLD_TO_NEW>::RemoveRange(page, start_addr, end_addr); | 5583 RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end); |
| 5593 } | 5584 } |
| 5594 } | 5585 } |
| 5595 | 5586 |
| 5596 Space* AllSpaces::next() { | 5587 Space* AllSpaces::next() { |
| 5597 switch (counter_++) { | 5588 switch (counter_++) { |
| 5598 case NEW_SPACE: | 5589 case NEW_SPACE: |
| 5599 return heap_->new_space(); | 5590 return heap_->new_space(); |
| 5600 case OLD_SPACE: | 5591 case OLD_SPACE: |
| 5601 return heap_->old_space(); | 5592 return heap_->old_space(); |
| 5602 case CODE_SPACE: | 5593 case CODE_SPACE: |
| (...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6325 } | 6316 } |
| 6326 | 6317 |
| 6327 | 6318 |
| 6328 // static | 6319 // static |
| 6329 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6320 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6330 return StaticVisitorBase::GetVisitorId(map); | 6321 return StaticVisitorBase::GetVisitorId(map); |
| 6331 } | 6322 } |
| 6332 | 6323 |
| 6333 } // namespace internal | 6324 } // namespace internal |
| 6334 } // namespace v8 | 6325 } // namespace v8 |
| OLD | NEW |