Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(291)

Side by Side Diff: src/heap/heap.cc

Issue 2638803002: [heap] Remove markbits getter from raw address (Closed)
Patch Set: Fix LeftTrim Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/incremental-marking.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/ast/context-slot-cache.h" 9 #include "src/ast/context-slot-cache.h"
10 #include "src/base/bits.h" 10 #include "src/base/bits.h"
(...skipping 3092 matching lines...) Expand 10 before | Expand all | Expand 10 after
3103 instance->set_osr_loop_nesting_level(0); 3103 instance->set_osr_loop_nesting_level(0);
3104 instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); 3104 instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge);
3105 instance->set_constant_pool(constant_pool); 3105 instance->set_constant_pool(constant_pool);
3106 instance->set_handler_table(empty_fixed_array()); 3106 instance->set_handler_table(empty_fixed_array());
3107 instance->set_source_position_table(empty_byte_array()); 3107 instance->set_source_position_table(empty_byte_array());
3108 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); 3108 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length);
3109 3109
3110 return result; 3110 return result;
3111 } 3111 }
3112 3112
3113 void Heap::CreateFillerObjectAt(Address addr, int size, 3113 HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
3114 ClearRecordedSlots mode) { 3114 ClearRecordedSlots mode) {
3115 if (size == 0) return; 3115 if (size == 0) return nullptr;
3116 HeapObject* filler = HeapObject::FromAddress(addr); 3116 HeapObject* filler = HeapObject::FromAddress(addr);
3117 if (size == kPointerSize) { 3117 if (size == kPointerSize) {
3118 filler->set_map_no_write_barrier( 3118 filler->set_map_no_write_barrier(
3119 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); 3119 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex)));
3120 } else if (size == 2 * kPointerSize) { 3120 } else if (size == 2 * kPointerSize) {
3121 filler->set_map_no_write_barrier( 3121 filler->set_map_no_write_barrier(
3122 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); 3122 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)));
3123 } else { 3123 } else {
3124 DCHECK_GT(size, 2 * kPointerSize); 3124 DCHECK_GT(size, 2 * kPointerSize);
3125 filler->set_map_no_write_barrier( 3125 filler->set_map_no_write_barrier(
3126 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); 3126 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)));
3127 FreeSpace::cast(filler)->nobarrier_set_size(size); 3127 FreeSpace::cast(filler)->nobarrier_set_size(size);
3128 } 3128 }
3129 if (mode == ClearRecordedSlots::kYes) { 3129 if (mode == ClearRecordedSlots::kYes) {
3130 ClearRecordedSlotRange(addr, addr + size); 3130 ClearRecordedSlotRange(addr, addr + size);
3131 } 3131 }
3132 3132
3133 // At this point, we may be deserializing the heap from a snapshot, and 3133 // At this point, we may be deserializing the heap from a snapshot, and
3134 // none of the maps have been created yet and are NULL. 3134 // none of the maps have been created yet and are NULL.
3135 DCHECK((filler->map() == NULL && !deserialization_complete_) || 3135 DCHECK((filler->map() == NULL && !deserialization_complete_) ||
3136 filler->map()->IsMap()); 3136 filler->map()->IsMap());
3137 return filler;
3137 } 3138 }
3138 3139
3139 3140
3140 bool Heap::CanMoveObjectStart(HeapObject* object) { 3141 bool Heap::CanMoveObjectStart(HeapObject* object) {
3141 if (!FLAG_move_object_start) return false; 3142 if (!FLAG_move_object_start) return false;
3142 3143
3143 // Sampling heap profiler may have a reference to the object. 3144 // Sampling heap profiler may have a reference to the object.
3144 if (isolate()->heap_profiler()->is_sampling_allocations()) return false; 3145 if (isolate()->heap_profiler()->is_sampling_allocations()) return false;
3145 3146
3146 Address address = object->address(); 3147 Address address = object->address();
3147 3148
3148 if (lo_space()->Contains(object)) return false; 3149 if (lo_space()->Contains(object)) return false;
3149 3150
3150 // We can move the object start if the page was already swept. 3151 // We can move the object start if the page was already swept.
3151 return Page::FromAddress(address)->SweepingDone(); 3152 return Page::FromAddress(address)->SweepingDone();
3152 } 3153 }
3153 3154
3154 void Heap::AdjustLiveBytes(HeapObject* object, int by) { 3155 void Heap::AdjustLiveBytes(HeapObject* object, int by) {
3155 // As long as the inspected object is black and we are currently not iterating 3156 // As long as the inspected object is black and we are currently not iterating
3156 // the heap using HeapIterator, we can update the live byte count. We cannot 3157 // the heap using HeapIterator, we can update the live byte count. We cannot
3157 // update while using HeapIterator because the iterator is temporarily 3158 // update while using HeapIterator because the iterator is temporarily
3158 // marking the whole object graph, without updating live bytes. 3159 // marking the whole object graph, without updating live bytes.
3159 if (lo_space()->Contains(object)) { 3160 if (lo_space()->Contains(object)) {
3160 lo_space()->AdjustLiveBytes(by); 3161 lo_space()->AdjustLiveBytes(by);
3161 } else if (!in_heap_iterator() && 3162 } else if (!in_heap_iterator() &&
3162 !mark_compact_collector()->sweeping_in_progress() && 3163 !mark_compact_collector()->sweeping_in_progress() &&
3163 Marking::IsBlack(ObjectMarking::MarkBitFrom(object->address()))) { 3164 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
3164 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); 3165 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
3165 MemoryChunk::IncrementLiveBytes(object, by); 3166 MemoryChunk::IncrementLiveBytes(object, by);
3166 } 3167 }
3167 } 3168 }
3168 3169
3169 3170
3170 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, 3171 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
3171 int elements_to_trim) { 3172 int elements_to_trim) {
3172 CHECK_NOT_NULL(object); 3173 CHECK_NOT_NULL(object);
3173 DCHECK(!object->IsFixedTypedArrayBase()); 3174 DCHECK(!object->IsFixedTypedArrayBase());
(...skipping 15 matching lines...) Expand all
3189 const int len = object->length(); 3190 const int len = object->length();
3190 DCHECK(elements_to_trim <= len); 3191 DCHECK(elements_to_trim <= len);
3191 3192
3192 // Calculate location of new array start. 3193 // Calculate location of new array start.
3193 Address old_start = object->address(); 3194 Address old_start = object->address();
3194 Address new_start = old_start + bytes_to_trim; 3195 Address new_start = old_start + bytes_to_trim;
3195 3196
3196 // Transfer the mark bits to their new location if the object is not within 3197 // Transfer the mark bits to their new location if the object is not within
3197 // a black area. 3198 // a black area.
3198 if (!incremental_marking()->black_allocation() || 3199 if (!incremental_marking()->black_allocation() ||
3199 !Marking::IsBlack(ObjectMarking::MarkBitFrom(new_start))) { 3200 !Marking::IsBlack(
3200 IncrementalMarking::TransferMark(this, old_start, new_start); 3201 ObjectMarking::MarkBitFrom(HeapObject::FromAddress(new_start)))) {
3202 IncrementalMarking::TransferMark(this, object,
3203 HeapObject::FromAddress(new_start));
3201 } 3204 }
3202 3205
3203 // Technically in new space this write might be omitted (except for 3206 // Technically in new space this write might be omitted (except for
3204 // debug mode which iterates through the heap), but to play safer 3207 // debug mode which iterates through the heap), but to play safer
3205 // we still do it. 3208 // we still do it.
3206 CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes); 3209 CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
3207 3210
3208 // Clear the mark bits of the black area that belongs now to the filler. 3211 // Clear the mark bits of the black area that belongs now to the filler.
3209 // This is an optimization. The sweeper will release black fillers anyway. 3212 // This is an optimization. The sweeper will release black fillers anyway.
3210 if (incremental_marking()->black_allocation() && 3213 if (incremental_marking()->black_allocation() &&
3211 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(old_start))) { 3214 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) {
3212 Page* page = Page::FromAddress(old_start); 3215 Page* page = Page::FromAddress(old_start);
3213 page->markbits()->ClearRange( 3216 page->markbits()->ClearRange(
3214 page->AddressToMarkbitIndex(old_start), 3217 page->AddressToMarkbitIndex(old_start),
3215 page->AddressToMarkbitIndex(old_start + bytes_to_trim)); 3218 page->AddressToMarkbitIndex(old_start + bytes_to_trim));
3216 } 3219 }
3217 3220
3218 // Initialize header of the trimmed array. Since left trimming is only 3221 // Initialize header of the trimmed array. Since left trimming is only
3219 // performed on pages which are not concurrently swept creating a filler 3222 // performed on pages which are not concurrently swept creating a filler
3220 // object does not require synchronization. 3223 // object does not require synchronization.
3221 DCHECK(CanMoveObjectStart(object)); 3224 DCHECK(CanMoveObjectStart(object));
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
3276 Address old_end = object->address() + object->Size(); 3279 Address old_end = object->address() + object->Size();
3277 Address new_end = old_end - bytes_to_trim; 3280 Address new_end = old_end - bytes_to_trim;
3278 3281
3279 // Technically in new space this write might be omitted (except for 3282 // Technically in new space this write might be omitted (except for
3280 // debug mode which iterates through the heap), but to play safer 3283 // debug mode which iterates through the heap), but to play safer
3281 // we still do it. 3284 // we still do it.
3282 // We do not create a filler for objects in large object space. 3285 // We do not create a filler for objects in large object space.
3283 // TODO(hpayer): We should shrink the large object page if the size 3286 // TODO(hpayer): We should shrink the large object page if the size
3284 // of the object changed significantly. 3287 // of the object changed significantly.
3285 if (!lo_space()->Contains(object)) { 3288 if (!lo_space()->Contains(object)) {
3286 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); 3289 HeapObject* filler =
3290 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
3291 DCHECK_NOT_NULL(filler);
3287 // Clear the mark bits of the black area that belongs now to the filler. 3292 // Clear the mark bits of the black area that belongs now to the filler.
3288 // This is an optimization. The sweeper will release black fillers anyway. 3293 // This is an optimization. The sweeper will release black fillers anyway.
3289 if (incremental_marking()->black_allocation() && 3294 if (incremental_marking()->black_allocation() &&
3290 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(new_end))) { 3295 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(filler))) {
3291 Page* page = Page::FromAddress(new_end); 3296 Page* page = Page::FromAddress(new_end);
3292 page->markbits()->ClearRange( 3297 page->markbits()->ClearRange(
3293 page->AddressToMarkbitIndex(new_end), 3298 page->AddressToMarkbitIndex(new_end),
3294 page->AddressToMarkbitIndex(new_end + bytes_to_trim)); 3299 page->AddressToMarkbitIndex(new_end + bytes_to_trim));
3295 } 3300 }
3296 } 3301 }
3297 3302
3298 // Initialize header of the trimmed array. We are storing the new length 3303 // Initialize header of the trimmed array. We are storing the new length
3299 // using release store after creating a filler for the left-over space to 3304 // using release store after creating a filler for the left-over space to
3300 // avoid races with the sweeper thread. 3305 // avoid races with the sweeper thread.
(...skipping 3261 matching lines...) Expand 10 before | Expand all | Expand 10 after
6562 } 6567 }
6563 6568
6564 6569
6565 // static 6570 // static
6566 int Heap::GetStaticVisitorIdForMap(Map* map) { 6571 int Heap::GetStaticVisitorIdForMap(Map* map) {
6567 return StaticVisitorBase::GetVisitorId(map); 6572 return StaticVisitorBase::GetVisitorId(map);
6568 } 6573 }
6569 6574
6570 } // namespace internal 6575 } // namespace internal
6571 } // namespace v8 6576 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/incremental-marking.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698