Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(140)

Side by Side Diff: src/heap/store-buffer.cc

Issue 1406133003: [heap] fix crash during the scavenge of ArrayBuffer (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: store-buffer: move IteratePointersToFromSpace Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« src/heap/store-buffer.h ('K') | « src/heap/store-buffer.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/store-buffer.h" 5 #include "src/heap/store-buffer.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "src/counters.h" 9 #include "src/counters.h"
10 #include "src/heap/incremental-marking.h" 10 #include "src/heap/incremental-marking.h"
11 #include "src/heap/mark-compact-inl.h"
11 #include "src/heap/store-buffer-inl.h" 12 #include "src/heap/store-buffer-inl.h"
12 #include "src/isolate.h" 13 #include "src/isolate.h"
13 #include "src/objects-inl.h" 14 #include "src/objects-inl.h"
14 #include "src/v8.h" 15 #include "src/v8.h"
15 16
16 namespace v8 { 17 namespace v8 {
17 namespace internal { 18 namespace internal {
18 19
19 StoreBuffer::StoreBuffer(Heap* heap) 20 StoreBuffer::StoreBuffer(Heap* heap)
20 : heap_(heap), 21 : heap_(heap),
(...skipping 517 matching lines...) Expand 10 before | Expand all | Expand 10 after
538 } 539 }
539 } 540 }
540 } 541 }
541 if (callback_ != NULL) { 542 if (callback_ != NULL) {
542 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); 543 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent);
543 } 544 }
544 } 545 }
545 } 546 }
546 547
547 548
549 void StoreBuffer::IterateAndMarkPointersToFromSpace(
550 HeapObject* object, Address start, Address end, bool record_slots,
551 ObjectSlotCallback slot_callback) {
552 Address slot_address = start;
553
554 while (slot_address < end) {
555 Object** slot = reinterpret_cast<Object**>(slot_address);
556 Object* target = *slot;
557 // If the store buffer becomes overfull we mark pages as being exempt from
558 // the store buffer. These pages are scanned to find pointers that point
559 // to the new space. In that case we may hit newly promoted objects and
560 // fix the pointers before the promotion queue gets to them. Thus the 'if'.
561 if (target->IsHeapObject()) {
562 if (heap_->InFromSpace(target)) {
563 slot_callback(reinterpret_cast<HeapObject**>(slot),
564 HeapObject::cast(target));
565 Object* new_target = *slot;
566 if (heap_->InNewSpace(new_target)) {
567 SLOW_DCHECK(heap_->InToSpace(new_target));
568 SLOW_DCHECK(new_target->IsHeapObject());
569 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot));
570 }
571 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_target));
572 } else if (record_slots &&
573 MarkCompactCollector::IsOnEvacuationCandidate(target)) {
574 heap_->mark_compact_collector()->RecordSlot(object, slot, target);
575 }
576 }
577 slot_address += kPointerSize;
578 }
579 }
580
581
582 void StoreBuffer::IteratePointersToFromSpace(HeapObject* target, int size,
583 ObjectSlotCallback slot_callback) {
584 Address obj_address = target->address();
585
586 // We are not collecting slots on new space objects during mutation
587 // thus we have to scan for pointers to evacuation candidates when we
588 // promote objects. But we should not record any slots in non-black
589 // objects. Grey object's slots would be rescanned.
590 // White object might not survive until the end of collection
591 // it would be a violation of the invariant to record it's slots.
592 bool record_slots = false;
593 if (heap_->incremental_marking()->IsCompacting()) {
594 MarkBit mark_bit = Marking::MarkBitFrom(target);
595 record_slots = Marking::IsBlack(mark_bit);
596 }
597
598 // Do not scavenge JSArrayBuffer's contents
599 switch (target->ContentType()) {
600 case HeapObjectContents::kTaggedValues: {
601 IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
602 record_slots, slot_callback);
603 break;
604 }
605 case HeapObjectContents::kMixedValues: {
606 if (target->IsFixedTypedArrayBase()) {
607 IterateAndMarkPointersToFromSpace(
608 target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
609 obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
610 slot_callback);
611 } else if (target->IsBytecodeArray()) {
612 IterateAndMarkPointersToFromSpace(
613 target, obj_address + BytecodeArray::kConstantPoolOffset,
614 obj_address + BytecodeArray::kHeaderSize, record_slots,
615 slot_callback);
616 } else if (target->IsJSArrayBuffer()) {
617 IterateAndMarkPointersToFromSpace(
618 target, obj_address,
619 obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
620 record_slots, slot_callback);
621 IterateAndMarkPointersToFromSpace(
622 target, obj_address + JSArrayBuffer::kSize, obj_address + size,
623 record_slots, slot_callback);
624 #if V8_DOUBLE_FIELDS_UNBOXING
625 } else if (FLAG_unbox_double_fields) {
626 LayoutDescriptorHelper helper(target->map());
627 DCHECK(!helper.all_fields_tagged());
628
629 for (int offset = 0; offset < size;) {
630 int end_of_region_offset;
631 if (helper.IsTagged(offset, size, &end_of_region_offset)) {
632 IterateAndMarkPointersToFromSpace(
633 target, obj_address + offset,
634 obj_address + end_of_region_offset, record_slots,
635 slot_callback);
636 }
637 offset = end_of_region_offset;
638 }
639 #endif
640 }
641 break;
642 }
643 case HeapObjectContents::kRawValues: {
644 break;
645 }
646 }
647 }
648
649
548 void StoreBuffer::Compact() { 650 void StoreBuffer::Compact() {
549 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); 651 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
550 652
551 if (top == start_) return; 653 if (top == start_) return;
552 654
553 // There's no check of the limit in the loop below so we check here for 655 // There's no check of the limit in the loop below so we check here for
554 // the worst case (compaction doesn't eliminate any pointers). 656 // the worst case (compaction doesn't eliminate any pointers).
555 DCHECK(top <= limit_); 657 DCHECK(top <= limit_);
556 heap_->set_store_buffer_top(reinterpret_cast<Smi*>(start_)); 658 heap_->set_store_buffer_top(reinterpret_cast<Smi*>(start_));
557 EnsureSpace(top - start_); 659 EnsureSpace(top - start_);
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
641 DCHECK(start_of_current_page_ != store_buffer_->Top()); 743 DCHECK(start_of_current_page_ != store_buffer_->Top());
642 store_buffer_->SetTop(start_of_current_page_); 744 store_buffer_->SetTop(start_of_current_page_);
643 } 745 }
644 } else { 746 } else {
645 UNREACHABLE(); 747 UNREACHABLE();
646 } 748 }
647 } 749 }
648 750
649 } // namespace internal 751 } // namespace internal
650 } // namespace v8 752 } // namespace v8
OLDNEW
« src/heap/store-buffer.h ('K') | « src/heap/store-buffer.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698