Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(244)

Side by Side Diff: src/heap/mark-compact.cc

Issue 640303006: Weak Cells (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
291 void MarkCompactCollector::CollectGarbage() { 291 void MarkCompactCollector::CollectGarbage() {
292 // Make sure that Prepare() has been called. The individual steps below will 292 // Make sure that Prepare() has been called. The individual steps below will
293 // update the state as they proceed. 293 // update the state as they proceed.
294 DCHECK(state_ == PREPARE_GC); 294 DCHECK(state_ == PREPARE_GC);
295 295
296 MarkLiveObjects(); 296 MarkLiveObjects();
297 DCHECK(heap_->incremental_marking()->IsStopped()); 297 DCHECK(heap_->incremental_marking()->IsStopped());
298 298
299 if (FLAG_collect_maps) ClearNonLiveReferences(); 299 if (FLAG_collect_maps) ClearNonLiveReferences();
300 300
301 ProcessAndClearWeakCells();
302
301 ClearWeakCollections(); 303 ClearWeakCollections();
302 304
305 heap_->set_encountered_weak_cells(heap_->undefined_value());
306
303 #ifdef VERIFY_HEAP 307 #ifdef VERIFY_HEAP
304 if (FLAG_verify_heap) { 308 if (FLAG_verify_heap) {
305 VerifyMarking(heap_); 309 VerifyMarking(heap_);
306 } 310 }
307 #endif 311 #endif
308 312
309 SweepSpaces(); 313 SweepSpaces();
310 314
311 #ifdef VERIFY_HEAP 315 #ifdef VERIFY_HEAP
312 if (heap()->weak_embedded_objects_verification_enabled()) { 316 if (heap()->weak_embedded_objects_verification_enabled()) {
(...skipping 502 matching lines...) Expand 10 before | Expand all | Expand 10 after
815 if (sweeping_in_progress()) { 819 if (sweeping_in_progress()) {
816 // Instead of waiting we could also abort the sweeper threads here. 820 // Instead of waiting we could also abort the sweeper threads here.
817 EnsureSweepingCompleted(); 821 EnsureSweepingCompleted();
818 } 822 }
819 823
820 // Clear marking bits if incremental marking is aborted. 824 // Clear marking bits if incremental marking is aborted.
821 if (was_marked_incrementally_ && abort_incremental_marking_) { 825 if (was_marked_incrementally_ && abort_incremental_marking_) {
822 heap()->incremental_marking()->Abort(); 826 heap()->incremental_marking()->Abort();
823 ClearMarkbits(); 827 ClearMarkbits();
824 AbortWeakCollections(); 828 AbortWeakCollections();
829 AbortWeakCells();
825 AbortCompaction(); 830 AbortCompaction();
826 was_marked_incrementally_ = false; 831 was_marked_incrementally_ = false;
827 } 832 }
828 833
829 // Don't start compaction if we are in the middle of incremental 834 // Don't start compaction if we are in the middle of incremental
830 // marking cycle. We did not collect any slots. 835 // marking cycle. We did not collect any slots.
831 if (!FLAG_never_compact && !was_marked_incrementally_) { 836 if (!FLAG_never_compact && !was_marked_incrementally_) {
832 StartCompaction(NON_INCREMENTAL_COMPACTION); 837 StartCompaction(NON_INCREMENTAL_COMPACTION);
833 } 838 }
834 839
(...skipping 1885 matching lines...) Expand 10 before | Expand all | Expand 10 after
2720 while (weak_collection_obj != Smi::FromInt(0)) { 2725 while (weak_collection_obj != Smi::FromInt(0)) {
2721 JSWeakCollection* weak_collection = 2726 JSWeakCollection* weak_collection =
2722 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2727 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2723 weak_collection_obj = weak_collection->next(); 2728 weak_collection_obj = weak_collection->next();
2724 weak_collection->set_next(heap()->undefined_value()); 2729 weak_collection->set_next(heap()->undefined_value());
2725 } 2730 }
2726 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2731 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2727 } 2732 }
2728 2733
2729 2734
2735 void MarkCompactCollector::ProcessAndClearWeakCells() {
2736 HeapObject* undefined = heap()->undefined_value();
2737 Object* weak_cell_obj = heap()->encountered_weak_cells();
2738 while (weak_cell_obj != undefined) {
2739 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2740 HeapObject* value = weak_cell->value();
2741 if (!MarkCompactCollector::IsMarked(value)) {
2742 weak_cell->update_value_from_gc(undefined);
2743 } else {
2744 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2745 heap()->mark_compact_collector()->RecordSlot(slot, slot, value);
2746 }
2747 weak_cell_obj = weak_cell->next();
2748 weak_cell->set_next(undefined);
Erik Corry 2014/10/13 15:56:17 No write barrier needed.
ulan 2014/10/14 10:17:22 Done.
2749 }
2750 heap()->set_encountered_weak_cells(undefined);
2751 }
2752
2753
2754 void MarkCompactCollector::AbortWeakCells() {
2755 Object* undefined = heap()->undefined_value();
2756 Object* weak_cell_obj = heap()->encountered_weak_cells();
2757 while (weak_cell_obj != undefined) {
2758 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2759 weak_cell_obj = weak_cell->next();
2760 weak_cell->set_next(undefined);
Erik Corry 2014/10/13 15:56:17 No write barrier needed
ulan 2014/10/14 10:17:22 Done.
2761 }
2762 heap()->set_encountered_weak_cells(undefined);
2763 }
2764
2765
2730 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { 2766 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) {
2731 if (heap_->InNewSpace(value)) { 2767 if (heap_->InNewSpace(value)) {
2732 heap_->store_buffer()->Mark(slot); 2768 heap_->store_buffer()->Mark(slot);
2733 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { 2769 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2734 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2770 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_,
2735 reinterpret_cast<Object**>(slot), 2771 reinterpret_cast<Object**>(slot),
2736 SlotsBuffer::IGNORE_OVERFLOW); 2772 SlotsBuffer::IGNORE_OVERFLOW);
2737 } 2773 }
2738 } 2774 }
2739 2775
2740 2776
2741 // We scavange new space simultaneously with sweeping. This is done in two 2777 // We scavange new space simultaneously with sweeping. This is done in two
Erik Corry 2014/10/13 15:56:17 scavange -> scavenge
ulan 2014/10/14 10:17:22 Done.
2742 // passes. 2778 // passes.
2743 // 2779 //
2744 // The first pass migrates all alive objects from one semispace to another or 2780 // The first pass migrates all alive objects from one semispace to another or
2745 // promotes them to old space. Forwarding address is written directly into 2781 // promotes them to old space. Forwarding address is written directly into
2746 // first word of object without any encoding. If object is dead we write 2782 // first word of object without any encoding. If object is dead we write
2747 // NULL as a forwarding address. 2783 // NULL as a forwarding address.
2748 // 2784 //
2749 // The second pass updates pointers to new space in all spaces. It is possible 2785 // The second pass updates pointers to new space in all spaces. It is possible
2750 // to encounter pointers to dead new space objects during traversal of pointers 2786 // to encounter pointers to dead new space objects during traversal of pointers
2751 // to new space. We should clear them to avoid encountering them during next 2787 // to new space. We should clear them to avoid encountering them during next
(...skipping 1650 matching lines...) Expand 10 before | Expand all | Expand 10 after
4402 SlotsBuffer* buffer = *buffer_address; 4438 SlotsBuffer* buffer = *buffer_address;
4403 while (buffer != NULL) { 4439 while (buffer != NULL) {
4404 SlotsBuffer* next_buffer = buffer->next(); 4440 SlotsBuffer* next_buffer = buffer->next();
4405 DeallocateBuffer(buffer); 4441 DeallocateBuffer(buffer);
4406 buffer = next_buffer; 4442 buffer = next_buffer;
4407 } 4443 }
4408 *buffer_address = NULL; 4444 *buffer_address = NULL;
4409 } 4445 }
4410 } 4446 }
4411 } // namespace v8::internal 4447 } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698