| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/compilation-cache.h" | 9 #include "src/compilation-cache.h" |
| 10 #include "src/cpu-profiler.h" | 10 #include "src/cpu-profiler.h" |
| (...skipping 950 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 961 | 961 |
| 962 if (sweeping_in_progress()) { | 962 if (sweeping_in_progress()) { |
| 963 // Instead of waiting we could also abort the sweeper threads here. | 963 // Instead of waiting we could also abort the sweeper threads here. |
| 964 EnsureSweepingCompleted(); | 964 EnsureSweepingCompleted(); |
| 965 } | 965 } |
| 966 | 966 |
| 967 // Clear marking bits if incremental marking is aborted. | 967 // Clear marking bits if incremental marking is aborted. |
| 968 if (was_marked_incrementally_ && abort_incremental_marking_) { | 968 if (was_marked_incrementally_ && abort_incremental_marking_) { |
| 969 heap()->incremental_marking()->Abort(); | 969 heap()->incremental_marking()->Abort(); |
| 970 ClearMarkbits(); | 970 ClearMarkbits(); |
| 971 AbortWeakCollections(); |
| 971 AbortCompaction(); | 972 AbortCompaction(); |
| 972 was_marked_incrementally_ = false; | 973 was_marked_incrementally_ = false; |
| 973 } | 974 } |
| 974 | 975 |
| 975 // Don't start compaction if we are in the middle of incremental | 976 // Don't start compaction if we are in the middle of incremental |
| 976 // marking cycle. We did not collect any slots. | 977 // marking cycle. We did not collect any slots. |
| 977 if (!FLAG_never_compact && !was_marked_incrementally_) { | 978 if (!FLAG_never_compact && !was_marked_incrementally_) { |
| 978 StartCompaction(NON_INCREMENTAL_COMPACTION); | 979 StartCompaction(NON_INCREMENTAL_COMPACTION); |
| 979 } | 980 } |
| 980 | 981 |
| (...skipping 1812 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2793 } | 2794 } |
| 2794 } | 2795 } |
| 2795 } | 2796 } |
| 2796 weak_collection_obj = weak_collection->next(); | 2797 weak_collection_obj = weak_collection->next(); |
| 2797 weak_collection->set_next(heap()->undefined_value()); | 2798 weak_collection->set_next(heap()->undefined_value()); |
| 2798 } | 2799 } |
| 2799 heap()->set_encountered_weak_collections(Smi::FromInt(0)); | 2800 heap()->set_encountered_weak_collections(Smi::FromInt(0)); |
| 2800 } | 2801 } |
| 2801 | 2802 |
| 2802 | 2803 |
| 2804 void MarkCompactCollector::AbortWeakCollections() { |
| 2805 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2806 GCTracer::Scope::MC_WEAKCOLLECTION_ABORT); |
| 2807 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| 2808 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2809 JSWeakCollection* weak_collection = |
| 2810 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
| 2811 weak_collection_obj = weak_collection->next(); |
| 2812 weak_collection->set_next(heap()->undefined_value()); |
| 2813 } |
| 2814 heap()->set_encountered_weak_collections(Smi::FromInt(0)); |
| 2815 } |
| 2816 |
| 2817 |
| 2803 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { | 2818 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { |
| 2804 if (heap_->InNewSpace(value)) { | 2819 if (heap_->InNewSpace(value)) { |
| 2805 heap_->store_buffer()->Mark(slot); | 2820 heap_->store_buffer()->Mark(slot); |
| 2806 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { | 2821 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { |
| 2807 SlotsBuffer::AddTo(&slots_buffer_allocator_, | 2822 SlotsBuffer::AddTo(&slots_buffer_allocator_, |
| 2808 &migration_slots_buffer_, | 2823 &migration_slots_buffer_, |
| 2809 reinterpret_cast<Object**>(slot), | 2824 reinterpret_cast<Object**>(slot), |
| 2810 SlotsBuffer::IGNORE_OVERFLOW); | 2825 SlotsBuffer::IGNORE_OVERFLOW); |
| 2811 } | 2826 } |
| 2812 } | 2827 } |
| (...skipping 1808 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4621 while (buffer != NULL) { | 4636 while (buffer != NULL) { |
| 4622 SlotsBuffer* next_buffer = buffer->next(); | 4637 SlotsBuffer* next_buffer = buffer->next(); |
| 4623 DeallocateBuffer(buffer); | 4638 DeallocateBuffer(buffer); |
| 4624 buffer = next_buffer; | 4639 buffer = next_buffer; |
| 4625 } | 4640 } |
| 4626 *buffer_address = NULL; | 4641 *buffer_address = NULL; |
| 4627 } | 4642 } |
| 4628 | 4643 |
| 4629 | 4644 |
| 4630 } } // namespace v8::internal | 4645 } } // namespace v8::internal |
| OLD | NEW |