Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2371733002: Introduce EmbedderReachableReferenceReporter (Closed)
Patch Set: Fix UsingEmbedderHeapTracer Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 #endif 54 #endif
55 marking_parity_(ODD_MARKING_PARITY), 55 marking_parity_(ODD_MARKING_PARITY),
56 was_marked_incrementally_(false), 56 was_marked_incrementally_(false),
57 evacuation_(false), 57 evacuation_(false),
58 compacting_(false), 58 compacting_(false),
59 black_allocation_(false), 59 black_allocation_(false),
60 have_code_to_deoptimize_(false), 60 have_code_to_deoptimize_(false),
61 marking_deque_memory_(NULL), 61 marking_deque_memory_(NULL),
62 marking_deque_memory_committed_(0), 62 marking_deque_memory_committed_(0),
63 code_flusher_(nullptr), 63 code_flusher_(nullptr),
64 embedder_heap_tracer_(nullptr),
65 sweeper_(heap) { 64 sweeper_(heap) {
66 } 65 }
67 66
68 #ifdef VERIFY_HEAP 67 #ifdef VERIFY_HEAP
69 class VerifyMarkingVisitor : public ObjectVisitor { 68 class VerifyMarkingVisitor : public ObjectVisitor {
70 public: 69 public:
71 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} 70 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {}
72 71
73 void VisitPointers(Object** start, Object** end) override { 72 void VisitPointers(Object** start, Object** end) override {
74 for (Object** current = start; current < end; current++) { 73 for (Object** current = start; current < end; current++) {
(...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after
802 // Clear marking bits if incremental marking is aborted. 801 // Clear marking bits if incremental marking is aborted.
803 if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) { 802 if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
804 heap()->incremental_marking()->Stop(); 803 heap()->incremental_marking()->Stop();
805 heap()->incremental_marking()->AbortBlackAllocation(); 804 heap()->incremental_marking()->AbortBlackAllocation();
806 ClearMarkbits(); 805 ClearMarkbits();
807 AbortWeakCollections(); 806 AbortWeakCollections();
808 AbortWeakCells(); 807 AbortWeakCells();
809 AbortTransitionArrays(); 808 AbortTransitionArrays();
810 AbortCompaction(); 809 AbortCompaction();
811 if (heap_->UsingEmbedderHeapTracer()) { 810 if (heap_->UsingEmbedderHeapTracer()) {
812 heap_->mark_compact_collector()->embedder_heap_tracer()->AbortTracing(); 811 heap_->embedder_heap_tracer()->AbortTracing();
813 } 812 }
814 was_marked_incrementally_ = false; 813 was_marked_incrementally_ = false;
815 } 814 }
816 815
817 if (!was_marked_incrementally_) { 816 if (!was_marked_incrementally_) {
818 if (heap_->UsingEmbedderHeapTracer()) { 817 if (heap_->UsingEmbedderHeapTracer()) {
819 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_PROLOGUE); 818 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_PROLOGUE);
820 heap_->mark_compact_collector()->embedder_heap_tracer()->TracePrologue(); 819 heap_->embedder_heap_tracer()->TracePrologue(
820 heap_->embedder_reachable_reference_reporter());
821 } 821 }
822 } 822 }
823 823
824 if (UsingEmbedderHeapTracer()) { 824 if (heap_->UsingEmbedderHeapTracer()) {
825 embedder_heap_tracer()->EnterFinalPause(); 825 heap_->embedder_heap_tracer()->EnterFinalPause();
826 } 826 }
827 827
828 // Don't start compaction if we are in the middle of incremental 828 // Don't start compaction if we are in the middle of incremental
829 // marking cycle. We did not collect any slots. 829 // marking cycle. We did not collect any slots.
830 if (!FLAG_never_compact && !was_marked_incrementally_) { 830 if (!FLAG_never_compact && !was_marked_incrementally_) {
831 StartCompaction(NON_INCREMENTAL_COMPACTION); 831 StartCompaction(NON_INCREMENTAL_COMPACTION);
832 } 832 }
833 833
834 PagedSpaces spaces(heap()); 834 PagedSpaces spaces(heap());
835 for (PagedSpace* space = spaces.next(); space != NULL; 835 for (PagedSpace* space = spaces.next(); space != NULL;
(...skipping 1242 matching lines...) Expand 10 before | Expand all | Expand 10 after
2078 } 2078 }
2079 } 2079 }
2080 2080
2081 // Mark all objects reachable (transitively) from objects on the marking 2081 // Mark all objects reachable (transitively) from objects on the marking
2082 // stack including references only considered in the atomic marking pause. 2082 // stack including references only considered in the atomic marking pause.
2083 void MarkCompactCollector::ProcessEphemeralMarking( 2083 void MarkCompactCollector::ProcessEphemeralMarking(
2084 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { 2084 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) {
2085 DCHECK(marking_deque_.IsEmpty() && !marking_deque_.overflowed()); 2085 DCHECK(marking_deque_.IsEmpty() && !marking_deque_.overflowed());
2086 bool work_to_do = true; 2086 bool work_to_do = true;
2087 while (work_to_do) { 2087 while (work_to_do) {
2088 if (UsingEmbedderHeapTracer()) { 2088 if (heap_->UsingEmbedderHeapTracer()) {
2089 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); 2089 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING);
2090 RegisterWrappersWithEmbedderHeapTracer(); 2090 heap_->RegisterWrappersWithEmbedderHeapTracer();
2091 embedder_heap_tracer()->AdvanceTracing( 2091 heap_->embedder_heap_tracer()->AdvanceTracing(
2092 0, EmbedderHeapTracer::AdvanceTracingActions( 2092 0, EmbedderHeapTracer::AdvanceTracingActions(
2093 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); 2093 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
2094 } 2094 }
2095 if (!only_process_harmony_weak_collections) { 2095 if (!only_process_harmony_weak_collections) {
2096 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); 2096 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING);
2097 isolate()->global_handles()->IterateObjectGroups( 2097 isolate()->global_handles()->IterateObjectGroups(
2098 visitor, &IsUnmarkedHeapObjectWithHeap); 2098 visitor, &IsUnmarkedHeapObjectWithHeap);
2099 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); 2099 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject);
2100 } 2100 }
2101 ProcessWeakCollections(); 2101 ProcessWeakCollections();
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
2195 void MarkingDeque::Uninitialize(bool aborting) { 2195 void MarkingDeque::Uninitialize(bool aborting) {
2196 if (!aborting) { 2196 if (!aborting) {
2197 DCHECK(IsEmpty()); 2197 DCHECK(IsEmpty());
2198 DCHECK(!overflowed_); 2198 DCHECK(!overflowed_);
2199 } 2199 }
2200 DCHECK(in_use_); 2200 DCHECK(in_use_);
2201 top_ = bottom_ = 0xdecbad; 2201 top_ = bottom_ = 0xdecbad;
2202 in_use_ = false; 2202 in_use_ = false;
2203 } 2203 }
2204 2204
2205 void MarkCompactCollector::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
2206 DCHECK_NOT_NULL(tracer);
2207 CHECK_NULL(embedder_heap_tracer_);
2208 embedder_heap_tracer_ = tracer;
2209 }
2210
2211 bool MarkCompactCollector::RequiresImmediateWrapperProcessing() {
2212 const size_t kTooManyWrappers = 16000;
2213 return wrappers_to_trace_.size() > kTooManyWrappers;
2214 }
2215
2216 void MarkCompactCollector::RegisterWrappersWithEmbedderHeapTracer() {
2217 DCHECK(UsingEmbedderHeapTracer());
2218 if (wrappers_to_trace_.empty()) {
2219 return;
2220 }
2221 embedder_heap_tracer()->RegisterV8References(wrappers_to_trace_);
2222 wrappers_to_trace_.clear();
2223 }
2224
2225 void MarkCompactCollector::TracePossibleWrapper(JSObject* js_object) {
2226 DCHECK(js_object->WasConstructedFromApiFunction());
2227 if (js_object->GetInternalFieldCount() >= 2 &&
2228 js_object->GetInternalField(0) &&
2229 js_object->GetInternalField(0) != heap_->undefined_value() &&
2230 js_object->GetInternalField(1) != heap_->undefined_value()) {
2231 DCHECK(reinterpret_cast<intptr_t>(js_object->GetInternalField(0)) % 2 == 0);
2232 wrappers_to_trace_.push_back(std::pair<void*, void*>(
2233 reinterpret_cast<void*>(js_object->GetInternalField(0)),
2234 reinterpret_cast<void*>(js_object->GetInternalField(1))));
2235 }
2236 }
2237
2238 class MarkCompactCollector::ObjectStatsVisitor 2205 class MarkCompactCollector::ObjectStatsVisitor
2239 : public MarkCompactCollector::HeapObjectVisitor { 2206 : public MarkCompactCollector::HeapObjectVisitor {
2240 public: 2207 public:
2241 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, 2208 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats,
2242 ObjectStats* dead_stats) 2209 ObjectStats* dead_stats)
2243 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { 2210 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) {
2244 DCHECK_NOT_NULL(live_stats); 2211 DCHECK_NOT_NULL(live_stats);
2245 DCHECK_NOT_NULL(dead_stats); 2212 DCHECK_NOT_NULL(dead_stats);
2246 // Global objects are roots and thus recorded as live. 2213 // Global objects are roots and thus recorded as live.
2247 live_collector_.CollectGlobalStatistics(); 2214 live_collector_.CollectGlobalStatistics();
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
2369 } 2336 }
2370 2337
2371 // Repeat Harmony weak maps marking to mark unmarked objects reachable from 2338 // Repeat Harmony weak maps marking to mark unmarked objects reachable from
2372 // the weak roots we just marked as pending destruction. 2339 // the weak roots we just marked as pending destruction.
2373 // 2340 //
2374 // We only process harmony collections, as all object groups have been fully 2341 // We only process harmony collections, as all object groups have been fully
2375 // processed and no weakly reachable node can discover new objects groups. 2342 // processed and no weakly reachable node can discover new objects groups.
2376 { 2343 {
2377 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); 2344 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
2378 ProcessEphemeralMarking(&root_visitor, true); 2345 ProcessEphemeralMarking(&root_visitor, true);
2379 if (UsingEmbedderHeapTracer()) { 2346 if (heap_->UsingEmbedderHeapTracer()) {
2380 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_EPILOGUE); 2347 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_EPILOGUE);
2381 embedder_heap_tracer()->TraceEpilogue(); 2348 heap()->embedder_heap_tracer()->TraceEpilogue();
2382 } 2349 }
2383 } 2350 }
2384 } 2351 }
2385 2352
2386 if (FLAG_print_cumulative_gc_stat) { 2353 if (FLAG_print_cumulative_gc_stat) {
2387 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() - 2354 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() -
2388 start_time); 2355 start_time);
2389 } 2356 }
2390 } 2357 }
2391 2358
(...skipping 1650 matching lines...) Expand 10 before | Expand all | Expand 10 after
4042 // The target is always in old space, we don't have to record the slot in 4009 // The target is always in old space, we don't have to record the slot in
4043 // the old-to-new remembered set. 4010 // the old-to-new remembered set.
4044 DCHECK(!heap()->InNewSpace(target)); 4011 DCHECK(!heap()->InNewSpace(target));
4045 RecordRelocSlot(host, &rinfo, target); 4012 RecordRelocSlot(host, &rinfo, target);
4046 } 4013 }
4047 } 4014 }
4048 } 4015 }
4049 4016
4050 } // namespace internal 4017 } // namespace internal
4051 } // namespace v8 4018 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698