Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(22)

Side by Side Diff: Source/platform/heap/Heap.cpp

Issue 765673004: Oilpan: support eager tracing of objects when marking. (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: rebased Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapLinkedStack.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
575 if (objectFields[i] != 0) 575 if (objectFields[i] != 0)
576 return false; 576 return false;
577 } 577 }
578 return true; 578 return true;
579 } 579 }
580 #endif 580 #endif
581 581
582 template<> 582 template<>
583 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) 583 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor)
584 { 584 {
585 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) { 585 FinalizedHeapObjectHeader* header = heapObjectHeader();
586 FinalizedHeapObjectHeader* header = heapObjectHeader(); 586 if (header->hasVTable() && !vTableInitialized(payload())) {
587 visitor->markNoTracing(header); 587 visitor->markNoTracing(header);
588 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); 588 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize()));
589 } else { 589 } else {
590 visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); 590 visitor->mark(header, header->traceCallback());
591 } 591 }
592 } 592 }
593 593
594 template<> 594 template<>
595 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor) 595 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor)
596 { 596 {
597 ASSERT(gcInfo()); 597 ASSERT(gcInfo());
598 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) { 598 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) {
599 HeapObjectHeader* header = heapObjectHeader(); 599 HeapObjectHeader* header = heapObjectHeader();
600 visitor->markNoTracing(header); 600 visitor->markNoTracing(header);
(...skipping 1355 matching lines...) Expand 10 before | Expand all | Expand 10 after
1956 void Heap::flushHeapDoesNotContainCache() 1956 void Heap::flushHeapDoesNotContainCache()
1957 { 1957 {
1958 s_heapDoesNotContainCache->flush(); 1958 s_heapDoesNotContainCache->flush();
1959 } 1959 }
1960 1960
1961 static void markNoTracingCallback(Visitor* visitor, void* object) 1961 static void markNoTracingCallback(Visitor* visitor, void* object)
1962 { 1962 {
1963 visitor->markNoTracing(object); 1963 visitor->markNoTracing(object);
1964 } 1964 }
1965 1965
1966 enum MarkingMode {
1967 GlobalMarking,
1968 ThreadLocalMarking,
1969 };
1970
1971 template<MarkingMode Mode>
1966 class MarkingVisitor final : public Visitor { 1972 class MarkingVisitor final : public Visitor {
1967 public: 1973 public:
1968 #if ENABLE(GC_PROFILE_MARKING) 1974 #if ENABLE(GC_PROFILE_MARKING)
1969 typedef HashSet<uintptr_t> LiveObjectSet; 1975 typedef HashSet<uintptr_t> LiveObjectSet;
1970 typedef HashMap<String, LiveObjectSet> LiveObjectMap; 1976 typedef HashMap<String, LiveObjectSet> LiveObjectMap;
1971 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; 1977 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph;
1972 #endif 1978 #endif
1973 1979
1974 MarkingVisitor(CallbackStack* markingStack) : m_markingStack(markingStack) 1980 explicit MarkingVisitor(CallbackStack* markingStack)
1981 : m_markingStack(markingStack)
1975 { 1982 {
1976 } 1983 }
1977 1984
1978 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) 1985 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback)
1979 { 1986 {
1980 ASSERT(header); 1987 ASSERT(header);
1981 #if ENABLE(ASSERT)
1982 {
1983 // Check that we are not marking objects that are outside
1984 // the heap by calling Heap::contains. However we cannot
1985 // call Heap::contains when outside a GC and we call mark
1986 // when doing weakness for ephemerons. Hence we only check
1987 // when called within.
1988 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header ));
1989 }
1990 #endif
1991 ASSERT(objectPointer); 1988 ASSERT(objectPointer);
1989 // Check that we are not marking objects that are outside
1990 // the heap by calling Heap::contains. However we cannot
1991 // call Heap::contains when outside a GC and we call mark
1992 // when doing weakness for ephemerons. Hence we only check
1993 // when called within.
1994 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header));
1995
1992 if (header->isMarked()) 1996 if (header->isMarked())
1993 return; 1997 return;
1994 header->mark(); 1998 header->mark();
1999
1995 #if ENABLE(GC_PROFILE_MARKING) 2000 #if ENABLE(GC_PROFILE_MARKING)
1996 MutexLocker locker(objectGraphMutex()); 2001 MutexLocker locker(objectGraphMutex());
1997 String className(classOf(objectPointer)); 2002 String className(classOf(objectPointer));
1998 { 2003 {
1999 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet()); 2004 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet());
2000 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter)); 2005 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter));
2001 } 2006 }
2002 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName)); 2007 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName));
2003 ASSERT(result.isNewEntry); 2008 ASSERT(result.isNewEntry);
2004 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer); 2009 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer);
2005 #endif 2010 #endif
2011 #if ENABLE(ASSERT)
2012 {
2013 BaseHeapPage* page = pageFromObject(objectPointer);
2014 // If you hit this ASSERT, it means that there is a dangling pointer
2015 // from a live thread heap to a dead thread heap. We must eliminate
2016 // the dangling pointer.
2017 // Release builds don't have the ASSERT, but it is OK because
2018 // release builds will crash upon invoking the trace callback
2019 // as all the entries of the orphaned heaps are zeroed out
2020 // (=> 'objectPointer' will not have a valid vtable.)
2021 ASSERT(!page->orphaned());
2022 }
2023 #endif
haraken 2014/12/02 15:35:10 Can we just write: ASSERT(!pageFromObject(objec
sof 2014/12/02 21:33:21 Done.
2024 if (Mode == ThreadLocalMarking && !needsTracing(objectPointer))
haraken 2014/12/02 15:35:10 Don't we need to check this before doing header->m
sof 2014/12/02 15:46:51 I'm just mirroring what is done now; don't we want
haraken 2014/12/02 15:56:54 Currently we're doing the check when popping an ob
2025 return;
2026
2006 if (callback) 2027 if (callback)
2007 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback); 2028 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback);
2008 } 2029 }
2009 2030
2031 // We need both HeapObjectHeader and FinalizedHeapObjectHeader versions to c orrectly find the payload.
2010 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override 2032 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override
2011 { 2033 {
2012 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2013 // version to correctly find the payload.
2014 visitHeader(header, header->payload(), callback); 2034 visitHeader(header, header->payload(), callback);
2015 } 2035 }
2016 2036
2017 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override 2037 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override
2018 { 2038 {
2019 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2020 // version to correctly find the payload.
2021 visitHeader(header, header->payload(), callback); 2039 visitHeader(header, header->payload(), callback);
2022 } 2040 }
2023 2041
2024 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e 2042 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e
2025 { 2043 {
2026 if (!objectPointer) 2044 if (!objectPointer)
2027 return; 2045 return;
2028 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer); 2046 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer);
2029 visitHeader(header, header->payload(), callback); 2047 visitHeader(header, header->payload(), callback);
2030 } 2048 }
(...skipping 18 matching lines...) Expand all
2049 { 2067 {
2050 return Heap::weakTableRegistered(closure); 2068 return Heap::weakTableRegistered(closure);
2051 } 2069 }
2052 #endif 2070 #endif
2053 2071
2054 virtual bool isMarked(const void* objectPointer) override 2072 virtual bool isMarked(const void* objectPointer) override
2055 { 2073 {
2056 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ; 2074 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ;
2057 } 2075 }
2058 2076
2077 virtual bool ensureMarked(const void* objectPointer) override
2078 {
2079 if (!objectPointer)
2080 return false;
2081 #if ENABLE(ASSERT)
2082 if (isMarked(objectPointer))
2083 return false;
2084
2085 markNoTracing(objectPointer);
2086 #else
2087 // Inline what the above markNoTracing() call expands to,
2088 // so as to make sure that we do get all the benefits.
2089 FinalizedHeapObjectHeader* header =
2090 FinalizedHeapObjectHeader::fromPayload(objectPointer);
2091 if (header->isMarked())
2092 return false;
2093 header->mark();
2094 #endif
2095 if (Mode == ThreadLocalMarking && !needsTracing(objectPointer))
haraken 2014/12/02 15:35:10 Ditto. I guess this check needs to be done before
2096 return false;
2097 return true;
2098 }
2099
2100 #if ENABLE(ASSERT)
2101 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2102 virtual bool ensureMarked(const Type* objectPointer) override \
2103 { \
2104 if (!objectPointer) \
2105 return false; \
2106 COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNon AdjustedTypes); \
2107 if (isMarked(objectPointer)) \
2108 return false; \
2109 markNoTracing(objectPointer); \
2110 if (Mode == ThreadLocalMarking && !needsTracing(objectPointer)) \
haraken 2014/12/02 15:35:10 Ditto.
2111 return false; \
2112 return true; \
2113 }
2114 #else
2115 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2116 virtual bool ensureMarked(const Type* objectPointer) override \
2117 { \
2118 if (!objectPointer) \
2119 return false; \
2120 HeapObjectHeader* header = \
2121 HeapObjectHeader::fromPayload(objectPointer); \
2122 if (header->isMarked()) \
2123 return false; \
2124 header->mark(); \
2125 if (Mode == ThreadLocalMarking && !needsTracing(objectPointer)) \
haraken 2014/12/02 15:35:10 Ditto.
2126 return false; \
2127 return true; \
2128 }
2129 #endif
2130
2059 // This macro defines the necessary visitor methods for typed heaps 2131 // This macro defines the necessary visitor methods for typed heaps
2060 #define DEFINE_VISITOR_METHODS(Type) \ 2132 #define DEFINE_VISITOR_METHODS(Type) \
2061 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \ 2133 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \
2062 { \ 2134 { \
2063 if (!objectPointer) \ 2135 if (!objectPointer) \
2064 return; \ 2136 return; \
2065 HeapObjectHeader* header = \ 2137 HeapObjectHeader* header = \
2066 HeapObjectHeader::fromPayload(objectPointer); \ 2138 HeapObjectHeader::fromPayload(objectPointer); \
2067 visitHeader(header, header->payload(), callback); \ 2139 visitHeader(header, header->payload(), callback); \
2068 } \ 2140 } \
2069 virtual bool isMarked(const Type* objectPointer) override \ 2141 virtual bool isMarked(const Type* objectPointer) override \
2070 { \ 2142 { \
2071 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ 2143 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \
2072 } 2144 } \
2145 DEFINE_ENSURE_MARKED_METHOD(Type)
2073 2146
2074 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) 2147 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS)
2075 #undef DEFINE_VISITOR_METHODS 2148 #undef DEFINE_VISITOR_METHODS
2076 2149
2077 #if ENABLE(GC_PROFILE_MARKING) 2150 #if ENABLE(GC_PROFILE_MARKING)
2078 void reportStats() 2151 void reportStats()
2079 { 2152 {
2080 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n"); 2153 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n");
2081 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) { 2154 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) {
2082 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size()); 2155 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2163 return graph; 2236 return graph;
2164 } 2237 }
2165 2238
2166 static HashSet<uintptr_t>& objectsToFindPath() 2239 static HashSet<uintptr_t>& objectsToFindPath()
2167 { 2240 {
2168 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ()); 2241 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ());
2169 return set; 2242 return set;
2170 } 2243 }
2171 #endif 2244 #endif
2172 2245
2246 static inline bool needsTracing(const void* objectPointer)
haraken 2014/12/02 15:35:10 needsTracing => containedInThisThreadHeap ?
haraken 2014/12/02 15:38:35 or terminatingThreadContains()
sof 2014/12/02 21:33:22 objectInTerminatingThreadHeap().
2247 {
2248 BaseHeapPage* page = pageFromObject(objectPointer);
2249 ASSERT(!page->orphaned());
2250 // When doing a thread local GC, the marker checks if
2251 // the object resides in another thread's heap. The
2252 // object should not be traced, if it does.
2253 if (!page->terminating())
haraken 2014/12/02 15:35:10 return page->terminating();
sof 2014/12/02 21:33:22 Done.
2254 return false;
2255
2256 return true;
2257 }
2258
2173 protected: 2259 protected:
2174 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride 2260 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride
2175 { 2261 {
2176 Heap::pushWeakCellPointerCallback(cell, callback); 2262 Heap::pushWeakCellPointerCallback(cell, callback);
2177 } 2263 }
2178 2264
2179 private: 2265 private:
2180 CallbackStack* m_markingStack; 2266 CallbackStack* m_markingStack;
2181 }; 2267 };
2182 2268
2183 void Heap::init() 2269 void Heap::init()
2184 { 2270 {
2185 ThreadState::init(); 2271 ThreadState::init();
2186 s_markingStack = new CallbackStack(); 2272 s_markingStack = new CallbackStack();
2187 s_postMarkingCallbackStack = new CallbackStack(); 2273 s_postMarkingCallbackStack = new CallbackStack();
2188 s_weakCallbackStack = new CallbackStack(); 2274 s_weakCallbackStack = new CallbackStack();
2189 s_ephemeronStack = new CallbackStack(); 2275 s_ephemeronStack = new CallbackStack();
2190 s_heapDoesNotContainCache = new HeapDoesNotContainCache(); 2276 s_heapDoesNotContainCache = new HeapDoesNotContainCache();
2191 s_markingVisitor = new MarkingVisitor(s_markingStack); 2277 s_markingVisitor = new MarkingVisitor<GlobalMarking>(s_markingStack);
2192 s_freePagePool = new FreePagePool(); 2278 s_freePagePool = new FreePagePool();
2193 s_orphanedPagePool = new OrphanedPagePool(); 2279 s_orphanedPagePool = new OrphanedPagePool();
2194 s_allocatedObjectSize = 0; 2280 s_allocatedObjectSize = 0;
2195 s_allocatedSpace = 0; 2281 s_allocatedSpace = 0;
2196 s_markedObjectSize = 0; 2282 s_markedObjectSize = 0;
2197 } 2283 }
2198 2284
2199 void Heap::shutdown() 2285 void Heap::shutdown()
2200 { 2286 {
2201 s_shutdownCalled = true; 2287 s_shutdownCalled = true;
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
2323 builder.append("\n\t"); 2409 builder.append("\n\t");
2324 builder.append(frameToName.nullableName()); 2410 builder.append(frameToName.nullableName());
2325 --framesToShow; 2411 --framesToShow;
2326 } 2412 }
2327 return builder.toString().replace("blink::", ""); 2413 return builder.toString().replace("blink::", "");
2328 } 2414 }
2329 #endif 2415 #endif
2330 2416
2331 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback) 2417 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback)
2332 { 2418 {
2333 #if ENABLE(ASSERT) 2419 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2334 {
2335 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2336 }
2337 #endif
2338 CallbackStack::Item* slot = stack->allocateEntry(); 2420 CallbackStack::Item* slot = stack->allocateEntry();
2339 *slot = CallbackStack::Item(object, callback); 2421 *slot = CallbackStack::Item(object, callback);
2340 } 2422 }
2341 2423
2342 template<CallbackInvocationMode Mode>
2343 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor) 2424 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor)
2344 { 2425 {
2345 CallbackStack::Item* item = stack->pop(); 2426 CallbackStack::Item* item = stack->pop();
2346 if (!item) 2427 if (!item)
2347 return false; 2428 return false;
2348 #if ENABLE(ASSERT)
2349 if (Mode == GlobalMarking) {
2350 BaseHeapPage* page = pageFromObject(item->object());
2351 // If you hit this ASSERT, it means that there is a dangling pointer
2352 // from a live thread heap to a dead thread heap. We must eliminate
2353 // the dangling pointer.
2354 // Release builds don't have the ASSERT, but it is OK because
2355 // release builds will crash at the following item->call
2356 // because all the entries of the orphaned heaps are zeroed out and
2357 // thus the item does not have a valid vtable.
2358 ASSERT(!page->orphaned());
2359 }
2360 #endif
2361 if (Mode == ThreadLocalMarking) {
2362 BaseHeapPage* page = pageFromObject(item->object());
2363 ASSERT(!page->orphaned());
2364 // When doing a thread local GC, don't trace an object located in
2365 // a heap of another thread.
2366 if (!page->terminating())
2367 return true;
2368 }
2369 2429
2370 #if ENABLE(GC_PROFILE_MARKING) 2430 #if ENABLE(GC_PROFILE_MARKING)
2371 visitor->setHostInfo(item->object(), classOf(item->object())); 2431 visitor->setHostInfo(item->object(), classOf(item->object()));
2372 #endif 2432 #endif
2373 item->call(visitor); 2433 item->call(visitor);
2374 return true; 2434 return true;
2375 } 2435 }
2376 2436
2377 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback) 2437 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback)
2378 { 2438 {
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
2494 enterGC(); 2554 enterGC();
2495 preGC(); 2555 preGC();
2496 2556
2497 Heap::resetMarkedObjectSize(); 2557 Heap::resetMarkedObjectSize();
2498 Heap::resetAllocatedObjectSize(); 2558 Heap::resetAllocatedObjectSize();
2499 2559
2500 // 1. trace persistent roots. 2560 // 1. trace persistent roots.
2501 ThreadState::visitPersistentRoots(s_markingVisitor); 2561 ThreadState::visitPersistentRoots(s_markingVisitor);
2502 2562
2503 // 2. trace objects reachable from the persistent roots including ephemerons . 2563 // 2. trace objects reachable from the persistent roots including ephemerons .
2504 processMarkingStack<GlobalMarking>(); 2564 processMarkingStack(s_markingVisitor);
2505 2565
2506 // 3. trace objects reachable from the stack. We do this independent of the 2566 // 3. trace objects reachable from the stack. We do this independent of the
2507 // given stackState since other threads might have a different stack state. 2567 // given stackState since other threads might have a different stack state.
2508 ThreadState::visitStackRoots(s_markingVisitor); 2568 ThreadState::visitStackRoots(s_markingVisitor);
2509 2569
2510 // 4. trace objects reachable from the stack "roots" including ephemerons. 2570 // 4. trace objects reachable from the stack "roots" including ephemerons.
2511 // Only do the processing if we found a pointer to an object on one of the 2571 // Only do the processing if we found a pointer to an object on one of the
2512 // thread stacks. 2572 // thread stacks.
2513 if (lastGCWasConservative()) { 2573 if (lastGCWasConservative())
2514 processMarkingStack<GlobalMarking>(); 2574 processMarkingStack(s_markingVisitor);
2515 }
2516 2575
2517 postMarkingProcessing(); 2576 postMarkingProcessing(s_markingVisitor);
2518 globalWeakProcessing(); 2577 globalWeakProcessing(s_markingVisitor);
2519 2578
2520 // Now we can delete all orphaned pages because there are no dangling 2579 // Now we can delete all orphaned pages because there are no dangling
2521 // pointers to the orphaned pages. (If we have such dangling pointers, 2580 // pointers to the orphaned pages. (If we have such dangling pointers,
2522 // we should have crashed during marking before getting here.) 2581 // we should have crashed during marking before getting here.)
2523 orphanedPagePool()->decommitOrphanedPages(); 2582 orphanedPagePool()->decommitOrphanedPages();
2524 2583
2525 postGC(); 2584 postGC();
2526 leaveGC(); 2585 leaveGC();
2527 2586
2528 #if ENABLE(GC_PROFILE_MARKING) 2587 #if ENABLE(GC_PROFILE_MARKING)
(...skipping 10 matching lines...) Expand all
2539 ScriptForbiddenScope::exit(); 2598 ScriptForbiddenScope::exit();
2540 } 2599 }
2541 2600
2542 void Heap::collectGarbageForTerminatingThread(ThreadState* state) 2601 void Heap::collectGarbageForTerminatingThread(ThreadState* state)
2543 { 2602 {
2544 // We explicitly do not enter a safepoint while doing thread specific 2603 // We explicitly do not enter a safepoint while doing thread specific
2545 // garbage collection since we don't want to allow a global GC at the 2604 // garbage collection since we don't want to allow a global GC at the
2546 // same time as a thread local GC. 2605 // same time as a thread local GC.
2547 2606
2548 { 2607 {
2608 MarkingVisitor<ThreadLocalMarking> markingVisitor(s_markingStack);
2549 ThreadState::NoAllocationScope noAllocationScope(state); 2609 ThreadState::NoAllocationScope noAllocationScope(state);
2550 2610
2551 enterGC(); 2611 enterGC();
2552 state->preGC(); 2612 state->preGC();
2553 2613
2554 // 1. trace the thread local persistent roots. For thread local GCs we 2614 // 1. trace the thread local persistent roots. For thread local GCs we
2555 // don't trace the stack (ie. no conservative scanning) since this is 2615 // don't trace the stack (ie. no conservative scanning) since this is
2556 // only called during thread shutdown where there should be no objects 2616 // only called during thread shutdown where there should be no objects
2557 // on the stack. 2617 // on the stack.
2558 // We also assume that orphaned pages have no objects reachable from 2618 // We also assume that orphaned pages have no objects reachable from
2559 // persistent handles on other threads or CrossThreadPersistents. The 2619 // persistent handles on other threads or CrossThreadPersistents. The
2560 // only cases where this could happen is if a subsequent conservative 2620 // only cases where this could happen is if a subsequent conservative
2561 // global GC finds a "pointer" on the stack or due to a programming 2621 // global GC finds a "pointer" on the stack or due to a programming
2562 // error where an object has a dangling cross-thread pointer to an 2622 // error where an object has a dangling cross-thread pointer to an
2563 // object on this heap. 2623 // object on this heap.
2564 state->visitPersistents(s_markingVisitor); 2624 state->visitPersistents(&markingVisitor);
2565 2625
2566 // 2. trace objects reachable from the thread's persistent roots 2626 // 2. trace objects reachable from the thread's persistent roots
2567 // including ephemerons. 2627 // including ephemerons.
2568 processMarkingStack<ThreadLocalMarking>(); 2628 processMarkingStack(&markingVisitor);
2569 2629
2570 postMarkingProcessing(); 2630 postMarkingProcessing(&markingVisitor);
2571 globalWeakProcessing(); 2631 globalWeakProcessing(&markingVisitor);
2572 2632
2573 state->postGC(); 2633 state->postGC();
2574 leaveGC(); 2634 leaveGC();
2575 } 2635 }
2576 state->performPendingSweep(); 2636 state->performPendingSweep();
2577 } 2637 }
2578 2638
2579 template<CallbackInvocationMode Mode> 2639 void Heap::processMarkingStack(Visitor* markingVisitor)
2580 void Heap::processMarkingStack()
2581 { 2640 {
2582 // Ephemeron fixed point loop. 2641 // Ephemeron fixed point loop.
2583 do { 2642 do {
2584 { 2643 {
2585 // Iteratively mark all objects that are reachable from the objects 2644 // Iteratively mark all objects that are reachable from the objects
2586 // currently pushed onto the marking stack. If Mode is ThreadLocalMa rking 2645 // currently pushed onto the marking stack.
2587 // don't continue tracing if the trace hits an object on another thr ead's
2588 // heap.
2589 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded"); 2646 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded");
2590 while (popAndInvokeTraceCallback<Mode>(s_markingStack, s_markingVisi tor)) { } 2647 while (popAndInvokeTraceCallback(s_markingStack, markingVisitor)) { }
2591 } 2648 }
2592 2649
2593 { 2650 {
2594 // Mark any strong pointers that have now become reachable in epheme ron 2651 // Mark any strong pointers that have now become reachable in epheme ron
2595 // maps. 2652 // maps.
2596 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack"); 2653 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack");
2597 s_ephemeronStack->invokeEphemeronCallbacks(s_markingVisitor); 2654 s_ephemeronStack->invokeEphemeronCallbacks(markingVisitor);
2598 } 2655 }
2599 2656
2600 // Rerun loop if ephemeron processing queued more objects for tracing. 2657 // Rerun loop if ephemeron processing queued more objects for tracing.
2601 } while (!s_markingStack->isEmpty()); 2658 } while (!s_markingStack->isEmpty());
2602 } 2659 }
2603 2660
2604 void Heap::postMarkingProcessing() 2661 void Heap::postMarkingProcessing(Visitor* markingVisitor)
2605 { 2662 {
2606 TRACE_EVENT0("blink_gc", "Heap::postMarkingProcessing"); 2663 TRACE_EVENT0("blink_gc", "Heap::postMarkingProcessing");
2607 // Call post-marking callbacks including: 2664 // Call post-marking callbacks including:
2608 // 1. the ephemeronIterationDone callbacks on weak tables to do cleanup 2665 // 1. the ephemeronIterationDone callbacks on weak tables to do cleanup
2609 // (specifically to clear the queued bits for weak hash tables), and 2666 // (specifically to clear the queued bits for weak hash tables), and
2610 // 2. the markNoTracing callbacks on collection backings to mark them 2667 // 2. the markNoTracing callbacks on collection backings to mark them
2611 // if they are only reachable from their front objects. 2668 // if they are only reachable from their front objects.
2612 while (popAndInvokePostMarkingCallback(s_markingVisitor)) { } 2669 while (popAndInvokePostMarkingCallback(markingVisitor)) { }
2613 2670
2614 s_ephemeronStack->clear(); 2671 s_ephemeronStack->clear();
2615 2672
2616 // Post-marking callbacks should not trace any objects and 2673 // Post-marking callbacks should not trace any objects and
2617 // therefore the marking stack should be empty after the 2674 // therefore the marking stack should be empty after the
2618 // post-marking callbacks. 2675 // post-marking callbacks.
2619 ASSERT(s_markingStack->isEmpty()); 2676 ASSERT(s_markingStack->isEmpty());
2620 } 2677 }
2621 2678
2622 void Heap::globalWeakProcessing() 2679 void Heap::globalWeakProcessing(Visitor* markingVisitor)
2623 { 2680 {
2624 TRACE_EVENT0("blink_gc", "Heap::globalWeakProcessing"); 2681 TRACE_EVENT0("blink_gc", "Heap::globalWeakProcessing");
2625 // Call weak callbacks on objects that may now be pointing to dead 2682 // Call weak callbacks on objects that may now be pointing to dead
2626 // objects. 2683 // objects.
2627 while (popAndInvokeWeakPointerCallback(s_markingVisitor)) { } 2684 while (popAndInvokeWeakPointerCallback(markingVisitor)) { }
2628 2685
2629 // It is not permitted to trace pointers of live objects in the weak 2686 // It is not permitted to trace pointers of live objects in the weak
2630 // callback phase, so the marking stack should still be empty here. 2687 // callback phase, so the marking stack should still be empty here.
2631 ASSERT(s_markingStack->isEmpty()); 2688 ASSERT(s_markingStack->isEmpty());
2632 } 2689 }
2633 2690
2634 void Heap::collectAllGarbage() 2691 void Heap::collectAllGarbage()
2635 { 2692 {
2636 // FIXME: oilpan: we should perform a single GC and everything 2693 // FIXME: oilpan: we should perform a single GC and everything
2637 // should die. Unfortunately it is not the case for all objects 2694 // should die. Unfortunately it is not the case for all objects
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after
2884 bool Heap::s_lastGCWasConservative = false; 2941 bool Heap::s_lastGCWasConservative = false;
2885 bool Heap::s_inGC = false; 2942 bool Heap::s_inGC = false;
2886 FreePagePool* Heap::s_freePagePool; 2943 FreePagePool* Heap::s_freePagePool;
2887 OrphanedPagePool* Heap::s_orphanedPagePool; 2944 OrphanedPagePool* Heap::s_orphanedPagePool;
2888 Heap::RegionTree* Heap::s_regionTree = 0; 2945 Heap::RegionTree* Heap::s_regionTree = 0;
2889 size_t Heap::s_allocatedObjectSize = 0; 2946 size_t Heap::s_allocatedObjectSize = 0;
2890 size_t Heap::s_allocatedSpace = 0; 2947 size_t Heap::s_allocatedSpace = 0;
2891 size_t Heap::s_markedObjectSize = 0; 2948 size_t Heap::s_markedObjectSize = 0;
2892 2949
2893 } // namespace blink 2950 } // namespace blink
OLDNEW
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapLinkedStack.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698