Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(123)

Side by Side Diff: Source/platform/heap/Heap.cpp

Issue 765673004: Oilpan: support eager tracing of objects when marking. (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: Tidying up Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapLinkedStack.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
575 if (objectFields[i] != 0) 575 if (objectFields[i] != 0)
576 return false; 576 return false;
577 } 577 }
578 return true; 578 return true;
579 } 579 }
580 #endif 580 #endif
581 581
582 template<> 582 template<>
583 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) 583 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor)
584 { 584 {
585 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) { 585 FinalizedHeapObjectHeader* header = heapObjectHeader();
586 FinalizedHeapObjectHeader* header = heapObjectHeader(); 586 if (header->hasVTable() && !vTableInitialized(payload())) {
587 visitor->markNoTracing(header); 587 visitor->markNoTracing(header);
588 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); 588 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize()));
589 } else { 589 } else {
590 visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); 590 visitor->mark(header, header->traceCallback());
591 } 591 }
592 } 592 }
593 593
594 template<> 594 template<>
595 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor) 595 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor)
596 { 596 {
597 ASSERT(gcInfo()); 597 ASSERT(gcInfo());
598 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) { 598 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) {
599 HeapObjectHeader* header = heapObjectHeader(); 599 HeapObjectHeader* header = heapObjectHeader();
600 visitor->markNoTracing(header); 600 visitor->markNoTracing(header);
(...skipping 1355 matching lines...) Expand 10 before | Expand all | Expand 10 after
1956 void Heap::flushHeapDoesNotContainCache() 1956 void Heap::flushHeapDoesNotContainCache()
1957 { 1957 {
1958 s_heapDoesNotContainCache->flush(); 1958 s_heapDoesNotContainCache->flush();
1959 } 1959 }
1960 1960
1961 static void markNoTracingCallback(Visitor* visitor, void* object) 1961 static void markNoTracingCallback(Visitor* visitor, void* object)
1962 { 1962 {
1963 visitor->markNoTracing(object); 1963 visitor->markNoTracing(object);
1964 } 1964 }
1965 1965
1966 enum MarkingMode {
1967 GlobalMarking,
1968 ThreadLocalMarking,
1969 };
1970
1971 template<MarkingMode Mode>
1966 class MarkingVisitor final : public Visitor { 1972 class MarkingVisitor final : public Visitor {
1967 public: 1973 public:
1968 #if ENABLE(GC_PROFILE_MARKING) 1974 #if ENABLE(GC_PROFILE_MARKING)
1969 typedef HashSet<uintptr_t> LiveObjectSet; 1975 typedef HashSet<uintptr_t> LiveObjectSet;
1970 typedef HashMap<String, LiveObjectSet> LiveObjectMap; 1976 typedef HashMap<String, LiveObjectSet> LiveObjectMap;
1971 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; 1977 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph;
1972 #endif 1978 #endif
1973 1979
1974 MarkingVisitor(CallbackStack* markingStack) : m_markingStack(markingStack) 1980 explicit MarkingVisitor(CallbackStack* markingStack)
1981 : m_markingStack(markingStack)
1975 { 1982 {
1976 } 1983 }
1977 1984
1978 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) 1985 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback)
1979 { 1986 {
1980 ASSERT(header); 1987 ASSERT(header);
1981 #if ENABLE(ASSERT)
1982 {
1983 // Check that we are not marking objects that are outside
1984 // the heap by calling Heap::contains. However we cannot
1985 // call Heap::contains when outside a GC and we call mark
1986 // when doing weakness for ephemerons. Hence we only check
1987 // when called within.
1988 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header ));
1989 }
1990 #endif
1991 ASSERT(objectPointer); 1988 ASSERT(objectPointer);
1989 // Check that we are not marking objects that are outside
1990 // the heap by calling Heap::contains. However we cannot
1991 // call Heap::contains when outside a GC and we call mark
1992 // when doing weakness for ephemerons. Hence we only check
1993 // when called within.
1994 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header));
1995
1992 if (header->isMarked()) 1996 if (header->isMarked())
1993 return; 1997 return;
1994 header->mark(); 1998 header->mark();
1999
1995 #if ENABLE(GC_PROFILE_MARKING) 2000 #if ENABLE(GC_PROFILE_MARKING)
1996 MutexLocker locker(objectGraphMutex()); 2001 MutexLocker locker(objectGraphMutex());
1997 String className(classOf(objectPointer)); 2002 String className(classOf(objectPointer));
1998 { 2003 {
1999 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet()); 2004 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet());
2000 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter)); 2005 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter));
2001 } 2006 }
2002 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName)); 2007 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName));
2003 ASSERT(result.isNewEntry); 2008 ASSERT(result.isNewEntry);
2004 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer); 2009 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer);
2005 #endif 2010 #endif
2011 // If you hit this ASSERT, it means that there is a dangling pointer
2012 // from a live thread heap to a dead thread heap. We must eliminate
2013 // the dangling pointer.
2014 // Release builds don't have the ASSERT, but it is OK because
2015 // release builds will crash upon invoking the trace callback
2016 // as all the entries of the orphaned heaps are zeroed out
2017 // (=> 'objectPointer' will not have a valid vtable.)
2018 ASSERT(!pageFromObject(objectPointer)->orphaned());
2019
2020 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer))
2021 return;
2022
2006 if (callback) 2023 if (callback)
2007 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback); 2024 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback);
2008 } 2025 }
2009 2026
2027 // We need both HeapObjectHeader and FinalizedHeapObjectHeader versions to c orrectly find the payload.
2010 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override 2028 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override
2011 { 2029 {
2012 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2013 // version to correctly find the payload.
2014 visitHeader(header, header->payload(), callback); 2030 visitHeader(header, header->payload(), callback);
2015 } 2031 }
2016 2032
2017 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override 2033 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override
2018 { 2034 {
2019 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2020 // version to correctly find the payload.
2021 visitHeader(header, header->payload(), callback); 2035 visitHeader(header, header->payload(), callback);
2022 } 2036 }
2023 2037
2024 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e 2038 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e
2025 { 2039 {
2026 if (!objectPointer) 2040 if (!objectPointer)
2027 return; 2041 return;
2028 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer); 2042 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer);
2029 visitHeader(header, header->payload(), callback); 2043 visitHeader(header, header->payload(), callback);
2030 } 2044 }
(...skipping 18 matching lines...) Expand all
2049 { 2063 {
2050 return Heap::weakTableRegistered(closure); 2064 return Heap::weakTableRegistered(closure);
2051 } 2065 }
2052 #endif 2066 #endif
2053 2067
2054 virtual bool isMarked(const void* objectPointer) override 2068 virtual bool isMarked(const void* objectPointer) override
2055 { 2069 {
2056 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ; 2070 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ;
2057 } 2071 }
2058 2072
2073 virtual bool ensureMarked(const void* objectPointer) override
2074 {
2075 if (!objectPointer)
2076 return false;
2077 #if ENABLE(ASSERT)
2078 if (isMarked(objectPointer))
2079 return false;
2080
2081 markNoTracing(objectPointer);
2082 #else
2083 // Inline what the above markNoTracing() call expands to,
2084 // so as to make sure that we do get all the benefits.
2085 FinalizedHeapObjectHeader* header =
2086 FinalizedHeapObjectHeader::fromPayload(objectPointer);
2087 if (header->isMarked())
2088 return false;
2089 header->mark();
2090 #endif
2091 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer))
2092 return false;
2093 return true;
2094 }
2095
2096 #if ENABLE(ASSERT)
2097 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2098 virtual bool ensureMarked(const Type* objectPointer) override \
2099 { \
2100 if (!objectPointer) \
2101 return false; \
2102 COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNon AdjustedTypes); \
2103 if (isMarked(objectPointer)) \
2104 return false; \
2105 markNoTracing(objectPointer); \
2106 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \
2107 return false; \
2108 return true; \
2109 }
2110 #else
2111 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2112 virtual bool ensureMarked(const Type* objectPointer) override \
2113 { \
2114 if (!objectPointer) \
2115 return false; \
2116 HeapObjectHeader* header = \
2117 HeapObjectHeader::fromPayload(objectPointer); \
2118 if (header->isMarked()) \
2119 return false; \
2120 header->mark(); \
2121 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \
2122 return false; \
2123 return true; \
2124 }
2125 #endif
2126
2059 // This macro defines the necessary visitor methods for typed heaps 2127 // This macro defines the necessary visitor methods for typed heaps
2060 #define DEFINE_VISITOR_METHODS(Type) \ 2128 #define DEFINE_VISITOR_METHODS(Type) \
2061 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \ 2129 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \
2062 { \ 2130 { \
2063 if (!objectPointer) \ 2131 if (!objectPointer) \
2064 return; \ 2132 return; \
2065 HeapObjectHeader* header = \ 2133 HeapObjectHeader* header = \
2066 HeapObjectHeader::fromPayload(objectPointer); \ 2134 HeapObjectHeader::fromPayload(objectPointer); \
2067 visitHeader(header, header->payload(), callback); \ 2135 visitHeader(header, header->payload(), callback); \
2068 } \ 2136 } \
2069 virtual bool isMarked(const Type* objectPointer) override \ 2137 virtual bool isMarked(const Type* objectPointer) override \
2070 { \ 2138 { \
2071 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ 2139 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \
2072 } 2140 } \
2141 DEFINE_ENSURE_MARKED_METHOD(Type)
2073 2142
2074 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) 2143 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS)
2075 #undef DEFINE_VISITOR_METHODS 2144 #undef DEFINE_VISITOR_METHODS
2076 2145
2077 #if ENABLE(GC_PROFILE_MARKING) 2146 #if ENABLE(GC_PROFILE_MARKING)
2078 void reportStats() 2147 void reportStats()
2079 { 2148 {
2080 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n"); 2149 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n");
2081 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) { 2150 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) {
2082 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size()); 2151 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2163 return graph; 2232 return graph;
2164 } 2233 }
2165 2234
2166 static HashSet<uintptr_t>& objectsToFindPath() 2235 static HashSet<uintptr_t>& objectsToFindPath()
2167 { 2236 {
2168 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ()); 2237 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ());
2169 return set; 2238 return set;
2170 } 2239 }
2171 #endif 2240 #endif
2172 2241
2242 static inline bool objectInTerminatingThreadHeap(const void* objectPointer)
2243 {
2244 BaseHeapPage* page = pageFromObject(objectPointer);
2245 ASSERT(!page->orphaned());
2246 // When doing a thread local GC, the marker checks if
2247 // the object resides in another thread's heap. The
2248 // object should not be traced, if it does.
2249 return page->terminating();
2250 }
2251
2173 protected: 2252 protected:
2174 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride 2253 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride
2175 { 2254 {
2176 Heap::pushWeakCellPointerCallback(cell, callback); 2255 Heap::pushWeakCellPointerCallback(cell, callback);
2177 } 2256 }
2178 2257
2179 private: 2258 private:
2180 CallbackStack* m_markingStack; 2259 CallbackStack* m_markingStack;
2181 }; 2260 };
2182 2261
2183 void Heap::init() 2262 void Heap::init()
2184 { 2263 {
2185 ThreadState::init(); 2264 ThreadState::init();
2186 s_markingStack = new CallbackStack(); 2265 s_markingStack = new CallbackStack();
2187 s_postMarkingCallbackStack = new CallbackStack(); 2266 s_postMarkingCallbackStack = new CallbackStack();
2188 s_weakCallbackStack = new CallbackStack(); 2267 s_weakCallbackStack = new CallbackStack();
2189 s_ephemeronStack = new CallbackStack(); 2268 s_ephemeronStack = new CallbackStack();
2190 s_heapDoesNotContainCache = new HeapDoesNotContainCache(); 2269 s_heapDoesNotContainCache = new HeapDoesNotContainCache();
2191 s_markingVisitor = new MarkingVisitor(s_markingStack); 2270 s_markingVisitor = new MarkingVisitor<GlobalMarking>(s_markingStack);
2192 s_freePagePool = new FreePagePool(); 2271 s_freePagePool = new FreePagePool();
2193 s_orphanedPagePool = new OrphanedPagePool(); 2272 s_orphanedPagePool = new OrphanedPagePool();
2194 s_allocatedObjectSize = 0; 2273 s_allocatedObjectSize = 0;
2195 s_allocatedSpace = 0; 2274 s_allocatedSpace = 0;
2196 s_markedObjectSize = 0; 2275 s_markedObjectSize = 0;
2197 } 2276 }
2198 2277
2199 void Heap::shutdown() 2278 void Heap::shutdown()
2200 { 2279 {
2201 s_shutdownCalled = true; 2280 s_shutdownCalled = true;
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
2323 builder.append("\n\t"); 2402 builder.append("\n\t");
2324 builder.append(frameToName.nullableName()); 2403 builder.append(frameToName.nullableName());
2325 --framesToShow; 2404 --framesToShow;
2326 } 2405 }
2327 return builder.toString().replace("blink::", ""); 2406 return builder.toString().replace("blink::", "");
2328 } 2407 }
2329 #endif 2408 #endif
2330 2409
2331 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback) 2410 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback)
2332 { 2411 {
2333 #if ENABLE(ASSERT) 2412 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2334 {
2335 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2336 }
2337 #endif
2338 CallbackStack::Item* slot = stack->allocateEntry(); 2413 CallbackStack::Item* slot = stack->allocateEntry();
2339 *slot = CallbackStack::Item(object, callback); 2414 *slot = CallbackStack::Item(object, callback);
2340 } 2415 }
2341 2416
2342 template<CallbackInvocationMode Mode>
2343 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor) 2417 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor)
2344 { 2418 {
2345 CallbackStack::Item* item = stack->pop(); 2419 CallbackStack::Item* item = stack->pop();
2346 if (!item) 2420 if (!item)
2347 return false; 2421 return false;
2348 #if ENABLE(ASSERT)
2349 if (Mode == GlobalMarking) {
2350 BaseHeapPage* page = pageFromObject(item->object());
2351 // If you hit this ASSERT, it means that there is a dangling pointer
2352 // from a live thread heap to a dead thread heap. We must eliminate
2353 // the dangling pointer.
2354 // Release builds don't have the ASSERT, but it is OK because
2355 // release builds will crash at the following item->call
2356 // because all the entries of the orphaned heaps are zeroed out and
2357 // thus the item does not have a valid vtable.
2358 ASSERT(!page->orphaned());
2359 }
2360 #endif
2361 if (Mode == ThreadLocalMarking) {
2362 BaseHeapPage* page = pageFromObject(item->object());
2363 ASSERT(!page->orphaned());
2364 // When doing a thread local GC, don't trace an object located in
2365 // a heap of another thread.
2366 if (!page->terminating())
2367 return true;
2368 }
2369 2422
2370 #if ENABLE(GC_PROFILE_MARKING) 2423 #if ENABLE(GC_PROFILE_MARKING)
2371 visitor->setHostInfo(item->object(), classOf(item->object())); 2424 visitor->setHostInfo(item->object(), classOf(item->object()));
2372 #endif 2425 #endif
2373 item->call(visitor); 2426 item->call(visitor);
2374 return true; 2427 return true;
2375 } 2428 }
2376 2429
2377 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback) 2430 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback)
2378 { 2431 {
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
2494 enterGC(); 2547 enterGC();
2495 preGC(); 2548 preGC();
2496 2549
2497 Heap::resetMarkedObjectSize(); 2550 Heap::resetMarkedObjectSize();
2498 Heap::resetAllocatedObjectSize(); 2551 Heap::resetAllocatedObjectSize();
2499 2552
2500 // 1. trace persistent roots. 2553 // 1. trace persistent roots.
2501 ThreadState::visitPersistentRoots(s_markingVisitor); 2554 ThreadState::visitPersistentRoots(s_markingVisitor);
2502 2555
2503 // 2. trace objects reachable from the persistent roots including ephemerons . 2556 // 2. trace objects reachable from the persistent roots including ephemerons .
2504 processMarkingStack<GlobalMarking>(); 2557 processMarkingStack(s_markingVisitor);
2505 2558
2506 // 3. trace objects reachable from the stack. We do this independent of the 2559 // 3. trace objects reachable from the stack. We do this independent of the
2507 // given stackState since other threads might have a different stack state. 2560 // given stackState since other threads might have a different stack state.
2508 ThreadState::visitStackRoots(s_markingVisitor); 2561 ThreadState::visitStackRoots(s_markingVisitor);
2509 2562
2510 // 4. trace objects reachable from the stack "roots" including ephemerons. 2563 // 4. trace objects reachable from the stack "roots" including ephemerons.
2511 // Only do the processing if we found a pointer to an object on one of the 2564 // Only do the processing if we found a pointer to an object on one of the
2512 // thread stacks. 2565 // thread stacks.
2513 if (lastGCWasConservative()) { 2566 if (lastGCWasConservative())
2514 processMarkingStack<GlobalMarking>(); 2567 processMarkingStack(s_markingVisitor);
2515 }
2516 2568
2517 postMarkingProcessing(); 2569 postMarkingProcessing(s_markingVisitor);
2518 globalWeakProcessing(); 2570 globalWeakProcessing(s_markingVisitor);
2519 2571
2520 // Now we can delete all orphaned pages because there are no dangling 2572 // Now we can delete all orphaned pages because there are no dangling
2521 // pointers to the orphaned pages. (If we have such dangling pointers, 2573 // pointers to the orphaned pages. (If we have such dangling pointers,
2522 // we should have crashed during marking before getting here.) 2574 // we should have crashed during marking before getting here.)
2523 orphanedPagePool()->decommitOrphanedPages(); 2575 orphanedPagePool()->decommitOrphanedPages();
2524 2576
2525 postGC(); 2577 postGC();
2526 leaveGC(); 2578 leaveGC();
2527 2579
2528 #if ENABLE(GC_PROFILE_MARKING) 2580 #if ENABLE(GC_PROFILE_MARKING)
(...skipping 10 matching lines...) Expand all
2539 ScriptForbiddenScope::exit(); 2591 ScriptForbiddenScope::exit();
2540 } 2592 }
2541 2593
2542 void Heap::collectGarbageForTerminatingThread(ThreadState* state) 2594 void Heap::collectGarbageForTerminatingThread(ThreadState* state)
2543 { 2595 {
2544 // We explicitly do not enter a safepoint while doing thread specific 2596 // We explicitly do not enter a safepoint while doing thread specific
2545 // garbage collection since we don't want to allow a global GC at the 2597 // garbage collection since we don't want to allow a global GC at the
2546 // same time as a thread local GC. 2598 // same time as a thread local GC.
2547 2599
2548 { 2600 {
2601 MarkingVisitor<ThreadLocalMarking> markingVisitor(s_markingStack);
2549 ThreadState::NoAllocationScope noAllocationScope(state); 2602 ThreadState::NoAllocationScope noAllocationScope(state);
2550 2603
2551 enterGC(); 2604 enterGC();
2552 state->preGC(); 2605 state->preGC();
2553 2606
2554 // 1. trace the thread local persistent roots. For thread local GCs we 2607 // 1. trace the thread local persistent roots. For thread local GCs we
2555 // don't trace the stack (ie. no conservative scanning) since this is 2608 // don't trace the stack (ie. no conservative scanning) since this is
2556 // only called during thread shutdown where there should be no objects 2609 // only called during thread shutdown where there should be no objects
2557 // on the stack. 2610 // on the stack.
2558 // We also assume that orphaned pages have no objects reachable from 2611 // We also assume that orphaned pages have no objects reachable from
2559 // persistent handles on other threads or CrossThreadPersistents. The 2612 // persistent handles on other threads or CrossThreadPersistents. The
2560 // only cases where this could happen is if a subsequent conservative 2613 // only cases where this could happen is if a subsequent conservative
2561 // global GC finds a "pointer" on the stack or due to a programming 2614 // global GC finds a "pointer" on the stack or due to a programming
2562 // error where an object has a dangling cross-thread pointer to an 2615 // error where an object has a dangling cross-thread pointer to an
2563 // object on this heap. 2616 // object on this heap.
2564 state->visitPersistents(s_markingVisitor); 2617 state->visitPersistents(&markingVisitor);
2565 2618
2566 // 2. trace objects reachable from the thread's persistent roots 2619 // 2. trace objects reachable from the thread's persistent roots
2567 // including ephemerons. 2620 // including ephemerons.
2568 processMarkingStack<ThreadLocalMarking>(); 2621 processMarkingStack(&markingVisitor);
2569 2622
2570 postMarkingProcessing(); 2623 postMarkingProcessing(&markingVisitor);
2571 globalWeakProcessing(); 2624 globalWeakProcessing(&markingVisitor);
2572 2625
2573 state->postGC(); 2626 state->postGC();
2574 leaveGC(); 2627 leaveGC();
2575 } 2628 }
2576 state->performPendingSweep(); 2629 state->performPendingSweep();
2577 } 2630 }
2578 2631
2579 template<CallbackInvocationMode Mode> 2632 void Heap::processMarkingStack(Visitor* markingVisitor)
2580 void Heap::processMarkingStack()
2581 { 2633 {
2582 // Ephemeron fixed point loop. 2634 // Ephemeron fixed point loop.
2583 do { 2635 do {
2584 { 2636 {
2585 // Iteratively mark all objects that are reachable from the objects 2637 // Iteratively mark all objects that are reachable from the objects
2586 // currently pushed onto the marking stack. If Mode is ThreadLocalMa rking 2638 // currently pushed onto the marking stack.
2587 // don't continue tracing if the trace hits an object on another thr ead's
2588 // heap.
2589 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded"); 2639 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded");
2590 while (popAndInvokeTraceCallback<Mode>(s_markingStack, s_markingVisi tor)) { } 2640 while (popAndInvokeTraceCallback(s_markingStack, markingVisitor)) { }
2591 } 2641 }
2592 2642
2593 { 2643 {
2594 // Mark any strong pointers that have now become reachable in epheme ron 2644 // Mark any strong pointers that have now become reachable in epheme ron
2595 // maps. 2645 // maps.
2596 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack"); 2646 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack");
2597 s_ephemeronStack->invokeEphemeronCallbacks(s_markingVisitor); 2647 s_ephemeronStack->invokeEphemeronCallbacks(markingVisitor);
2598 } 2648 }
2599 2649
2600 // Rerun loop if ephemeron processing queued more objects for tracing. 2650 // Rerun loop if ephemeron processing queued more objects for tracing.
2601 } while (!s_markingStack->isEmpty()); 2651 } while (!s_markingStack->isEmpty());
2602 } 2652 }
2603 2653
2604 void Heap::postMarkingProcessing() 2654 void Heap::postMarkingProcessing(Visitor* markingVisitor)
2605 { 2655 {
2606 TRACE_EVENT0("blink_gc", "Heap::postMarkingProcessing"); 2656 TRACE_EVENT0("blink_gc", "Heap::postMarkingProcessing");
2607 // Call post-marking callbacks including: 2657 // Call post-marking callbacks including:
2608 // 1. the ephemeronIterationDone callbacks on weak tables to do cleanup 2658 // 1. the ephemeronIterationDone callbacks on weak tables to do cleanup
2609 // (specifically to clear the queued bits for weak hash tables), and 2659 // (specifically to clear the queued bits for weak hash tables), and
2610 // 2. the markNoTracing callbacks on collection backings to mark them 2660 // 2. the markNoTracing callbacks on collection backings to mark them
2611 // if they are only reachable from their front objects. 2661 // if they are only reachable from their front objects.
2612 while (popAndInvokePostMarkingCallback(s_markingVisitor)) { } 2662 while (popAndInvokePostMarkingCallback(markingVisitor)) { }
2613 2663
2614 s_ephemeronStack->clear(); 2664 s_ephemeronStack->clear();
2615 2665
2616 // Post-marking callbacks should not trace any objects and 2666 // Post-marking callbacks should not trace any objects and
2617 // therefore the marking stack should be empty after the 2667 // therefore the marking stack should be empty after the
2618 // post-marking callbacks. 2668 // post-marking callbacks.
2619 ASSERT(s_markingStack->isEmpty()); 2669 ASSERT(s_markingStack->isEmpty());
2620 } 2670 }
2621 2671
2622 void Heap::globalWeakProcessing() 2672 void Heap::globalWeakProcessing(Visitor* markingVisitor)
2623 { 2673 {
2624 TRACE_EVENT0("blink_gc", "Heap::globalWeakProcessing"); 2674 TRACE_EVENT0("blink_gc", "Heap::globalWeakProcessing");
2625 // Call weak callbacks on objects that may now be pointing to dead 2675 // Call weak callbacks on objects that may now be pointing to dead
2626 // objects. 2676 // objects.
2627 while (popAndInvokeWeakPointerCallback(s_markingVisitor)) { } 2677 while (popAndInvokeWeakPointerCallback(markingVisitor)) { }
2628 2678
2629 // It is not permitted to trace pointers of live objects in the weak 2679 // It is not permitted to trace pointers of live objects in the weak
2630 // callback phase, so the marking stack should still be empty here. 2680 // callback phase, so the marking stack should still be empty here.
2631 ASSERT(s_markingStack->isEmpty()); 2681 ASSERT(s_markingStack->isEmpty());
2632 } 2682 }
2633 2683
2634 void Heap::collectAllGarbage() 2684 void Heap::collectAllGarbage()
2635 { 2685 {
2636 // FIXME: oilpan: we should perform a single GC and everything 2686 // FIXME: oilpan: we should perform a single GC and everything
2637 // should die. Unfortunately it is not the case for all objects 2687 // should die. Unfortunately it is not the case for all objects
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after
2884 bool Heap::s_lastGCWasConservative = false; 2934 bool Heap::s_lastGCWasConservative = false;
2885 bool Heap::s_inGC = false; 2935 bool Heap::s_inGC = false;
2886 FreePagePool* Heap::s_freePagePool; 2936 FreePagePool* Heap::s_freePagePool;
2887 OrphanedPagePool* Heap::s_orphanedPagePool; 2937 OrphanedPagePool* Heap::s_orphanedPagePool;
2888 Heap::RegionTree* Heap::s_regionTree = 0; 2938 Heap::RegionTree* Heap::s_regionTree = 0;
2889 size_t Heap::s_allocatedObjectSize = 0; 2939 size_t Heap::s_allocatedObjectSize = 0;
2890 size_t Heap::s_allocatedSpace = 0; 2940 size_t Heap::s_allocatedSpace = 0;
2891 size_t Heap::s_markedObjectSize = 0; 2941 size_t Heap::s_markedObjectSize = 0;
2892 2942
2893 } // namespace blink 2943 } // namespace blink
OLDNEW
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapLinkedStack.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698