Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(161)

Side by Side Diff: Source/platform/heap/Heap.cpp

Issue 765673004: Oilpan: support eager tracing of objects when marking. (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: For eager tracing, check for cross-heap access Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after
574 if (objectFields[i] != 0) 574 if (objectFields[i] != 0)
575 return false; 575 return false;
576 } 576 }
577 return true; 577 return true;
578 } 578 }
579 #endif 579 #endif
580 580
581 template<> 581 template<>
582 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) 582 void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor)
583 { 583 {
584 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) { 584 FinalizedHeapObjectHeader* header = heapObjectHeader();
585 FinalizedHeapObjectHeader* header = heapObjectHeader(); 585 if (header->hasVTable() && !vTableInitialized(payload())) {
586 visitor->markNoTracing(header); 586 visitor->markNoTracing(header);
587 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); 587 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize()));
588 } else { 588 } else {
589 visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); 589 visitor->mark(header, header->traceCallback());
590 } 590 }
591 } 591 }
592 592
593 template<> 593 template<>
594 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor) 594 void LargeObject<HeapObjectHeader>::mark(Visitor* visitor)
595 { 595 {
596 ASSERT(gcInfo()); 596 ASSERT(gcInfo());
597 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) { 597 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) {
598 HeapObjectHeader* header = heapObjectHeader(); 598 HeapObjectHeader* header = heapObjectHeader();
599 visitor->markNoTracing(header); 599 visitor->markNoTracing(header);
(...skipping 901 matching lines...) Expand 10 before | Expand all | Expand 10 after
1501 { 1501 {
1502 setAllocationPoint(0, 0); 1502 setAllocationPoint(0, 0);
1503 clearFreeLists(); 1503 clearFreeLists();
1504 } 1504 }
1505 1505
1506 template<typename Header> 1506 template<typename Header>
1507 void ThreadHeap<Header>::markUnmarkedObjectsDead() 1507 void ThreadHeap<Header>::markUnmarkedObjectsDead()
1508 { 1508 {
1509 ASSERT(Heap::isInGC()); 1509 ASSERT(Heap::isInGC());
1510 ASSERT(isConsistentForSweeping()); 1510 ASSERT(isConsistentForSweeping());
1511 for (HeapPage<Header>* page = m_firstPage; page; page = page->next()) { 1511 for (HeapPage<Header>* page = m_firstPage; page; page = page->next()) {
kouhei (in TOK) 2014/12/02 00:41:58 Nit: Unintended {}?
kouhei (in TOK) 2014/12/02 06:51:02 Sorry, this was unrelated to your CL. Please ignor
sof 2014/12/02 09:52:15 Your bracing preference is identical to mine thoug
1512 page->markUnmarkedObjectsDead(); 1512 page->markUnmarkedObjectsDead();
1513 } 1513 }
1514 for (LargeObject<Header>* largeObject = m_firstLargeObject; largeObject; lar geObject = largeObject->next()) { 1514 for (LargeObject<Header>* largeObject = m_firstLargeObject; largeObject; lar geObject = largeObject->next()) {
1515 largeObject->markUnmarkedObjectsDead(); 1515 largeObject->markUnmarkedObjectsDead();
1516 } 1516 }
1517 } 1517 }
1518 1518
1519 template<typename Header> 1519 template<typename Header>
1520 void ThreadHeap<Header>::clearFreeLists() 1520 void ThreadHeap<Header>::clearFreeLists()
1521 { 1521 {
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after
1962 } 1962 }
1963 1963
1964 class MarkingVisitor final : public Visitor { 1964 class MarkingVisitor final : public Visitor {
1965 public: 1965 public:
1966 #if ENABLE(GC_PROFILE_MARKING) 1966 #if ENABLE(GC_PROFILE_MARKING)
1967 typedef HashSet<uintptr_t> LiveObjectSet; 1967 typedef HashSet<uintptr_t> LiveObjectSet;
1968 typedef HashMap<String, LiveObjectSet> LiveObjectMap; 1968 typedef HashMap<String, LiveObjectSet> LiveObjectMap;
1969 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; 1969 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph;
1970 #endif 1970 #endif
1971 1971
1972 MarkingVisitor(CallbackStack* markingStack) : m_markingStack(markingStack) 1972 MarkingVisitor(CallbackStack* markingStack)
haraken 2014/12/02 06:16:04 Add explicit.
sof 2014/12/02 09:52:15 Done.
1973 : m_markingStack(markingStack)
1974 , m_checkIfNeedsTracing(false)
1973 { 1975 {
1974 } 1976 }
1975 1977
1976 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) 1978 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback)
1977 { 1979 {
1978 ASSERT(header); 1980 ASSERT(header);
1979 #if ENABLE(ASSERT)
1980 {
1981 // Check that we are not marking objects that are outside
1982 // the heap by calling Heap::contains. However we cannot
1983 // call Heap::contains when outside a GC and we call mark
1984 // when doing weakness for ephemerons. Hence we only check
1985 // when called within.
1986 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header ));
1987 }
1988 #endif
1989 ASSERT(objectPointer); 1981 ASSERT(objectPointer);
1982 // Check that we are not marking objects that are outside
1983 // the heap by calling Heap::contains. However we cannot
1984 // call Heap::contains when outside a GC and we call mark
1985 // when doing weakness for ephemerons. Hence we only check
1986 // when called within.
1987 ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header));
1988
1990 if (header->isMarked()) 1989 if (header->isMarked())
1991 return; 1990 return;
1992 header->mark(); 1991 header->mark();
1992
1993 #if ENABLE(GC_PROFILE_MARKING) 1993 #if ENABLE(GC_PROFILE_MARKING)
1994 MutexLocker locker(objectGraphMutex()); 1994 MutexLocker locker(objectGraphMutex());
1995 String className(classOf(objectPointer)); 1995 String className(classOf(objectPointer));
1996 { 1996 {
1997 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet()); 1997 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet());
1998 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter)); 1998 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter));
1999 } 1999 }
2000 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName)); 2000 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName));
2001 ASSERT(result.isNewEntry); 2001 ASSERT(result.isNewEntry);
2002 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer); 2002 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer);
2003 #endif 2003 #endif
2004 if (callback) 2004 #if ENABLE(ASSERT)
2005 {
2006 BaseHeapPage* page = pageFromObject(objectPointer);
2007 // If you hit this ASSERT, it means that there is a dangling pointer
2008 // from a live thread heap to a dead thread heap. We must eliminate
2009 // the dangling pointer.
2010 // Release builds don't have the ASSERT, but it is OK because
2011 // release builds will crash at the following item->call
haraken 2014/12/02 06:16:04 Update this comment. Now we don't have "the follow
2012 // because all the entries of the orphaned heaps are zeroed out and
2013 // thus the item does not have a valid vtable.
2014 ASSERT(!page->orphaned());
2015 }
2016 #endif
2017 if (callback) {
2018 if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer)))
2019 return;
2020
2005 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback); 2021 Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPoin ter), callback);
2022 }
2006 } 2023 }
2007 2024
2025 // We need both HeapObjectHeader and FinalizedHeapObjectHeader versions to c orrectly find the payload.
2008 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override 2026 virtual void mark(HeapObjectHeader* header, TraceCallback callback) override
2009 { 2027 {
2010 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2011 // version to correctly find the payload.
2012 visitHeader(header, header->payload(), callback); 2028 visitHeader(header, header->payload(), callback);
2013 } 2029 }
2014 2030
2015 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override 2031 virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override
2016 { 2032 {
2017 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader
2018 // version to correctly find the payload.
2019 visitHeader(header, header->payload(), callback); 2033 visitHeader(header, header->payload(), callback);
2020 } 2034 }
2021 2035
2022 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e 2036 virtual void mark(const void* objectPointer, TraceCallback callback) overrid e
2023 { 2037 {
2024 if (!objectPointer) 2038 if (!objectPointer)
2025 return; 2039 return;
2026 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer); 2040 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo ad(objectPointer);
2027 visitHeader(header, header->payload(), callback); 2041 visitHeader(header, header->payload(), callback);
2028 } 2042 }
(...skipping 18 matching lines...) Expand all
2047 { 2061 {
2048 return Heap::weakTableRegistered(closure); 2062 return Heap::weakTableRegistered(closure);
2049 } 2063 }
2050 #endif 2064 #endif
2051 2065
2052 virtual bool isMarked(const void* objectPointer) override 2066 virtual bool isMarked(const void* objectPointer) override
2053 { 2067 {
2054 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ; 2068 return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked() ;
2055 } 2069 }
2056 2070
2071 virtual bool ensureMarked(const void* objectPointer) override
2072 {
2073 if (!objectPointer)
2074 return false;
2075 #if ENABLE(ASSERT)
2076 if (isMarked(objectPointer))
2077 return false;
2078
2079 markNoTracing(objectPointer);
2080 #else
2081 FinalizedHeapObjectHeader* header =
2082 FinalizedHeapObjectHeader::fromPayload(objectPointer);
2083 if (header->isMarked())
2084 return false;
2085 header->mark();
2086
2087 if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer)))
haraken 2014/12/02 06:16:04 Hmm, it's a bit unfortunate we need to have this b
sof 2014/12/02 06:19:27 As I said in the previous comment, I don't see a w
sof 2014/12/02 09:52:15 Had a go at the former; acceptable?
2088 return false;
2089 #endif
2090 return true;
2091 }
2092
2093 #if ENABLE(ASSERT)
2094 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2095 virtual bool ensureMarked(const Type* objectPointer) override \
2096 { \
2097 if (!objectPointer) \
2098 return false; \
2099 COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNon AdjustedTypes); \
2100 if (isMarked(objectPointer)) \
2101 return false; \
2102 markNoTracing(objectPointer); \
2103 return true; \
2104 }
2105 #else
2106 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2107 virtual bool ensureMarked(const Type* objectPointer) override \
2108 { \
2109 if (!objectPointer) \
2110 return false; \
2111 HeapObjectHeader* header = \
2112 HeapObjectHeader::fromPayload(objectPointer); \
2113 if (header->isMarked()) \
2114 return false; \
2115 header->mark(); \
2116 if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer))) \
2117 return false; \
2118 return true; \
2119 }
2120 #endif
2121
2057 // This macro defines the necessary visitor methods for typed heaps 2122 // This macro defines the necessary visitor methods for typed heaps
2058 #define DEFINE_VISITOR_METHODS(Type) \ 2123 #define DEFINE_VISITOR_METHODS(Type) \
2059 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \ 2124 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \
2060 { \ 2125 { \
2061 if (!objectPointer) \ 2126 if (!objectPointer) \
2062 return; \ 2127 return; \
2063 HeapObjectHeader* header = \ 2128 HeapObjectHeader* header = \
2064 HeapObjectHeader::fromPayload(objectPointer); \ 2129 HeapObjectHeader::fromPayload(objectPointer); \
2065 visitHeader(header, header->payload(), callback); \ 2130 visitHeader(header, header->payload(), callback); \
2066 } \ 2131 } \
2067 virtual bool isMarked(const Type* objectPointer) override \ 2132 virtual bool isMarked(const Type* objectPointer) override \
2068 { \ 2133 { \
2069 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ 2134 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \
2070 } 2135 } \
2136 DEFINE_ENSURE_MARKED_METHOD(Type)
2071 2137
2072 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) 2138 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS)
2073 #undef DEFINE_VISITOR_METHODS 2139 #undef DEFINE_VISITOR_METHODS
2074 2140
2075 #if ENABLE(GC_PROFILE_MARKING) 2141 #if ENABLE(GC_PROFILE_MARKING)
2076 void reportStats() 2142 void reportStats()
2077 { 2143 {
2078 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n"); 2144 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n");
2079 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) { 2145 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) {
2080 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size()); 2146 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2161 return graph; 2227 return graph;
2162 } 2228 }
2163 2229
2164 static HashSet<uintptr_t>& objectsToFindPath() 2230 static HashSet<uintptr_t>& objectsToFindPath()
2165 { 2231 {
2166 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ()); 2232 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ());
2167 return set; 2233 return set;
2168 } 2234 }
2169 #endif 2235 #endif
2170 2236
2237 static inline bool needsTracing(const void* objectPointer)
2238 {
2239 BaseHeapPage* page = pageFromObject(objectPointer);
2240 ASSERT(!page->orphaned());
2241 // When doing a thread local GC, the marker checks if
2242 // the object resides in another thread's heap. The
2243 // object should not be traced, if it does.
2244 if (!page->terminating())
2245 return false;
2246
2247 return true;
2248 }
2249
2171 protected: 2250 protected:
2172 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride 2251 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride
2173 { 2252 {
2174 Heap::pushWeakCellPointerCallback(cell, callback); 2253 Heap::pushWeakCellPointerCallback(cell, callback);
2175 } 2254 }
2176 2255
2177 private: 2256 private:
2257 friend class ThreadLocalMarkingScope;
2258
2178 CallbackStack* m_markingStack; 2259 CallbackStack* m_markingStack;
2260 bool m_checkIfNeedsTracing;
2261 };
2262
2263 class ThreadLocalMarkingScope {
2264 STACK_ALLOCATED();
2265 public:
2266 ThreadLocalMarkingScope(MarkingVisitor* visitor)
2267 : m_visitor(visitor)
2268 {
2269 ASSERT(m_visitor);
2270 m_visitor->m_checkIfNeedsTracing = true;
2271 }
2272
2273 ~ThreadLocalMarkingScope()
2274 {
2275 m_visitor->m_checkIfNeedsTracing = false;
2276 }
2277
2278 private:
2279 MarkingVisitor* m_visitor;
2179 }; 2280 };
2180 2281
2181 void Heap::init() 2282 void Heap::init()
2182 { 2283 {
2183 ThreadState::init(); 2284 ThreadState::init();
2184 s_markingStack = new CallbackStack(); 2285 s_markingStack = new CallbackStack();
2185 s_postMarkingCallbackStack = new CallbackStack(); 2286 s_postMarkingCallbackStack = new CallbackStack();
2186 s_weakCallbackStack = new CallbackStack(); 2287 s_weakCallbackStack = new CallbackStack();
2187 s_ephemeronStack = new CallbackStack(); 2288 s_ephemeronStack = new CallbackStack();
2188 s_heapDoesNotContainCache = new HeapDoesNotContainCache(); 2289 s_heapDoesNotContainCache = new HeapDoesNotContainCache();
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
2321 builder.append("\n\t"); 2422 builder.append("\n\t");
2322 builder.append(frameToName.nullableName()); 2423 builder.append(frameToName.nullableName());
2323 --framesToShow; 2424 --framesToShow;
2324 } 2425 }
2325 return builder.toString().replace("blink::", ""); 2426 return builder.toString().replace("blink::", "");
2326 } 2427 }
2327 #endif 2428 #endif
2328 2429
2329 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback) 2430 void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback c allback)
2330 { 2431 {
2331 #if ENABLE(ASSERT) 2432 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2332 {
2333 ASSERT(Heap::containedInHeapOrOrphanedPage(object));
2334 }
2335 #endif
2336 CallbackStack::Item* slot = stack->allocateEntry(); 2433 CallbackStack::Item* slot = stack->allocateEntry();
2337 *slot = CallbackStack::Item(object, callback); 2434 *slot = CallbackStack::Item(object, callback);
2338 } 2435 }
2339 2436
2340 template<CallbackInvocationMode Mode>
2341 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor) 2437 bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor)
2342 { 2438 {
2343 CallbackStack::Item* item = stack->pop(); 2439 CallbackStack::Item* item = stack->pop();
2344 if (!item) 2440 if (!item)
2345 return false; 2441 return false;
2346 #if ENABLE(ASSERT)
2347 if (Mode == GlobalMarking) {
2348 BaseHeapPage* page = pageFromObject(item->object());
2349 // If you hit this ASSERT, it means that there is a dangling pointer
2350 // from a live thread heap to a dead thread heap. We must eliminate
2351 // the dangling pointer.
2352 // Release builds don't have the ASSERT, but it is OK because
2353 // release builds will crash at the following item->call
2354 // because all the entries of the orphaned heaps are zeroed out and
2355 // thus the item does not have a valid vtable.
2356 ASSERT(!page->orphaned());
2357 }
2358 #endif
2359 if (Mode == ThreadLocalMarking) {
2360 BaseHeapPage* page = pageFromObject(item->object());
2361 ASSERT(!page->orphaned());
2362 // When doing a thread local GC, don't trace an object located in
2363 // a heap of another thread.
2364 if (!page->terminating())
2365 return true;
2366 }
2367 2442
2368 #if ENABLE(GC_PROFILE_MARKING) 2443 #if ENABLE(GC_PROFILE_MARKING)
2369 visitor->setHostInfo(item->object(), classOf(item->object())); 2444 visitor->setHostInfo(item->object(), classOf(item->object()));
2370 #endif 2445 #endif
2371 item->call(visitor); 2446 item->call(visitor);
2372 return true; 2447 return true;
2373 } 2448 }
2374 2449
2375 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback) 2450 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback)
2376 { 2451 {
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
2492 enterGC(); 2567 enterGC();
2493 preGC(); 2568 preGC();
2494 2569
2495 Heap::resetMarkedObjectSize(); 2570 Heap::resetMarkedObjectSize();
2496 Heap::resetAllocatedObjectSize(); 2571 Heap::resetAllocatedObjectSize();
2497 2572
2498 // 1. trace persistent roots. 2573 // 1. trace persistent roots.
2499 ThreadState::visitPersistentRoots(s_markingVisitor); 2574 ThreadState::visitPersistentRoots(s_markingVisitor);
2500 2575
2501 // 2. trace objects reachable from the persistent roots including ephemerons . 2576 // 2. trace objects reachable from the persistent roots including ephemerons .
2502 processMarkingStack<GlobalMarking>(); 2577 processMarkingStack();
2503 2578
2504 // 3. trace objects reachable from the stack. We do this independent of the 2579 // 3. trace objects reachable from the stack. We do this independent of the
2505 // given stackState since other threads might have a different stack state. 2580 // given stackState since other threads might have a different stack state.
2506 ThreadState::visitStackRoots(s_markingVisitor); 2581 ThreadState::visitStackRoots(s_markingVisitor);
2507 2582
2508 // 4. trace objects reachable from the stack "roots" including ephemerons. 2583 // 4. trace objects reachable from the stack "roots" including ephemerons.
2509 // Only do the processing if we found a pointer to an object on one of the 2584 // Only do the processing if we found a pointer to an object on one of the
2510 // thread stacks. 2585 // thread stacks.
2511 if (lastGCWasConservative()) { 2586 if (lastGCWasConservative())
2512 processMarkingStack<GlobalMarking>(); 2587 processMarkingStack();
2513 }
2514 2588
2515 postMarkingProcessing(); 2589 postMarkingProcessing();
2516 globalWeakProcessing(); 2590 globalWeakProcessing();
2517 2591
2518 // Now we can delete all orphaned pages because there are no dangling 2592 // Now we can delete all orphaned pages because there are no dangling
2519 // pointers to the orphaned pages. (If we have such dangling pointers, 2593 // pointers to the orphaned pages. (If we have such dangling pointers,
2520 // we should have crashed during marking before getting here.) 2594 // we should have crashed during marking before getting here.)
2521 orphanedPagePool()->decommitOrphanedPages(); 2595 orphanedPagePool()->decommitOrphanedPages();
2522 2596
2523 postGC(); 2597 postGC();
(...skipping 18 matching lines...) Expand all
2542 // We explicitly do not enter a safepoint while doing thread specific 2616 // We explicitly do not enter a safepoint while doing thread specific
2543 // garbage collection since we don't want to allow a global GC at the 2617 // garbage collection since we don't want to allow a global GC at the
2544 // same time as a thread local GC. 2618 // same time as a thread local GC.
2545 2619
2546 { 2620 {
2547 NoAllocationScope<AnyThread> noAllocationScope; 2621 NoAllocationScope<AnyThread> noAllocationScope;
2548 2622
2549 enterGC(); 2623 enterGC();
2550 state->preGC(); 2624 state->preGC();
2551 2625
2552 // 1. trace the thread local persistent roots. For thread local GCs we 2626 {
2553 // don't trace the stack (ie. no conservative scanning) since this is 2627 ThreadLocalMarkingScope markingScope(static_cast<MarkingVisitor*>(s_ markingVisitor));
2554 // only called during thread shutdown where there should be no objects
2555 // on the stack.
2556 // We also assume that orphaned pages have no objects reachable from
2557 // persistent handles on other threads or CrossThreadPersistents. The
2558 // only cases where this could happen is if a subsequent conservative
2559 // global GC finds a "pointer" on the stack or due to a programming
2560 // error where an object has a dangling cross-thread pointer to an
2561 // object on this heap.
2562 state->visitPersistents(s_markingVisitor);
2563 2628
2564 // 2. trace objects reachable from the thread's persistent roots 2629 // 1. trace the thread local persistent roots. For thread local GCs we
2565 // including ephemerons. 2630 // don't trace the stack (ie. no conservative scanning) since this i s
2566 processMarkingStack<ThreadLocalMarking>(); 2631 // only called during thread shutdown where there should be no objec ts
2632 // on the stack.
2633 // We also assume that orphaned pages have no objects reachable from
2634 // persistent handles on other threads or CrossThreadPersistents. Th e
2635 // only cases where this could happen is if a subsequent conservativ e
2636 // global GC finds a "pointer" on the stack or due to a programming
2637 // error where an object has a dangling cross-thread pointer to an
2638 // object on this heap.
2639 state->visitPersistents(s_markingVisitor);
2640
2641 // 2. trace objects reachable from the thread's persistent roots
2642 // including ephemerons.
2643 processMarkingStack();
2644 }
2567 2645
2568 postMarkingProcessing(); 2646 postMarkingProcessing();
2569 globalWeakProcessing(); 2647 globalWeakProcessing();
2570 2648
2571 state->postGC(); 2649 state->postGC();
2572 leaveGC(); 2650 leaveGC();
2573 } 2651 }
2574 state->performPendingSweep(); 2652 state->performPendingSweep();
2575 } 2653 }
2576 2654
2577 template<CallbackInvocationMode Mode>
2578 void Heap::processMarkingStack() 2655 void Heap::processMarkingStack()
2579 { 2656 {
2580 // Ephemeron fixed point loop. 2657 // Ephemeron fixed point loop.
2581 do { 2658 do {
2582 { 2659 {
2583 // Iteratively mark all objects that are reachable from the objects 2660 // Iteratively mark all objects that are reachable from the objects
2584 // currently pushed onto the marking stack. If Mode is ThreadLocalMa rking 2661 // currently pushed onto the marking stack.
2585 // don't continue tracing if the trace hits an object on another thr ead's
2586 // heap.
2587 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded"); 2662 TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded");
2588 while (popAndInvokeTraceCallback<Mode>(s_markingStack, s_markingVisi tor)) { } 2663 while (popAndInvokeTraceCallback(s_markingStack, s_markingVisitor)) { }
2589 } 2664 }
2590 2665
2591 { 2666 {
2592 // Mark any strong pointers that have now become reachable in epheme ron 2667 // Mark any strong pointers that have now become reachable in epheme ron
2593 // maps. 2668 // maps.
2594 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack"); 2669 TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack");
2595 s_ephemeronStack->invokeEphemeronCallbacks(s_markingVisitor); 2670 s_ephemeronStack->invokeEphemeronCallbacks(s_markingVisitor);
2596 } 2671 }
2597 2672
2598 // Rerun loop if ephemeron processing queued more objects for tracing. 2673 // Rerun loop if ephemeron processing queued more objects for tracing.
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
2844 bool Heap::s_lastGCWasConservative = false; 2919 bool Heap::s_lastGCWasConservative = false;
2845 bool Heap::s_inGC = false; 2920 bool Heap::s_inGC = false;
2846 FreePagePool* Heap::s_freePagePool; 2921 FreePagePool* Heap::s_freePagePool;
2847 OrphanedPagePool* Heap::s_orphanedPagePool; 2922 OrphanedPagePool* Heap::s_orphanedPagePool;
2848 Heap::RegionTree* Heap::s_regionTree = 0; 2923 Heap::RegionTree* Heap::s_regionTree = 0;
2849 size_t Heap::s_allocatedObjectSize = 0; 2924 size_t Heap::s_allocatedObjectSize = 0;
2850 size_t Heap::s_allocatedSpace = 0; 2925 size_t Heap::s_allocatedSpace = 0;
2851 size_t Heap::s_markedObjectSize = 0; 2926 size_t Heap::s_markedObjectSize = 0;
2852 2927
2853 } // namespace blink 2928 } // namespace blink
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698