| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 1959 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1970 | 1970 |
| 1971 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1971 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 1972 // Mark the heap roots including global variables, stack variables, | 1972 // Mark the heap roots including global variables, stack variables, |
| 1973 // etc., and all objects reachable from them. | 1973 // etc., and all objects reachable from them. |
| 1974 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1974 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 1975 | 1975 |
| 1976 // Handle the string table specially. | 1976 // Handle the string table specially. |
| 1977 MarkStringTable(visitor); | 1977 MarkStringTable(visitor); |
| 1978 | 1978 |
| 1979 // There may be overflowed objects in the heap. Visit them now. | 1979 // There may be overflowed objects in the heap. Visit them now. |
| 1980 while (marking_deque_.overflowed()) { | 1980 while (marking_deque()->overflowed()) { |
| 1981 RefillMarkingDeque(); | 1981 RefillMarkingDeque(); |
| 1982 EmptyMarkingDeque(); | 1982 EmptyMarkingDeque(); |
| 1983 } | 1983 } |
| 1984 } | 1984 } |
| 1985 | 1985 |
| 1986 | 1986 |
| 1987 void MarkCompactCollector::MarkImplicitRefGroups( | 1987 void MarkCompactCollector::MarkImplicitRefGroups( |
| 1988 MarkObjectFunction mark_object) { | 1988 MarkObjectFunction mark_object) { |
| 1989 List<ImplicitRefGroup*>* ref_groups = | 1989 List<ImplicitRefGroup*>* ref_groups = |
| 1990 isolate()->global_handles()->implicit_ref_groups(); | 1990 isolate()->global_handles()->implicit_ref_groups(); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2013 } | 2013 } |
| 2014 ref_groups->Rewind(last); | 2014 ref_groups->Rewind(last); |
| 2015 } | 2015 } |
| 2016 | 2016 |
| 2017 | 2017 |
| 2018 // Mark all objects reachable from the objects on the marking stack. | 2018 // Mark all objects reachable from the objects on the marking stack. |
| 2019 // Before: the marking stack contains zero or more heap object pointers. | 2019 // Before: the marking stack contains zero or more heap object pointers. |
| 2020 // After: the marking stack is empty, and all objects reachable from the | 2020 // After: the marking stack is empty, and all objects reachable from the |
| 2021 // marking stack have been marked, or are overflowed in the heap. | 2021 // marking stack have been marked, or are overflowed in the heap. |
| 2022 void MarkCompactCollector::EmptyMarkingDeque() { | 2022 void MarkCompactCollector::EmptyMarkingDeque() { |
| 2023 while (!marking_deque_.IsEmpty()) { | 2023 while (!marking_deque()->IsEmpty()) { |
| 2024 HeapObject* object = marking_deque_.Pop(); | 2024 HeapObject* object = marking_deque()->Pop(); |
| 2025 | 2025 |
| 2026 DCHECK(!object->IsFiller()); | 2026 DCHECK(!object->IsFiller()); |
| 2027 DCHECK(object->IsHeapObject()); | 2027 DCHECK(object->IsHeapObject()); |
| 2028 DCHECK(heap()->Contains(object)); | 2028 DCHECK(heap()->Contains(object)); |
| 2029 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); | 2029 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); |
| 2030 | 2030 |
| 2031 Map* map = object->map(); | 2031 Map* map = object->map(); |
| 2032 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 2032 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 2033 MarkObject(map, map_mark); | 2033 MarkObject(map, map_mark); |
| 2034 | 2034 |
| 2035 MarkCompactMarkingVisitor::IterateBody(map, object); | 2035 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 2036 } | 2036 } |
| 2037 } | 2037 } |
| 2038 | 2038 |
| 2039 | 2039 |
| 2040 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2040 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 2041 // push them on the marking stack. Stop early if the marking stack fills | 2041 // push them on the marking stack. Stop early if the marking stack fills |
| 2042 // before sweeping completes. If sweeping completes, there are no remaining | 2042 // before sweeping completes. If sweeping completes, there are no remaining |
| 2043 // overflowed objects in the heap so the overflow flag on the markings stack | 2043 // overflowed objects in the heap so the overflow flag on the markings stack |
| 2044 // is cleared. | 2044 // is cleared. |
| 2045 void MarkCompactCollector::RefillMarkingDeque() { | 2045 void MarkCompactCollector::RefillMarkingDeque() { |
| 2046 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); | 2046 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); |
| 2047 DCHECK(marking_deque_.overflowed()); | 2047 DCHECK(marking_deque()->overflowed()); |
| 2048 | 2048 |
| 2049 DiscoverGreyObjectsInNewSpace(); | 2049 DiscoverGreyObjectsInNewSpace(); |
| 2050 if (marking_deque_.IsFull()) return; | 2050 if (marking_deque()->IsFull()) return; |
| 2051 | 2051 |
| 2052 DiscoverGreyObjectsInSpace(heap()->old_space()); | 2052 DiscoverGreyObjectsInSpace(heap()->old_space()); |
| 2053 if (marking_deque_.IsFull()) return; | 2053 if (marking_deque()->IsFull()) return; |
| 2054 | 2054 |
| 2055 DiscoverGreyObjectsInSpace(heap()->code_space()); | 2055 DiscoverGreyObjectsInSpace(heap()->code_space()); |
| 2056 if (marking_deque_.IsFull()) return; | 2056 if (marking_deque()->IsFull()) return; |
| 2057 | 2057 |
| 2058 DiscoverGreyObjectsInSpace(heap()->map_space()); | 2058 DiscoverGreyObjectsInSpace(heap()->map_space()); |
| 2059 if (marking_deque_.IsFull()) return; | 2059 if (marking_deque()->IsFull()) return; |
| 2060 | 2060 |
| 2061 LargeObjectIterator lo_it(heap()->lo_space()); | 2061 LargeObjectIterator lo_it(heap()->lo_space()); |
| 2062 DiscoverGreyObjectsWithIterator(&lo_it); | 2062 DiscoverGreyObjectsWithIterator(&lo_it); |
| 2063 if (marking_deque_.IsFull()) return; | 2063 if (marking_deque()->IsFull()) return; |
| 2064 | 2064 |
| 2065 marking_deque_.ClearOverflowed(); | 2065 marking_deque()->ClearOverflowed(); |
| 2066 } | 2066 } |
| 2067 | 2067 |
| 2068 | 2068 |
| 2069 // Mark all objects reachable (transitively) from objects on the marking | 2069 // Mark all objects reachable (transitively) from objects on the marking |
| 2070 // stack. Before: the marking stack contains zero or more heap object | 2070 // stack. Before: the marking stack contains zero or more heap object |
| 2071 // pointers. After: the marking stack is empty and there are no overflowed | 2071 // pointers. After: the marking stack is empty and there are no overflowed |
| 2072 // objects in the heap. | 2072 // objects in the heap. |
| 2073 void MarkCompactCollector::ProcessMarkingDeque() { | 2073 void MarkCompactCollector::ProcessMarkingDeque() { |
| 2074 EmptyMarkingDeque(); | 2074 EmptyMarkingDeque(); |
| 2075 while (marking_deque_.overflowed()) { | 2075 while (marking_deque()->overflowed()) { |
| 2076 RefillMarkingDeque(); | 2076 RefillMarkingDeque(); |
| 2077 EmptyMarkingDeque(); | 2077 EmptyMarkingDeque(); |
| 2078 } | 2078 } |
| 2079 } | 2079 } |
| 2080 | 2080 |
| 2081 // Mark all objects reachable (transitively) from objects on the marking | 2081 // Mark all objects reachable (transitively) from objects on the marking |
| 2082 // stack including references only considered in the atomic marking pause. | 2082 // stack including references only considered in the atomic marking pause. |
| 2083 void MarkCompactCollector::ProcessEphemeralMarking( | 2083 void MarkCompactCollector::ProcessEphemeralMarking( |
| 2084 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { | 2084 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { |
| 2085 DCHECK(marking_deque_.IsEmpty() && !marking_deque_.overflowed()); | 2085 DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed()); |
| 2086 bool work_to_do = true; | 2086 bool work_to_do = true; |
| 2087 while (work_to_do) { | 2087 while (work_to_do) { |
| 2088 if (heap_->UsingEmbedderHeapTracer()) { | 2088 if (heap_->UsingEmbedderHeapTracer()) { |
| 2089 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); | 2089 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); |
| 2090 heap_->RegisterWrappersWithEmbedderHeapTracer(); | 2090 heap_->RegisterWrappersWithEmbedderHeapTracer(); |
| 2091 heap_->embedder_heap_tracer()->AdvanceTracing( | 2091 heap_->embedder_heap_tracer()->AdvanceTracing( |
| 2092 0, EmbedderHeapTracer::AdvanceTracingActions( | 2092 0, EmbedderHeapTracer::AdvanceTracingActions( |
| 2093 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); | 2093 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); |
| 2094 } | 2094 } |
| 2095 if (!only_process_harmony_weak_collections) { | 2095 if (!only_process_harmony_weak_collections) { |
| 2096 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); | 2096 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); |
| 2097 isolate()->global_handles()->IterateObjectGroups( | 2097 isolate()->global_handles()->IterateObjectGroups( |
| 2098 visitor, &IsUnmarkedHeapObjectWithHeap); | 2098 visitor, &IsUnmarkedHeapObjectWithHeap); |
| 2099 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); | 2099 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); |
| 2100 } | 2100 } |
| 2101 ProcessWeakCollections(); | 2101 ProcessWeakCollections(); |
| 2102 work_to_do = !marking_deque_.IsEmpty(); | 2102 work_to_do = !marking_deque()->IsEmpty(); |
| 2103 ProcessMarkingDeque(); | 2103 ProcessMarkingDeque(); |
| 2104 } | 2104 } |
| 2105 } | 2105 } |
| 2106 | 2106 |
| 2107 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { | 2107 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { |
| 2108 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); | 2108 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); |
| 2109 !it.done(); it.Advance()) { | 2109 !it.done(); it.Advance()) { |
| 2110 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { | 2110 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { |
| 2111 return; | 2111 return; |
| 2112 } | 2112 } |
| 2113 if (it.frame()->type() == StackFrame::OPTIMIZED) { | 2113 if (it.frame()->type() == StackFrame::OPTIMIZED) { |
| 2114 Code* code = it.frame()->LookupCode(); | 2114 Code* code = it.frame()->LookupCode(); |
| 2115 if (!code->CanDeoptAt(it.frame()->pc())) { | 2115 if (!code->CanDeoptAt(it.frame()->pc())) { |
| 2116 Code::BodyDescriptor::IterateBody(code, visitor); | 2116 Code::BodyDescriptor::IterateBody(code, visitor); |
| 2117 } | 2117 } |
| 2118 ProcessMarkingDeque(); | 2118 ProcessMarkingDeque(); |
| 2119 return; | 2119 return; |
| 2120 } | 2120 } |
| 2121 } | 2121 } |
| 2122 } | 2122 } |
| 2123 | 2123 |
| 2124 | 2124 |
| 2125 void MarkCompactCollector::EnsureMarkingDequeIsReserved() { | 2125 void MarkCompactCollector::EnsureMarkingDequeIsReserved() { |
| 2126 DCHECK(!marking_deque_.in_use()); | 2126 DCHECK(!marking_deque()->in_use()); |
| 2127 if (marking_deque_memory_ == NULL) { | 2127 if (marking_deque_memory_ == NULL) { |
| 2128 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize); | 2128 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize); |
| 2129 marking_deque_memory_committed_ = 0; | 2129 marking_deque_memory_committed_ = 0; |
| 2130 } | 2130 } |
| 2131 if (marking_deque_memory_ == NULL) { | 2131 if (marking_deque_memory_ == NULL) { |
| 2132 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved"); | 2132 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved"); |
| 2133 } | 2133 } |
| 2134 } | 2134 } |
| 2135 | 2135 |
| 2136 | 2136 |
| 2137 void MarkCompactCollector::EnsureMarkingDequeIsCommitted(size_t max_size) { | 2137 void MarkCompactCollector::EnsureMarkingDequeIsCommitted(size_t max_size) { |
| 2138 // If the marking deque is too small, we try to allocate a bigger one. | 2138 // If the marking deque is too small, we try to allocate a bigger one. |
| 2139 // If that fails, make do with a smaller one. | 2139 // If that fails, make do with a smaller one. |
| 2140 CHECK(!marking_deque_.in_use()); | 2140 CHECK(!marking_deque()->in_use()); |
| 2141 for (size_t size = max_size; size >= kMinMarkingDequeSize; size >>= 1) { | 2141 for (size_t size = max_size; size >= kMinMarkingDequeSize; size >>= 1) { |
| 2142 base::VirtualMemory* memory = marking_deque_memory_; | 2142 base::VirtualMemory* memory = marking_deque_memory_; |
| 2143 size_t currently_committed = marking_deque_memory_committed_; | 2143 size_t currently_committed = marking_deque_memory_committed_; |
| 2144 | 2144 |
| 2145 if (currently_committed == size) return; | 2145 if (currently_committed == size) return; |
| 2146 | 2146 |
| 2147 if (currently_committed > size) { | 2147 if (currently_committed > size) { |
| 2148 bool success = marking_deque_memory_->Uncommit( | 2148 bool success = marking_deque_memory_->Uncommit( |
| 2149 reinterpret_cast<Address>(marking_deque_memory_->address()) + size, | 2149 reinterpret_cast<Address>(marking_deque_memory_->address()) + size, |
| 2150 currently_committed - size); | 2150 currently_committed - size); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2162 if (success) { | 2162 if (success) { |
| 2163 marking_deque_memory_committed_ = size; | 2163 marking_deque_memory_committed_ = size; |
| 2164 return; | 2164 return; |
| 2165 } | 2165 } |
| 2166 } | 2166 } |
| 2167 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsCommitted"); | 2167 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsCommitted"); |
| 2168 } | 2168 } |
| 2169 | 2169 |
| 2170 | 2170 |
| 2171 void MarkCompactCollector::InitializeMarkingDeque() { | 2171 void MarkCompactCollector::InitializeMarkingDeque() { |
| 2172 DCHECK(!marking_deque_.in_use()); | 2172 DCHECK(!marking_deque()->in_use()); |
| 2173 DCHECK(marking_deque_memory_committed_ > 0); | 2173 DCHECK(marking_deque_memory_committed_ > 0); |
| 2174 Address addr = static_cast<Address>(marking_deque_memory_->address()); | 2174 Address addr = static_cast<Address>(marking_deque_memory_->address()); |
| 2175 size_t size = marking_deque_memory_committed_; | 2175 size_t size = marking_deque_memory_committed_; |
| 2176 if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize; | 2176 if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize; |
| 2177 marking_deque_.Initialize(addr, addr + size); | 2177 marking_deque()->Initialize(addr, addr + size); |
| 2178 } | 2178 } |
| 2179 | 2179 |
| 2180 | 2180 |
| 2181 void MarkingDeque::Initialize(Address low, Address high) { | 2181 void MarkingDeque::Initialize(Address low, Address high) { |
| 2182 DCHECK(!in_use_); | 2182 DCHECK(!in_use_); |
| 2183 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); | 2183 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); |
| 2184 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); | 2184 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); |
| 2185 array_ = obj_low; | 2185 array_ = obj_low; |
| 2186 mask_ = base::bits::RoundDownToPowerOfTwo32( | 2186 mask_ = base::bits::RoundDownToPowerOfTwo32( |
| 2187 static_cast<uint32_t>(obj_high - obj_low)) - | 2187 static_cast<uint32_t>(obj_high - obj_low)) - |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2263 PostponeInterruptsScope postpone(isolate()); | 2263 PostponeInterruptsScope postpone(isolate()); |
| 2264 | 2264 |
| 2265 { | 2265 { |
| 2266 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL); | 2266 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL); |
| 2267 IncrementalMarking* incremental_marking = heap_->incremental_marking(); | 2267 IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
| 2268 if (was_marked_incrementally_) { | 2268 if (was_marked_incrementally_) { |
| 2269 incremental_marking->Finalize(); | 2269 incremental_marking->Finalize(); |
| 2270 } else { | 2270 } else { |
| 2271 // Abort any pending incremental activities e.g. incremental sweeping. | 2271 // Abort any pending incremental activities e.g. incremental sweeping. |
| 2272 incremental_marking->Stop(); | 2272 incremental_marking->Stop(); |
| 2273 if (marking_deque_.in_use()) { | 2273 if (marking_deque()->in_use()) { |
| 2274 marking_deque_.Uninitialize(true); | 2274 marking_deque()->Uninitialize(true); |
| 2275 } | 2275 } |
| 2276 } | 2276 } |
| 2277 } | 2277 } |
| 2278 | 2278 |
| 2279 #ifdef DEBUG | 2279 #ifdef DEBUG |
| 2280 DCHECK(state_ == PREPARE_GC); | 2280 DCHECK(state_ == PREPARE_GC); |
| 2281 state_ = MARK_LIVE_OBJECTS; | 2281 state_ = MARK_LIVE_OBJECTS; |
| 2282 #endif | 2282 #endif |
| 2283 | 2283 |
| 2284 EnsureMarkingDequeIsCommittedAndInitialize( | 2284 EnsureMarkingDequeIsCommittedAndInitialize( |
| (...skipping 1709 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3994 // The target is always in old space, we don't have to record the slot in | 3994 // The target is always in old space, we don't have to record the slot in |
| 3995 // the old-to-new remembered set. | 3995 // the old-to-new remembered set. |
| 3996 DCHECK(!heap()->InNewSpace(target)); | 3996 DCHECK(!heap()->InNewSpace(target)); |
| 3997 RecordRelocSlot(host, &rinfo, target); | 3997 RecordRelocSlot(host, &rinfo, target); |
| 3998 } | 3998 } |
| 3999 } | 3999 } |
| 4000 } | 4000 } |
| 4001 | 4001 |
| 4002 } // namespace internal | 4002 } // namespace internal |
| 4003 } // namespace v8 | 4003 } // namespace v8 |
| OLD | NEW |