Chromium Code Reviews| Index: src/incremental-marking.h |
| diff --git a/src/incremental-marking.h b/src/incremental-marking.h |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..028591a411ce82255d863a610ec9dcaebcd0dfdf |
| --- /dev/null |
| +++ b/src/incremental-marking.h |
| @@ -0,0 +1,352 @@ |
| +// Copyright 2010 the V8 project authors. All rights reserved. |
|
Erik Corry
2011/02/22 12:27:19
2011
|
| +// Redistribution and use in source and binary forms, with or without |
| +// modification, are permitted provided that the following conditions are |
| +// met: |
| +// |
| +// * Redistributions of source code must retain the above copyright |
| +// notice, this list of conditions and the following disclaimer. |
| +// * Redistributions in binary form must reproduce the above |
| +// copyright notice, this list of conditions and the following |
| +// disclaimer in the documentation and/or other materials provided |
| +// with the distribution. |
| +// * Neither the name of Google Inc. nor the names of its |
| +// contributors may be used to endorse or promote products derived |
| +// from this software without specific prior written permission. |
| +// |
| +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| + |
| +#ifndef V8_INCREMENTAL_MARKING_H_ |
| +#define V8_INCREMENTAL_MARKING_H_ |
| + |
| + |
| +#include "execution.h" |
| +#include "mark-compact.h" |
| +#include "objects.h" |
| + |
| +namespace v8 { |
| +namespace internal { |
| + |
| + |
| +class IncrementalMarking : public AllStatic { |
| + public: |
| + enum State { |
| + STOPPED, |
| + MARKING, |
| + COMPLETE |
| + }; |
| + |
| + static void Start() { |
|
Erik Corry
2011/02/22 12:27:19
I think it would be cleaner to move largish functi
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Start\n"); |
| + } |
| + ASSERT(FLAG_incremental_marking); |
| + ASSERT(state_ == STOPPED); |
| + state_ = MARKING; |
| + |
| + // Initialize marking stack. |
| + marking_stack_.Initialize(Heap::new_space()->FromSpaceLow(), |
| + Heap::new_space()->FromSpaceHigh()); |
| + |
| + // Clear markbits. |
| + Address new_space_top = Heap::new_space()->top(); |
| + Address new_space_bottom = Heap::new_space()->bottom(); |
| + |
| + Marking::ClearRange(new_space_bottom, |
| + static_cast<int>(new_space_top - new_space_bottom)); |
| + |
| + ClearMarkbits(); |
| + |
| +#ifdef DEBUG |
| + VerifyMarkbitsAreClean(); |
| +#endif |
| + |
| + // Mark strong roots grey. |
| + RootMarkingVisitor visitor; |
| + Heap::IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); |
| + |
| + // Ready to start incremental marking. |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Running\n"); |
| + } |
| + } |
| + |
| + static State state() { |
| + ASSERT(state_ == STOPPED || FLAG_incremental_marking); |
| + return state_; |
| + } |
| + |
| + static void Stop() { |
| + state_ = STOPPED; |
|
Erik Corry
2011/02/22 12:27:19
Fits on one line.
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + } |
| + |
| + static void Hurry() { |
| + if (state() == MARKING) { |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Hurry\n"); |
| + } |
| + // TODO(gc) hurry can mark objects it encounters black as mutator |
| + // was stopped. |
| + while (!marking_stack_.is_empty()) { |
| + HeapObject* obj = marking_stack_.Pop(); |
| + obj->Iterate(&visitor_); |
| + MarkBlack(obj); |
| + } |
| + state_ = COMPLETE; |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Complete (hurry)\n"); |
| + } |
| + } |
| + } |
| + |
| + static void Finalize() { |
| + Hurry(); |
| + state_ = STOPPED; |
| + ASSERT(marking_stack_.is_empty()); |
| + } |
| + |
| + |
| + static void MarkingComplete() { |
| + state_ = COMPLETE; |
| + // We completed marking. |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Complete (normal)\n"); |
| + } |
| + StackGuard::RequestGC(); |
| + } |
| + |
| + |
| + static const intptr_t kAllocatedThreshold = 1024; |
| + static const intptr_t kFactor = 8; |
|
Erik Corry
2011/02/22 12:27:19
Perhaps call this kAllocationMarkingFactor.
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + |
| + static void Step(intptr_t allocated) { |
| + if (state_ == MARKING) { |
| + allocated_ += allocated; |
| + |
| + if (allocated_ >= kAllocatedThreshold) { |
| + intptr_t bytes_to_process = allocated_ * kFactor; |
| + int count = 0; |
| + double start = 0; |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Marking %d bytes\n", bytes_to_process); |
| + start = OS::TimeCurrentMillis(); |
| + } |
| + while (!marking_stack_.is_empty() && bytes_to_process > 0) { |
| + HeapObject* obj = marking_stack_.Pop(); |
| + ASSERT(IsGrey(obj)); |
| + Map* map = obj->map(); |
| + int size = obj->SizeFromMap(map); |
| + bytes_to_process -= size; |
| + if (IsWhite(map)) WhiteToGrey(map); |
| + obj->IterateBody(map->instance_type(), size, &visitor_); |
| + MarkBlack(obj); |
| + count++; |
| + } |
| + if (FLAG_trace_incremental_marking) { |
| + double end = OS::TimeCurrentMillis(); |
| + PrintF("[IncrementalMarking] %d objects marked, spent %d ms\n", |
| + count, |
| + static_cast<int>(end - start)); |
| + } |
| + allocated_ = 0; |
| + if (marking_stack_.is_empty()) MarkingComplete(); |
| + } |
| + } |
| + } |
| + |
| + static void RecordWrite(HeapObject* obj, Object* value) { |
| + if (state_ != STOPPED && value->IsHeapObject()) { |
| + if (IsBlack(obj) && IsWhite(HeapObject::cast(value))) { |
| + BlackToGrey(obj); |
| + if (state_ == COMPLETE) { |
| + state_ = MARKING; |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Restarting (new grey objects)\n"); |
| + } |
| + } |
| + } |
| + } |
| + } |
| + |
| + static void RecordWriteOf(HeapObject* value) { |
| + if (state_ != STOPPED) { |
| + if (IsWhite(value)) { |
| + WhiteToGrey(value); |
| + if (state_ == COMPLETE) { |
| + state_ = MARKING; |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Restarting (new grey objects)\n"); |
| + } |
| + } |
| + } |
| + } |
| + } |
| + |
| + |
| + static void RecordWrites(HeapObject* obj) { |
| + if (state_ != STOPPED) { |
| + if (IsBlack(obj)) { |
| + BlackToGrey(obj); |
| + if (state_ == COMPLETE) { |
| + state_ = MARKING; |
| + if (FLAG_trace_incremental_marking) { |
| + PrintF("[IncrementalMarking] Restarting (new grey objects)\n"); |
| + } |
| + } |
| + } |
| + } |
| + } |
| + |
| + |
| + // Black markbits: 10 |
| + static inline bool IsBlack(HeapObject* obj) { |
| + return Marking::IsMarked(obj->address()); |
|
Erik Corry
2011/02/22 12:27:19
Assert that the other bit is not marked?
|
| + } |
| + |
| + // White markbits: 00 |
| + static inline bool IsWhite(HeapObject* obj) { |
| + return !Marking::IsMarked(obj->address()) && |
| + !Marking::IsMarked(obj->address() + kPointerSize); |
| + } |
| + |
| + // Grey markbits: 01 |
|
Erik Corry
2011/02/22 12:27:19
Probably should be 11 when we handle overflow.
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Yes. Will investigate when we start handling overf
|
| + static inline bool IsGrey(HeapObject* obj) { |
| + return Marking::IsMarked(obj->address() + kPointerSize); |
|
Erik Corry
2011/02/22 12:27:19
Assert that the other bit is not marked?
|
| + } |
| + |
| + static inline void BlackToGrey(HeapObject* obj) { |
| + ASSERT(IsBlack(obj)); |
| + Marking::ClearMark(obj->address()); |
| + Marking::SetMark(obj->address() + kPointerSize); |
| + ASSERT(IsGrey(obj)); |
| + |
| + marking_stack_.Push(obj); |
| + ASSERT(!marking_stack_.overflowed()); |
| + } |
| + |
| + static inline void WhiteToGrey(HeapObject* obj) { |
| + ASSERT(IsWhite(obj)); |
| + Marking::SetMark(obj->address() + kPointerSize); |
| + ASSERT(IsGrey(obj)); |
| + |
| + marking_stack_.Push(obj); |
| + ASSERT(!marking_stack_.overflowed()); |
| + } |
| + |
| + static inline void MarkBlack(HeapObject* obj) { |
| + Marking::SetMark(obj->address()); |
| + Marking::ClearMark(obj->address() + kPointerSize); |
| + ASSERT(IsBlack(obj)); |
| + } |
| + |
| + static inline const char* ColorStr(HeapObject* obj) { |
| + if (IsBlack(obj)) return "black"; |
| + if (IsWhite(obj)) return "white"; |
| + if (IsGrey(obj)) return "grey"; |
| + return "???"; |
|
Erik Corry
2011/02/22 12:27:19
UNREACHABLE()?
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + } |
| + |
| + private: |
| + static intptr_t allocated_; |
| + static State state_; |
| + static MarkingStack marking_stack_; |
| + |
| + class MarkingVisitor : public ObjectVisitor { |
| + public: |
| + void VisitPointer(Object** p) { |
| + MarkObjectByPointer(p); |
| + } |
| + |
| + void VisitPointers(Object** start, Object** end) { |
| + for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| + } |
| + |
| + private: |
| + // Mark object pointed to by p. |
| + INLINE(static void MarkObjectByPointer(Object** p)) { |
| + if ((*p)->IsHeapObject()) { |
| + HeapObject* object = HeapObject::cast(*p); |
| + if (IsWhite(object)) WhiteToGrey(object); |
| + } |
| + } |
| + }; |
| + |
| + |
| + class RootMarkingVisitor : public ObjectVisitor { |
| + public: |
| + void VisitPointer(Object** p) { |
| + MarkObjectByPointer(p); |
| + } |
| + |
| + void VisitPointers(Object** start, Object** end) { |
| + for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| + } |
| + |
| + private: |
| + void MarkObjectByPointer(Object** p) { |
| + if (!(*p)->IsHeapObject()) return; |
| + |
| + HeapObject* object = HeapObject::cast(*p); |
| + if (!IsWhite(object)) return; |
| + |
| + Map* map = object->map(); |
| + |
| + WhiteToGrey(object); |
|
Erik Corry
2011/02/22 12:27:19
This pushes the object onto the stack. When it is
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + |
| + if (IsWhite(map)) WhiteToGrey(map); |
|
Erik Corry
2011/02/22 12:27:19
Here we also mark the map. Aren't we doing it twi
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + } |
| + }; |
| + |
| + |
| + static void ClearMarkbits(PagedSpace* space) { |
| + PageIterator it(space, PageIterator::PAGES_IN_USE); |
| + |
| + while (it.has_next()) { |
| + Page* p = it.next(); |
| + p->markbits()->Clear(); |
| + } |
| + } |
| + |
| + |
| + static void ClearMarkbits() { |
| + // We are sweeping code and map spaces precisely so clearing is not required. |
|
Erik Corry
2011/02/22 12:27:19
lint?
Vyacheslav Egorov (Chromium)
2011/02/23 14:31:46
Done.
|
| + ClearMarkbits(Heap::old_pointer_space()); |
| + ClearMarkbits(Heap::old_data_space()); |
| + ClearMarkbits(Heap::cell_space()); |
| + } |
| + |
| + |
| +#ifdef DEBUG |
| + static void VerifyMarkbitsAreClean(PagedSpace* space) { |
| + PageIterator it(space, PageIterator::PAGES_IN_USE); |
| + |
| + while (it.has_next()) { |
| + Page* p = it.next(); |
| + ASSERT(p->markbits()->IsClean()); |
| + } |
| + } |
| + |
| + static void VerifyMarkbitsAreClean() { |
| + VerifyMarkbitsAreClean(Heap::old_pointer_space()); |
| + VerifyMarkbitsAreClean(Heap::old_data_space()); |
| + VerifyMarkbitsAreClean(Heap::code_space()); |
| + VerifyMarkbitsAreClean(Heap::cell_space()); |
| + VerifyMarkbitsAreClean(Heap::map_space()); |
| + } |
| +#endif |
| + |
| + static MarkingVisitor visitor_; |
| +}; |
| + |
| +} } // namespace v8::internal |
| + |
| +#endif // V8_INCREMENTAL_MARKING_H_ |