Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(633)

Side by Side Diff: src/heap/heap.cc

Issue 2862563002: [heap] Pause black allocation during GCs (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/assembler-inl.h" 9 #include "src/assembler-inl.h"
10 #include "src/ast/context-slot-cache.h" 10 #include "src/ast/context-slot-cache.h"
(...skipping 1481 matching lines...) Expand 10 before | Expand all | Expand 10 after
1492 1492
1493 void Heap::MinorMarkCompact() { 1493 void Heap::MinorMarkCompact() {
1494 DCHECK(FLAG_minor_mc); 1494 DCHECK(FLAG_minor_mc);
1495 1495
1496 SetGCState(MINOR_MARK_COMPACT); 1496 SetGCState(MINOR_MARK_COMPACT);
1497 LOG(isolate_, ResourceEvent("MinorMarkCompact", "begin")); 1497 LOG(isolate_, ResourceEvent("MinorMarkCompact", "begin"));
1498 1498
1499 TRACE_GC(tracer(), GCTracer::Scope::MC_MINOR_MC); 1499 TRACE_GC(tracer(), GCTracer::Scope::MC_MINOR_MC);
1500 AlwaysAllocateScope always_allocate(isolate()); 1500 AlwaysAllocateScope always_allocate(isolate());
1501 PauseAllocationObserversScope pause_observers(this); 1501 PauseAllocationObserversScope pause_observers(this);
1502 IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
1503 incremental_marking());
1502 1504
1503 minor_mark_compact_collector()->CollectGarbage(); 1505 minor_mark_compact_collector()->CollectGarbage();
1504 1506
1505 LOG(isolate_, ResourceEvent("MinorMarkCompact", "end")); 1507 LOG(isolate_, ResourceEvent("MinorMarkCompact", "end"));
1506 SetGCState(NOT_IN_GC); 1508 SetGCState(NOT_IN_GC);
1507 } 1509 }
1508 1510
1509 void Heap::MarkCompactEpilogue() { 1511 void Heap::MarkCompactEpilogue() {
1510 TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE); 1512 TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE);
1511 SetGCState(NOT_IN_GC); 1513 SetGCState(NOT_IN_GC);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
1670 RelocationLock relocation_lock(this); 1672 RelocationLock relocation_lock(this);
1671 // There are soft limits in the allocation code, designed to trigger a mark 1673 // There are soft limits in the allocation code, designed to trigger a mark
1672 // sweep collection by failing allocations. There is no sense in trying to 1674 // sweep collection by failing allocations. There is no sense in trying to
1673 // trigger one during scavenge: scavenges allocation should always succeed. 1675 // trigger one during scavenge: scavenges allocation should always succeed.
1674 AlwaysAllocateScope scope(isolate()); 1676 AlwaysAllocateScope scope(isolate());
1675 1677
1676 // Bump-pointer allocations done during scavenge are not real allocations. 1678 // Bump-pointer allocations done during scavenge are not real allocations.
1677 // Pause the inline allocation steps. 1679 // Pause the inline allocation steps.
1678 PauseAllocationObserversScope pause_observers(this); 1680 PauseAllocationObserversScope pause_observers(this);
1679 1681
1682 IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
1683 incremental_marking());
1684
1680 mark_compact_collector()->sweeper().EnsureNewSpaceCompleted(); 1685 mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
1681 1686
1682 SetGCState(SCAVENGE); 1687 SetGCState(SCAVENGE);
1683 1688
1684 // Implements Cheney's copying algorithm 1689 // Implements Cheney's copying algorithm
1685 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1690 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1686 1691
1687 // Used for updating survived_since_last_expansion_ at function end. 1692 // Used for updating survived_since_last_expansion_ at function end.
1688 size_t survived_watermark = PromotedSpaceSizeOfObjects(); 1693 size_t survived_watermark = PromotedSpaceSizeOfObjects();
1689 1694
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
2000 ->next_page() 2005 ->next_page()
2001 ->area_start(); 2006 ->area_start();
2002 } 2007 }
2003 } 2008 }
2004 2009
2005 // Promote and process all the to-be-promoted objects. 2010 // Promote and process all the to-be-promoted objects.
2006 { 2011 {
2007 while (!promotion_queue()->is_empty()) { 2012 while (!promotion_queue()->is_empty()) {
2008 HeapObject* target; 2013 HeapObject* target;
2009 int32_t size; 2014 int32_t size;
2010 bool was_marked_black; 2015 promotion_queue()->remove(&target, &size);
2011 promotion_queue()->remove(&target, &size, &was_marked_black);
2012 2016
2013 // Promoted object might be already partially visited 2017 // Promoted object might be already partially visited
2014 // during old space pointer iteration. Thus we search specifically 2018 // during old space pointer iteration. Thus we search specifically
2015 // for pointers to from semispace instead of looking for pointers 2019 // for pointers to from semispace instead of looking for pointers
2016 // to new space. 2020 // to new space.
2017 DCHECK(!target->IsMap()); 2021 DCHECK(!target->IsMap());
2018 2022
2019 IterateAndScavengePromotedObject(target, static_cast<int>(size), 2023 IterateAndScavengePromotedObject(target, static_cast<int>(size));
2020 was_marked_black);
2021 } 2024 }
2022 } 2025 }
2023 2026
2024 // Take another spin if there are now unswept objects in new space 2027 // Take another spin if there are now unswept objects in new space
2025 // (there are currently no more unswept promoted objects). 2028 // (there are currently no more unswept promoted objects).
2026 } while (new_space_front != new_space_->top()); 2029 } while (new_space_front != new_space_->top());
2027 2030
2028 return new_space_front; 2031 return new_space_front;
2029 } 2032 }
2030 2033
(...skipping 2796 matching lines...) Expand 10 before | Expand all | Expand 10 after
4827 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); 4830 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
4828 heap_->incremental_marking()->MarkGrey(code); 4831 heap_->incremental_marking()->MarkGrey(code);
4829 } 4832 }
4830 } 4833 }
4831 4834
4832 private: 4835 private:
4833 Heap* heap_; 4836 Heap* heap_;
4834 bool record_slots_; 4837 bool record_slots_;
4835 }; 4838 };
4836 4839
4837 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, 4840 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) {
4838 bool was_marked_black) {
4839 // We are not collecting slots on new space objects during mutation 4841 // We are not collecting slots on new space objects during mutation
4840 // thus we have to scan for pointers to evacuation candidates when we 4842 // thus we have to scan for pointers to evacuation candidates when we
4841 // promote objects. But we should not record any slots in non-black 4843 // promote objects. But we should not record any slots in non-black
4842 // objects. Grey object's slots would be rescanned. 4844 // objects. Grey object's slots would be rescanned.
4843 // White object might not survive until the end of collection 4845 // White object might not survive until the end of collection
4844 // it would be a violation of the invariant to record it's slots. 4846 // it would be a violation of the invariant to record it's slots.
4845 bool record_slots = false; 4847 bool record_slots = false;
4846 if (incremental_marking()->IsCompacting()) { 4848 if (incremental_marking()->IsCompacting()) {
4847 record_slots = 4849 record_slots =
4848 ObjectMarking::IsBlack(target, MarkingState::Internal(target)); 4850 ObjectMarking::IsBlack(target, MarkingState::Internal(target));
4849 } 4851 }
4850 4852
4851 IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots); 4853 IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
4852 if (target->IsJSFunction()) { 4854 if (target->IsJSFunction()) {
4853 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for 4855 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
4854 // this links are recorded during processing of weak lists. 4856 // this links are recorded during processing of weak lists.
4855 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); 4857 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor);
4856 } else { 4858 } else {
4857 target->IterateBody(target->map()->instance_type(), size, &visitor); 4859 target->IterateBody(target->map()->instance_type(), size, &visitor);
4858 } 4860 }
4859
4860 // When black allocations is on, we have to visit not already marked black
4861 // objects (in new space) promoted to black pages to keep their references
4862 // alive.
4863 // TODO(hpayer): Implement a special promotion visitor that incorporates
4864 // regular visiting and IteratePromotedObjectPointers.
4865 if (!was_marked_black) {
4866 if (incremental_marking()->black_allocation()) {
4867 incremental_marking()->MarkGrey(target->map());
4868 incremental_marking()->IterateBlackObject(target);
4869 }
4870 }
4871 } 4861 }
4872 4862
4873 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { 4863 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
4874 IterateStrongRoots(v, mode); 4864 IterateStrongRoots(v, mode);
4875 IterateWeakRoots(v, mode); 4865 IterateWeakRoots(v, mode);
4876 } 4866 }
4877 4867
4878 void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) { 4868 void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
4879 v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>( 4869 v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>(
4880 &roots_[kStringTableRootIndex])); 4870 &roots_[kStringTableRootIndex]));
(...skipping 1508 matching lines...) Expand 10 before | Expand all | Expand 10 after
6389 case LO_SPACE: 6379 case LO_SPACE:
6390 return "LO_SPACE"; 6380 return "LO_SPACE";
6391 default: 6381 default:
6392 UNREACHABLE(); 6382 UNREACHABLE();
6393 } 6383 }
6394 return NULL; 6384 return NULL;
6395 } 6385 }
6396 6386
6397 } // namespace internal 6387 } // namespace internal
6398 } // namespace v8 6388 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698