Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap/heap.cc

Issue 1988623002: Ensure black and gray objects are kept around by scavenger (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix for black pages Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/ast/scopeinfo.h" 9 #include "src/ast/scopeinfo.h"
10 #include "src/base/bits.h" 10 #include "src/base/bits.h"
(...skipping 1677 matching lines...) Expand 10 before | Expand all | Expand 10 after
1688 if (collector->is_code_flushing_enabled()) { 1688 if (collector->is_code_flushing_enabled()) {
1689 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); 1689 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
1690 } 1690 }
1691 } 1691 }
1692 1692
1693 { 1693 {
1694 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); 1694 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE);
1695 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1695 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1696 } 1696 }
1697 1697
1698 {
1699 // Copy objects marked black or grey by MarkCompact collector.
Hannes Payer (out of office) 2016/05/20 07:34:41 Scavenge objects...
Marcel Hlopko 2016/05/20 14:31:41 Done.
1700 TRACE_GC(tracer(),
1701 GCTracer::Scope::SCAVENGER_OBJECTS_MARKED_BY_MARK_COMPACT);
1702 if (incremental_marking()->IsMarking()) {
1703 IterateMarkedForScavenger(&scavenge_visitor);
1704 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1705 }
1706 }
1707
1698 if (FLAG_scavenge_reclaim_unmodified_objects) { 1708 if (FLAG_scavenge_reclaim_unmodified_objects) {
1699 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( 1709 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
1700 &IsUnscavengedHeapObject); 1710 &IsUnscavengedHeapObject);
1701 1711
1702 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( 1712 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
1703 &scavenge_visitor); 1713 &scavenge_visitor);
1704 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1714 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1705 } else { 1715 } else {
1706 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); 1716 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OBJECT_GROUPS);
1707 while (isolate()->global_handles()->IterateObjectGroups( 1717 while (isolate()->global_handles()->IterateObjectGroups(
(...skipping 1337 matching lines...) Expand 10 before | Expand all | Expand 10 after
3045 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); 3055 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)));
3046 } else { 3056 } else {
3047 DCHECK_GT(size, 2 * kPointerSize); 3057 DCHECK_GT(size, 2 * kPointerSize);
3048 filler->set_map_no_write_barrier( 3058 filler->set_map_no_write_barrier(
3049 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); 3059 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)));
3050 FreeSpace::cast(filler)->nobarrier_set_size(size); 3060 FreeSpace::cast(filler)->nobarrier_set_size(size);
3051 } 3061 }
3052 if (mode == ClearRecordedSlots::kYes) { 3062 if (mode == ClearRecordedSlots::kYes) {
3053 ClearRecordedSlotRange(addr, addr + size); 3063 ClearRecordedSlotRange(addr, addr + size);
3054 } 3064 }
3065
3055 // At this point, we may be deserializing the heap from a snapshot, and 3066 // At this point, we may be deserializing the heap from a snapshot, and
3056 // none of the maps have been created yet and are NULL. 3067 // none of the maps have been created yet and are NULL.
3057 DCHECK((filler->map() == NULL && !deserialization_complete_) || 3068 DCHECK((filler->map() == NULL && !deserialization_complete_) ||
3058 filler->map()->IsMap()); 3069 filler->map()->IsMap());
3059 } 3070 }
3060 3071
3061 3072
3062 bool Heap::CanMoveObjectStart(HeapObject* object) { 3073 bool Heap::CanMoveObjectStart(HeapObject* object) {
3063 if (!FLAG_move_object_start) return false; 3074 if (!FLAG_move_object_start) return false;
3064 3075
(...skipping 1768 matching lines...) Expand 10 before | Expand all | Expand 10 after
4833 // Iterate over the partial snapshot cache unless serializing. 4844 // Iterate over the partial snapshot cache unless serializing.
4834 if (mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION) { 4845 if (mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION) {
4835 SerializerDeserializer::Iterate(isolate_, v); 4846 SerializerDeserializer::Iterate(isolate_, v);
4836 } 4847 }
4837 // We don't do a v->Synchronize call here, because in debug mode that will 4848 // We don't do a v->Synchronize call here, because in debug mode that will
4838 // output a flag to the snapshot. However at this point the serializer and 4849 // output a flag to the snapshot. However at this point the serializer and
4839 // deserializer are deliberately a little unsynchronized (see above) so the 4850 // deserializer are deliberately a little unsynchronized (see above) so the
4840 // checking of the sync flag in the snapshot would fail. 4851 // checking of the sync flag in the snapshot would fail.
4841 } 4852 }
4842 4853
4854 void Heap::IterateMarkedForScavenger(ObjectVisitor* visitor) {
4855 NewSpacePageIterator it(new_space_.from_space());
4856 while (it.has_next()) {
4857 Page* page = it.next();
4858 LiveObjectIterator<kAllLiveObjects> it(page);
4859 Object* object = nullptr;
4860 while ((object = it.Next()) != nullptr) {
4861 if (object->IsHeapObject()) {
4862 DCHECK(IsUnscavengedHeapObject(this, &object));
4863 visitor->VisitPointer(&object);
4864 }
4865 }
4866 }
4867 }
4843 4868
4844 // TODO(1236194): Since the heap size is configurable on the command line 4869 // TODO(1236194): Since the heap size is configurable on the command line
4845 // and through the API, we should gracefully handle the case that the heap 4870 // and through the API, we should gracefully handle the case that the heap
4846 // size is not big enough to fit all the initial objects. 4871 // size is not big enough to fit all the initial objects.
4847 bool Heap::ConfigureHeap(int max_semi_space_size, int max_old_space_size, 4872 bool Heap::ConfigureHeap(int max_semi_space_size, int max_old_space_size,
4848 int max_executable_size, size_t code_range_size) { 4873 int max_executable_size, size_t code_range_size) {
4849 if (HasBeenSetUp()) return false; 4874 if (HasBeenSetUp()) return false;
4850 4875
4851 // Overwrite default configuration. 4876 // Overwrite default configuration.
4852 if (max_semi_space_size > 0) { 4877 if (max_semi_space_size > 0) {
(...skipping 1503 matching lines...) Expand 10 before | Expand all | Expand 10 after
6356 } 6381 }
6357 6382
6358 6383
6359 // static 6384 // static
6360 int Heap::GetStaticVisitorIdForMap(Map* map) { 6385 int Heap::GetStaticVisitorIdForMap(Map* map) {
6361 return StaticVisitorBase::GetVisitorId(map); 6386 return StaticVisitorBase::GetVisitorId(map);
6362 } 6387 }
6363 6388
6364 } // namespace internal 6389 } // namespace internal
6365 } // namespace v8 6390 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698