Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/incremental-marking.cc

Issue 10919294: Integrate map marking into static marking visitor. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
41 41
42 IncrementalMarking::IncrementalMarking(Heap* heap) 42 IncrementalMarking::IncrementalMarking(Heap* heap)
43 : heap_(heap), 43 : heap_(heap),
44 state_(STOPPED), 44 state_(STOPPED),
45 marking_deque_memory_(NULL), 45 marking_deque_memory_(NULL),
46 marking_deque_memory_committed_(false), 46 marking_deque_memory_committed_(false),
47 marker_(this, heap->mark_compact_collector()),
48 steps_count_(0), 47 steps_count_(0),
49 steps_took_(0), 48 steps_took_(0),
50 longest_step_(0.0), 49 longest_step_(0.0),
51 old_generation_space_available_at_start_of_incremental_(0), 50 old_generation_space_available_at_start_of_incremental_(0),
52 old_generation_space_used_at_start_of_incremental_(0), 51 old_generation_space_used_at_start_of_incremental_(0),
53 steps_count_since_last_gc_(0), 52 steps_count_since_last_gc_(0),
54 steps_took_since_last_gc_(0), 53 steps_took_since_last_gc_(0),
55 should_hurry_(false), 54 should_hurry_(false),
56 allocation_marking_factor_(0), 55 allocation_marking_factor_(0),
57 allocated_(0), 56 allocated_(0),
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 218 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
220 for (Object** p = start; p < end; p++) { 219 for (Object** p = start; p < end; p++) {
221 Object* obj = *p; 220 Object* obj = *p;
222 if (obj->NonFailureIsHeapObject()) { 221 if (obj->NonFailureIsHeapObject()) {
223 heap->mark_compact_collector()->RecordSlot(start, p, obj); 222 heap->mark_compact_collector()->RecordSlot(start, p, obj);
224 MarkObject(heap, obj); 223 MarkObject(heap, obj);
225 } 224 }
226 } 225 }
227 } 226 }
228 227
228 // Marks the object grey and pushes it on the marking stack.
229 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 229 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
230 HeapObject* heap_object = HeapObject::cast(obj); 230 HeapObject* heap_object = HeapObject::cast(obj);
231 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 231 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
232 if (mark_bit.data_only()) { 232 if (mark_bit.data_only()) {
233 if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) { 233 if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) {
234 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), 234 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
235 heap_object->Size()); 235 heap_object->Size());
236 } 236 }
237 } else if (Marking::IsWhite(mark_bit)) { 237 } else if (Marking::IsWhite(mark_bit)) {
238 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit); 238 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
239 } 239 }
240 } 240 }
241
242 // Marks the object black without pushing it on the marking stack.
243 // Returns true if object needed marking and false otherwise.
244 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
245 HeapObject* heap_object = HeapObject::cast(obj);
246 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
247 if (Marking::IsWhite(mark_bit)) {
248 mark_bit.Set();
249 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
250 heap_object->Size());
251 return true;
252 }
253 return false;
254 }
241 }; 255 };
242 256
243 257
244 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { 258 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
245 public: 259 public:
246 IncrementalMarkingRootMarkingVisitor(Heap* heap, 260 IncrementalMarkingRootMarkingVisitor(Heap* heap,
247 IncrementalMarking* incremental_marking) 261 IncrementalMarking* incremental_marking)
248 : heap_(heap), 262 : heap_(heap),
249 incremental_marking_(incremental_marking) { 263 incremental_marking_(incremental_marking) {
250 } 264 }
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after
634 HeapObject* obj = marking_deque_.Pop(); 648 HeapObject* obj = marking_deque_.Pop();
635 649
636 // Explicitly skip one word fillers. Incremental markbit patterns are 650 // Explicitly skip one word fillers. Incremental markbit patterns are
637 // correct only for objects that occupy at least two words. 651 // correct only for objects that occupy at least two words.
638 Map* map = obj->map(); 652 Map* map = obj->map();
639 if (map == filler_map) { 653 if (map == filler_map) {
640 continue; 654 continue;
641 } else if (map == native_context_map) { 655 } else if (map == native_context_map) {
642 // Native contexts have weak fields. 656 // Native contexts have weak fields.
643 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 657 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
644 } else if (map->instance_type() == MAP_TYPE) {
645 Map* map = Map::cast(obj);
646 heap_->ClearCacheOnMap(map);
647
648 // When map collection is enabled we have to mark through map's
649 // transitions and back pointers in a special way to make these links
650 // weak. Only maps for subclasses of JSReceiver can have transitions.
651 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
652 if (FLAG_collect_maps &&
653 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
654 marker_.MarkMapContents(map);
655 } else {
656 IncrementalMarkingMarkingVisitor::VisitPointers(
657 heap_,
658 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
659 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
660 }
661 } else { 658 } else {
662 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 659 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
663 if (Marking::IsWhite(map_mark_bit)) { 660 if (Marking::IsWhite(map_mark_bit)) {
664 WhiteToGreyAndPush(map, map_mark_bit); 661 WhiteToGreyAndPush(map, map_mark_bit);
665 } 662 }
666 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 663 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
667 } 664 }
668 665
669 MarkBit mark_bit = Marking::MarkBitFrom(obj); 666 MarkBit mark_bit = Marking::MarkBitFrom(obj);
670 ASSERT(!Marking::IsBlack(mark_bit)); 667 ASSERT(!Marking::IsBlack(mark_bit));
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
815 // TODO(gc) switch to static visitor instead of normal visitor. 812 // TODO(gc) switch to static visitor instead of normal visitor.
816 if (map == native_context_map) { 813 if (map == native_context_map) {
817 // Native contexts have weak fields. 814 // Native contexts have weak fields.
818 Context* ctx = Context::cast(obj); 815 Context* ctx = Context::cast(obj);
819 816
820 // We will mark cache black with a separate pass 817 // We will mark cache black with a separate pass
821 // when we finish marking. 818 // when we finish marking.
822 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 819 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
823 820
824 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); 821 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
825 } else if (map->instance_type() == MAP_TYPE) {
826 Map* map = Map::cast(obj);
827 heap_->ClearCacheOnMap(map);
828
829 // When map collection is enabled we have to mark through map's
830 // transitions and back pointers in a special way to make these links
831 // weak. Only maps for subclasses of JSReceiver can have transitions.
832 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
833 if (FLAG_collect_maps &&
834 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
835 marker_.MarkMapContents(map);
836 } else {
837 IncrementalMarkingMarkingVisitor::VisitPointers(
838 heap_,
839 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
840 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
841 }
842 } else { 822 } else {
843 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 823 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
844 } 824 }
845 825
846 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); 826 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
847 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || 827 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
848 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); 828 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
849 Marking::MarkBlack(obj_mark_bit); 829 Marking::MarkBlack(obj_mark_bit);
850 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); 830 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
851 } 831 }
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
944 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 924 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
945 bytes_scanned_ = 0; 925 bytes_scanned_ = 0;
946 } 926 }
947 927
948 928
949 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 929 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
950 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 930 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
951 } 931 }
952 932
953 } } // namespace v8::internal 933 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698