OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
53 allocated_(0), | 53 allocated_(0), |
54 no_marking_scope_depth_(0) { | 54 no_marking_scope_depth_(0) { |
55 } | 55 } |
56 | 56 |
57 | 57 |
58 void IncrementalMarking::TearDown() { | 58 void IncrementalMarking::TearDown() { |
59 delete marking_deque_memory_; | 59 delete marking_deque_memory_; |
60 } | 60 } |
61 | 61 |
62 | 62 |
63 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, | |
64 Object** slot, | |
65 Object* value) { | |
66 if (BaseRecordWrite(obj, slot, value) && is_compacting_ && slot != NULL) { | |
67 MarkBit obj_bit = Marking::MarkBitFrom(obj); | |
68 if (Marking::IsBlack(obj_bit)) { | |
69 // Object is not going to be rescanned we need to record the slot. | |
70 heap_->mark_compact_collector()->RecordSlot( | |
71 HeapObject::RawField(obj, 0), slot, value); | |
72 } | |
73 } | |
74 } | |
75 | |
76 | |
63 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, | 77 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, |
64 Object* value, | 78 Object* value, |
65 Isolate* isolate) { | 79 Isolate* isolate) { |
66 ASSERT(obj->IsHeapObject()); | 80 ASSERT(obj->IsHeapObject()); |
67 | 81 |
68 // Fast cases should already be covered by RecordWriteStub. | 82 // Fast cases should already be covered by RecordWriteStub. |
69 ASSERT(value->IsHeapObject()); | 83 ASSERT(value->IsHeapObject()); |
70 ASSERT(!value->IsHeapNumber()); | 84 ASSERT(!value->IsHeapNumber()); |
71 ASSERT(!value->IsString() || | 85 ASSERT(!value->IsString() || |
72 value->IsConsString() || | 86 value->IsConsString() || |
(...skipping 28 matching lines...) Expand all Loading... | |
101 void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) { | 115 void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) { |
102 if (IsMarking()) { | 116 if (IsMarking()) { |
103 Code* host = heap_->isolate()->inner_pointer_to_code_cache()-> | 117 Code* host = heap_->isolate()->inner_pointer_to_code_cache()-> |
104 GcSafeFindCodeForInnerPointer(pc); | 118 GcSafeFindCodeForInnerPointer(pc); |
105 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); | 119 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); |
106 RecordWriteIntoCode(host, &rinfo, value); | 120 RecordWriteIntoCode(host, &rinfo, value); |
107 } | 121 } |
108 } | 122 } |
109 | 123 |
110 | 124 |
111 void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, | 125 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, |
112 Object** slot, | 126 Object** slot, |
Erik Corry
2011/10/31 19:48:33
Formatting.
| |
113 Code* value) { | 127 Code* value) { |
114 if (BaseRecordWrite(host, slot, value) && is_compacting_) { | 128 if (BaseRecordWrite(host, slot, value) && is_compacting_) { |
115 ASSERT(slot != NULL); | 129 ASSERT(slot != NULL); |
116 heap_->mark_compact_collector()-> | 130 heap_->mark_compact_collector()-> |
117 RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value); | 131 RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value); |
118 } | 132 } |
119 } | 133 } |
120 | 134 |
121 | 135 |
136 void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj, | |
137 RelocInfo* rinfo, | |
138 Object* value) { | |
139 MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value)); | |
140 if (Marking::IsWhite(value_bit)) { | |
141 MarkBit obj_bit = Marking::MarkBitFrom(obj); | |
142 if (Marking::IsBlack(obj_bit)) { | |
143 BlackToGreyAndUnshift(obj, obj_bit); | |
144 RestartIfNotMarking(); | |
145 } | |
146 // Object is either grey or white it will be scanned if survives. | |
Erik Corry
2011/10/31 19:48:33
, or ; or : between "white" and "it"
| |
147 return; | |
148 } | |
149 | |
150 if (is_compacting_) { | |
151 MarkBit obj_bit = Marking::MarkBitFrom(obj); | |
152 if (Marking::IsBlack(obj_bit)) { | |
153 // Object is not going to be rescanned we need to record the slot. | |
Erik Corry
2011/10/31 19:48:33
Punctuation before "we"
| |
154 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, | |
155 Code::cast(value)); | |
156 } | |
157 } | |
158 } | |
159 | |
122 | 160 |
123 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { | 161 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { |
124 public: | 162 public: |
125 IncrementalMarkingMarkingVisitor(Heap* heap, | 163 IncrementalMarkingMarkingVisitor(Heap* heap, |
126 IncrementalMarking* incremental_marking) | 164 IncrementalMarking* incremental_marking) |
127 : heap_(heap), | 165 : heap_(heap), |
128 incremental_marking_(incremental_marking) { | 166 incremental_marking_(incremental_marking) { |
129 } | 167 } |
130 | 168 |
131 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 169 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
(...skipping 693 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
825 bytes_rescanned_ = 0; | 863 bytes_rescanned_ = 0; |
826 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 864 allocation_marking_factor_ = kInitialAllocationMarkingFactor; |
827 } | 865 } |
828 | 866 |
829 | 867 |
830 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 868 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
831 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); | 869 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); |
832 } | 870 } |
833 | 871 |
834 } } // namespace v8::internal | 872 } } // namespace v8::internal |
OLD | NEW |