| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
| 6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
| 7 | 7 |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| 11 | 11 |
| 12 template<typename StaticVisitor> | 12 template <typename StaticVisitor> |
| 13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { | 13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { |
| 14 table_.Register(kVisitShortcutCandidate, | 14 table_.Register( |
| 15 &FixedBodyVisitor<StaticVisitor, | 15 kVisitShortcutCandidate, |
| 16 ConsString::BodyDescriptor, | 16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit); |
| 17 int>::Visit); | |
| 18 | 17 |
| 19 table_.Register(kVisitConsString, | 18 table_.Register( |
| 20 &FixedBodyVisitor<StaticVisitor, | 19 kVisitConsString, |
| 21 ConsString::BodyDescriptor, | 20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit); |
| 22 int>::Visit); | |
| 23 | 21 |
| 24 table_.Register(kVisitSlicedString, | 22 table_.Register(kVisitSlicedString, |
| 25 &FixedBodyVisitor<StaticVisitor, | 23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor, |
| 26 SlicedString::BodyDescriptor, | 24 int>::Visit); |
| 27 int>::Visit); | |
| 28 | 25 |
| 29 table_.Register(kVisitSymbol, | 26 table_.Register( |
| 30 &FixedBodyVisitor<StaticVisitor, | 27 kVisitSymbol, |
| 31 Symbol::BodyDescriptor, | 28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit); |
| 32 int>::Visit); | |
| 33 | 29 |
| 34 table_.Register(kVisitFixedArray, | 30 table_.Register(kVisitFixedArray, |
| 35 &FlexibleBodyVisitor<StaticVisitor, | 31 &FlexibleBodyVisitor<StaticVisitor, |
| 36 FixedArray::BodyDescriptor, | 32 FixedArray::BodyDescriptor, int>::Visit); |
| 37 int>::Visit); | |
| 38 | 33 |
| 39 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray); | 34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray); |
| 40 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray); | 35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray); |
| 41 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray); | 36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray); |
| 42 | 37 |
| 43 table_.Register(kVisitNativeContext, | 38 table_.Register( |
| 44 &FixedBodyVisitor<StaticVisitor, | 39 kVisitNativeContext, |
| 45 Context::ScavengeBodyDescriptor, | 40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor, |
| 46 int>::Visit); | 41 int>::Visit); |
| 47 | 42 |
| 48 table_.Register(kVisitByteArray, &VisitByteArray); | 43 table_.Register(kVisitByteArray, &VisitByteArray); |
| 49 | 44 |
| 50 table_.Register(kVisitSharedFunctionInfo, | 45 table_.Register( |
| 51 &FixedBodyVisitor<StaticVisitor, | 46 kVisitSharedFunctionInfo, |
| 52 SharedFunctionInfo::BodyDescriptor, | 47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor, |
| 53 int>::Visit); | 48 int>::Visit); |
| 54 | 49 |
| 55 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); | 50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); |
| 56 | 51 |
| 57 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); | 52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); |
| 58 | 53 |
| 59 table_.Register(kVisitJSFunction, &VisitJSFunction); | 54 table_.Register(kVisitJSFunction, &VisitJSFunction); |
| 60 | 55 |
| 61 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | 56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
| 62 | 57 |
| 63 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | 58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
| 64 | 59 |
| 65 table_.Register(kVisitJSDataView, &VisitJSDataView); | 60 table_.Register(kVisitJSDataView, &VisitJSDataView); |
| 66 | 61 |
| 67 table_.Register(kVisitFreeSpace, &VisitFreeSpace); | 62 table_.Register(kVisitFreeSpace, &VisitFreeSpace); |
| 68 | 63 |
| 69 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); | 64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); |
| 70 | 65 |
| 71 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit); | 66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit); |
| 72 | 67 |
| 73 table_.template RegisterSpecializations<DataObjectVisitor, | 68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject, |
| 74 kVisitDataObject, | |
| 75 kVisitDataObjectGeneric>(); | 69 kVisitDataObjectGeneric>(); |
| 76 | 70 |
| 77 table_.template RegisterSpecializations<JSObjectVisitor, | 71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject, |
| 78 kVisitJSObject, | |
| 79 kVisitJSObjectGeneric>(); | 72 kVisitJSObjectGeneric>(); |
| 80 table_.template RegisterSpecializations<StructVisitor, | 73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct, |
| 81 kVisitStruct, | |
| 82 kVisitStructGeneric>(); | 74 kVisitStructGeneric>(); |
| 83 } | 75 } |
| 84 | 76 |
| 85 | 77 |
| 86 template<typename StaticVisitor> | 78 template <typename StaticVisitor> |
| 87 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( | 79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( |
| 88 Map* map, HeapObject* object) { | 80 Map* map, HeapObject* object) { |
| 89 Heap* heap = map->GetHeap(); | 81 Heap* heap = map->GetHeap(); |
| 90 | 82 |
| 91 STATIC_ASSERT( | 83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset == |
| 92 JSArrayBuffer::kWeakFirstViewOffset == | 84 JSArrayBuffer::kWeakNextOffset + kPointerSize); |
| 93 JSArrayBuffer::kWeakNextOffset + kPointerSize); | 85 VisitPointers(heap, HeapObject::RawField( |
| 86 object, JSArrayBuffer::BodyDescriptor::kStartOffset), |
| 87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); |
| 94 VisitPointers( | 88 VisitPointers( |
| 95 heap, | 89 heap, HeapObject::RawField( |
| 96 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), | 90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), |
| 97 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); | |
| 98 VisitPointers( | |
| 99 heap, | |
| 100 HeapObject::RawField(object, | |
| 101 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), | |
| 102 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); | 91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); |
| 103 return JSArrayBuffer::kSizeWithInternalFields; | 92 return JSArrayBuffer::kSizeWithInternalFields; |
| 104 } | 93 } |
| 105 | 94 |
| 106 | 95 |
| 107 template<typename StaticVisitor> | 96 template <typename StaticVisitor> |
| 108 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray( | 97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray( |
| 109 Map* map, HeapObject* object) { | 98 Map* map, HeapObject* object) { |
| 110 VisitPointers( | 99 VisitPointers( |
| 111 map->GetHeap(), | 100 map->GetHeap(), |
| 112 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), | 101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), |
| 113 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); | 102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); |
| 114 VisitPointers( | 103 VisitPointers( |
| 115 map->GetHeap(), | 104 map->GetHeap(), HeapObject::RawField( |
| 116 HeapObject::RawField(object, | 105 object, JSTypedArray::kWeakNextOffset + kPointerSize), |
| 117 JSTypedArray::kWeakNextOffset + kPointerSize), | |
| 118 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); | 106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); |
| 119 return JSTypedArray::kSizeWithInternalFields; | 107 return JSTypedArray::kSizeWithInternalFields; |
| 120 } | 108 } |
| 121 | 109 |
| 122 | 110 |
| 123 template<typename StaticVisitor> | 111 template <typename StaticVisitor> |
| 124 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView( | 112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map, |
| 125 Map* map, HeapObject* object) { | 113 HeapObject* object) { |
| 126 VisitPointers( | 114 VisitPointers( |
| 127 map->GetHeap(), | 115 map->GetHeap(), |
| 128 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), | 116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), |
| 129 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); | 117 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); |
| 130 VisitPointers( | 118 VisitPointers( |
| 131 map->GetHeap(), | 119 map->GetHeap(), |
| 132 HeapObject::RawField(object, | 120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize), |
| 133 JSDataView::kWeakNextOffset + kPointerSize), | |
| 134 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); | 121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); |
| 135 return JSDataView::kSizeWithInternalFields; | 122 return JSDataView::kSizeWithInternalFields; |
| 136 } | 123 } |
| 137 | 124 |
| 138 | 125 |
| 139 template<typename StaticVisitor> | 126 template <typename StaticVisitor> |
| 140 void StaticMarkingVisitor<StaticVisitor>::Initialize() { | 127 void StaticMarkingVisitor<StaticVisitor>::Initialize() { |
| 141 table_.Register(kVisitShortcutCandidate, | 128 table_.Register(kVisitShortcutCandidate, |
| 142 &FixedBodyVisitor<StaticVisitor, | 129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, |
| 143 ConsString::BodyDescriptor, | 130 void>::Visit); |
| 144 void>::Visit); | |
| 145 | 131 |
| 146 table_.Register(kVisitConsString, | 132 table_.Register(kVisitConsString, |
| 147 &FixedBodyVisitor<StaticVisitor, | 133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, |
| 148 ConsString::BodyDescriptor, | 134 void>::Visit); |
| 149 void>::Visit); | |
| 150 | 135 |
| 151 table_.Register(kVisitSlicedString, | 136 table_.Register(kVisitSlicedString, |
| 152 &FixedBodyVisitor<StaticVisitor, | 137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor, |
| 153 SlicedString::BodyDescriptor, | 138 void>::Visit); |
| 154 void>::Visit); | |
| 155 | 139 |
| 156 table_.Register(kVisitSymbol, | 140 table_.Register( |
| 157 &FixedBodyVisitor<StaticVisitor, | 141 kVisitSymbol, |
| 158 Symbol::BodyDescriptor, | 142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit); |
| 159 void>::Visit); | |
| 160 | 143 |
| 161 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); | 144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); |
| 162 | 145 |
| 163 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); | 146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); |
| 164 | 147 |
| 165 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); | 148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); |
| 166 | 149 |
| 167 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); | 150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); |
| 168 | 151 |
| 169 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); | 152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); |
| 170 | 153 |
| 171 table_.Register(kVisitNativeContext, &VisitNativeContext); | 154 table_.Register(kVisitNativeContext, &VisitNativeContext); |
| 172 | 155 |
| 173 table_.Register(kVisitAllocationSite, &VisitAllocationSite); | 156 table_.Register(kVisitAllocationSite, &VisitAllocationSite); |
| 174 | 157 |
| 175 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
| 176 | 159 |
| 177 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | 160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); |
| 178 | 161 |
| 179 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); | 162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); |
| 180 | 163 |
| 181 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); | 164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); |
| 182 | 165 |
| 183 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection); | 166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection); |
| 184 | 167 |
| 185 table_.Register(kVisitOddball, | 168 table_.Register( |
| 186 &FixedBodyVisitor<StaticVisitor, | 169 kVisitOddball, |
| 187 Oddball::BodyDescriptor, | 170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit); |
| 188 void>::Visit); | |
| 189 | 171 |
| 190 table_.Register(kVisitMap, &VisitMap); | 172 table_.Register(kVisitMap, &VisitMap); |
| 191 | 173 |
| 192 table_.Register(kVisitCode, &VisitCode); | 174 table_.Register(kVisitCode, &VisitCode); |
| 193 | 175 |
| 194 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); | 176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); |
| 195 | 177 |
| 196 table_.Register(kVisitJSFunction, &VisitJSFunction); | 178 table_.Register(kVisitJSFunction, &VisitJSFunction); |
| 197 | 179 |
| 198 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | 180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
| 199 | 181 |
| 200 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | 182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
| 201 | 183 |
| 202 table_.Register(kVisitJSDataView, &VisitJSDataView); | 184 table_.Register(kVisitJSDataView, &VisitJSDataView); |
| 203 | 185 |
| 204 // Registration for kVisitJSRegExp is done by StaticVisitor. | 186 // Registration for kVisitJSRegExp is done by StaticVisitor. |
| 205 | 187 |
| 206 table_.Register(kVisitCell, | 188 table_.Register( |
| 207 &FixedBodyVisitor<StaticVisitor, | 189 kVisitCell, |
| 208 Cell::BodyDescriptor, | 190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit); |
| 209 void>::Visit); | |
| 210 | 191 |
| 211 table_.Register(kVisitPropertyCell, &VisitPropertyCell); | 192 table_.Register(kVisitPropertyCell, &VisitPropertyCell); |
| 212 | 193 |
| 213 table_.template RegisterSpecializations<DataObjectVisitor, | 194 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject, |
| 214 kVisitDataObject, | |
| 215 kVisitDataObjectGeneric>(); | 195 kVisitDataObjectGeneric>(); |
| 216 | 196 |
| 217 table_.template RegisterSpecializations<JSObjectVisitor, | 197 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject, |
| 218 kVisitJSObject, | |
| 219 kVisitJSObjectGeneric>(); | 198 kVisitJSObjectGeneric>(); |
| 220 | 199 |
| 221 table_.template RegisterSpecializations<StructObjectVisitor, | 200 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct, |
| 222 kVisitStruct, | |
| 223 kVisitStructGeneric>(); | 201 kVisitStructGeneric>(); |
| 224 } | 202 } |
| 225 | 203 |
| 226 | 204 |
| 227 template<typename StaticVisitor> | 205 template <typename StaticVisitor> |
| 228 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry( | 206 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry( |
| 229 Heap* heap, Address entry_address) { | 207 Heap* heap, Address entry_address) { |
| 230 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); | 208 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); |
| 231 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code); | 209 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code); |
| 232 StaticVisitor::MarkObject(heap, code); | 210 StaticVisitor::MarkObject(heap, code); |
| 233 } | 211 } |
| 234 | 212 |
| 235 | 213 |
| 236 template<typename StaticVisitor> | 214 template <typename StaticVisitor> |
| 237 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer( | 215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer( |
| 238 Heap* heap, RelocInfo* rinfo) { | 216 Heap* heap, RelocInfo* rinfo) { |
| 239 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 217 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 240 HeapObject* object = HeapObject::cast(rinfo->target_object()); | 218 HeapObject* object = HeapObject::cast(rinfo->target_object()); |
| 241 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); | 219 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); |
| 242 // TODO(ulan): It could be better to record slots only for strongly embedded | 220 // TODO(ulan): It could be better to record slots only for strongly embedded |
| 243 // objects here and record slots for weakly embedded object during clearing | 221 // objects here and record slots for weakly embedded object during clearing |
| 244 // of non-live references in mark-compact. | 222 // of non-live references in mark-compact. |
| 245 if (!rinfo->host()->IsWeakObject(object)) { | 223 if (!rinfo->host()->IsWeakObject(object)) { |
| 246 StaticVisitor::MarkObject(heap, object); | 224 StaticVisitor::MarkObject(heap, object); |
| 247 } | 225 } |
| 248 } | 226 } |
| 249 | 227 |
| 250 | 228 |
| 251 template<typename StaticVisitor> | 229 template <typename StaticVisitor> |
| 252 void StaticMarkingVisitor<StaticVisitor>::VisitCell( | 230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap, |
| 253 Heap* heap, RelocInfo* rinfo) { | 231 RelocInfo* rinfo) { |
| 254 DCHECK(rinfo->rmode() == RelocInfo::CELL); | 232 DCHECK(rinfo->rmode() == RelocInfo::CELL); |
| 255 Cell* cell = rinfo->target_cell(); | 233 Cell* cell = rinfo->target_cell(); |
| 256 // No need to record slots because the cell space is not compacted during GC. | 234 // No need to record slots because the cell space is not compacted during GC. |
| 257 if (!rinfo->host()->IsWeakObject(cell)) { | 235 if (!rinfo->host()->IsWeakObject(cell)) { |
| 258 StaticVisitor::MarkObject(heap, cell); | 236 StaticVisitor::MarkObject(heap, cell); |
| 259 } | 237 } |
| 260 } | 238 } |
| 261 | 239 |
| 262 | 240 |
| 263 template<typename StaticVisitor> | 241 template <typename StaticVisitor> |
| 264 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget( | 242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap, |
| 265 Heap* heap, RelocInfo* rinfo) { | 243 RelocInfo* rinfo) { |
| 266 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) && | 244 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 267 rinfo->IsPatchedReturnSequence()) || | 245 rinfo->IsPatchedReturnSequence()) || |
| 268 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 246 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 269 rinfo->IsPatchedDebugBreakSlotSequence())); | 247 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 270 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 248 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| 271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 249 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
| 272 StaticVisitor::MarkObject(heap, target); | 250 StaticVisitor::MarkObject(heap, target); |
| 273 } | 251 } |
| 274 | 252 |
| 275 | 253 |
| 276 template<typename StaticVisitor> | 254 template <typename StaticVisitor> |
| 277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget( | 255 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap, |
| 278 Heap* heap, RelocInfo* rinfo) { | 256 RelocInfo* rinfo) { |
| 279 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); | 257 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 280 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 258 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 281 // Monomorphic ICs are preserved when possible, but need to be flushed | 259 // Monomorphic ICs are preserved when possible, but need to be flushed |
| 282 // when they might be keeping a Context alive, or when the heap is about | 260 // when they might be keeping a Context alive, or when the heap is about |
| 283 // to be serialized. | 261 // to be serialized. |
| 284 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() | 262 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() && |
| 285 && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || | 263 (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || |
| 286 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || | 264 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || |
| 287 heap->isolate()->serializer_enabled() || | 265 heap->isolate()->serializer_enabled() || |
| 288 target->ic_age() != heap->global_ic_age() || | 266 target->ic_age() != heap->global_ic_age() || |
| 289 target->is_invalidated_weak_stub())) { | 267 target->is_invalidated_weak_stub())) { |
| 290 IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool()); | 268 IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool()); |
| 291 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 269 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 292 } | 270 } |
| 293 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
| 294 StaticVisitor::MarkObject(heap, target); | 272 StaticVisitor::MarkObject(heap, target); |
| 295 } | 273 } |
| 296 | 274 |
| 297 | 275 |
| 298 template<typename StaticVisitor> | 276 template <typename StaticVisitor> |
| 299 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence( | 277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence( |
| 300 Heap* heap, RelocInfo* rinfo) { | 278 Heap* heap, RelocInfo* rinfo) { |
| 301 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); | 279 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); |
| 302 Code* target = rinfo->code_age_stub(); | 280 Code* target = rinfo->code_age_stub(); |
| 303 DCHECK(target != NULL); | 281 DCHECK(target != NULL); |
| 304 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 282 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
| 305 StaticVisitor::MarkObject(heap, target); | 283 StaticVisitor::MarkObject(heap, target); |
| 306 } | 284 } |
| 307 | 285 |
| 308 | 286 |
| 309 template<typename StaticVisitor> | 287 template <typename StaticVisitor> |
| 310 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext( | 288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext( |
| 311 Map* map, HeapObject* object) { | 289 Map* map, HeapObject* object) { |
| 312 FixedBodyVisitor<StaticVisitor, | 290 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor, |
| 313 Context::MarkCompactBodyDescriptor, | |
| 314 void>::Visit(map, object); | 291 void>::Visit(map, object); |
| 315 | 292 |
| 316 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | 293 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
| 317 for (int idx = Context::FIRST_WEAK_SLOT; | 294 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS; |
| 318 idx < Context::NATIVE_CONTEXT_SLOTS; | |
| 319 ++idx) { | 295 ++idx) { |
| 320 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx); | 296 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx); |
| 321 collector->RecordSlot(slot, slot, *slot); | 297 collector->RecordSlot(slot, slot, *slot); |
| 322 } | 298 } |
| 323 } | 299 } |
| 324 | 300 |
| 325 | 301 |
| 326 template<typename StaticVisitor> | 302 template <typename StaticVisitor> |
| 327 void StaticMarkingVisitor<StaticVisitor>::VisitMap( | 303 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map, |
| 328 Map* map, HeapObject* object) { | 304 HeapObject* object) { |
| 329 Heap* heap = map->GetHeap(); | 305 Heap* heap = map->GetHeap(); |
| 330 Map* map_object = Map::cast(object); | 306 Map* map_object = Map::cast(object); |
| 331 | 307 |
| 332 // Clears the cache of ICs related to this map. | 308 // Clears the cache of ICs related to this map. |
| 333 if (FLAG_cleanup_code_caches_at_gc) { | 309 if (FLAG_cleanup_code_caches_at_gc) { |
| 334 map_object->ClearCodeCache(heap); | 310 map_object->ClearCodeCache(heap); |
| 335 } | 311 } |
| 336 | 312 |
| 337 // When map collection is enabled we have to mark through map's transitions | 313 // When map collection is enabled we have to mark through map's transitions |
| 338 // and back pointers in a special way to make these links weak. | 314 // and back pointers in a special way to make these links weak. |
| 339 if (FLAG_collect_maps && map_object->CanTransition()) { | 315 if (FLAG_collect_maps && map_object->CanTransition()) { |
| 340 MarkMapContents(heap, map_object); | 316 MarkMapContents(heap, map_object); |
| 341 } else { | 317 } else { |
| 342 StaticVisitor::VisitPointers(heap, | 318 StaticVisitor::VisitPointers( |
| 343 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), | 319 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), |
| 344 HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); | 320 HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); |
| 345 } | 321 } |
| 346 } | 322 } |
| 347 | 323 |
| 348 | 324 |
| 349 template<typename StaticVisitor> | 325 template <typename StaticVisitor> |
| 350 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell( | 326 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell( |
| 351 Map* map, HeapObject* object) { | 327 Map* map, HeapObject* object) { |
| 352 Heap* heap = map->GetHeap(); | 328 Heap* heap = map->GetHeap(); |
| 353 | 329 |
| 354 Object** slot = | 330 Object** slot = |
| 355 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset); | 331 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset); |
| 356 if (FLAG_collect_maps) { | 332 if (FLAG_collect_maps) { |
| 357 // Mark property cell dependent codes array but do not push it onto marking | 333 // Mark property cell dependent codes array but do not push it onto marking |
| 358 // stack, this will make references from it weak. We will clean dead | 334 // stack, this will make references from it weak. We will clean dead |
| 359 // codes when we iterate over property cells in ClearNonLiveReferences. | 335 // codes when we iterate over property cells in ClearNonLiveReferences. |
| 360 HeapObject* obj = HeapObject::cast(*slot); | 336 HeapObject* obj = HeapObject::cast(*slot); |
| 361 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 337 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
| 362 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 338 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
| 363 } else { | 339 } else { |
| 364 StaticVisitor::VisitPointer(heap, slot); | 340 StaticVisitor::VisitPointer(heap, slot); |
| 365 } | 341 } |
| 366 | 342 |
| 367 StaticVisitor::VisitPointers(heap, | 343 StaticVisitor::VisitPointers( |
| 344 heap, |
| 368 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset), | 345 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset), |
| 369 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset)); | 346 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset)); |
| 370 } | 347 } |
| 371 | 348 |
| 372 | 349 |
| 373 template<typename StaticVisitor> | 350 template <typename StaticVisitor> |
| 374 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite( | 351 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite( |
| 375 Map* map, HeapObject* object) { | 352 Map* map, HeapObject* object) { |
| 376 Heap* heap = map->GetHeap(); | 353 Heap* heap = map->GetHeap(); |
| 377 | 354 |
| 378 Object** slot = | 355 Object** slot = |
| 379 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset); | 356 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset); |
| 380 if (FLAG_collect_maps) { | 357 if (FLAG_collect_maps) { |
| 381 // Mark allocation site dependent codes array but do not push it onto | 358 // Mark allocation site dependent codes array but do not push it onto |
| 382 // marking stack, this will make references from it weak. We will clean | 359 // marking stack, this will make references from it weak. We will clean |
| 383 // dead codes when we iterate over allocation sites in | 360 // dead codes when we iterate over allocation sites in |
| 384 // ClearNonLiveReferences. | 361 // ClearNonLiveReferences. |
| 385 HeapObject* obj = HeapObject::cast(*slot); | 362 HeapObject* obj = HeapObject::cast(*slot); |
| 386 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 363 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
| 387 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 364 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
| 388 } else { | 365 } else { |
| 389 StaticVisitor::VisitPointer(heap, slot); | 366 StaticVisitor::VisitPointer(heap, slot); |
| 390 } | 367 } |
| 391 | 368 |
| 392 StaticVisitor::VisitPointers(heap, | 369 StaticVisitor::VisitPointers( |
| 370 heap, |
| 393 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset), | 371 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset), |
| 394 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset)); | 372 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset)); |
| 395 } | 373 } |
| 396 | 374 |
| 397 | 375 |
| 398 template<typename StaticVisitor> | 376 template <typename StaticVisitor> |
| 399 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( | 377 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( |
| 400 Map* map, HeapObject* object) { | 378 Map* map, HeapObject* object) { |
| 401 Heap* heap = map->GetHeap(); | 379 Heap* heap = map->GetHeap(); |
| 402 JSWeakCollection* weak_collection = | 380 JSWeakCollection* weak_collection = |
| 403 reinterpret_cast<JSWeakCollection*>(object); | 381 reinterpret_cast<JSWeakCollection*>(object); |
| 404 | 382 |
| 405 // Enqueue weak collection in linked list of encountered weak collections. | 383 // Enqueue weak collection in linked list of encountered weak collections. |
| 406 if (weak_collection->next() == heap->undefined_value()) { | 384 if (weak_collection->next() == heap->undefined_value()) { |
| 407 weak_collection->set_next(heap->encountered_weak_collections()); | 385 weak_collection->set_next(heap->encountered_weak_collections()); |
| 408 heap->set_encountered_weak_collections(weak_collection); | 386 heap->set_encountered_weak_collections(weak_collection); |
| 409 } | 387 } |
| 410 | 388 |
| 411 // Skip visiting the backing hash table containing the mappings and the | 389 // Skip visiting the backing hash table containing the mappings and the |
| 412 // pointer to the other enqueued weak collections, both are post-processed. | 390 // pointer to the other enqueued weak collections, both are post-processed. |
| 413 StaticVisitor::VisitPointers(heap, | 391 StaticVisitor::VisitPointers( |
| 414 HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset), | 392 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset), |
| 415 HeapObject::RawField(object, JSWeakCollection::kTableOffset)); | 393 HeapObject::RawField(object, JSWeakCollection::kTableOffset)); |
| 416 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize == | 394 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize == |
| 417 JSWeakCollection::kNextOffset); | 395 JSWeakCollection::kNextOffset); |
| 418 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize == | 396 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize == |
| 419 JSWeakCollection::kSize); | 397 JSWeakCollection::kSize); |
| 420 | 398 |
| 421 // Partially initialized weak collection is enqueued, but table is ignored. | 399 // Partially initialized weak collection is enqueued, but table is ignored. |
| 422 if (!weak_collection->table()->IsHashTable()) return; | 400 if (!weak_collection->table()->IsHashTable()) return; |
| 423 | 401 |
| 424 // Mark the backing hash table without pushing it on the marking stack. | 402 // Mark the backing hash table without pushing it on the marking stack. |
| 425 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset); | 403 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset); |
| 426 HeapObject* obj = HeapObject::cast(*slot); | 404 HeapObject* obj = HeapObject::cast(*slot); |
| 427 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 405 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
| 428 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 406 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
| 429 } | 407 } |
| 430 | 408 |
| 431 | 409 |
| 432 template<typename StaticVisitor> | 410 template <typename StaticVisitor> |
| 433 void StaticMarkingVisitor<StaticVisitor>::VisitCode( | 411 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map, |
| 434 Map* map, HeapObject* object) { | 412 HeapObject* object) { |
| 435 Heap* heap = map->GetHeap(); | 413 Heap* heap = map->GetHeap(); |
| 436 Code* code = Code::cast(object); | 414 Code* code = Code::cast(object); |
| 437 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) { | 415 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) { |
| 438 code->MakeOlder(heap->mark_compact_collector()->marking_parity()); | 416 code->MakeOlder(heap->mark_compact_collector()->marking_parity()); |
| 439 } | 417 } |
| 440 code->CodeIterateBody<StaticVisitor>(heap); | 418 code->CodeIterateBody<StaticVisitor>(heap); |
| 441 } | 419 } |
| 442 | 420 |
| 443 | 421 |
| 444 template<typename StaticVisitor> | 422 template <typename StaticVisitor> |
| 445 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | 423 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( |
| 446 Map* map, HeapObject* object) { | 424 Map* map, HeapObject* object) { |
| 447 Heap* heap = map->GetHeap(); | 425 Heap* heap = map->GetHeap(); |
| 448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | 426 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
| 449 if (shared->ic_age() != heap->global_ic_age()) { | 427 if (shared->ic_age() != heap->global_ic_age()) { |
| 450 shared->ResetForNewContext(heap->global_ic_age()); | 428 shared->ResetForNewContext(heap->global_ic_age()); |
| 451 } | 429 } |
| 452 if (FLAG_cleanup_code_caches_at_gc) { | 430 if (FLAG_cleanup_code_caches_at_gc) { |
| 453 shared->ClearTypeFeedbackInfo(); | 431 shared->ClearTypeFeedbackInfo(); |
| 454 } | 432 } |
| 455 if (FLAG_cache_optimized_code && | 433 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && |
| 456 FLAG_flush_optimized_code_cache && | |
| 457 !shared->optimized_code_map()->IsSmi()) { | 434 !shared->optimized_code_map()->IsSmi()) { |
| 458 // Always flush the optimized code map if requested by flag. | 435 // Always flush the optimized code map if requested by flag. |
| 459 shared->ClearOptimizedCodeMap(); | 436 shared->ClearOptimizedCodeMap(); |
| 460 } | 437 } |
| 461 MarkCompactCollector* collector = heap->mark_compact_collector(); | 438 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 462 if (collector->is_code_flushing_enabled()) { | 439 if (collector->is_code_flushing_enabled()) { |
| 463 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | 440 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { |
| 464 // Add the shared function info holding an optimized code map to | 441 // Add the shared function info holding an optimized code map to |
| 465 // the code flusher for processing of code maps after marking. | 442 // the code flusher for processing of code maps after marking. |
| 466 collector->code_flusher()->AddOptimizedCodeMap(shared); | 443 collector->code_flusher()->AddOptimizedCodeMap(shared); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 485 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | 462 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { |
| 486 // Flush optimized code map on major GCs without code flushing, | 463 // Flush optimized code map on major GCs without code flushing, |
| 487 // needed because cached code doesn't contain breakpoints. | 464 // needed because cached code doesn't contain breakpoints. |
| 488 shared->ClearOptimizedCodeMap(); | 465 shared->ClearOptimizedCodeMap(); |
| 489 } | 466 } |
| 490 } | 467 } |
| 491 VisitSharedFunctionInfoStrongCode(heap, object); | 468 VisitSharedFunctionInfoStrongCode(heap, object); |
| 492 } | 469 } |
| 493 | 470 |
| 494 | 471 |
| 495 template<typename StaticVisitor> | 472 template <typename StaticVisitor> |
| 496 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( | 473 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( |
| 497 Map* map, HeapObject* object) { | 474 Map* map, HeapObject* object) { |
| 498 Heap* heap = map->GetHeap(); | 475 Heap* heap = map->GetHeap(); |
| 499 ConstantPoolArray* array = ConstantPoolArray::cast(object); | 476 ConstantPoolArray* array = ConstantPoolArray::cast(object); |
| 500 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); | 477 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); |
| 501 while (!code_iter.is_finished()) { | 478 while (!code_iter.is_finished()) { |
| 502 Address code_entry = reinterpret_cast<Address>( | 479 Address code_entry = reinterpret_cast<Address>( |
| 503 array->RawFieldOfElementAt(code_iter.next_index())); | 480 array->RawFieldOfElementAt(code_iter.next_index())); |
| 504 StaticVisitor::VisitCodeEntry(heap, code_entry); | 481 StaticVisitor::VisitCodeEntry(heap, code_entry); |
| 505 } | 482 } |
| 506 | 483 |
| 507 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); | 484 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); |
| 508 while (!heap_iter.is_finished()) { | 485 while (!heap_iter.is_finished()) { |
| 509 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); | 486 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); |
| 510 HeapObject* object = HeapObject::cast(*slot); | 487 HeapObject* object = HeapObject::cast(*slot); |
| 511 heap->mark_compact_collector()->RecordSlot(slot, slot, object); | 488 heap->mark_compact_collector()->RecordSlot(slot, slot, object); |
| 512 bool is_weak_object = | 489 bool is_weak_object = |
| 513 (array->get_weak_object_state() == | 490 (array->get_weak_object_state() == |
| 514 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && | 491 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && |
| 515 Code::IsWeakObjectInOptimizedCode(object)) || | 492 Code::IsWeakObjectInOptimizedCode(object)) || |
| 516 (array->get_weak_object_state() == | 493 (array->get_weak_object_state() == |
| 517 ConstantPoolArray::WEAK_OBJECTS_IN_IC && | 494 ConstantPoolArray::WEAK_OBJECTS_IN_IC && |
| 518 Code::IsWeakObjectInIC(object)); | 495 Code::IsWeakObjectInIC(object)); |
| 519 if (!is_weak_object) { | 496 if (!is_weak_object) { |
| 520 StaticVisitor::MarkObject(heap, object); | 497 StaticVisitor::MarkObject(heap, object); |
| 521 } | 498 } |
| 522 } | 499 } |
| 523 } | 500 } |
| 524 | 501 |
| 525 | 502 |
| 526 template<typename StaticVisitor> | 503 template <typename StaticVisitor> |
| 527 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( | 504 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, |
| 528 Map* map, HeapObject* object) { | 505 HeapObject* object) { |
| 529 Heap* heap = map->GetHeap(); | 506 Heap* heap = map->GetHeap(); |
| 530 JSFunction* function = JSFunction::cast(object); | 507 JSFunction* function = JSFunction::cast(object); |
| 531 MarkCompactCollector* collector = heap->mark_compact_collector(); | 508 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 532 if (collector->is_code_flushing_enabled()) { | 509 if (collector->is_code_flushing_enabled()) { |
| 533 if (IsFlushable(heap, function)) { | 510 if (IsFlushable(heap, function)) { |
| 534 // This function's code looks flushable. But we have to postpone | 511 // This function's code looks flushable. But we have to postpone |
| 535 // the decision until we see all functions that point to the same | 512 // the decision until we see all functions that point to the same |
| 536 // SharedFunctionInfo because some of them might be optimized. | 513 // SharedFunctionInfo because some of them might be optimized. |
| 537 // That would also make the non-optimized version of the code | 514 // That would also make the non-optimized version of the code |
| 538 // non-flushable, because it is required for bailing out from | 515 // non-flushable, because it is required for bailing out from |
| (...skipping 15 matching lines...) Expand all Loading... |
| 554 StaticVisitor::MarkObject(heap, function->shared()->code()); | 531 StaticVisitor::MarkObject(heap, function->shared()->code()); |
| 555 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | 532 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 556 MarkInlinedFunctionsCode(heap, function->code()); | 533 MarkInlinedFunctionsCode(heap, function->code()); |
| 557 } | 534 } |
| 558 } | 535 } |
| 559 } | 536 } |
| 560 VisitJSFunctionStrongCode(heap, object); | 537 VisitJSFunctionStrongCode(heap, object); |
| 561 } | 538 } |
| 562 | 539 |
| 563 | 540 |
| 564 template<typename StaticVisitor> | 541 template <typename StaticVisitor> |
| 565 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( | 542 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, |
| 566 Map* map, HeapObject* object) { | 543 HeapObject* object) { |
| 567 int last_property_offset = | 544 int last_property_offset = |
| 568 JSRegExp::kSize + kPointerSize * map->inobject_properties(); | 545 JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
| 569 StaticVisitor::VisitPointers(map->GetHeap(), | 546 StaticVisitor::VisitPointers( |
| 570 HeapObject::RawField(object, JSRegExp::kPropertiesOffset), | 547 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset), |
| 571 HeapObject::RawField(object, last_property_offset)); | 548 HeapObject::RawField(object, last_property_offset)); |
| 572 } | 549 } |
| 573 | 550 |
| 574 | 551 |
| 575 template<typename StaticVisitor> | 552 template <typename StaticVisitor> |
| 576 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( | 553 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( |
| 577 Map* map, HeapObject* object) { | 554 Map* map, HeapObject* object) { |
| 578 Heap* heap = map->GetHeap(); | 555 Heap* heap = map->GetHeap(); |
| 579 | 556 |
| 580 STATIC_ASSERT( | 557 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset == |
| 581 JSArrayBuffer::kWeakFirstViewOffset == | 558 JSArrayBuffer::kWeakNextOffset + kPointerSize); |
| 582 JSArrayBuffer::kWeakNextOffset + kPointerSize); | |
| 583 StaticVisitor::VisitPointers( | 559 StaticVisitor::VisitPointers( |
| 584 heap, | 560 heap, |
| 585 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), | 561 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), |
| 586 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); | 562 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); |
| 587 StaticVisitor::VisitPointers( | 563 StaticVisitor::VisitPointers( |
| 588 heap, | 564 heap, HeapObject::RawField( |
| 589 HeapObject::RawField(object, | 565 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), |
| 590 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), | |
| 591 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); | 566 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); |
| 592 } | 567 } |
| 593 | 568 |
| 594 | 569 |
| 595 template<typename StaticVisitor> | 570 template <typename StaticVisitor> |
| 596 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray( | 571 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray( |
| 597 Map* map, HeapObject* object) { | 572 Map* map, HeapObject* object) { |
| 598 StaticVisitor::VisitPointers( | 573 StaticVisitor::VisitPointers( |
| 599 map->GetHeap(), | 574 map->GetHeap(), |
| 600 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), | 575 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), |
| 601 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); | 576 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); |
| 602 StaticVisitor::VisitPointers( | 577 StaticVisitor::VisitPointers( |
| 603 map->GetHeap(), | 578 map->GetHeap(), HeapObject::RawField( |
| 604 HeapObject::RawField(object, | 579 object, JSTypedArray::kWeakNextOffset + kPointerSize), |
| 605 JSTypedArray::kWeakNextOffset + kPointerSize), | |
| 606 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); | 580 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); |
| 607 } | 581 } |
| 608 | 582 |
| 609 | 583 |
| 610 template<typename StaticVisitor> | 584 template <typename StaticVisitor> |
| 611 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView( | 585 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map, |
| 612 Map* map, HeapObject* object) { | 586 HeapObject* object) { |
| 613 StaticVisitor::VisitPointers( | 587 StaticVisitor::VisitPointers( |
| 614 map->GetHeap(), | 588 map->GetHeap(), |
| 615 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), | 589 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), |
| 616 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); | 590 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); |
| 617 StaticVisitor::VisitPointers( | 591 StaticVisitor::VisitPointers( |
| 618 map->GetHeap(), | 592 map->GetHeap(), |
| 619 HeapObject::RawField(object, | 593 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize), |
| 620 JSDataView::kWeakNextOffset + kPointerSize), | |
| 621 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); | 594 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); |
| 622 } | 595 } |
| 623 | 596 |
| 624 | 597 |
| 625 template<typename StaticVisitor> | 598 template <typename StaticVisitor> |
| 626 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents( | 599 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap, |
| 627 Heap* heap, Map* map) { | 600 Map* map) { |
| 628 // Make sure that the back pointer stored either in the map itself or | 601 // Make sure that the back pointer stored either in the map itself or |
| 629 // inside its transitions array is marked. Skip recording the back | 602 // inside its transitions array is marked. Skip recording the back |
| 630 // pointer slot since map space is not compacted. | 603 // pointer slot since map space is not compacted. |
| 631 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer())); | 604 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer())); |
| 632 | 605 |
| 633 // Treat pointers in the transitions array as weak and also mark that | 606 // Treat pointers in the transitions array as weak and also mark that |
| 634 // array to prevent visiting it later. Skip recording the transition | 607 // array to prevent visiting it later. Skip recording the transition |
| 635 // array slot, since it will be implicitly recorded when the pointer | 608 // array slot, since it will be implicitly recorded when the pointer |
| 636 // fields of this map are visited. | 609 // fields of this map are visited. |
| 637 if (map->HasTransitionArray()) { | 610 if (map->HasTransitionArray()) { |
| 638 TransitionArray* transitions = map->transitions(); | 611 TransitionArray* transitions = map->transitions(); |
| 639 MarkTransitionArray(heap, transitions); | 612 MarkTransitionArray(heap, transitions); |
| 640 } | 613 } |
| 641 | 614 |
| 642 // Since descriptor arrays are potentially shared, ensure that only the | 615 // Since descriptor arrays are potentially shared, ensure that only the |
| 643 // descriptors that belong to this map are marked. The first time a | 616 // descriptors that belong to this map are marked. The first time a |
| 644 // non-empty descriptor array is marked, its header is also visited. The slot | 617 // non-empty descriptor array is marked, its header is also visited. The slot |
| 645 // holding the descriptor array will be implicitly recorded when the pointer | 618 // holding the descriptor array will be implicitly recorded when the pointer |
| 646 // fields of this map are visited. | 619 // fields of this map are visited. |
| 647 DescriptorArray* descriptors = map->instance_descriptors(); | 620 DescriptorArray* descriptors = map->instance_descriptors(); |
| 648 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) && | 621 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) && |
| 649 descriptors->length() > 0) { | 622 descriptors->length() > 0) { |
| 650 StaticVisitor::VisitPointers(heap, | 623 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(), |
| 651 descriptors->GetFirstElementAddress(), | 624 descriptors->GetDescriptorEndSlot(0)); |
| 652 descriptors->GetDescriptorEndSlot(0)); | |
| 653 } | 625 } |
| 654 int start = 0; | 626 int start = 0; |
| 655 int end = map->NumberOfOwnDescriptors(); | 627 int end = map->NumberOfOwnDescriptors(); |
| 656 if (start < end) { | 628 if (start < end) { |
| 657 StaticVisitor::VisitPointers(heap, | 629 StaticVisitor::VisitPointers(heap, |
| 658 descriptors->GetDescriptorStartSlot(start), | 630 descriptors->GetDescriptorStartSlot(start), |
| 659 descriptors->GetDescriptorEndSlot(end)); | 631 descriptors->GetDescriptorEndSlot(end)); |
| 660 } | 632 } |
| 661 | 633 |
| 662 // Mark prototype dependent codes array but do not push it onto marking | 634 // Mark prototype dependent codes array but do not push it onto marking |
| 663 // stack, this will make references from it weak. We will clean dead | 635 // stack, this will make references from it weak. We will clean dead |
| 664 // codes when we iterate over maps in ClearNonLiveTransitions. | 636 // codes when we iterate over maps in ClearNonLiveTransitions. |
| 665 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset); | 637 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset); |
| 666 HeapObject* obj = HeapObject::cast(*slot); | 638 HeapObject* obj = HeapObject::cast(*slot); |
| 667 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 639 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
| 668 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 640 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
| 669 | 641 |
| 670 // Mark the pointer fields of the Map. Since the transitions array has | 642 // Mark the pointer fields of the Map. Since the transitions array has |
| 671 // been marked already, it is fine that one of these fields contains a | 643 // been marked already, it is fine that one of these fields contains a |
| 672 // pointer to it. | 644 // pointer to it. |
| 673 StaticVisitor::VisitPointers(heap, | 645 StaticVisitor::VisitPointers( |
| 674 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | 646 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), |
| 675 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | 647 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); |
| 676 } | 648 } |
| 677 | 649 |
| 678 | 650 |
| 679 template<typename StaticVisitor> | 651 template <typename StaticVisitor> |
| 680 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray( | 652 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray( |
| 681 Heap* heap, TransitionArray* transitions) { | 653 Heap* heap, TransitionArray* transitions) { |
| 682 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return; | 654 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return; |
| 683 | 655 |
| 684 // Simple transitions do not have keys nor prototype transitions. | 656 // Simple transitions do not have keys nor prototype transitions. |
| 685 if (transitions->IsSimpleTransition()) return; | 657 if (transitions->IsSimpleTransition()) return; |
| 686 | 658 |
| 687 if (transitions->HasPrototypeTransitions()) { | 659 if (transitions->HasPrototypeTransitions()) { |
| 688 // Mark prototype transitions array but do not push it onto marking | 660 // Mark prototype transitions array but do not push it onto marking |
| 689 // stack, this will make references from it weak. We will clean dead | 661 // stack, this will make references from it weak. We will clean dead |
| 690 // prototype transitions in ClearNonLiveTransitions. | 662 // prototype transitions in ClearNonLiveTransitions. |
| 691 Object** slot = transitions->GetPrototypeTransitionsSlot(); | 663 Object** slot = transitions->GetPrototypeTransitionsSlot(); |
| 692 HeapObject* obj = HeapObject::cast(*slot); | 664 HeapObject* obj = HeapObject::cast(*slot); |
| 693 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 665 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
| 694 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 666 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
| 695 } | 667 } |
| 696 | 668 |
| 697 for (int i = 0; i < transitions->number_of_transitions(); ++i) { | 669 for (int i = 0; i < transitions->number_of_transitions(); ++i) { |
| 698 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); | 670 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); |
| 699 } | 671 } |
| 700 } | 672 } |
| 701 | 673 |
| 702 | 674 |
| 703 template<typename StaticVisitor> | 675 template <typename StaticVisitor> |
| 704 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode( | 676 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap, |
| 705 Heap* heap, Code* code) { | 677 Code* code) { |
| 706 // Skip in absence of inlining. | 678 // Skip in absence of inlining. |
| 707 // TODO(turbofan): Revisit once we support inlining. | 679 // TODO(turbofan): Revisit once we support inlining. |
| 708 if (code->is_turbofanned()) return; | 680 if (code->is_turbofanned()) return; |
| 709 // For optimized functions we should retain both non-optimized version | 681 // For optimized functions we should retain both non-optimized version |
| 710 // of its code and non-optimized version of all inlined functions. | 682 // of its code and non-optimized version of all inlined functions. |
| 711 // This is required to support bailing out from inlined code. | 683 // This is required to support bailing out from inlined code. |
| 712 DeoptimizationInputData* data = | 684 DeoptimizationInputData* data = |
| 713 DeoptimizationInputData::cast(code->deoptimization_data()); | 685 DeoptimizationInputData::cast(code->deoptimization_data()); |
| 714 FixedArray* literals = data->LiteralArray(); | 686 FixedArray* literals = data->LiteralArray(); |
| 715 for (int i = 0, count = data->InlinedFunctionCount()->value(); | 687 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count; |
| 716 i < count; | |
| 717 i++) { | 688 i++) { |
| 718 JSFunction* inlined = JSFunction::cast(literals->get(i)); | 689 JSFunction* inlined = JSFunction::cast(literals->get(i)); |
| 719 StaticVisitor::MarkObject(heap, inlined->shared()->code()); | 690 StaticVisitor::MarkObject(heap, inlined->shared()->code()); |
| 720 } | 691 } |
| 721 } | 692 } |
| 722 | 693 |
| 723 | 694 |
| 724 inline static bool IsValidNonBuiltinContext(Object* context) { | 695 inline static bool IsValidNonBuiltinContext(Object* context) { |
| 725 return context->IsContext() && | 696 return context->IsContext() && |
| 726 !Context::cast(context)->global_object()->IsJSBuiltinsObject(); | 697 !Context::cast(context)->global_object()->IsJSBuiltinsObject(); |
| 727 } | 698 } |
| 728 | 699 |
| 729 | 700 |
| 730 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | 701 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| 731 Object* undefined = heap->undefined_value(); | 702 Object* undefined = heap->undefined_value(); |
| 732 return (info->script() != undefined) && | 703 return (info->script() != undefined) && |
| 733 (reinterpret_cast<Script*>(info->script())->source() != undefined); | 704 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 734 } | 705 } |
| 735 | 706 |
| 736 | 707 |
| 737 template<typename StaticVisitor> | 708 template <typename StaticVisitor> |
| 738 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | 709 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap, |
| 739 Heap* heap, JSFunction* function) { | 710 JSFunction* function) { |
| 740 SharedFunctionInfo* shared_info = function->shared(); | 711 SharedFunctionInfo* shared_info = function->shared(); |
| 741 | 712 |
| 742 // Code is either on stack, in compilation cache or referenced | 713 // Code is either on stack, in compilation cache or referenced |
| 743 // by optimized version of function. | 714 // by optimized version of function. |
| 744 MarkBit code_mark = Marking::MarkBitFrom(function->code()); | 715 MarkBit code_mark = Marking::MarkBitFrom(function->code()); |
| 745 if (code_mark.Get()) { | 716 if (code_mark.Get()) { |
| 746 return false; | 717 return false; |
| 747 } | 718 } |
| 748 | 719 |
| 749 // The function must have a valid context and not be a builtin. | 720 // The function must have a valid context and not be a builtin. |
| 750 if (!IsValidNonBuiltinContext(function->context())) { | 721 if (!IsValidNonBuiltinContext(function->context())) { |
| 751 return false; | 722 return false; |
| 752 } | 723 } |
| 753 | 724 |
| 754 // We do not (yet) flush code for optimized functions. | 725 // We do not (yet) flush code for optimized functions. |
| 755 if (function->code() != shared_info->code()) { | 726 if (function->code() != shared_info->code()) { |
| 756 return false; | 727 return false; |
| 757 } | 728 } |
| 758 | 729 |
| 759 // Check age of optimized code. | 730 // Check age of optimized code. |
| 760 if (FLAG_age_code && !function->code()->IsOld()) { | 731 if (FLAG_age_code && !function->code()->IsOld()) { |
| 761 return false; | 732 return false; |
| 762 } | 733 } |
| 763 | 734 |
| 764 return IsFlushable(heap, shared_info); | 735 return IsFlushable(heap, shared_info); |
| 765 } | 736 } |
| 766 | 737 |
| 767 | 738 |
| 768 template<typename StaticVisitor> | 739 template <typename StaticVisitor> |
| 769 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | 740 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( |
| 770 Heap* heap, SharedFunctionInfo* shared_info) { | 741 Heap* heap, SharedFunctionInfo* shared_info) { |
| 771 // Code is either on stack, in compilation cache or referenced | 742 // Code is either on stack, in compilation cache or referenced |
| 772 // by optimized version of function. | 743 // by optimized version of function. |
| 773 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); | 744 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); |
| 774 if (code_mark.Get()) { | 745 if (code_mark.Get()) { |
| 775 return false; | 746 return false; |
| 776 } | 747 } |
| 777 | 748 |
| 778 // The function must be compiled and have the source code available, | 749 // The function must be compiled and have the source code available, |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 816 | 787 |
| 817 // Check age of code. If code aging is disabled we never flush. | 788 // Check age of code. If code aging is disabled we never flush. |
| 818 if (!FLAG_age_code || !shared_info->code()->IsOld()) { | 789 if (!FLAG_age_code || !shared_info->code()->IsOld()) { |
| 819 return false; | 790 return false; |
| 820 } | 791 } |
| 821 | 792 |
| 822 return true; | 793 return true; |
| 823 } | 794 } |
| 824 | 795 |
| 825 | 796 |
| 826 template<typename StaticVisitor> | 797 template <typename StaticVisitor> |
| 827 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( | 798 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( |
| 828 Heap* heap, HeapObject* object) { | 799 Heap* heap, HeapObject* object) { |
| 829 Object** start_slot = | 800 Object** start_slot = HeapObject::RawField( |
| 830 HeapObject::RawField(object, | 801 object, SharedFunctionInfo::BodyDescriptor::kStartOffset); |
| 831 SharedFunctionInfo::BodyDescriptor::kStartOffset); | 802 Object** end_slot = HeapObject::RawField( |
| 832 Object** end_slot = | 803 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); |
| 833 HeapObject::RawField(object, | |
| 834 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
| 835 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 804 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 836 } | 805 } |
| 837 | 806 |
| 838 | 807 |
| 839 template<typename StaticVisitor> | 808 template <typename StaticVisitor> |
| 840 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( | 809 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( |
| 841 Heap* heap, HeapObject* object) { | 810 Heap* heap, HeapObject* object) { |
| 842 Object** name_slot = | 811 Object** name_slot = |
| 843 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); | 812 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); |
| 844 StaticVisitor::VisitPointer(heap, name_slot); | 813 StaticVisitor::VisitPointer(heap, name_slot); |
| 845 | 814 |
| 846 // Skip visiting kCodeOffset as it is treated weakly here. | 815 // Skip visiting kCodeOffset as it is treated weakly here. |
| 847 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == | 816 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == |
| 848 SharedFunctionInfo::kCodeOffset); | 817 SharedFunctionInfo::kCodeOffset); |
| 849 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize == | 818 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize == |
| 850 SharedFunctionInfo::kOptimizedCodeMapOffset); | 819 SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 851 | 820 |
| 852 Object** start_slot = | 821 Object** start_slot = |
| 853 HeapObject::RawField(object, | 822 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 854 SharedFunctionInfo::kOptimizedCodeMapOffset); | 823 Object** end_slot = HeapObject::RawField( |
| 855 Object** end_slot = | 824 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); |
| 856 HeapObject::RawField(object, | |
| 857 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
| 858 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 825 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 859 } | 826 } |
| 860 | 827 |
| 861 | 828 |
| 862 template<typename StaticVisitor> | 829 template <typename StaticVisitor> |
| 863 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( | 830 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( |
| 864 Heap* heap, HeapObject* object) { | 831 Heap* heap, HeapObject* object) { |
| 865 Object** start_slot = | 832 Object** start_slot = |
| 866 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | 833 HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
| 867 Object** end_slot = | 834 Object** end_slot = |
| 868 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | 835 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
| 869 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 836 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 870 | 837 |
| 871 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); | 838 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
| 872 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | 839 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
| 873 JSFunction::kPrototypeOrInitialMapOffset); | 840 JSFunction::kPrototypeOrInitialMapOffset); |
| 874 | 841 |
| 875 start_slot = | 842 start_slot = |
| 876 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | 843 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
| 877 end_slot = | 844 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
| 878 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
| 879 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 845 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 880 } | 846 } |
| 881 | 847 |
| 882 | 848 |
| 883 template<typename StaticVisitor> | 849 template <typename StaticVisitor> |
| 884 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( | 850 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( |
| 885 Heap* heap, HeapObject* object) { | 851 Heap* heap, HeapObject* object) { |
| 886 Object** start_slot = | 852 Object** start_slot = |
| 887 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | 853 HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
| 888 Object** end_slot = | 854 Object** end_slot = |
| 889 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | 855 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
| 890 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 856 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 891 | 857 |
| 892 // Skip visiting kCodeEntryOffset as it is treated weakly here. | 858 // Skip visiting kCodeEntryOffset as it is treated weakly here. |
| 893 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | 859 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
| 894 JSFunction::kPrototypeOrInitialMapOffset); | 860 JSFunction::kPrototypeOrInitialMapOffset); |
| 895 | 861 |
| 896 start_slot = | 862 start_slot = |
| 897 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | 863 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
| 898 end_slot = | 864 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
| 899 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
| 900 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | 865 StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
| 901 } | 866 } |
| 902 | 867 |
| 903 | 868 |
| 904 void Code::CodeIterateBody(ObjectVisitor* v) { | 869 void Code::CodeIterateBody(ObjectVisitor* v) { |
| 905 int mode_mask = RelocInfo::kCodeTargetMask | | 870 int mode_mask = RelocInfo::kCodeTargetMask | |
| 906 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 871 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
| 907 RelocInfo::ModeMask(RelocInfo::CELL) | | 872 RelocInfo::ModeMask(RelocInfo::CELL) | |
| 908 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 873 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
| 909 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | 874 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
| (...skipping 10 matching lines...) Expand all Loading... |
| 920 IteratePointer(v, kConstantPoolOffset); | 885 IteratePointer(v, kConstantPoolOffset); |
| 921 | 886 |
| 922 RelocIterator it(this, mode_mask); | 887 RelocIterator it(this, mode_mask); |
| 923 Isolate* isolate = this->GetIsolate(); | 888 Isolate* isolate = this->GetIsolate(); |
| 924 for (; !it.done(); it.next()) { | 889 for (; !it.done(); it.next()) { |
| 925 it.rinfo()->Visit(isolate, v); | 890 it.rinfo()->Visit(isolate, v); |
| 926 } | 891 } |
| 927 } | 892 } |
| 928 | 893 |
| 929 | 894 |
| 930 template<typename StaticVisitor> | 895 template <typename StaticVisitor> |
| 931 void Code::CodeIterateBody(Heap* heap) { | 896 void Code::CodeIterateBody(Heap* heap) { |
| 932 int mode_mask = RelocInfo::kCodeTargetMask | | 897 int mode_mask = RelocInfo::kCodeTargetMask | |
| 933 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 898 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
| 934 RelocInfo::ModeMask(RelocInfo::CELL) | | 899 RelocInfo::ModeMask(RelocInfo::CELL) | |
| 935 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 900 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
| 936 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | 901 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
| 937 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | 902 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
| 938 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 903 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
| 939 | 904 |
| 940 // There are two places where we iterate code bodies: here and the non- | 905 // There are two places where we iterate code bodies: here and the non- |
| 941 // templated CodeIterateBody (above). They should be kept in sync. | 906 // templated CodeIterateBody (above). They should be kept in sync. |
| 942 StaticVisitor::VisitPointer( | 907 StaticVisitor::VisitPointer( |
| 943 heap, | 908 heap, |
| 944 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); | 909 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); |
| 945 StaticVisitor::VisitPointer( | 910 StaticVisitor::VisitPointer( |
| 946 heap, | 911 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); |
| 947 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); | |
| 948 StaticVisitor::VisitPointer( | 912 StaticVisitor::VisitPointer( |
| 949 heap, | 913 heap, |
| 950 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); | 914 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); |
| 951 StaticVisitor::VisitPointer( | 915 StaticVisitor::VisitPointer( |
| 952 heap, | 916 heap, |
| 953 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); | 917 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); |
| 954 StaticVisitor::VisitNextCodeLink( | 918 StaticVisitor::VisitNextCodeLink( |
| 955 heap, | 919 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); |
| 956 reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); | |
| 957 StaticVisitor::VisitPointer( | 920 StaticVisitor::VisitPointer( |
| 958 heap, | 921 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset)); |
| 959 reinterpret_cast<Object**>(this->address() + kConstantPoolOffset)); | |
| 960 | 922 |
| 961 | 923 |
| 962 RelocIterator it(this, mode_mask); | 924 RelocIterator it(this, mode_mask); |
| 963 for (; !it.done(); it.next()) { | 925 for (; !it.done(); it.next()) { |
| 964 it.rinfo()->template Visit<StaticVisitor>(heap); | 926 it.rinfo()->template Visit<StaticVisitor>(heap); |
| 965 } | 927 } |
| 966 } | 928 } |
| 967 | 929 } |
| 968 | 930 } // namespace v8::internal |
| 969 } } // namespace v8::internal | |
| 970 | 931 |
| 971 #endif // V8_OBJECTS_VISITING_INL_H_ | 932 #endif // V8_OBJECTS_VISITING_INL_H_ |
| OLD | NEW |