| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #ifndef V8_OBJECTS_VISITING_H_ | |
| 6 #define V8_OBJECTS_VISITING_H_ | |
| 7 | |
| 8 #include "src/allocation.h" | |
| 9 | |
| 10 // This file provides base classes and auxiliary methods for defining | |
| 11 // static object visitors used during GC. | |
| 12 // Visiting HeapObject body with a normal ObjectVisitor requires performing | |
| 13 // two switches on object's instance type to determine object size and layout | |
| 14 // and one or more virtual method calls on visitor itself. | |
| 15 // Static visitor is different: it provides a dispatch table which contains | |
| 16 // pointers to specialized visit functions. Each map has the visitor_id | |
| 17 // field which contains an index of specialized visitor to use. | |
| 18 | |
| 19 namespace v8 { | |
| 20 namespace internal { | |
| 21 | |
| 22 | |
| 23 // Base class for all static visitors. | |
| 24 class StaticVisitorBase : public AllStatic { | |
| 25 public: | |
| 26 #define VISITOR_ID_LIST(V) \ | |
| 27 V(SeqOneByteString) \ | |
| 28 V(SeqTwoByteString) \ | |
| 29 V(ShortcutCandidate) \ | |
| 30 V(ByteArray) \ | |
| 31 V(FreeSpace) \ | |
| 32 V(FixedArray) \ | |
| 33 V(FixedDoubleArray) \ | |
| 34 V(FixedTypedArray) \ | |
| 35 V(FixedFloat64Array) \ | |
| 36 V(ConstantPoolArray) \ | |
| 37 V(NativeContext) \ | |
| 38 V(AllocationSite) \ | |
| 39 V(DataObject2) \ | |
| 40 V(DataObject3) \ | |
| 41 V(DataObject4) \ | |
| 42 V(DataObject5) \ | |
| 43 V(DataObject6) \ | |
| 44 V(DataObject7) \ | |
| 45 V(DataObject8) \ | |
| 46 V(DataObject9) \ | |
| 47 V(DataObjectGeneric) \ | |
| 48 V(JSObject2) \ | |
| 49 V(JSObject3) \ | |
| 50 V(JSObject4) \ | |
| 51 V(JSObject5) \ | |
| 52 V(JSObject6) \ | |
| 53 V(JSObject7) \ | |
| 54 V(JSObject8) \ | |
| 55 V(JSObject9) \ | |
| 56 V(JSObjectGeneric) \ | |
| 57 V(Struct2) \ | |
| 58 V(Struct3) \ | |
| 59 V(Struct4) \ | |
| 60 V(Struct5) \ | |
| 61 V(Struct6) \ | |
| 62 V(Struct7) \ | |
| 63 V(Struct8) \ | |
| 64 V(Struct9) \ | |
| 65 V(StructGeneric) \ | |
| 66 V(ConsString) \ | |
| 67 V(SlicedString) \ | |
| 68 V(Symbol) \ | |
| 69 V(Oddball) \ | |
| 70 V(Code) \ | |
| 71 V(Map) \ | |
| 72 V(Cell) \ | |
| 73 V(PropertyCell) \ | |
| 74 V(SharedFunctionInfo) \ | |
| 75 V(JSFunction) \ | |
| 76 V(JSWeakCollection) \ | |
| 77 V(JSArrayBuffer) \ | |
| 78 V(JSTypedArray) \ | |
| 79 V(JSDataView) \ | |
| 80 V(JSRegExp) | |
| 81 | |
| 82 // For data objects, JS objects and structs along with generic visitor which | |
| 83 // can visit object of any size we provide visitors specialized by | |
| 84 // object size in words. | |
| 85 // Ids of specialized visitors are declared in a linear order (without | |
| 86 // holes) starting from the id of visitor specialized for 2 words objects | |
| 87 // (base visitor id) and ending with the id of generic visitor. | |
| 88 // Method GetVisitorIdForSize depends on this ordering to calculate visitor | |
| 89 // id of specialized visitor from given instance size, base visitor id and | |
| 90 // generic visitor's id. | |
| 91 enum VisitorId { | |
| 92 #define VISITOR_ID_ENUM_DECL(id) kVisit##id, | |
| 93 VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL) | |
| 94 #undef VISITOR_ID_ENUM_DECL | |
| 95 kVisitorIdCount, | |
| 96 kVisitDataObject = kVisitDataObject2, | |
| 97 kVisitJSObject = kVisitJSObject2, | |
| 98 kVisitStruct = kVisitStruct2, | |
| 99 kMinObjectSizeInWords = 2 | |
| 100 }; | |
| 101 | |
| 102 // Visitor ID should fit in one byte. | |
| 103 STATIC_ASSERT(kVisitorIdCount <= 256); | |
| 104 | |
| 105 // Determine which specialized visitor should be used for given instance type | |
| 106 // and instance type. | |
| 107 static VisitorId GetVisitorId(int instance_type, int instance_size); | |
| 108 | |
| 109 static VisitorId GetVisitorId(Map* map) { | |
| 110 return GetVisitorId(map->instance_type(), map->instance_size()); | |
| 111 } | |
| 112 | |
| 113 // For visitors that allow specialization by size calculate VisitorId based | |
| 114 // on size, base visitor id and generic visitor id. | |
| 115 static VisitorId GetVisitorIdForSize(VisitorId base, | |
| 116 VisitorId generic, | |
| 117 int object_size) { | |
| 118 DCHECK((base == kVisitDataObject) || | |
| 119 (base == kVisitStruct) || | |
| 120 (base == kVisitJSObject)); | |
| 121 DCHECK(IsAligned(object_size, kPointerSize)); | |
| 122 DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size); | |
| 123 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | |
| 124 | |
| 125 const VisitorId specialization = static_cast<VisitorId>( | |
| 126 base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords); | |
| 127 | |
| 128 return Min(specialization, generic); | |
| 129 } | |
| 130 }; | |
| 131 | |
| 132 | |
| 133 template<typename Callback> | |
| 134 class VisitorDispatchTable { | |
| 135 public: | |
| 136 void CopyFrom(VisitorDispatchTable* other) { | |
| 137 // We are not using memcpy to guarantee that during update | |
| 138 // every element of callbacks_ array will remain correct | |
| 139 // pointer (memcpy might be implemented as a byte copying loop). | |
| 140 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) { | |
| 141 base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]); | |
| 142 } | |
| 143 } | |
| 144 | |
| 145 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) { | |
| 146 return reinterpret_cast<Callback>(callbacks_[id]); | |
| 147 } | |
| 148 | |
| 149 inline Callback GetVisitor(Map* map) { | |
| 150 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]); | |
| 151 } | |
| 152 | |
| 153 void Register(StaticVisitorBase::VisitorId id, Callback callback) { | |
| 154 DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned. | |
| 155 callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback); | |
| 156 } | |
| 157 | |
| 158 template<typename Visitor, | |
| 159 StaticVisitorBase::VisitorId base, | |
| 160 StaticVisitorBase::VisitorId generic, | |
| 161 int object_size_in_words> | |
| 162 void RegisterSpecialization() { | |
| 163 static const int size = object_size_in_words * kPointerSize; | |
| 164 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size), | |
| 165 &Visitor::template VisitSpecialized<size>); | |
| 166 } | |
| 167 | |
| 168 | |
| 169 template<typename Visitor, | |
| 170 StaticVisitorBase::VisitorId base, | |
| 171 StaticVisitorBase::VisitorId generic> | |
| 172 void RegisterSpecializations() { | |
| 173 STATIC_ASSERT( | |
| 174 (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10); | |
| 175 RegisterSpecialization<Visitor, base, generic, 2>(); | |
| 176 RegisterSpecialization<Visitor, base, generic, 3>(); | |
| 177 RegisterSpecialization<Visitor, base, generic, 4>(); | |
| 178 RegisterSpecialization<Visitor, base, generic, 5>(); | |
| 179 RegisterSpecialization<Visitor, base, generic, 6>(); | |
| 180 RegisterSpecialization<Visitor, base, generic, 7>(); | |
| 181 RegisterSpecialization<Visitor, base, generic, 8>(); | |
| 182 RegisterSpecialization<Visitor, base, generic, 9>(); | |
| 183 Register(generic, &Visitor::Visit); | |
| 184 } | |
| 185 | |
| 186 private: | |
| 187 base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount]; | |
| 188 }; | |
| 189 | |
| 190 | |
| 191 template<typename StaticVisitor> | |
| 192 class BodyVisitorBase : public AllStatic { | |
| 193 public: | |
| 194 INLINE(static void IteratePointers(Heap* heap, | |
| 195 HeapObject* object, | |
| 196 int start_offset, | |
| 197 int end_offset)) { | |
| 198 Object** start_slot = reinterpret_cast<Object**>(object->address() + | |
| 199 start_offset); | |
| 200 Object** end_slot = reinterpret_cast<Object**>(object->address() + | |
| 201 end_offset); | |
| 202 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
| 203 } | |
| 204 }; | |
| 205 | |
| 206 | |
| 207 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> | |
| 208 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> { | |
| 209 public: | |
| 210 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) { | |
| 211 int object_size = BodyDescriptor::SizeOf(map, object); | |
| 212 BodyVisitorBase<StaticVisitor>::IteratePointers( | |
| 213 map->GetHeap(), | |
| 214 object, | |
| 215 BodyDescriptor::kStartOffset, | |
| 216 object_size); | |
| 217 return static_cast<ReturnType>(object_size); | |
| 218 } | |
| 219 | |
| 220 template<int object_size> | |
| 221 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) { | |
| 222 DCHECK(BodyDescriptor::SizeOf(map, object) == object_size); | |
| 223 BodyVisitorBase<StaticVisitor>::IteratePointers( | |
| 224 map->GetHeap(), | |
| 225 object, | |
| 226 BodyDescriptor::kStartOffset, | |
| 227 object_size); | |
| 228 return static_cast<ReturnType>(object_size); | |
| 229 } | |
| 230 }; | |
| 231 | |
| 232 | |
| 233 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> | |
| 234 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> { | |
| 235 public: | |
| 236 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) { | |
| 237 BodyVisitorBase<StaticVisitor>::IteratePointers( | |
| 238 map->GetHeap(), | |
| 239 object, | |
| 240 BodyDescriptor::kStartOffset, | |
| 241 BodyDescriptor::kEndOffset); | |
| 242 return static_cast<ReturnType>(BodyDescriptor::kSize); | |
| 243 } | |
| 244 }; | |
| 245 | |
| 246 | |
| 247 // Base class for visitors used for a linear new space iteration. | |
| 248 // IterateBody returns size of visited object. | |
| 249 // Certain types of objects (i.e. Code objects) are not handled | |
| 250 // by dispatch table of this visitor because they cannot appear | |
| 251 // in the new space. | |
| 252 // | |
| 253 // This class is intended to be used in the following way: | |
| 254 // | |
| 255 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> { | |
| 256 // ... | |
| 257 // } | |
| 258 // | |
| 259 // This is an example of Curiously recurring template pattern | |
| 260 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). | |
| 261 // We use CRTP to guarantee aggressive compile time optimizations (i.e. | |
| 262 // inlining and specialization of StaticVisitor::VisitPointers methods). | |
| 263 template<typename StaticVisitor> | |
| 264 class StaticNewSpaceVisitor : public StaticVisitorBase { | |
| 265 public: | |
| 266 static void Initialize(); | |
| 267 | |
| 268 INLINE(static int IterateBody(Map* map, HeapObject* obj)) { | |
| 269 return table_.GetVisitor(map)(map, obj); | |
| 270 } | |
| 271 | |
| 272 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { | |
| 273 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p); | |
| 274 } | |
| 275 | |
| 276 private: | |
| 277 INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) { | |
| 278 Heap* heap = map->GetHeap(); | |
| 279 VisitPointers(heap, | |
| 280 HeapObject::RawField(object, JSFunction::kPropertiesOffset), | |
| 281 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); | |
| 282 | |
| 283 // Don't visit code entry. We are using this visitor only during scavenges. | |
| 284 | |
| 285 VisitPointers( | |
| 286 heap, | |
| 287 HeapObject::RawField(object, | |
| 288 JSFunction::kCodeEntryOffset + kPointerSize), | |
| 289 HeapObject::RawField(object, | |
| 290 JSFunction::kNonWeakFieldsEndOffset)); | |
| 291 return JSFunction::kSize; | |
| 292 } | |
| 293 | |
| 294 INLINE(static int VisitByteArray(Map* map, HeapObject* object)) { | |
| 295 return reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | |
| 296 } | |
| 297 | |
| 298 INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) { | |
| 299 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | |
| 300 return FixedDoubleArray::SizeFor(length); | |
| 301 } | |
| 302 | |
| 303 INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) { | |
| 304 return reinterpret_cast<FixedTypedArrayBase*>(object)->size(); | |
| 305 } | |
| 306 | |
| 307 INLINE(static int VisitJSObject(Map* map, HeapObject* object)) { | |
| 308 return JSObjectVisitor::Visit(map, object); | |
| 309 } | |
| 310 | |
| 311 INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) { | |
| 312 return SeqOneByteString::cast(object)-> | |
| 313 SeqOneByteStringSize(map->instance_type()); | |
| 314 } | |
| 315 | |
| 316 INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) { | |
| 317 return SeqTwoByteString::cast(object)-> | |
| 318 SeqTwoByteStringSize(map->instance_type()); | |
| 319 } | |
| 320 | |
| 321 INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) { | |
| 322 return FreeSpace::cast(object)->Size(); | |
| 323 } | |
| 324 | |
| 325 INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object)); | |
| 326 INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object)); | |
| 327 INLINE(static int VisitJSDataView(Map* map, HeapObject* object)); | |
| 328 | |
| 329 class DataObjectVisitor { | |
| 330 public: | |
| 331 template<int object_size> | |
| 332 static inline int VisitSpecialized(Map* map, HeapObject* object) { | |
| 333 return object_size; | |
| 334 } | |
| 335 | |
| 336 INLINE(static int Visit(Map* map, HeapObject* object)) { | |
| 337 return map->instance_size(); | |
| 338 } | |
| 339 }; | |
| 340 | |
| 341 typedef FlexibleBodyVisitor<StaticVisitor, | |
| 342 StructBodyDescriptor, | |
| 343 int> StructVisitor; | |
| 344 | |
| 345 typedef FlexibleBodyVisitor<StaticVisitor, | |
| 346 JSObject::BodyDescriptor, | |
| 347 int> JSObjectVisitor; | |
| 348 | |
| 349 typedef int (*Callback)(Map* map, HeapObject* object); | |
| 350 | |
| 351 static VisitorDispatchTable<Callback> table_; | |
| 352 }; | |
| 353 | |
| 354 | |
| 355 template<typename StaticVisitor> | |
| 356 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback> | |
| 357 StaticNewSpaceVisitor<StaticVisitor>::table_; | |
| 358 | |
| 359 | |
| 360 // Base class for visitors used to transitively mark the entire heap. | |
| 361 // IterateBody returns nothing. | |
| 362 // Certain types of objects might not be handled by this base class and | |
| 363 // no visitor function is registered by the generic initialization. A | |
| 364 // specialized visitor function needs to be provided by the inheriting | |
| 365 // class itself for those cases. | |
| 366 // | |
| 367 // This class is intended to be used in the following way: | |
| 368 // | |
| 369 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> { | |
| 370 // ... | |
| 371 // } | |
| 372 // | |
| 373 // This is an example of Curiously recurring template pattern. | |
| 374 template<typename StaticVisitor> | |
| 375 class StaticMarkingVisitor : public StaticVisitorBase { | |
| 376 public: | |
| 377 static void Initialize(); | |
| 378 | |
| 379 INLINE(static void IterateBody(Map* map, HeapObject* obj)) { | |
| 380 table_.GetVisitor(map)(map, obj); | |
| 381 } | |
| 382 | |
| 383 INLINE(static void VisitPropertyCell(Map* map, HeapObject* object)); | |
| 384 INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address)); | |
| 385 INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo)); | |
| 386 INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo)); | |
| 387 INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo)); | |
| 388 INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo)); | |
| 389 INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo)); | |
| 390 INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { } | |
| 391 INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { } | |
| 392 // Skip the weak next code link in a code object. | |
| 393 INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) { } | |
| 394 | |
| 395 // TODO(mstarzinger): This should be made protected once refactoring is done. | |
| 396 // Mark non-optimize code for functions inlined into the given optimized | |
| 397 // code. This will prevent it from being flushed. | |
| 398 static void MarkInlinedFunctionsCode(Heap* heap, Code* code); | |
| 399 | |
| 400 protected: | |
| 401 INLINE(static void VisitMap(Map* map, HeapObject* object)); | |
| 402 INLINE(static void VisitCode(Map* map, HeapObject* object)); | |
| 403 INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object)); | |
| 404 INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object)); | |
| 405 INLINE(static void VisitAllocationSite(Map* map, HeapObject* object)); | |
| 406 INLINE(static void VisitWeakCollection(Map* map, HeapObject* object)); | |
| 407 INLINE(static void VisitJSFunction(Map* map, HeapObject* object)); | |
| 408 INLINE(static void VisitJSRegExp(Map* map, HeapObject* object)); | |
| 409 INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object)); | |
| 410 INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object)); | |
| 411 INLINE(static void VisitJSDataView(Map* map, HeapObject* object)); | |
| 412 INLINE(static void VisitNativeContext(Map* map, HeapObject* object)); | |
| 413 | |
| 414 // Mark pointers in a Map and its TransitionArray together, possibly | |
| 415 // treating transitions or back pointers weak. | |
| 416 static void MarkMapContents(Heap* heap, Map* map); | |
| 417 static void MarkTransitionArray(Heap* heap, TransitionArray* transitions); | |
| 418 | |
| 419 // Code flushing support. | |
| 420 INLINE(static bool IsFlushable(Heap* heap, JSFunction* function)); | |
| 421 INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info)); | |
| 422 | |
| 423 // Helpers used by code flushing support that visit pointer fields and treat | |
| 424 // references to code objects either strongly or weakly. | |
| 425 static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object); | |
| 426 static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object); | |
| 427 static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object); | |
| 428 static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object); | |
| 429 | |
| 430 class DataObjectVisitor { | |
| 431 public: | |
| 432 template<int size> | |
| 433 static inline void VisitSpecialized(Map* map, HeapObject* object) { | |
| 434 } | |
| 435 | |
| 436 INLINE(static void Visit(Map* map, HeapObject* object)) { | |
| 437 } | |
| 438 }; | |
| 439 | |
| 440 typedef FlexibleBodyVisitor<StaticVisitor, | |
| 441 FixedArray::BodyDescriptor, | |
| 442 void> FixedArrayVisitor; | |
| 443 | |
| 444 typedef FlexibleBodyVisitor<StaticVisitor, | |
| 445 JSObject::BodyDescriptor, | |
| 446 void> JSObjectVisitor; | |
| 447 | |
| 448 typedef FlexibleBodyVisitor<StaticVisitor, | |
| 449 StructBodyDescriptor, | |
| 450 void> StructObjectVisitor; | |
| 451 | |
| 452 typedef void (*Callback)(Map* map, HeapObject* object); | |
| 453 | |
| 454 static VisitorDispatchTable<Callback> table_; | |
| 455 }; | |
| 456 | |
| 457 | |
| 458 template<typename StaticVisitor> | |
| 459 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback> | |
| 460 StaticMarkingVisitor<StaticVisitor>::table_; | |
| 461 | |
| 462 | |
| 463 class WeakObjectRetainer; | |
| 464 | |
| 465 | |
| 466 // A weak list is single linked list where each element has a weak pointer to | |
| 467 // the next element. Given the head of the list, this function removes dead | |
| 468 // elements from the list and if requested records slots for next-element | |
| 469 // pointers. The template parameter T is a WeakListVisitor that defines how to | |
| 470 // access the next-element pointers. | |
| 471 template <class T> | |
| 472 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
| 473 | |
| 474 } } // namespace v8::internal | |
| 475 | |
| 476 #endif // V8_OBJECTS_VISITING_H_ | |
| OLD | NEW |