OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #ifndef V8_OBJECTS_VISITING_INL_H_ | |
6 #define V8_OBJECTS_VISITING_INL_H_ | |
7 | |
8 | |
9 namespace v8 { | |
10 namespace internal { | |
11 | |
12 template<typename StaticVisitor> | |
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { | |
14 table_.Register(kVisitShortcutCandidate, | |
15 &FixedBodyVisitor<StaticVisitor, | |
16 ConsString::BodyDescriptor, | |
17 int>::Visit); | |
18 | |
19 table_.Register(kVisitConsString, | |
20 &FixedBodyVisitor<StaticVisitor, | |
21 ConsString::BodyDescriptor, | |
22 int>::Visit); | |
23 | |
24 table_.Register(kVisitSlicedString, | |
25 &FixedBodyVisitor<StaticVisitor, | |
26 SlicedString::BodyDescriptor, | |
27 int>::Visit); | |
28 | |
29 table_.Register(kVisitSymbol, | |
30 &FixedBodyVisitor<StaticVisitor, | |
31 Symbol::BodyDescriptor, | |
32 int>::Visit); | |
33 | |
34 table_.Register(kVisitFixedArray, | |
35 &FlexibleBodyVisitor<StaticVisitor, | |
36 FixedArray::BodyDescriptor, | |
37 int>::Visit); | |
38 | |
39 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray); | |
40 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray); | |
41 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray); | |
42 | |
43 table_.Register(kVisitNativeContext, | |
44 &FixedBodyVisitor<StaticVisitor, | |
45 Context::ScavengeBodyDescriptor, | |
46 int>::Visit); | |
47 | |
48 table_.Register(kVisitByteArray, &VisitByteArray); | |
49 | |
50 table_.Register(kVisitSharedFunctionInfo, | |
51 &FixedBodyVisitor<StaticVisitor, | |
52 SharedFunctionInfo::BodyDescriptor, | |
53 int>::Visit); | |
54 | |
55 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); | |
56 | |
57 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); | |
58 | |
59 table_.Register(kVisitJSFunction, &VisitJSFunction); | |
60 | |
61 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | |
62 | |
63 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | |
64 | |
65 table_.Register(kVisitJSDataView, &VisitJSDataView); | |
66 | |
67 table_.Register(kVisitFreeSpace, &VisitFreeSpace); | |
68 | |
69 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); | |
70 | |
71 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit); | |
72 | |
73 table_.template RegisterSpecializations<DataObjectVisitor, | |
74 kVisitDataObject, | |
75 kVisitDataObjectGeneric>(); | |
76 | |
77 table_.template RegisterSpecializations<JSObjectVisitor, | |
78 kVisitJSObject, | |
79 kVisitJSObjectGeneric>(); | |
80 table_.template RegisterSpecializations<StructVisitor, | |
81 kVisitStruct, | |
82 kVisitStructGeneric>(); | |
83 } | |
84 | |
85 | |
86 template<typename StaticVisitor> | |
87 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( | |
88 Map* map, HeapObject* object) { | |
89 Heap* heap = map->GetHeap(); | |
90 | |
91 STATIC_ASSERT( | |
92 JSArrayBuffer::kWeakFirstViewOffset == | |
93 JSArrayBuffer::kWeakNextOffset + kPointerSize); | |
94 VisitPointers( | |
95 heap, | |
96 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), | |
97 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); | |
98 VisitPointers( | |
99 heap, | |
100 HeapObject::RawField(object, | |
101 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), | |
102 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); | |
103 return JSArrayBuffer::kSizeWithInternalFields; | |
104 } | |
105 | |
106 | |
107 template<typename StaticVisitor> | |
108 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray( | |
109 Map* map, HeapObject* object) { | |
110 VisitPointers( | |
111 map->GetHeap(), | |
112 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), | |
113 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); | |
114 VisitPointers( | |
115 map->GetHeap(), | |
116 HeapObject::RawField(object, | |
117 JSTypedArray::kWeakNextOffset + kPointerSize), | |
118 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); | |
119 return JSTypedArray::kSizeWithInternalFields; | |
120 } | |
121 | |
122 | |
123 template<typename StaticVisitor> | |
124 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView( | |
125 Map* map, HeapObject* object) { | |
126 VisitPointers( | |
127 map->GetHeap(), | |
128 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), | |
129 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); | |
130 VisitPointers( | |
131 map->GetHeap(), | |
132 HeapObject::RawField(object, | |
133 JSDataView::kWeakNextOffset + kPointerSize), | |
134 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); | |
135 return JSDataView::kSizeWithInternalFields; | |
136 } | |
137 | |
138 | |
139 template<typename StaticVisitor> | |
140 void StaticMarkingVisitor<StaticVisitor>::Initialize() { | |
141 table_.Register(kVisitShortcutCandidate, | |
142 &FixedBodyVisitor<StaticVisitor, | |
143 ConsString::BodyDescriptor, | |
144 void>::Visit); | |
145 | |
146 table_.Register(kVisitConsString, | |
147 &FixedBodyVisitor<StaticVisitor, | |
148 ConsString::BodyDescriptor, | |
149 void>::Visit); | |
150 | |
151 table_.Register(kVisitSlicedString, | |
152 &FixedBodyVisitor<StaticVisitor, | |
153 SlicedString::BodyDescriptor, | |
154 void>::Visit); | |
155 | |
156 table_.Register(kVisitSymbol, | |
157 &FixedBodyVisitor<StaticVisitor, | |
158 Symbol::BodyDescriptor, | |
159 void>::Visit); | |
160 | |
161 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); | |
162 | |
163 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); | |
164 | |
165 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); | |
166 | |
167 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); | |
168 | |
169 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); | |
170 | |
171 table_.Register(kVisitNativeContext, &VisitNativeContext); | |
172 | |
173 table_.Register(kVisitAllocationSite, &VisitAllocationSite); | |
174 | |
175 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | |
176 | |
177 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | |
178 | |
179 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); | |
180 | |
181 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); | |
182 | |
183 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection); | |
184 | |
185 table_.Register(kVisitOddball, | |
186 &FixedBodyVisitor<StaticVisitor, | |
187 Oddball::BodyDescriptor, | |
188 void>::Visit); | |
189 | |
190 table_.Register(kVisitMap, &VisitMap); | |
191 | |
192 table_.Register(kVisitCode, &VisitCode); | |
193 | |
194 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); | |
195 | |
196 table_.Register(kVisitJSFunction, &VisitJSFunction); | |
197 | |
198 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | |
199 | |
200 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | |
201 | |
202 table_.Register(kVisitJSDataView, &VisitJSDataView); | |
203 | |
204 // Registration for kVisitJSRegExp is done by StaticVisitor. | |
205 | |
206 table_.Register(kVisitCell, | |
207 &FixedBodyVisitor<StaticVisitor, | |
208 Cell::BodyDescriptor, | |
209 void>::Visit); | |
210 | |
211 table_.Register(kVisitPropertyCell, &VisitPropertyCell); | |
212 | |
213 table_.template RegisterSpecializations<DataObjectVisitor, | |
214 kVisitDataObject, | |
215 kVisitDataObjectGeneric>(); | |
216 | |
217 table_.template RegisterSpecializations<JSObjectVisitor, | |
218 kVisitJSObject, | |
219 kVisitJSObjectGeneric>(); | |
220 | |
221 table_.template RegisterSpecializations<StructObjectVisitor, | |
222 kVisitStruct, | |
223 kVisitStructGeneric>(); | |
224 } | |
225 | |
226 | |
227 template<typename StaticVisitor> | |
228 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry( | |
229 Heap* heap, Address entry_address) { | |
230 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); | |
231 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code); | |
232 StaticVisitor::MarkObject(heap, code); | |
233 } | |
234 | |
235 | |
236 template<typename StaticVisitor> | |
237 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer( | |
238 Heap* heap, RelocInfo* rinfo) { | |
239 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | |
240 HeapObject* object = HeapObject::cast(rinfo->target_object()); | |
241 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); | |
242 // TODO(ulan): It could be better to record slots only for strongly embedded | |
243 // objects here and record slots for weakly embedded object during clearing | |
244 // of non-live references in mark-compact. | |
245 if (!rinfo->host()->IsWeakObject(object)) { | |
246 StaticVisitor::MarkObject(heap, object); | |
247 } | |
248 } | |
249 | |
250 | |
251 template<typename StaticVisitor> | |
252 void StaticMarkingVisitor<StaticVisitor>::VisitCell( | |
253 Heap* heap, RelocInfo* rinfo) { | |
254 DCHECK(rinfo->rmode() == RelocInfo::CELL); | |
255 Cell* cell = rinfo->target_cell(); | |
256 // No need to record slots because the cell space is not compacted during GC. | |
257 if (!rinfo->host()->IsWeakObject(cell)) { | |
258 StaticVisitor::MarkObject(heap, cell); | |
259 } | |
260 } | |
261 | |
262 | |
263 template<typename StaticVisitor> | |
264 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget( | |
265 Heap* heap, RelocInfo* rinfo) { | |
266 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) && | |
267 rinfo->IsPatchedReturnSequence()) || | |
268 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | |
269 rinfo->IsPatchedDebugBreakSlotSequence())); | |
270 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | |
271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | |
272 StaticVisitor::MarkObject(heap, target); | |
273 } | |
274 | |
275 | |
276 template<typename StaticVisitor> | |
277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget( | |
278 Heap* heap, RelocInfo* rinfo) { | |
279 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); | |
280 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | |
281 // Monomorphic ICs are preserved when possible, but need to be flushed | |
282 // when they might be keeping a Context alive, or when the heap is about | |
283 // to be serialized. | |
284 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() | |
285 && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || | |
286 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || | |
287 heap->isolate()->serializer_enabled() || | |
288 target->ic_age() != heap->global_ic_age() || | |
289 target->is_invalidated_weak_stub())) { | |
290 IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool()); | |
291 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | |
292 } | |
293 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | |
294 StaticVisitor::MarkObject(heap, target); | |
295 } | |
296 | |
297 | |
298 template<typename StaticVisitor> | |
299 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence( | |
300 Heap* heap, RelocInfo* rinfo) { | |
301 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); | |
302 Code* target = rinfo->code_age_stub(); | |
303 DCHECK(target != NULL); | |
304 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | |
305 StaticVisitor::MarkObject(heap, target); | |
306 } | |
307 | |
308 | |
309 template<typename StaticVisitor> | |
310 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext( | |
311 Map* map, HeapObject* object) { | |
312 FixedBodyVisitor<StaticVisitor, | |
313 Context::MarkCompactBodyDescriptor, | |
314 void>::Visit(map, object); | |
315 | |
316 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | |
317 for (int idx = Context::FIRST_WEAK_SLOT; | |
318 idx < Context::NATIVE_CONTEXT_SLOTS; | |
319 ++idx) { | |
320 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx); | |
321 collector->RecordSlot(slot, slot, *slot); | |
322 } | |
323 } | |
324 | |
325 | |
326 template<typename StaticVisitor> | |
327 void StaticMarkingVisitor<StaticVisitor>::VisitMap( | |
328 Map* map, HeapObject* object) { | |
329 Heap* heap = map->GetHeap(); | |
330 Map* map_object = Map::cast(object); | |
331 | |
332 // Clears the cache of ICs related to this map. | |
333 if (FLAG_cleanup_code_caches_at_gc) { | |
334 map_object->ClearCodeCache(heap); | |
335 } | |
336 | |
337 // When map collection is enabled we have to mark through map's transitions | |
338 // and back pointers in a special way to make these links weak. | |
339 if (FLAG_collect_maps && map_object->CanTransition()) { | |
340 MarkMapContents(heap, map_object); | |
341 } else { | |
342 StaticVisitor::VisitPointers(heap, | |
343 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), | |
344 HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); | |
345 } | |
346 } | |
347 | |
348 | |
349 template<typename StaticVisitor> | |
350 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell( | |
351 Map* map, HeapObject* object) { | |
352 Heap* heap = map->GetHeap(); | |
353 | |
354 Object** slot = | |
355 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset); | |
356 if (FLAG_collect_maps) { | |
357 // Mark property cell dependent codes array but do not push it onto marking | |
358 // stack, this will make references from it weak. We will clean dead | |
359 // codes when we iterate over property cells in ClearNonLiveReferences. | |
360 HeapObject* obj = HeapObject::cast(*slot); | |
361 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
362 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
363 } else { | |
364 StaticVisitor::VisitPointer(heap, slot); | |
365 } | |
366 | |
367 StaticVisitor::VisitPointers(heap, | |
368 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset), | |
369 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset)); | |
370 } | |
371 | |
372 | |
373 template<typename StaticVisitor> | |
374 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite( | |
375 Map* map, HeapObject* object) { | |
376 Heap* heap = map->GetHeap(); | |
377 | |
378 Object** slot = | |
379 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset); | |
380 if (FLAG_collect_maps) { | |
381 // Mark allocation site dependent codes array but do not push it onto | |
382 // marking stack, this will make references from it weak. We will clean | |
383 // dead codes when we iterate over allocation sites in | |
384 // ClearNonLiveReferences. | |
385 HeapObject* obj = HeapObject::cast(*slot); | |
386 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
387 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
388 } else { | |
389 StaticVisitor::VisitPointer(heap, slot); | |
390 } | |
391 | |
392 StaticVisitor::VisitPointers(heap, | |
393 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset), | |
394 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset)); | |
395 } | |
396 | |
397 | |
398 template<typename StaticVisitor> | |
399 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( | |
400 Map* map, HeapObject* object) { | |
401 Heap* heap = map->GetHeap(); | |
402 JSWeakCollection* weak_collection = | |
403 reinterpret_cast<JSWeakCollection*>(object); | |
404 | |
405 // Enqueue weak collection in linked list of encountered weak collections. | |
406 if (weak_collection->next() == heap->undefined_value()) { | |
407 weak_collection->set_next(heap->encountered_weak_collections()); | |
408 heap->set_encountered_weak_collections(weak_collection); | |
409 } | |
410 | |
411 // Skip visiting the backing hash table containing the mappings and the | |
412 // pointer to the other enqueued weak collections, both are post-processed. | |
413 StaticVisitor::VisitPointers(heap, | |
414 HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset), | |
415 HeapObject::RawField(object, JSWeakCollection::kTableOffset)); | |
416 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize == | |
417 JSWeakCollection::kNextOffset); | |
418 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize == | |
419 JSWeakCollection::kSize); | |
420 | |
421 // Partially initialized weak collection is enqueued, but table is ignored. | |
422 if (!weak_collection->table()->IsHashTable()) return; | |
423 | |
424 // Mark the backing hash table without pushing it on the marking stack. | |
425 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset); | |
426 HeapObject* obj = HeapObject::cast(*slot); | |
427 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
428 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
429 } | |
430 | |
431 | |
432 template<typename StaticVisitor> | |
433 void StaticMarkingVisitor<StaticVisitor>::VisitCode( | |
434 Map* map, HeapObject* object) { | |
435 Heap* heap = map->GetHeap(); | |
436 Code* code = Code::cast(object); | |
437 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) { | |
438 code->MakeOlder(heap->mark_compact_collector()->marking_parity()); | |
439 } | |
440 code->CodeIterateBody<StaticVisitor>(heap); | |
441 } | |
442 | |
443 | |
444 template<typename StaticVisitor> | |
445 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | |
446 Map* map, HeapObject* object) { | |
447 Heap* heap = map->GetHeap(); | |
448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | |
449 if (shared->ic_age() != heap->global_ic_age()) { | |
450 shared->ResetForNewContext(heap->global_ic_age()); | |
451 } | |
452 if (FLAG_cleanup_code_caches_at_gc) { | |
453 shared->ClearTypeFeedbackInfo(); | |
454 } | |
455 if (FLAG_cache_optimized_code && | |
456 FLAG_flush_optimized_code_cache && | |
457 !shared->optimized_code_map()->IsSmi()) { | |
458 // Always flush the optimized code map if requested by flag. | |
459 shared->ClearOptimizedCodeMap(); | |
460 } | |
461 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
462 if (collector->is_code_flushing_enabled()) { | |
463 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | |
464 // Add the shared function info holding an optimized code map to | |
465 // the code flusher for processing of code maps after marking. | |
466 collector->code_flusher()->AddOptimizedCodeMap(shared); | |
467 // Treat all references within the code map weakly by marking the | |
468 // code map itself but not pushing it onto the marking deque. | |
469 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); | |
470 StaticVisitor::MarkObjectWithoutPush(heap, code_map); | |
471 } | |
472 if (IsFlushable(heap, shared)) { | |
473 // This function's code looks flushable. But we have to postpone | |
474 // the decision until we see all functions that point to the same | |
475 // SharedFunctionInfo because some of them might be optimized. | |
476 // That would also make the non-optimized version of the code | |
477 // non-flushable, because it is required for bailing out from | |
478 // optimized code. | |
479 collector->code_flusher()->AddCandidate(shared); | |
480 // Treat the reference to the code object weakly. | |
481 VisitSharedFunctionInfoWeakCode(heap, object); | |
482 return; | |
483 } | |
484 } else { | |
485 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | |
486 // Flush optimized code map on major GCs without code flushing, | |
487 // needed because cached code doesn't contain breakpoints. | |
488 shared->ClearOptimizedCodeMap(); | |
489 } | |
490 } | |
491 VisitSharedFunctionInfoStrongCode(heap, object); | |
492 } | |
493 | |
494 | |
495 template<typename StaticVisitor> | |
496 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( | |
497 Map* map, HeapObject* object) { | |
498 Heap* heap = map->GetHeap(); | |
499 ConstantPoolArray* array = ConstantPoolArray::cast(object); | |
500 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); | |
501 while (!code_iter.is_finished()) { | |
502 Address code_entry = reinterpret_cast<Address>( | |
503 array->RawFieldOfElementAt(code_iter.next_index())); | |
504 StaticVisitor::VisitCodeEntry(heap, code_entry); | |
505 } | |
506 | |
507 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); | |
508 while (!heap_iter.is_finished()) { | |
509 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); | |
510 HeapObject* object = HeapObject::cast(*slot); | |
511 heap->mark_compact_collector()->RecordSlot(slot, slot, object); | |
512 bool is_weak_object = | |
513 (array->get_weak_object_state() == | |
514 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && | |
515 Code::IsWeakObjectInOptimizedCode(object)) || | |
516 (array->get_weak_object_state() == | |
517 ConstantPoolArray::WEAK_OBJECTS_IN_IC && | |
518 Code::IsWeakObjectInIC(object)); | |
519 if (!is_weak_object) { | |
520 StaticVisitor::MarkObject(heap, object); | |
521 } | |
522 } | |
523 } | |
524 | |
525 | |
526 template<typename StaticVisitor> | |
527 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( | |
528 Map* map, HeapObject* object) { | |
529 Heap* heap = map->GetHeap(); | |
530 JSFunction* function = JSFunction::cast(object); | |
531 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
532 if (collector->is_code_flushing_enabled()) { | |
533 if (IsFlushable(heap, function)) { | |
534 // This function's code looks flushable. But we have to postpone | |
535 // the decision until we see all functions that point to the same | |
536 // SharedFunctionInfo because some of them might be optimized. | |
537 // That would also make the non-optimized version of the code | |
538 // non-flushable, because it is required for bailing out from | |
539 // optimized code. | |
540 collector->code_flusher()->AddCandidate(function); | |
541 // Visit shared function info immediately to avoid double checking | |
542 // of its flushability later. This is just an optimization because | |
543 // the shared function info would eventually be visited. | |
544 SharedFunctionInfo* shared = function->shared(); | |
545 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { | |
546 StaticVisitor::MarkObject(heap, shared->map()); | |
547 VisitSharedFunctionInfoWeakCode(heap, shared); | |
548 } | |
549 // Treat the reference to the code object weakly. | |
550 VisitJSFunctionWeakCode(heap, object); | |
551 return; | |
552 } else { | |
553 // Visit all unoptimized code objects to prevent flushing them. | |
554 StaticVisitor::MarkObject(heap, function->shared()->code()); | |
555 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | |
556 MarkInlinedFunctionsCode(heap, function->code()); | |
557 } | |
558 } | |
559 } | |
560 VisitJSFunctionStrongCode(heap, object); | |
561 } | |
562 | |
563 | |
564 template<typename StaticVisitor> | |
565 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( | |
566 Map* map, HeapObject* object) { | |
567 int last_property_offset = | |
568 JSRegExp::kSize + kPointerSize * map->inobject_properties(); | |
569 StaticVisitor::VisitPointers(map->GetHeap(), | |
570 HeapObject::RawField(object, JSRegExp::kPropertiesOffset), | |
571 HeapObject::RawField(object, last_property_offset)); | |
572 } | |
573 | |
574 | |
575 template<typename StaticVisitor> | |
576 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( | |
577 Map* map, HeapObject* object) { | |
578 Heap* heap = map->GetHeap(); | |
579 | |
580 STATIC_ASSERT( | |
581 JSArrayBuffer::kWeakFirstViewOffset == | |
582 JSArrayBuffer::kWeakNextOffset + kPointerSize); | |
583 StaticVisitor::VisitPointers( | |
584 heap, | |
585 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), | |
586 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); | |
587 StaticVisitor::VisitPointers( | |
588 heap, | |
589 HeapObject::RawField(object, | |
590 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize), | |
591 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); | |
592 } | |
593 | |
594 | |
595 template<typename StaticVisitor> | |
596 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray( | |
597 Map* map, HeapObject* object) { | |
598 StaticVisitor::VisitPointers( | |
599 map->GetHeap(), | |
600 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), | |
601 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); | |
602 StaticVisitor::VisitPointers( | |
603 map->GetHeap(), | |
604 HeapObject::RawField(object, | |
605 JSTypedArray::kWeakNextOffset + kPointerSize), | |
606 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); | |
607 } | |
608 | |
609 | |
610 template<typename StaticVisitor> | |
611 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView( | |
612 Map* map, HeapObject* object) { | |
613 StaticVisitor::VisitPointers( | |
614 map->GetHeap(), | |
615 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), | |
616 HeapObject::RawField(object, JSDataView::kWeakNextOffset)); | |
617 StaticVisitor::VisitPointers( | |
618 map->GetHeap(), | |
619 HeapObject::RawField(object, | |
620 JSDataView::kWeakNextOffset + kPointerSize), | |
621 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); | |
622 } | |
623 | |
624 | |
625 template<typename StaticVisitor> | |
626 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents( | |
627 Heap* heap, Map* map) { | |
628 // Make sure that the back pointer stored either in the map itself or | |
629 // inside its transitions array is marked. Skip recording the back | |
630 // pointer slot since map space is not compacted. | |
631 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer())); | |
632 | |
633 // Treat pointers in the transitions array as weak and also mark that | |
634 // array to prevent visiting it later. Skip recording the transition | |
635 // array slot, since it will be implicitly recorded when the pointer | |
636 // fields of this map are visited. | |
637 if (map->HasTransitionArray()) { | |
638 TransitionArray* transitions = map->transitions(); | |
639 MarkTransitionArray(heap, transitions); | |
640 } | |
641 | |
642 // Since descriptor arrays are potentially shared, ensure that only the | |
643 // descriptors that belong to this map are marked. The first time a | |
644 // non-empty descriptor array is marked, its header is also visited. The slot | |
645 // holding the descriptor array will be implicitly recorded when the pointer | |
646 // fields of this map are visited. | |
647 DescriptorArray* descriptors = map->instance_descriptors(); | |
648 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) && | |
649 descriptors->length() > 0) { | |
650 StaticVisitor::VisitPointers(heap, | |
651 descriptors->GetFirstElementAddress(), | |
652 descriptors->GetDescriptorEndSlot(0)); | |
653 } | |
654 int start = 0; | |
655 int end = map->NumberOfOwnDescriptors(); | |
656 if (start < end) { | |
657 StaticVisitor::VisitPointers(heap, | |
658 descriptors->GetDescriptorStartSlot(start), | |
659 descriptors->GetDescriptorEndSlot(end)); | |
660 } | |
661 | |
662 // Mark prototype dependent codes array but do not push it onto marking | |
663 // stack, this will make references from it weak. We will clean dead | |
664 // codes when we iterate over maps in ClearNonLiveTransitions. | |
665 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset); | |
666 HeapObject* obj = HeapObject::cast(*slot); | |
667 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
668 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
669 | |
670 // Mark the pointer fields of the Map. Since the transitions array has | |
671 // been marked already, it is fine that one of these fields contains a | |
672 // pointer to it. | |
673 StaticVisitor::VisitPointers(heap, | |
674 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | |
675 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | |
676 } | |
677 | |
678 | |
679 template<typename StaticVisitor> | |
680 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray( | |
681 Heap* heap, TransitionArray* transitions) { | |
682 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return; | |
683 | |
684 // Simple transitions do not have keys nor prototype transitions. | |
685 if (transitions->IsSimpleTransition()) return; | |
686 | |
687 if (transitions->HasPrototypeTransitions()) { | |
688 // Mark prototype transitions array but do not push it onto marking | |
689 // stack, this will make references from it weak. We will clean dead | |
690 // prototype transitions in ClearNonLiveTransitions. | |
691 Object** slot = transitions->GetPrototypeTransitionsSlot(); | |
692 HeapObject* obj = HeapObject::cast(*slot); | |
693 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
694 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
695 } | |
696 | |
697 for (int i = 0; i < transitions->number_of_transitions(); ++i) { | |
698 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); | |
699 } | |
700 } | |
701 | |
702 | |
703 template<typename StaticVisitor> | |
704 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode( | |
705 Heap* heap, Code* code) { | |
706 // Skip in absence of inlining. | |
707 // TODO(turbofan): Revisit once we support inlining. | |
708 if (code->is_turbofanned()) return; | |
709 // For optimized functions we should retain both non-optimized version | |
710 // of its code and non-optimized version of all inlined functions. | |
711 // This is required to support bailing out from inlined code. | |
712 DeoptimizationInputData* data = | |
713 DeoptimizationInputData::cast(code->deoptimization_data()); | |
714 FixedArray* literals = data->LiteralArray(); | |
715 for (int i = 0, count = data->InlinedFunctionCount()->value(); | |
716 i < count; | |
717 i++) { | |
718 JSFunction* inlined = JSFunction::cast(literals->get(i)); | |
719 StaticVisitor::MarkObject(heap, inlined->shared()->code()); | |
720 } | |
721 } | |
722 | |
723 | |
724 inline static bool IsValidNonBuiltinContext(Object* context) { | |
725 return context->IsContext() && | |
726 !Context::cast(context)->global_object()->IsJSBuiltinsObject(); | |
727 } | |
728 | |
729 | |
730 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | |
731 Object* undefined = heap->undefined_value(); | |
732 return (info->script() != undefined) && | |
733 (reinterpret_cast<Script*>(info->script())->source() != undefined); | |
734 } | |
735 | |
736 | |
737 template<typename StaticVisitor> | |
738 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | |
739 Heap* heap, JSFunction* function) { | |
740 SharedFunctionInfo* shared_info = function->shared(); | |
741 | |
742 // Code is either on stack, in compilation cache or referenced | |
743 // by optimized version of function. | |
744 MarkBit code_mark = Marking::MarkBitFrom(function->code()); | |
745 if (code_mark.Get()) { | |
746 return false; | |
747 } | |
748 | |
749 // The function must have a valid context and not be a builtin. | |
750 if (!IsValidNonBuiltinContext(function->context())) { | |
751 return false; | |
752 } | |
753 | |
754 // We do not (yet) flush code for optimized functions. | |
755 if (function->code() != shared_info->code()) { | |
756 return false; | |
757 } | |
758 | |
759 // Check age of optimized code. | |
760 if (FLAG_age_code && !function->code()->IsOld()) { | |
761 return false; | |
762 } | |
763 | |
764 return IsFlushable(heap, shared_info); | |
765 } | |
766 | |
767 | |
768 template<typename StaticVisitor> | |
769 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | |
770 Heap* heap, SharedFunctionInfo* shared_info) { | |
771 // Code is either on stack, in compilation cache or referenced | |
772 // by optimized version of function. | |
773 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); | |
774 if (code_mark.Get()) { | |
775 return false; | |
776 } | |
777 | |
778 // The function must be compiled and have the source code available, | |
779 // to be able to recompile it in case we need the function again. | |
780 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) { | |
781 return false; | |
782 } | |
783 | |
784 // We never flush code for API functions. | |
785 Object* function_data = shared_info->function_data(); | |
786 if (function_data->IsFunctionTemplateInfo()) { | |
787 return false; | |
788 } | |
789 | |
790 // Only flush code for functions. | |
791 if (shared_info->code()->kind() != Code::FUNCTION) { | |
792 return false; | |
793 } | |
794 | |
795 // Function must be lazy compilable. | |
796 if (!shared_info->allows_lazy_compilation()) { | |
797 return false; | |
798 } | |
799 | |
800 // We do not (yet?) flush code for generator functions, because we don't know | |
801 // if there are still live activations (generator objects) on the heap. | |
802 if (shared_info->is_generator()) { | |
803 return false; | |
804 } | |
805 | |
806 // If this is a full script wrapped in a function we do not flush the code. | |
807 if (shared_info->is_toplevel()) { | |
808 return false; | |
809 } | |
810 | |
811 // If this is a function initialized with %SetCode then the one-to-one | |
812 // relation between SharedFunctionInfo and Code is broken. | |
813 if (shared_info->dont_flush()) { | |
814 return false; | |
815 } | |
816 | |
817 // Check age of code. If code aging is disabled we never flush. | |
818 if (!FLAG_age_code || !shared_info->code()->IsOld()) { | |
819 return false; | |
820 } | |
821 | |
822 return true; | |
823 } | |
824 | |
825 | |
826 template<typename StaticVisitor> | |
827 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( | |
828 Heap* heap, HeapObject* object) { | |
829 Object** start_slot = | |
830 HeapObject::RawField(object, | |
831 SharedFunctionInfo::BodyDescriptor::kStartOffset); | |
832 Object** end_slot = | |
833 HeapObject::RawField(object, | |
834 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
835 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
836 } | |
837 | |
838 | |
839 template<typename StaticVisitor> | |
840 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( | |
841 Heap* heap, HeapObject* object) { | |
842 Object** name_slot = | |
843 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); | |
844 StaticVisitor::VisitPointer(heap, name_slot); | |
845 | |
846 // Skip visiting kCodeOffset as it is treated weakly here. | |
847 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == | |
848 SharedFunctionInfo::kCodeOffset); | |
849 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize == | |
850 SharedFunctionInfo::kOptimizedCodeMapOffset); | |
851 | |
852 Object** start_slot = | |
853 HeapObject::RawField(object, | |
854 SharedFunctionInfo::kOptimizedCodeMapOffset); | |
855 Object** end_slot = | |
856 HeapObject::RawField(object, | |
857 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
858 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
859 } | |
860 | |
861 | |
862 template<typename StaticVisitor> | |
863 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( | |
864 Heap* heap, HeapObject* object) { | |
865 Object** start_slot = | |
866 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
867 Object** end_slot = | |
868 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
869 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
870 | |
871 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); | |
872 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
873 JSFunction::kPrototypeOrInitialMapOffset); | |
874 | |
875 start_slot = | |
876 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
877 end_slot = | |
878 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
879 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
880 } | |
881 | |
882 | |
883 template<typename StaticVisitor> | |
884 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( | |
885 Heap* heap, HeapObject* object) { | |
886 Object** start_slot = | |
887 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
888 Object** end_slot = | |
889 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
890 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
891 | |
892 // Skip visiting kCodeEntryOffset as it is treated weakly here. | |
893 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
894 JSFunction::kPrototypeOrInitialMapOffset); | |
895 | |
896 start_slot = | |
897 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
898 end_slot = | |
899 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
900 StaticVisitor::VisitPointers(heap, start_slot, end_slot); | |
901 } | |
902 | |
903 | |
904 void Code::CodeIterateBody(ObjectVisitor* v) { | |
905 int mode_mask = RelocInfo::kCodeTargetMask | | |
906 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | |
907 RelocInfo::ModeMask(RelocInfo::CELL) | | |
908 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | |
909 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | |
910 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | |
911 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | |
912 | |
913 // There are two places where we iterate code bodies: here and the | |
914 // templated CodeIterateBody (below). They should be kept in sync. | |
915 IteratePointer(v, kRelocationInfoOffset); | |
916 IteratePointer(v, kHandlerTableOffset); | |
917 IteratePointer(v, kDeoptimizationDataOffset); | |
918 IteratePointer(v, kTypeFeedbackInfoOffset); | |
919 IterateNextCodeLink(v, kNextCodeLinkOffset); | |
920 IteratePointer(v, kConstantPoolOffset); | |
921 | |
922 RelocIterator it(this, mode_mask); | |
923 Isolate* isolate = this->GetIsolate(); | |
924 for (; !it.done(); it.next()) { | |
925 it.rinfo()->Visit(isolate, v); | |
926 } | |
927 } | |
928 | |
929 | |
930 template<typename StaticVisitor> | |
931 void Code::CodeIterateBody(Heap* heap) { | |
932 int mode_mask = RelocInfo::kCodeTargetMask | | |
933 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | |
934 RelocInfo::ModeMask(RelocInfo::CELL) | | |
935 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | |
936 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | |
937 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | |
938 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | |
939 | |
940 // There are two places where we iterate code bodies: here and the non- | |
941 // templated CodeIterateBody (above). They should be kept in sync. | |
942 StaticVisitor::VisitPointer( | |
943 heap, | |
944 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); | |
945 StaticVisitor::VisitPointer( | |
946 heap, | |
947 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); | |
948 StaticVisitor::VisitPointer( | |
949 heap, | |
950 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); | |
951 StaticVisitor::VisitPointer( | |
952 heap, | |
953 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); | |
954 StaticVisitor::VisitNextCodeLink( | |
955 heap, | |
956 reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); | |
957 StaticVisitor::VisitPointer( | |
958 heap, | |
959 reinterpret_cast<Object**>(this->address() + kConstantPoolOffset)); | |
960 | |
961 | |
962 RelocIterator it(this, mode_mask); | |
963 for (; !it.done(); it.next()) { | |
964 it.rinfo()->template Visit<StaticVisitor>(heap); | |
965 } | |
966 } | |
967 | |
968 | |
969 } } // namespace v8::internal | |
970 | |
971 #endif // V8_OBJECTS_VISITING_INL_H_ | |
OLD | NEW |