OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
178 | 178 |
179 table_.Register(kVisitSymbol, | 179 table_.Register(kVisitSymbol, |
180 &FixedBodyVisitor<StaticVisitor, | 180 &FixedBodyVisitor<StaticVisitor, |
181 Symbol::BodyDescriptor, | 181 Symbol::BodyDescriptor, |
182 void>::Visit); | 182 void>::Visit); |
183 | 183 |
184 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); | 184 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); |
185 | 185 |
186 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); | 186 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); |
187 | 187 |
188 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); | |
189 | |
188 table_.Register(kVisitNativeContext, &VisitNativeContext); | 190 table_.Register(kVisitNativeContext, &VisitNativeContext); |
189 | 191 |
190 table_.Register(kVisitAllocationSite, | 192 table_.Register(kVisitAllocationSite, |
191 &FixedBodyVisitor<StaticVisitor, | 193 &FixedBodyVisitor<StaticVisitor, |
192 AllocationSite::BodyDescriptor, | 194 AllocationSite::BodyDescriptor, |
193 void>::Visit); | 195 void>::Visit); |
194 | 196 |
195 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 197 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
196 | 198 |
197 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | 199 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
262 ASSERT(!rinfo->target_object()->IsConsString()); | 264 ASSERT(!rinfo->target_object()->IsConsString()); |
263 HeapObject* object = HeapObject::cast(rinfo->target_object()); | 265 HeapObject* object = HeapObject::cast(rinfo->target_object()); |
264 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || | 266 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || |
265 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || | 267 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
266 !object->IsMap() || !Map::cast(object)->CanTransition()) { | 268 !object->IsMap() || !Map::cast(object)->CanTransition()) { |
267 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); | 269 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); |
268 StaticVisitor::MarkObject(heap, object); | 270 StaticVisitor::MarkObject(heap, object); |
269 } | 271 } |
270 } | 272 } |
271 | 273 |
272 | |
273 template<typename StaticVisitor> | 274 template<typename StaticVisitor> |
274 void StaticMarkingVisitor<StaticVisitor>::VisitCell( | 275 void StaticMarkingVisitor<StaticVisitor>::VisitCell( |
275 Heap* heap, RelocInfo* rinfo) { | 276 Heap* heap, RelocInfo* rinfo) { |
276 ASSERT(rinfo->rmode() == RelocInfo::CELL); | 277 ASSERT(rinfo->rmode() == RelocInfo::CELL); |
277 Cell* cell = rinfo->target_cell(); | 278 Cell* cell = rinfo->target_cell(); |
278 StaticVisitor::MarkObject(heap, cell); | 279 StaticVisitor::MarkObject(heap, cell); |
279 } | 280 } |
280 | 281 |
281 | 282 |
282 template<typename StaticVisitor> | 283 template<typename StaticVisitor> |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
444 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | 445 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { |
445 // Flush optimized code map on major GCs without code flushing, | 446 // Flush optimized code map on major GCs without code flushing, |
446 // needed because cached code doesn't contain breakpoints. | 447 // needed because cached code doesn't contain breakpoints. |
447 shared->ClearOptimizedCodeMap(); | 448 shared->ClearOptimizedCodeMap(); |
448 } | 449 } |
449 } | 450 } |
450 VisitSharedFunctionInfoStrongCode(heap, object); | 451 VisitSharedFunctionInfoStrongCode(heap, object); |
451 } | 452 } |
452 | 453 |
453 | 454 |
455 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { | |
ulan
2013/09/27 12:39:10
Better to move it in object.cc or object-inl.h
rmcilroy
2013/10/01 11:21:52
Moved to objects.cc.
| |
456 int first_ptr_offset = OffsetOfElementAt(first_ptr_index()); | |
457 int last_ptr_offset = | |
458 OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries()); | |
459 v->VisitPointers( | |
460 HeapObject::RawField(this, first_ptr_offset), | |
461 HeapObject::RawField(this, last_ptr_offset)); | |
462 } | |
463 | |
464 | |
465 template<typename StaticVisitor> | |
466 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( | |
467 Map* map, HeapObject* object) { | |
468 Heap* heap = map->GetHeap(); | |
469 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); | |
470 int first_ptr_offset = constant_pool->OffsetOfElementAt( | |
471 constant_pool->first_ptr_index()); | |
472 int last_ptr_offset = constant_pool->OffsetOfElementAt( | |
473 constant_pool->first_ptr_index() + constant_pool->count_of_ptr_entries()); | |
474 StaticVisitor::VisitPointers( | |
475 heap, | |
476 HeapObject::RawField(object, first_ptr_offset), | |
477 HeapObject::RawField(object, last_ptr_offset)); | |
478 } | |
479 | |
480 | |
454 template<typename StaticVisitor> | 481 template<typename StaticVisitor> |
455 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( | 482 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( |
456 Map* map, HeapObject* object) { | 483 Map* map, HeapObject* object) { |
457 Heap* heap = map->GetHeap(); | 484 Heap* heap = map->GetHeap(); |
458 JSFunction* function = JSFunction::cast(object); | 485 JSFunction* function = JSFunction::cast(object); |
459 MarkCompactCollector* collector = heap->mark_compact_collector(); | 486 MarkCompactCollector* collector = heap->mark_compact_collector(); |
460 if (collector->is_code_flushing_enabled()) { | 487 if (collector->is_code_flushing_enabled()) { |
461 if (IsFlushable(heap, function)) { | 488 if (IsFlushable(heap, function)) { |
462 // This function's code looks flushable. But we have to postpone | 489 // This function's code looks flushable. But we have to postpone |
463 // the decision until we see all functions that point to the same | 490 // the decision until we see all functions that point to the same |
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
883 RelocIterator it(this, mode_mask); | 910 RelocIterator it(this, mode_mask); |
884 for (; !it.done(); it.next()) { | 911 for (; !it.done(); it.next()) { |
885 it.rinfo()->template Visit<StaticVisitor>(heap); | 912 it.rinfo()->template Visit<StaticVisitor>(heap); |
886 } | 913 } |
887 } | 914 } |
888 | 915 |
889 | 916 |
890 } } // namespace v8::internal | 917 } } // namespace v8::internal |
891 | 918 |
892 #endif // V8_OBJECTS_VISITING_INL_H_ | 919 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |