OLD | NEW |
---|---|
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
457 } | 457 } |
458 | 458 |
459 static inline void VisitCodeTarget(RelocInfo* rinfo) { | 459 static inline void VisitCodeTarget(RelocInfo* rinfo) { |
460 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 460 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
461 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 461 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
462 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { | 462 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { |
463 IC::Clear(rinfo->pc()); | 463 IC::Clear(rinfo->pc()); |
464 // Please note targets for cleared inline cached do not have to be | 464 // Please note targets for cleared inline cached do not have to be |
465 // marked since they are contained in HEAP->non_monomorphic_cache(). | 465 // marked since they are contained in HEAP->non_monomorphic_cache(). |
466 } else { | 466 } else { |
467 HEAP->mark_compact_collector()->MarkObject(code); | 467 code->heap()->mark_compact_collector()->MarkObject(code); |
468 } | 468 } |
469 } | 469 } |
470 | 470 |
471 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { | 471 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { |
472 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); | 472 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); |
473 Object* cell = rinfo->target_cell(); | 473 Object* cell = rinfo->target_cell(); |
474 Object* old_cell = cell; | 474 Object* old_cell = cell; |
475 VisitPointer(HEAP, &cell); | 475 VisitPointer(reinterpret_cast<JSGlobalPropertyCell*>(cell)->heap(), &cell); |
476 if (cell != old_cell) { | 476 if (cell != old_cell) { |
477 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); | 477 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); |
478 } | 478 } |
479 } | 479 } |
480 | 480 |
481 static inline void VisitDebugTarget(RelocInfo* rinfo) { | 481 static inline void VisitDebugTarget(RelocInfo* rinfo) { |
482 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 482 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
483 rinfo->IsPatchedReturnSequence()) || | 483 rinfo->IsPatchedReturnSequence()) || |
484 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 484 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
485 rinfo->IsPatchedDebugBreakSlotSequence())); | 485 rinfo->IsPatchedDebugBreakSlotSequence())); |
486 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 486 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
487 HEAP->mark_compact_collector()->MarkObject(code); | 487 reinterpret_cast<Code*>(code)->heap()->mark_compact_collector()-> |
488 MarkObject(code); | |
488 } | 489 } |
489 | 490 |
490 // Mark object pointed to by p. | 491 // Mark object pointed to by p. |
491 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { | 492 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { |
492 if (!(*p)->IsHeapObject()) return; | 493 if (!(*p)->IsHeapObject()) return; |
493 HeapObject* object = ShortCircuitConsString(p); | 494 HeapObject* object = ShortCircuitConsString(p); |
494 heap->mark_compact_collector()->MarkObject(object); | 495 if (!object->IsMarked()) { |
496 heap->mark_compact_collector()->MarkUnmarkedObject(object); | |
497 } | |
495 } | 498 } |
496 | 499 |
497 | 500 |
498 // Visit an unmarked object. | 501 // Visit an unmarked object. |
499 static inline void VisitUnmarkedObject(HeapObject* obj) { | 502 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
503 HeapObject* obj)) { | |
500 #ifdef DEBUG | 504 #ifdef DEBUG |
501 ASSERT(HEAP->Contains(obj)); | 505 ASSERT(obj->map()->heap()->Contains(obj)); |
Vitaly Repeshko
2011/03/31 19:23:16
It's safer to have Isolate::Current() in asserts l
| |
502 ASSERT(!obj->IsMarked()); | 506 ASSERT(!obj->IsMarked()); |
503 #endif | 507 #endif |
504 Map* map = obj->map(); | 508 Map* map = obj->map(); |
505 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); | |
506 collector->SetMark(obj); | 509 collector->SetMark(obj); |
507 // Mark the map pointer and the body. | 510 // Mark the map pointer and the body. |
508 collector->MarkObject(map); | 511 if (!map->IsMarked()) collector->MarkUnmarkedObject(map); |
509 IterateBody(map, obj); | 512 IterateBody(map, obj); |
510 } | 513 } |
511 | 514 |
512 // Visit all unmarked objects pointed to by [start, end). | 515 // Visit all unmarked objects pointed to by [start, end). |
513 // Returns false if the operation fails (lack of stack space). | 516 // Returns false if the operation fails (lack of stack space). |
514 static inline bool VisitUnmarkedObjects(Heap* heap, | 517 static inline bool VisitUnmarkedObjects(Heap* heap, |
515 Object** start, | 518 Object** start, |
516 Object** end) { | 519 Object** end) { |
517 // Return false is we are close to the stack limit. | 520 // Return false is we are close to the stack limit. |
518 StackLimitCheck check(heap->isolate()); | 521 StackLimitCheck check(heap->isolate()); |
519 if (check.HasOverflowed()) return false; | 522 if (check.HasOverflowed()) return false; |
520 | 523 |
524 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
521 // Visit the unmarked objects. | 525 // Visit the unmarked objects. |
522 for (Object** p = start; p < end; p++) { | 526 for (Object** p = start; p < end; p++) { |
523 if (!(*p)->IsHeapObject()) continue; | 527 if (!(*p)->IsHeapObject()) continue; |
524 HeapObject* obj = HeapObject::cast(*p); | 528 HeapObject* obj = HeapObject::cast(*p); |
525 if (obj->IsMarked()) continue; | 529 if (obj->IsMarked()) continue; |
526 VisitUnmarkedObject(obj); | 530 VisitUnmarkedObject(collector, obj); |
527 } | 531 } |
528 return true; | 532 return true; |
529 } | 533 } |
530 | 534 |
531 static inline void VisitExternalReference(Address* p) { } | 535 static inline void VisitExternalReference(Address* p) { } |
532 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } | 536 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } |
533 | 537 |
534 private: | 538 private: |
535 class DataObjectVisitor { | 539 class DataObjectVisitor { |
536 public: | 540 public: |
(...skipping 2532 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3069 } | 3073 } |
3070 | 3074 |
3071 | 3075 |
3072 void MarkCompactCollector::Initialize() { | 3076 void MarkCompactCollector::Initialize() { |
3073 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3077 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
3074 StaticMarkingVisitor::Initialize(); | 3078 StaticMarkingVisitor::Initialize(); |
3075 } | 3079 } |
3076 | 3080 |
3077 | 3081 |
3078 } } // namespace v8::internal | 3082 } } // namespace v8::internal |
OLD | NEW |