| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 483 OS::MemMove(dst, src, static_cast<size_t>(byte_size)); | 483 OS::MemMove(dst, src, static_cast<size_t>(byte_size)); |
| 484 } | 484 } |
| 485 } | 485 } |
| 486 | 486 |
| 487 | 487 |
| 488 void Heap::ScavengePointer(HeapObject** p) { | 488 void Heap::ScavengePointer(HeapObject** p) { |
| 489 ScavengeObject(p, *p); | 489 ScavengeObject(p, *p); |
| 490 } | 490 } |
| 491 | 491 |
| 492 | 492 |
| 493 void Heap::UpdateAllocationSiteFeedback(HeapObject* object) { | 493 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, |
| 494 ScratchpadSlotMode mode) { |
| 494 Heap* heap = object->GetHeap(); | 495 Heap* heap = object->GetHeap(); |
| 495 ASSERT(heap->InFromSpace(object)); | 496 ASSERT(heap->InFromSpace(object)); |
| 496 | 497 |
| 497 if (!FLAG_allocation_site_pretenuring || | 498 if (!FLAG_allocation_site_pretenuring || |
| 498 !AllocationSite::CanTrack(object->map()->instance_type())) return; | 499 !AllocationSite::CanTrack(object->map()->instance_type())) return; |
| 499 | 500 |
| 500 // Check if there is potentially a memento behind the object. If | 501 // Check if there is potentially a memento behind the object. If |
| 501 // the last word of the momento is on another page we return | 502 // the last word of the momento is on another page we return |
| 502 // immediatelly. Note that we do not have to compare with the current | 503 // immediatelly. Note that we do not have to compare with the current |
| 503 // top pointer of the from space page, since we always install filler | 504 // top pointer of the from space page, since we always install filler |
| 504 // objects above the top pointer of a from space page when performing | 505 // objects above the top pointer of a from space page when performing |
| 505 // a garbage collection. | 506 // a garbage collection. |
| 506 Address object_address = object->address(); | 507 Address object_address = object->address(); |
| 507 Address memento_address = object_address + object->Size(); | 508 Address memento_address = object_address + object->Size(); |
| 508 Address last_memento_word_address = memento_address + kPointerSize; | 509 Address last_memento_word_address = memento_address + kPointerSize; |
| 509 if (!NewSpacePage::OnSamePage(object_address, | 510 if (!NewSpacePage::OnSamePage(object_address, |
| 510 last_memento_word_address)) { | 511 last_memento_word_address)) { |
| 511 return; | 512 return; |
| 512 } | 513 } |
| 513 | 514 |
| 514 HeapObject* candidate = HeapObject::FromAddress(memento_address); | 515 HeapObject* candidate = HeapObject::FromAddress(memento_address); |
| 515 if (candidate->map() != heap->allocation_memento_map()) return; | 516 if (candidate->map() != heap->allocation_memento_map()) return; |
| 516 | 517 |
| 517 AllocationMemento* memento = AllocationMemento::cast(candidate); | 518 AllocationMemento* memento = AllocationMemento::cast(candidate); |
| 518 if (!memento->IsValid()) return; | 519 if (!memento->IsValid()) return; |
| 519 | 520 |
| 520 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { | 521 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { |
| 521 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite()); | 522 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); |
| 522 } | 523 } |
| 523 } | 524 } |
| 524 | 525 |
| 525 | 526 |
| 526 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | 527 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { |
| 527 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); | 528 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 528 | 529 |
| 529 // We use the first word (where the map pointer usually is) of a heap | 530 // We use the first word (where the map pointer usually is) of a heap |
| 530 // object to record the forwarding pointer. A forwarding pointer can | 531 // object to record the forwarding pointer. A forwarding pointer can |
| 531 // point to an old space, the code space, or the to space of the new | 532 // point to an old space, the code space, or the to space of the new |
| 532 // generation. | 533 // generation. |
| 533 MapWord first_word = object->map_word(); | 534 MapWord first_word = object->map_word(); |
| 534 | 535 |
| 535 // If the first word is a forwarding address, the object has already been | 536 // If the first word is a forwarding address, the object has already been |
| 536 // copied. | 537 // copied. |
| 537 if (first_word.IsForwardingAddress()) { | 538 if (first_word.IsForwardingAddress()) { |
| 538 HeapObject* dest = first_word.ToForwardingAddress(); | 539 HeapObject* dest = first_word.ToForwardingAddress(); |
| 539 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); | 540 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); |
| 540 *p = dest; | 541 *p = dest; |
| 541 return; | 542 return; |
| 542 } | 543 } |
| 543 | 544 |
| 544 UpdateAllocationSiteFeedback(object); | 545 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); |
| 545 | 546 |
| 546 // AllocationMementos are unrooted and shouldn't survive a scavenge | 547 // AllocationMementos are unrooted and shouldn't survive a scavenge |
| 547 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); | 548 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); |
| 548 // Call the slow part of scavenge object. | 549 // Call the slow part of scavenge object. |
| 549 return ScavengeObjectSlow(p, object); | 550 return ScavengeObjectSlow(p, object); |
| 550 } | 551 } |
| 551 | 552 |
| 552 | 553 |
| 553 bool Heap::CollectGarbage(AllocationSpace space, | 554 bool Heap::CollectGarbage(AllocationSpace space, |
| 554 const char* gc_reason, | 555 const char* gc_reason, |
| (...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 845 #ifdef DEBUG | 846 #ifdef DEBUG |
| 846 Isolate* isolate = Isolate::Current(); | 847 Isolate* isolate = Isolate::Current(); |
| 847 isolate->heap()->disallow_allocation_failure_ = old_state_; | 848 isolate->heap()->disallow_allocation_failure_ = old_state_; |
| 848 #endif | 849 #endif |
| 849 } | 850 } |
| 850 | 851 |
| 851 | 852 |
| 852 } } // namespace v8::internal | 853 } } // namespace v8::internal |
| 853 | 854 |
| 854 #endif // V8_HEAP_INL_H_ | 855 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |