OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
476 OS::MemMove(dst, src, static_cast<size_t>(byte_size)); | 476 OS::MemMove(dst, src, static_cast<size_t>(byte_size)); |
477 } | 477 } |
478 } | 478 } |
479 | 479 |
480 | 480 |
481 void Heap::ScavengePointer(HeapObject** p) { | 481 void Heap::ScavengePointer(HeapObject** p) { |
482 ScavengeObject(p, *p); | 482 ScavengeObject(p, *p); |
483 } | 483 } |
484 | 484 |
485 | 485 |
| 486 void Heap::UpdateAllocationSiteFeedback(HeapObject* object) { |
| 487 if (FLAG_allocation_site_pretenuring && object->IsJSObject()) { |
| 488 AllocationMemento* memento = AllocationMemento::FindForJSObject( |
| 489 JSObject::cast(object), true); |
| 490 if (memento != NULL) { |
| 491 ASSERT(memento->IsValid()); |
| 492 memento->GetAllocationSite()->IncrementMementoFoundCount(); |
| 493 } |
| 494 } |
| 495 } |
| 496 |
| 497 |
486 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | 498 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { |
487 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); | 499 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
488 | 500 |
489 // We use the first word (where the map pointer usually is) of a heap | 501 // We use the first word (where the map pointer usually is) of a heap |
490 // object to record the forwarding pointer. A forwarding pointer can | 502 // object to record the forwarding pointer. A forwarding pointer can |
491 // point to an old space, the code space, or the to space of the new | 503 // point to an old space, the code space, or the to space of the new |
492 // generation. | 504 // generation. |
493 MapWord first_word = object->map_word(); | 505 MapWord first_word = object->map_word(); |
494 | 506 |
495 // If the first word is a forwarding address, the object has already been | 507 // If the first word is a forwarding address, the object has already been |
496 // copied. | 508 // copied. |
497 if (first_word.IsForwardingAddress()) { | 509 if (first_word.IsForwardingAddress()) { |
498 HeapObject* dest = first_word.ToForwardingAddress(); | 510 HeapObject* dest = first_word.ToForwardingAddress(); |
499 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); | 511 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); |
500 *p = dest; | 512 *p = dest; |
501 return; | 513 return; |
502 } | 514 } |
503 | 515 |
504 if (FLAG_trace_track_allocation_sites && object->IsJSObject()) { | 516 UpdateAllocationSiteFeedback(object); |
505 if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) != | |
506 NULL) { | |
507 object->GetIsolate()->heap()->allocation_mementos_found_++; | |
508 } | |
509 } | |
510 | 517 |
511 // AllocationMementos are unrooted and shouldn't survive a scavenge | 518 // AllocationMementos are unrooted and shouldn't survive a scavenge |
512 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); | 519 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); |
513 // Call the slow part of scavenge object. | 520 // Call the slow part of scavenge object. |
514 return ScavengeObjectSlow(p, object); | 521 return ScavengeObjectSlow(p, object); |
515 } | 522 } |
516 | 523 |
517 | 524 |
518 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) { | 525 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) { |
519 const char* collector_reason = NULL; | 526 const char* collector_reason = NULL; |
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
864 #ifdef DEBUG | 871 #ifdef DEBUG |
865 Isolate* isolate = Isolate::Current(); | 872 Isolate* isolate = Isolate::Current(); |
866 isolate->heap()->disallow_allocation_failure_ = old_state_; | 873 isolate->heap()->disallow_allocation_failure_ = old_state_; |
867 #endif | 874 #endif |
868 } | 875 } |
869 | 876 |
870 | 877 |
871 } } // namespace v8::internal | 878 } } // namespace v8::internal |
872 | 879 |
873 #endif // V8_HEAP_INL_H_ | 880 #endif // V8_HEAP_INL_H_ |
OLD | NEW |