| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 411 // Only the latter two contain non-map-word pointers to heap objects. | 411 // Only the latter two contain non-map-word pointers to heap objects. |
| 412 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) | 412 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) |
| 413 ? OLD_POINTER_SPACE | 413 ? OLD_POINTER_SPACE |
| 414 : OLD_DATA_SPACE; | 414 : OLD_DATA_SPACE; |
| 415 } else { | 415 } else { |
| 416 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; | 416 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; |
| 417 } | 417 } |
| 418 } | 418 } |
| 419 | 419 |
| 420 | 420 |
| 421 bool Heap::AllowedToBeMigrated(HeapObject* object, AllocationSpace dst) { | 421 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { |
| 422 // Object migration is governed by the following rules: | 422 // Object migration is governed by the following rules: |
| 423 // | 423 // |
| 424 // 1) Objects in new-space can be migrated to one of the old spaces | 424 // 1) Objects in new-space can be migrated to one of the old spaces |
| 425 // that matches their target space or they stay in new-space. | 425 // that matches their target space or they stay in new-space. |
| 426 // 2) Objects in old-space stay in the same space when migrating. | 426 // 2) Objects in old-space stay in the same space when migrating. |
| 427 // 3) Fillers (two or more words) can migrate due to left-trimming of | 427 // 3) Fillers (two or more words) can migrate due to left-trimming of |
| 428 // fixed arrays in new-space, old-data-space and old-pointer-space. | 428 // fixed arrays in new-space, old-data-space and old-pointer-space. |
| 429 // 4) Fillers (one word) can never migrate, they are skipped by | 429 // 4) Fillers (one word) can never migrate, they are skipped by |
| 430 // incremental marking explicitly to prevent invalid pattern. | 430 // incremental marking explicitly to prevent invalid pattern. |
| 431 // 5) Short external strings can end up in old pointer space when a cons |
| 432 // string in old pointer space is made external (String::MakeExternal). |
| 431 // | 433 // |
| 432 // Since this function is used for debugging only, we do not place | 434 // Since this function is used for debugging only, we do not place |
| 433 // asserts here, but check everything explicitly. | 435 // asserts here, but check everything explicitly. |
| 434 if (object->map() == one_pointer_filler_map()) return false; | 436 if (obj->map() == one_pointer_filler_map()) return false; |
| 435 InstanceType type = object->map()->instance_type(); | 437 InstanceType type = obj->map()->instance_type(); |
| 436 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 438 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 437 AllocationSpace src = chunk->owner()->identity(); | 439 AllocationSpace src = chunk->owner()->identity(); |
| 438 switch (src) { | 440 switch (src) { |
| 439 case NEW_SPACE: | 441 case NEW_SPACE: |
| 440 return dst == src || dst == TargetSpaceId(type); | 442 return dst == src || dst == TargetSpaceId(type); |
| 441 case OLD_POINTER_SPACE: | 443 case OLD_POINTER_SPACE: |
| 442 return dst == src && (dst == TargetSpaceId(type) || object->IsFiller()); | 444 return dst == src && |
| 445 (dst == TargetSpaceId(type) || obj->IsFiller() || |
| 446 (obj->IsExternalString() && ExternalString::cast(obj)->is_short())); |
| 443 case OLD_DATA_SPACE: | 447 case OLD_DATA_SPACE: |
| 444 return dst == src && dst == TargetSpaceId(type); | 448 return dst == src && dst == TargetSpaceId(type); |
| 445 case CODE_SPACE: | 449 case CODE_SPACE: |
| 446 return dst == src && type == CODE_TYPE; | 450 return dst == src && type == CODE_TYPE; |
| 447 case MAP_SPACE: | 451 case MAP_SPACE: |
| 448 case CELL_SPACE: | 452 case CELL_SPACE: |
| 449 case PROPERTY_CELL_SPACE: | 453 case PROPERTY_CELL_SPACE: |
| 450 case LO_SPACE: | 454 case LO_SPACE: |
| 451 return false; | 455 return false; |
| 452 } | 456 } |
| (...skipping 28 matching lines...) Expand all Loading... |
| 481 } | 485 } |
| 482 | 486 |
| 483 | 487 |
| 484 void Heap::ScavengePointer(HeapObject** p) { | 488 void Heap::ScavengePointer(HeapObject** p) { |
| 485 ScavengeObject(p, *p); | 489 ScavengeObject(p, *p); |
| 486 } | 490 } |
| 487 | 491 |
| 488 | 492 |
| 489 void Heap::UpdateAllocationSiteFeedback(HeapObject* object) { | 493 void Heap::UpdateAllocationSiteFeedback(HeapObject* object) { |
| 490 Heap* heap = object->GetHeap(); | 494 Heap* heap = object->GetHeap(); |
| 491 ASSERT(heap->InNewSpace(object)); | 495 ASSERT(heap->InFromSpace(object)); |
| 492 | 496 |
| 493 if (!FLAG_allocation_site_pretenuring || | 497 if (!FLAG_allocation_site_pretenuring || |
| 494 !AllocationSite::CanTrack(object->map()->instance_type())) return; | 498 !AllocationSite::CanTrack(object->map()->instance_type())) return; |
| 495 | 499 |
| 496 // Either object is the last object in the from space, or there is another | 500 // Check if there is potentially a memento behind the object. If |
| 497 // object of at least word size (the header map word) following it, so | 501 // the last word of the momento is on another page we return |
| 498 // suffices to compare ptr and top here. | 502 // immediatelly. Note that we do not have to compare with the current |
| 499 Address ptr = object->address() + object->Size(); | 503 // top pointer of the from space page, since we always install filler |
| 500 Address top = heap->new_space()->FromSpacePageHigh(); | 504 // objects above the top pointer of a from space page when performing |
| 501 ASSERT(ptr == top || ptr + HeapObject::kHeaderSize <= top); | 505 // a garbage collection. |
| 502 if (ptr == top) return; | 506 Address object_address = object->address(); |
| 507 Address memento_address = object_address + object->Size(); |
| 508 Address last_memento_word_address = memento_address + kPointerSize; |
| 509 if (!NewSpacePage::OnSamePage(object_address, |
| 510 last_memento_word_address)) { |
| 511 return; |
| 512 } |
| 503 | 513 |
| 504 HeapObject* candidate = HeapObject::FromAddress(ptr); | 514 HeapObject* candidate = HeapObject::FromAddress(memento_address); |
| 505 if (candidate->map() != heap->allocation_memento_map()) return; | 515 if (candidate->map() != heap->allocation_memento_map()) return; |
| 506 | 516 |
| 507 AllocationMemento* memento = AllocationMemento::cast(candidate); | 517 AllocationMemento* memento = AllocationMemento::cast(candidate); |
| 508 if (!memento->IsValid()) return; | 518 if (!memento->IsValid()) return; |
| 509 | 519 |
| 510 if (memento->GetAllocationSite()->IncrementMementoFoundCount() && | 520 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { |
| 511 heap->allocation_sites_scratchpad_length < | 521 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite()); |
| 512 kAllocationSiteScratchpadSize) { | |
| 513 heap->allocation_sites_scratchpad[ | |
| 514 heap->allocation_sites_scratchpad_length++] = | |
| 515 memento->GetAllocationSite(); | |
| 516 } | 522 } |
| 517 } | 523 } |
| 518 | 524 |
| 519 | 525 |
| 520 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | 526 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { |
| 521 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); | 527 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 522 | 528 |
| 523 // We use the first word (where the map pointer usually is) of a heap | 529 // We use the first word (where the map pointer usually is) of a heap |
| 524 // object to record the forwarding pointer. A forwarding pointer can | 530 // object to record the forwarding pointer. A forwarding pointer can |
| 525 // point to an old space, the code space, or the to space of the new | 531 // point to an old space, the code space, or the to space of the new |
| (...skipping 16 matching lines...) Expand all Loading... |
| 542 // Call the slow part of scavenge object. | 548 // Call the slow part of scavenge object. |
| 543 return ScavengeObjectSlow(p, object); | 549 return ScavengeObjectSlow(p, object); |
| 544 } | 550 } |
| 545 | 551 |
| 546 | 552 |
| 547 bool Heap::CollectGarbage(AllocationSpace space, | 553 bool Heap::CollectGarbage(AllocationSpace space, |
| 548 const char* gc_reason, | 554 const char* gc_reason, |
| 549 const v8::GCCallbackFlags callbackFlags) { | 555 const v8::GCCallbackFlags callbackFlags) { |
| 550 const char* collector_reason = NULL; | 556 const char* collector_reason = NULL; |
| 551 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); | 557 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); |
| 552 return CollectGarbage( | 558 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags); |
| 553 space, collector, gc_reason, collector_reason, callbackFlags); | |
| 554 } | 559 } |
| 555 | 560 |
| 556 | 561 |
| 557 MaybeObject* Heap::PrepareForCompare(String* str) { | 562 MaybeObject* Heap::PrepareForCompare(String* str) { |
| 558 // Always flatten small strings and force flattening of long strings | 563 // Always flatten small strings and force flattening of long strings |
| 559 // after we have accumulated a certain amount we failed to flatten. | 564 // after we have accumulated a certain amount we failed to flatten. |
| 560 static const int kMaxAlwaysFlattenLength = 32; | 565 static const int kMaxAlwaysFlattenLength = 32; |
| 561 static const int kFlattenLongThreshold = 16*KB; | 566 static const int kFlattenLongThreshold = 16*KB; |
| 562 | 567 |
| 563 const int length = str->length(); | 568 const int length = str->length(); |
| (...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 833 #ifdef DEBUG | 838 #ifdef DEBUG |
| 834 Isolate* isolate = Isolate::Current(); | 839 Isolate* isolate = Isolate::Current(); |
| 835 isolate->heap()->disallow_allocation_failure_ = old_state_; | 840 isolate->heap()->disallow_allocation_failure_ = old_state_; |
| 836 #endif | 841 #endif |
| 837 } | 842 } |
| 838 | 843 |
| 839 | 844 |
| 840 } } // namespace v8::internal | 845 } } // namespace v8::internal |
| 841 | 846 |
| 842 #endif // V8_HEAP_INL_H_ | 847 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |