| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 62 RelocateQueueHead(); | 62 RelocateQueueHead(); |
| 63 emergency_stack_->Add(Entry(target, size)); | 63 emergency_stack_->Add(Entry(target, size)); |
| 64 return; | 64 return; |
| 65 } | 65 } |
| 66 } | 66 } |
| 67 | 67 |
| 68 *(--rear_) = reinterpret_cast<intptr_t>(target); | 68 *(--rear_) = reinterpret_cast<intptr_t>(target); |
| 69 *(--rear_) = size; | 69 *(--rear_) = size; |
| 70 // Assert no overflow into live objects. | 70 // Assert no overflow into live objects. |
| 71 #ifdef DEBUG | 71 #ifdef DEBUG |
| 72 SemiSpace::AssertValidRange(HEAP->new_space()->top(), | 72 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), |
| 73 reinterpret_cast<Address>(rear_)); | 73 reinterpret_cast<Address>(rear_)); |
| 74 #endif | 74 #endif |
| 75 } | 75 } |
| 76 | 76 |
| 77 | 77 |
| 78 void PromotionQueue::ActivateGuardIfOnTheSamePage() { | 78 void PromotionQueue::ActivateGuardIfOnTheSamePage() { |
| 79 guard_ = guard_ || | 79 guard_ = guard_ || |
| 80 heap_->new_space()->active_space()->current_page()->address() == | 80 heap_->new_space()->active_space()->current_page()->address() == |
| 81 GetHeadPage()->address(); | 81 GetHeadPage()->address(); |
| 82 } | 82 } |
| (...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 501 } | 501 } |
| 502 } | 502 } |
| 503 | 503 |
| 504 | 504 |
| 505 void Heap::ScavengePointer(HeapObject** p) { | 505 void Heap::ScavengePointer(HeapObject** p) { |
| 506 ScavengeObject(p, *p); | 506 ScavengeObject(p, *p); |
| 507 } | 507 } |
| 508 | 508 |
| 509 | 509 |
| 510 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | 510 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { |
| 511 ASSERT(HEAP->InFromSpace(object)); | 511 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 512 | 512 |
| 513 // We use the first word (where the map pointer usually is) of a heap | 513 // We use the first word (where the map pointer usually is) of a heap |
| 514 // object to record the forwarding pointer. A forwarding pointer can | 514 // object to record the forwarding pointer. A forwarding pointer can |
| 515 // point to an old space, the code space, or the to space of the new | 515 // point to an old space, the code space, or the to space of the new |
| 516 // generation. | 516 // generation. |
| 517 MapWord first_word = object->map_word(); | 517 MapWord first_word = object->map_word(); |
| 518 | 518 |
| 519 // If the first word is a forwarding address, the object has already been | 519 // If the first word is a forwarding address, the object has already been |
| 520 // copied. | 520 // copied. |
| 521 if (first_word.IsForwardingAddress()) { | 521 if (first_word.IsForwardingAddress()) { |
| 522 HeapObject* dest = first_word.ToForwardingAddress(); | 522 HeapObject* dest = first_word.ToForwardingAddress(); |
| 523 ASSERT(HEAP->InFromSpace(*p)); | 523 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); |
| 524 *p = dest; | 524 *p = dest; |
| 525 return; | 525 return; |
| 526 } | 526 } |
| 527 | 527 |
| 528 // TODO(hpayer): temporary debugging code for issue 284577. |
| 529 CHECK(object->map() != object->GetHeap()->allocation_memento_map()); |
| 528 // Call the slow part of scavenge object. | 530 // Call the slow part of scavenge object. |
| 529 return ScavengeObjectSlow(p, object); | 531 return ScavengeObjectSlow(p, object); |
| 530 } | 532 } |
| 531 | 533 |
| 532 | 534 |
| 533 MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite( | 535 MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite( |
| 534 ElementsKind elements_kind, | 536 ElementsKind elements_kind, |
| 535 Handle<AllocationSite> allocation_site) { | 537 Handle<AllocationSite> allocation_site) { |
| 536 return AllocateJSArrayAndStorageWithAllocationSite(elements_kind, 0, 0, | 538 return AllocateJSArrayAndStorageWithAllocationSite(elements_kind, 0, 0, |
| 537 allocation_site, DONT_INITIALIZE_ARRAY_ELEMENTS); | 539 allocation_site, DONT_INITIALIZE_ARRAY_ELEMENTS); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 606 } | 608 } |
| 607 | 609 |
| 608 | 610 |
| 609 Isolate* Heap::isolate() { | 611 Isolate* Heap::isolate() { |
| 610 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - | 612 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - |
| 611 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); | 613 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); |
| 612 } | 614 } |
| 613 | 615 |
| 614 | 616 |
| 615 #ifdef DEBUG | 617 #ifdef DEBUG |
| 616 #define GC_GREEDY_CHECK() \ | 618 #define GC_GREEDY_CHECK(ISOLATE) \ |
| 617 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck() | 619 if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck() |
| 618 #else | 620 #else |
| 619 #define GC_GREEDY_CHECK() { } | 621 #define GC_GREEDY_CHECK(ISOLATE) { } |
| 620 #endif | 622 #endif |
| 621 | 623 |
| 622 // Calls the FUNCTION_CALL function and retries it up to three times | 624 // Calls the FUNCTION_CALL function and retries it up to three times |
| 623 // to guarantee that any allocations performed during the call will | 625 // to guarantee that any allocations performed during the call will |
| 624 // succeed if there's enough memory. | 626 // succeed if there's enough memory. |
| 625 | 627 |
| 626 // Warning: Do not use the identifiers __object__, __maybe_object__ or | 628 // Warning: Do not use the identifiers __object__, __maybe_object__ or |
| 627 // __scope__ in a call to this macro. | 629 // __scope__ in a call to this macro. |
| 628 | 630 |
| 629 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ | 631 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ |
| 630 do { \ | 632 do { \ |
| 631 GC_GREEDY_CHECK(); \ | 633 GC_GREEDY_CHECK(ISOLATE); \ |
| 632 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ | 634 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ |
| 633 Object* __object__ = NULL; \ | 635 Object* __object__ = NULL; \ |
| 634 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 636 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 635 if (__maybe_object__->IsOutOfMemory()) { \ | 637 if (__maybe_object__->IsOutOfMemory()) { \ |
| 636 OOM; \ | 638 OOM; \ |
| 637 } \ | 639 } \ |
| 638 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ | 640 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
| 639 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ | 641 (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ |
| 640 allocation_space(), \ | 642 allocation_space(), \ |
| 641 "allocation failure"); \ | 643 "allocation failure"); \ |
| 642 __maybe_object__ = FUNCTION_CALL; \ | 644 __maybe_object__ = FUNCTION_CALL; \ |
| 643 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 645 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 644 if (__maybe_object__->IsOutOfMemory()) { \ | 646 if (__maybe_object__->IsOutOfMemory()) { \ |
| 645 OOM; \ | 647 OOM; \ |
| 646 } \ | 648 } \ |
| 647 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ | 649 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
| 648 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \ | 650 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
| 649 ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \ | 651 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
| 650 { \ | 652 { \ |
| 651 AlwaysAllocateScope __scope__; \ | 653 AlwaysAllocateScope __scope__; \ |
| 652 __maybe_object__ = FUNCTION_CALL; \ | 654 __maybe_object__ = FUNCTION_CALL; \ |
| 653 } \ | 655 } \ |
| 654 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 656 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 655 if (__maybe_object__->IsOutOfMemory()) { \ | 657 if (__maybe_object__->IsOutOfMemory()) { \ |
| 656 OOM; \ | 658 OOM; \ |
| 657 } \ | 659 } \ |
| 658 if (__maybe_object__->IsRetryAfterGC()) { \ | 660 if (__maybe_object__->IsRetryAfterGC()) { \ |
| 659 /* TODO(1181417): Fix this. */ \ | 661 /* TODO(1181417): Fix this. */ \ |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 712 } | 714 } |
| 713 | 715 |
| 714 | 716 |
| 715 // Verify() is inline to avoid ifdef-s around its calls in release | 717 // Verify() is inline to avoid ifdef-s around its calls in release |
| 716 // mode. | 718 // mode. |
| 717 void ExternalStringTable::Verify() { | 719 void ExternalStringTable::Verify() { |
| 718 #ifdef DEBUG | 720 #ifdef DEBUG |
| 719 for (int i = 0; i < new_space_strings_.length(); ++i) { | 721 for (int i = 0; i < new_space_strings_.length(); ++i) { |
| 720 Object* obj = Object::cast(new_space_strings_[i]); | 722 Object* obj = Object::cast(new_space_strings_[i]); |
| 721 ASSERT(heap_->InNewSpace(obj)); | 723 ASSERT(heap_->InNewSpace(obj)); |
| 722 ASSERT(obj != HEAP->the_hole_value()); | 724 ASSERT(obj != heap_->the_hole_value()); |
| 723 } | 725 } |
| 724 for (int i = 0; i < old_space_strings_.length(); ++i) { | 726 for (int i = 0; i < old_space_strings_.length(); ++i) { |
| 725 Object* obj = Object::cast(old_space_strings_[i]); | 727 Object* obj = Object::cast(old_space_strings_[i]); |
| 726 ASSERT(!heap_->InNewSpace(obj)); | 728 ASSERT(!heap_->InNewSpace(obj)); |
| 727 ASSERT(obj != HEAP->the_hole_value()); | 729 ASSERT(obj != heap_->the_hole_value()); |
| 728 } | 730 } |
| 729 #endif | 731 #endif |
| 730 } | 732 } |
| 731 | 733 |
| 732 | 734 |
| 733 void ExternalStringTable::AddOldString(String* string) { | 735 void ExternalStringTable::AddOldString(String* string) { |
| 734 ASSERT(string->IsExternalString()); | 736 ASSERT(string->IsExternalString()); |
| 735 ASSERT(!heap_->InNewSpace(string)); | 737 ASSERT(!heap_->InNewSpace(string)); |
| 736 old_space_strings_.Add(string); | 738 old_space_strings_.Add(string); |
| 737 } | 739 } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 824 elements_[hash].output = heap_number; | 826 elements_[hash].output = heap_number; |
| 825 return heap_number; | 827 return heap_number; |
| 826 } | 828 } |
| 827 | 829 |
| 828 | 830 |
| 829 AlwaysAllocateScope::AlwaysAllocateScope() { | 831 AlwaysAllocateScope::AlwaysAllocateScope() { |
| 830 // We shouldn't hit any nested scopes, because that requires | 832 // We shouldn't hit any nested scopes, because that requires |
| 831 // non-handle code to call handle code. The code still works but | 833 // non-handle code to call handle code. The code still works but |
| 832 // performance will degrade, so we want to catch this situation | 834 // performance will degrade, so we want to catch this situation |
| 833 // in debug mode. | 835 // in debug mode. |
| 834 ASSERT(HEAP->always_allocate_scope_depth_ == 0); | 836 Isolate* isolate = Isolate::Current(); |
| 835 HEAP->always_allocate_scope_depth_++; | 837 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); |
| 838 isolate->heap()->always_allocate_scope_depth_++; |
| 836 } | 839 } |
| 837 | 840 |
| 838 | 841 |
| 839 AlwaysAllocateScope::~AlwaysAllocateScope() { | 842 AlwaysAllocateScope::~AlwaysAllocateScope() { |
| 840 HEAP->always_allocate_scope_depth_--; | 843 Isolate* isolate = Isolate::Current(); |
| 841 ASSERT(HEAP->always_allocate_scope_depth_ == 0); | 844 isolate->heap()->always_allocate_scope_depth_--; |
| 845 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); |
| 842 } | 846 } |
| 843 | 847 |
| 844 | 848 |
| 845 #ifdef VERIFY_HEAP | 849 #ifdef VERIFY_HEAP |
| 846 NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() { | 850 NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() { |
| 847 HEAP->no_weak_embedded_maps_verification_scope_depth_++; | 851 Isolate* isolate = Isolate::Current(); |
| 852 isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++; |
| 848 } | 853 } |
| 849 | 854 |
| 850 | 855 |
| 851 NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() { | 856 NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() { |
| 852 HEAP->no_weak_embedded_maps_verification_scope_depth_--; | 857 Isolate* isolate = Isolate::Current(); |
| 858 isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--; |
| 853 } | 859 } |
| 854 #endif | 860 #endif |
| 855 | 861 |
| 856 | 862 |
| 857 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { | 863 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { |
| 858 for (Object** current = start; current < end; current++) { | 864 for (Object** current = start; current < end; current++) { |
| 859 if ((*current)->IsHeapObject()) { | 865 if ((*current)->IsHeapObject()) { |
| 860 HeapObject* object = HeapObject::cast(*current); | 866 HeapObject* object = HeapObject::cast(*current); |
| 861 CHECK(HEAP->Contains(object)); | 867 CHECK(object->GetIsolate()->heap()->Contains(object)); |
| 862 CHECK(object->map()->IsMap()); | 868 CHECK(object->map()->IsMap()); |
| 863 } | 869 } |
| 864 } | 870 } |
| 865 } | 871 } |
| 866 | 872 |
| 867 | 873 |
| 868 double GCTracer::SizeOfHeapObjects() { | 874 double GCTracer::SizeOfHeapObjects() { |
| 869 return (static_cast<double>(HEAP->SizeOfObjects())) / MB; | 875 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
| 870 } | 876 } |
| 871 | 877 |
| 872 | 878 |
| 873 DisallowAllocationFailure::DisallowAllocationFailure() { | 879 DisallowAllocationFailure::DisallowAllocationFailure() { |
| 874 #ifdef DEBUG | 880 #ifdef DEBUG |
| 875 old_state_ = HEAP->disallow_allocation_failure_; | 881 Isolate* isolate = Isolate::Current(); |
| 876 HEAP->disallow_allocation_failure_ = true; | 882 old_state_ = isolate->heap()->disallow_allocation_failure_; |
| 883 isolate->heap()->disallow_allocation_failure_ = true; |
| 877 #endif | 884 #endif |
| 878 } | 885 } |
| 879 | 886 |
| 880 | 887 |
| 881 DisallowAllocationFailure::~DisallowAllocationFailure() { | 888 DisallowAllocationFailure::~DisallowAllocationFailure() { |
| 882 #ifdef DEBUG | 889 #ifdef DEBUG |
| 883 HEAP->disallow_allocation_failure_ = old_state_; | 890 Isolate* isolate = Isolate::Current(); |
| 891 isolate->heap()->disallow_allocation_failure_ = old_state_; |
| 884 #endif | 892 #endif |
| 885 } | 893 } |
| 886 | 894 |
| 887 | 895 |
| 888 } } // namespace v8::internal | 896 } } // namespace v8::internal |
| 889 | 897 |
| 890 #endif // V8_HEAP_INL_H_ | 898 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |