| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 62 RelocateQueueHead(); | 62 RelocateQueueHead(); |
| 63 emergency_stack_->Add(Entry(target, size)); | 63 emergency_stack_->Add(Entry(target, size)); |
| 64 return; | 64 return; |
| 65 } | 65 } |
| 66 } | 66 } |
| 67 | 67 |
| 68 *(--rear_) = reinterpret_cast<intptr_t>(target); | 68 *(--rear_) = reinterpret_cast<intptr_t>(target); |
| 69 *(--rear_) = size; | 69 *(--rear_) = size; |
| 70 // Assert no overflow into live objects. | 70 // Assert no overflow into live objects. |
| 71 #ifdef DEBUG | 71 #ifdef DEBUG |
| 72 SemiSpace::AssertValidRange(HEAP->new_space()->top(), | 72 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), |
| 73 reinterpret_cast<Address>(rear_)); | 73 reinterpret_cast<Address>(rear_)); |
| 74 #endif | 74 #endif |
| 75 } | 75 } |
| 76 | 76 |
| 77 | 77 |
| 78 void PromotionQueue::ActivateGuardIfOnTheSamePage() { | 78 void PromotionQueue::ActivateGuardIfOnTheSamePage() { |
| 79 guard_ = guard_ || | 79 guard_ = guard_ || |
| 80 heap_->new_space()->active_space()->current_page()->address() == | 80 heap_->new_space()->active_space()->current_page()->address() == |
| 81 GetHeadPage()->address(); | 81 GetHeadPage()->address(); |
| 82 } | 82 } |
| (...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 501 } | 501 } |
| 502 } | 502 } |
| 503 | 503 |
| 504 | 504 |
| 505 void Heap::ScavengePointer(HeapObject** p) { | 505 void Heap::ScavengePointer(HeapObject** p) { |
| 506 ScavengeObject(p, *p); | 506 ScavengeObject(p, *p); |
| 507 } | 507 } |
| 508 | 508 |
| 509 | 509 |
| 510 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | 510 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { |
| 511 ASSERT(HEAP->InFromSpace(object)); | 511 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 512 | 512 |
| 513 // We use the first word (where the map pointer usually is) of a heap | 513 // We use the first word (where the map pointer usually is) of a heap |
| 514 // object to record the forwarding pointer. A forwarding pointer can | 514 // object to record the forwarding pointer. A forwarding pointer can |
| 515 // point to an old space, the code space, or the to space of the new | 515 // point to an old space, the code space, or the to space of the new |
| 516 // generation. | 516 // generation. |
| 517 MapWord first_word = object->map_word(); | 517 MapWord first_word = object->map_word(); |
| 518 | 518 |
| 519 // If the first word is a forwarding address, the object has already been | 519 // If the first word is a forwarding address, the object has already been |
| 520 // copied. | 520 // copied. |
| 521 if (first_word.IsForwardingAddress()) { | 521 if (first_word.IsForwardingAddress()) { |
| 522 HeapObject* dest = first_word.ToForwardingAddress(); | 522 HeapObject* dest = first_word.ToForwardingAddress(); |
| 523 ASSERT(HEAP->InFromSpace(*p)); | 523 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); |
| 524 *p = dest; | 524 *p = dest; |
| 525 return; | 525 return; |
| 526 } | 526 } |
| 527 | 527 |
| 528 // Call the slow part of scavenge object. | 528 // Call the slow part of scavenge object. |
| 529 return ScavengeObjectSlow(p, object); | 529 return ScavengeObjectSlow(p, object); |
| 530 } | 530 } |
| 531 | 531 |
| 532 | 532 |
| 533 MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite( | 533 MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite( |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 606 } | 606 } |
| 607 | 607 |
| 608 | 608 |
| 609 Isolate* Heap::isolate() { | 609 Isolate* Heap::isolate() { |
| 610 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - | 610 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - |
| 611 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); | 611 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); |
| 612 } | 612 } |
| 613 | 613 |
| 614 | 614 |
| 615 #ifdef DEBUG | 615 #ifdef DEBUG |
| 616 #define GC_GREEDY_CHECK() \ | 616 #define GC_GREEDY_CHECK(ISOLATE) \ |
| 617 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck() | 617 if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck() |
| 618 #else | 618 #else |
| 619 #define GC_GREEDY_CHECK() { } | 619 #define GC_GREEDY_CHECK(ISOLATE) { } |
| 620 #endif | 620 #endif |
| 621 | 621 |
| 622 // Calls the FUNCTION_CALL function and retries it up to three times | 622 // Calls the FUNCTION_CALL function and retries it up to three times |
| 623 // to guarantee that any allocations performed during the call will | 623 // to guarantee that any allocations performed during the call will |
| 624 // succeed if there's enough memory. | 624 // succeed if there's enough memory. |
| 625 | 625 |
| 626 // Warning: Do not use the identifiers __object__, __maybe_object__ or | 626 // Warning: Do not use the identifiers __object__, __maybe_object__ or |
| 627 // __scope__ in a call to this macro. | 627 // __scope__ in a call to this macro. |
| 628 | 628 |
| 629 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ | 629 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ |
| 630 do { \ | 630 do { \ |
| 631 GC_GREEDY_CHECK(); \ | 631 GC_GREEDY_CHECK(ISOLATE); \ |
| 632 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ | 632 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ |
| 633 Object* __object__ = NULL; \ | 633 Object* __object__ = NULL; \ |
| 634 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 634 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 635 if (__maybe_object__->IsOutOfMemory()) { \ | 635 if (__maybe_object__->IsOutOfMemory()) { \ |
| 636 OOM; \ | 636 OOM; \ |
| 637 } \ | 637 } \ |
| 638 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ | 638 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
| 639 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ | 639 (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ |
| 640 allocation_space(), \ | 640 allocation_space(), \ |
| 641 "allocation failure"); \ | 641 "allocation failure"); \ |
| 642 __maybe_object__ = FUNCTION_CALL; \ | 642 __maybe_object__ = FUNCTION_CALL; \ |
| 643 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 643 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 644 if (__maybe_object__->IsOutOfMemory()) { \ | 644 if (__maybe_object__->IsOutOfMemory()) { \ |
| 645 OOM; \ | 645 OOM; \ |
| 646 } \ | 646 } \ |
| 647 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ | 647 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
| 648 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \ | 648 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
| 649 ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \ | 649 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
| 650 { \ | 650 { \ |
| 651 AlwaysAllocateScope __scope__; \ | 651 AlwaysAllocateScope __scope__; \ |
| 652 __maybe_object__ = FUNCTION_CALL; \ | 652 __maybe_object__ = FUNCTION_CALL; \ |
| 653 } \ | 653 } \ |
| 654 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 654 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 655 if (__maybe_object__->IsOutOfMemory()) { \ | 655 if (__maybe_object__->IsOutOfMemory()) { \ |
| 656 OOM; \ | 656 OOM; \ |
| 657 } \ | 657 } \ |
| 658 if (__maybe_object__->IsRetryAfterGC()) { \ | 658 if (__maybe_object__->IsRetryAfterGC()) { \ |
| 659 /* TODO(1181417): Fix this. */ \ | 659 /* TODO(1181417): Fix this. */ \ |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 712 } | 712 } |
| 713 | 713 |
| 714 | 714 |
| 715 // Verify() is inline to avoid ifdef-s around its calls in release | 715 // Verify() is inline to avoid ifdef-s around its calls in release |
| 716 // mode. | 716 // mode. |
| 717 void ExternalStringTable::Verify() { | 717 void ExternalStringTable::Verify() { |
| 718 #ifdef DEBUG | 718 #ifdef DEBUG |
| 719 for (int i = 0; i < new_space_strings_.length(); ++i) { | 719 for (int i = 0; i < new_space_strings_.length(); ++i) { |
| 720 Object* obj = Object::cast(new_space_strings_[i]); | 720 Object* obj = Object::cast(new_space_strings_[i]); |
| 721 ASSERT(heap_->InNewSpace(obj)); | 721 ASSERT(heap_->InNewSpace(obj)); |
| 722 ASSERT(obj != HEAP->the_hole_value()); | 722 ASSERT(obj != heap_->the_hole_value()); |
| 723 } | 723 } |
| 724 for (int i = 0; i < old_space_strings_.length(); ++i) { | 724 for (int i = 0; i < old_space_strings_.length(); ++i) { |
| 725 Object* obj = Object::cast(old_space_strings_[i]); | 725 Object* obj = Object::cast(old_space_strings_[i]); |
| 726 ASSERT(!heap_->InNewSpace(obj)); | 726 ASSERT(!heap_->InNewSpace(obj)); |
| 727 ASSERT(obj != HEAP->the_hole_value()); | 727 ASSERT(obj != heap_->the_hole_value()); |
| 728 } | 728 } |
| 729 #endif | 729 #endif |
| 730 } | 730 } |
| 731 | 731 |
| 732 | 732 |
| 733 void ExternalStringTable::AddOldString(String* string) { | 733 void ExternalStringTable::AddOldString(String* string) { |
| 734 ASSERT(string->IsExternalString()); | 734 ASSERT(string->IsExternalString()); |
| 735 ASSERT(!heap_->InNewSpace(string)); | 735 ASSERT(!heap_->InNewSpace(string)); |
| 736 old_space_strings_.Add(string); | 736 old_space_strings_.Add(string); |
| 737 } | 737 } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 824 elements_[hash].output = heap_number; | 824 elements_[hash].output = heap_number; |
| 825 return heap_number; | 825 return heap_number; |
| 826 } | 826 } |
| 827 | 827 |
| 828 | 828 |
| 829 AlwaysAllocateScope::AlwaysAllocateScope() { | 829 AlwaysAllocateScope::AlwaysAllocateScope() { |
| 830 // We shouldn't hit any nested scopes, because that requires | 830 // We shouldn't hit any nested scopes, because that requires |
| 831 // non-handle code to call handle code. The code still works but | 831 // non-handle code to call handle code. The code still works but |
| 832 // performance will degrade, so we want to catch this situation | 832 // performance will degrade, so we want to catch this situation |
| 833 // in debug mode. | 833 // in debug mode. |
| 834 ASSERT(HEAP->always_allocate_scope_depth_ == 0); | 834 Isolate* isolate = Isolate::Current(); |
| 835 HEAP->always_allocate_scope_depth_++; | 835 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); |
| 836 isolate->heap()->always_allocate_scope_depth_++; |
| 836 } | 837 } |
| 837 | 838 |
| 838 | 839 |
| 839 AlwaysAllocateScope::~AlwaysAllocateScope() { | 840 AlwaysAllocateScope::~AlwaysAllocateScope() { |
| 840 HEAP->always_allocate_scope_depth_--; | 841 Isolate* isolate = Isolate::Current(); |
| 841 ASSERT(HEAP->always_allocate_scope_depth_ == 0); | 842 isolate->heap()->always_allocate_scope_depth_--; |
| 843 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); |
| 842 } | 844 } |
| 843 | 845 |
| 844 | 846 |
| 845 #ifdef VERIFY_HEAP | 847 #ifdef VERIFY_HEAP |
| 846 NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() { | 848 NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() { |
| 847 HEAP->no_weak_embedded_maps_verification_scope_depth_++; | 849 Isolate* isolate = Isolate::Current(); |
| 850 isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++; |
| 848 } | 851 } |
| 849 | 852 |
| 850 | 853 |
| 851 NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() { | 854 NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() { |
| 852 HEAP->no_weak_embedded_maps_verification_scope_depth_--; | 855 Isolate* isolate = Isolate::Current(); |
| 856 isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--; |
| 853 } | 857 } |
| 854 #endif | 858 #endif |
| 855 | 859 |
| 856 | 860 |
| 857 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { | 861 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { |
| 858 for (Object** current = start; current < end; current++) { | 862 for (Object** current = start; current < end; current++) { |
| 859 if ((*current)->IsHeapObject()) { | 863 if ((*current)->IsHeapObject()) { |
| 860 HeapObject* object = HeapObject::cast(*current); | 864 HeapObject* object = HeapObject::cast(*current); |
| 861 CHECK(HEAP->Contains(object)); | 865 CHECK(object->GetIsolate()->heap()->Contains(object)); |
| 862 CHECK(object->map()->IsMap()); | 866 CHECK(object->map()->IsMap()); |
| 863 } | 867 } |
| 864 } | 868 } |
| 865 } | 869 } |
| 866 | 870 |
| 867 | 871 |
| 868 double GCTracer::SizeOfHeapObjects() { | 872 double GCTracer::SizeOfHeapObjects() { |
| 869 return (static_cast<double>(HEAP->SizeOfObjects())) / MB; | 873 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
| 870 } | 874 } |
| 871 | 875 |
| 872 | 876 |
| 873 DisallowAllocationFailure::DisallowAllocationFailure() { | 877 DisallowAllocationFailure::DisallowAllocationFailure() { |
| 874 #ifdef DEBUG | 878 #ifdef DEBUG |
| 875 old_state_ = HEAP->disallow_allocation_failure_; | 879 Isolate* isolate = Isolate::Current(); |
| 876 HEAP->disallow_allocation_failure_ = true; | 880 old_state_ = isolate->heap()->disallow_allocation_failure_; |
| 881 isolate->heap()->disallow_allocation_failure_ = true; |
| 877 #endif | 882 #endif |
| 878 } | 883 } |
| 879 | 884 |
| 880 | 885 |
| 881 DisallowAllocationFailure::~DisallowAllocationFailure() { | 886 DisallowAllocationFailure::~DisallowAllocationFailure() { |
| 882 #ifdef DEBUG | 887 #ifdef DEBUG |
| 883 HEAP->disallow_allocation_failure_ = old_state_; | 888 Isolate* isolate = Isolate::Current(); |
| 889 isolate->heap()->disallow_allocation_failure_ = old_state_; |
| 884 #endif | 890 #endif |
| 885 } | 891 } |
| 886 | 892 |
| 887 | 893 |
| 888 } } // namespace v8::internal | 894 } } // namespace v8::internal |
| 889 | 895 |
| 890 #endif // V8_HEAP_INL_H_ | 896 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |