| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 216 | 216 |
| 217 MaybeObject* Heap::AllocateRaw(int size_in_bytes, | 217 MaybeObject* Heap::AllocateRaw(int size_in_bytes, |
| 218 AllocationSpace space, | 218 AllocationSpace space, |
| 219 AllocationSpace retry_space) { | 219 AllocationSpace retry_space) { |
| 220 ASSERT(AllowHandleAllocation::IsAllowed()); | 220 ASSERT(AllowHandleAllocation::IsAllowed()); |
| 221 ASSERT(AllowHeapAllocation::IsAllowed()); | 221 ASSERT(AllowHeapAllocation::IsAllowed()); |
| 222 ASSERT(gc_state_ == NOT_IN_GC); | 222 ASSERT(gc_state_ == NOT_IN_GC); |
| 223 HeapProfiler* profiler = isolate_->heap_profiler(); | 223 HeapProfiler* profiler = isolate_->heap_profiler(); |
| 224 #ifdef DEBUG | 224 #ifdef DEBUG |
| 225 if (FLAG_gc_interval >= 0 && | 225 if (FLAG_gc_interval >= 0 && |
| 226 !disallow_allocation_failure_ && | 226 AllowAllocationFailure::IsAllowed(isolate_) && |
| 227 Heap::allocation_timeout_-- <= 0) { | 227 Heap::allocation_timeout_-- <= 0) { |
| 228 return Failure::RetryAfterGC(space); | 228 return Failure::RetryAfterGC(space); |
| 229 } | 229 } |
| 230 isolate_->counters()->objs_since_last_full()->Increment(); | 230 isolate_->counters()->objs_since_last_full()->Increment(); |
| 231 isolate_->counters()->objs_since_last_young()->Increment(); | 231 isolate_->counters()->objs_since_last_young()->Increment(); |
| 232 #endif | 232 #endif |
| 233 | 233 |
| 234 HeapObject* object; | 234 HeapObject* object; |
| 235 MaybeObject* result; | 235 MaybeObject* result; |
| 236 if (NEW_SPACE == space) { | 236 if (NEW_SPACE == space) { |
| (...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 656 "allocation failure"); \ | 656 "allocation failure"); \ |
| 657 __maybe_object__ = FUNCTION_CALL; \ | 657 __maybe_object__ = FUNCTION_CALL; \ |
| 658 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 658 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 659 if (__maybe_object__->IsOutOfMemory()) { \ | 659 if (__maybe_object__->IsOutOfMemory()) { \ |
| 660 OOM; \ | 660 OOM; \ |
| 661 } \ | 661 } \ |
| 662 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ | 662 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
| 663 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ | 663 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
| 664 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ | 664 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
| 665 { \ | 665 { \ |
| 666 AlwaysAllocateScope __scope__; \ | 666 AlwaysAllocateScope __scope__(ISOLATE); \ |
| 667 __maybe_object__ = FUNCTION_CALL; \ | 667 __maybe_object__ = FUNCTION_CALL; \ |
| 668 } \ | 668 } \ |
| 669 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 669 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 670 if (__maybe_object__->IsOutOfMemory()) { \ | 670 if (__maybe_object__->IsOutOfMemory()) { \ |
| 671 OOM; \ | 671 OOM; \ |
| 672 } \ | 672 } \ |
| 673 if (__maybe_object__->IsRetryAfterGC()) { \ | 673 if (__maybe_object__->IsRetryAfterGC()) { \ |
| 674 /* TODO(1181417): Fix this. */ \ | 674 /* TODO(1181417): Fix this. */ \ |
| 675 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\ | 675 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\ |
| 676 } \ | 676 } \ |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 771 return condition ? true_value() : false_value(); | 771 return condition ? true_value() : false_value(); |
| 772 } | 772 } |
| 773 | 773 |
| 774 | 774 |
| 775 void Heap::CompletelyClearInstanceofCache() { | 775 void Heap::CompletelyClearInstanceofCache() { |
| 776 set_instanceof_cache_map(the_hole_value()); | 776 set_instanceof_cache_map(the_hole_value()); |
| 777 set_instanceof_cache_function(the_hole_value()); | 777 set_instanceof_cache_function(the_hole_value()); |
| 778 } | 778 } |
| 779 | 779 |
| 780 | 780 |
| 781 AlwaysAllocateScope::AlwaysAllocateScope() { | 781 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate) |
| 782 : heap_(isolate->heap()), daf_(isolate) { |
| 782 // We shouldn't hit any nested scopes, because that requires | 783 // We shouldn't hit any nested scopes, because that requires |
| 783 // non-handle code to call handle code. The code still works but | 784 // non-handle code to call handle code. The code still works but |
| 784 // performance will degrade, so we want to catch this situation | 785 // performance will degrade, so we want to catch this situation |
| 785 // in debug mode. | 786 // in debug mode. |
| 786 Isolate* isolate = Isolate::Current(); | 787 ASSERT(heap_->always_allocate_scope_depth_ == 0); |
| 787 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); | 788 heap_->always_allocate_scope_depth_++; |
| 788 isolate->heap()->always_allocate_scope_depth_++; | |
| 789 } | 789 } |
| 790 | 790 |
| 791 | 791 |
| 792 AlwaysAllocateScope::~AlwaysAllocateScope() { | 792 AlwaysAllocateScope::~AlwaysAllocateScope() { |
| 793 Isolate* isolate = Isolate::Current(); | 793 heap_->always_allocate_scope_depth_--; |
| 794 isolate->heap()->always_allocate_scope_depth_--; | 794 ASSERT(heap_->always_allocate_scope_depth_ == 0); |
| 795 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); | |
| 796 } | 795 } |
| 797 | 796 |
| 798 | 797 |
| 799 #ifdef VERIFY_HEAP | 798 #ifdef VERIFY_HEAP |
| 800 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() { | 799 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() { |
| 801 Isolate* isolate = Isolate::Current(); | 800 Isolate* isolate = Isolate::Current(); |
| 802 isolate->heap()->no_weak_object_verification_scope_depth_++; | 801 isolate->heap()->no_weak_object_verification_scope_depth_++; |
| 803 } | 802 } |
| 804 | 803 |
| 805 | 804 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 826 CHECK((*current)->IsSmi()); | 825 CHECK((*current)->IsSmi()); |
| 827 } | 826 } |
| 828 } | 827 } |
| 829 | 828 |
| 830 | 829 |
| 831 double GCTracer::SizeOfHeapObjects() { | 830 double GCTracer::SizeOfHeapObjects() { |
| 832 return (static_cast<double>(heap_->SizeOfObjects())) / MB; | 831 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
| 833 } | 832 } |
| 834 | 833 |
| 835 | 834 |
| 836 DisallowAllocationFailure::DisallowAllocationFailure() { | |
| 837 #ifdef DEBUG | |
| 838 Isolate* isolate = Isolate::Current(); | |
| 839 old_state_ = isolate->heap()->disallow_allocation_failure_; | |
| 840 isolate->heap()->disallow_allocation_failure_ = true; | |
| 841 #endif | |
| 842 } | |
| 843 | |
| 844 | |
| 845 DisallowAllocationFailure::~DisallowAllocationFailure() { | |
| 846 #ifdef DEBUG | |
| 847 Isolate* isolate = Isolate::Current(); | |
| 848 isolate->heap()->disallow_allocation_failure_ = old_state_; | |
| 849 #endif | |
| 850 } | |
| 851 | |
| 852 | |
| 853 } } // namespace v8::internal | 835 } } // namespace v8::internal |
| 854 | 836 |
| 855 #endif // V8_HEAP_INL_H_ | 837 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |