OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 9768 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9779 } | 9779 } |
9780 | 9780 |
9781 | 9781 |
9782 // Allocate a block of memory in the given space (filled with a filler). | 9782 // Allocate a block of memory in the given space (filled with a filler). |
9783 // Used as a fall-back for generated code when the space is full. | 9783 // Used as a fall-back for generated code when the space is full. |
9784 static MaybeObject* Allocate(Isolate* isolate, | 9784 static MaybeObject* Allocate(Isolate* isolate, |
9785 int size, | 9785 int size, |
9786 bool double_align, | 9786 bool double_align, |
9787 AllocationSpace space) { | 9787 AllocationSpace space) { |
9788 Heap* heap = isolate->heap(); | 9788 Heap* heap = isolate->heap(); |
9789 if (double_align) size += kPointerSize; | |
9790 RUNTIME_ASSERT(IsAligned(size, kPointerSize)); | 9789 RUNTIME_ASSERT(IsAligned(size, kPointerSize)); |
9791 RUNTIME_ASSERT(size > 0); | 9790 RUNTIME_ASSERT(size > 0); |
9792 RUNTIME_ASSERT(size <= heap->MaxRegularSpaceAllocationSize()); | 9791 RUNTIME_ASSERT(size <= heap->MaxRegularSpaceAllocationSize()); |
9793 HeapObject* allocation; | 9792 HeapObject* allocation; |
9794 { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space, space); | 9793 { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space, space); |
9795 if (!maybe_allocation->To(&allocation)) return maybe_allocation; | 9794 if (!maybe_allocation->To(&allocation)) return maybe_allocation; |
9796 } | 9795 } |
9797 #ifdef DEBUG | 9796 #ifdef DEBUG |
9798 MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); | 9797 MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); |
9799 ASSERT(chunk->owner()->identity() == space); | 9798 ASSERT(chunk->owner()->identity() == space); |
9800 #endif | 9799 #endif |
9801 if (double_align) { | |
9802 allocation = heap->EnsureDoubleAligned(allocation, size); | |
9803 } | |
9804 heap->CreateFillerObjectAt(allocation->address(), size); | 9800 heap->CreateFillerObjectAt(allocation->address(), size); |
9805 return allocation; | 9801 return allocation; |
9806 } | 9802 } |
9807 | 9803 |
9808 | 9804 |
9809 RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) { | 9805 RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) { |
9810 SealHandleScope shs(isolate); | 9806 SealHandleScope shs(isolate); |
9811 ASSERT(args.length() == 1); | 9807 ASSERT(args.length() == 1); |
9812 CONVERT_SMI_ARG_CHECKED(size, 0); | 9808 CONVERT_SMI_ARG_CHECKED(size, 0); |
9813 return Allocate(isolate, size, false, NEW_SPACE); | 9809 return Allocate(isolate, size, false, NEW_SPACE); |
(...skipping 5089 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
14903 // Handle last resort GC and make sure to allow future allocations | 14899 // Handle last resort GC and make sure to allow future allocations |
14904 // to grow the heap without causing GCs (if possible). | 14900 // to grow the heap without causing GCs (if possible). |
14905 isolate->counters()->gc_last_resort_from_js()->Increment(); | 14901 isolate->counters()->gc_last_resort_from_js()->Increment(); |
14906 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, | 14902 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, |
14907 "Runtime::PerformGC"); | 14903 "Runtime::PerformGC"); |
14908 } | 14904 } |
14909 } | 14905 } |
14910 | 14906 |
14911 | 14907 |
14912 } } // namespace v8::internal | 14908 } } // namespace v8::internal |
OLD | NEW |