| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 9747 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9758 | 9758 |
| 9759 // Allocate a block of memory in the given space (filled with a filler). | 9759 // Allocate a block of memory in the given space (filled with a filler). |
| 9760 // Used as a fall-back for generated code when the space is full. | 9760 // Used as a fall-back for generated code when the space is full. |
| 9761 static MaybeObject* Allocate(Isolate* isolate, | 9761 static MaybeObject* Allocate(Isolate* isolate, |
| 9762 int size, | 9762 int size, |
| 9763 bool double_align, | 9763 bool double_align, |
| 9764 AllocationSpace space) { | 9764 AllocationSpace space) { |
| 9765 Heap* heap = isolate->heap(); | 9765 Heap* heap = isolate->heap(); |
| 9766 RUNTIME_ASSERT(IsAligned(size, kPointerSize)); | 9766 RUNTIME_ASSERT(IsAligned(size, kPointerSize)); |
| 9767 RUNTIME_ASSERT(size > 0); | 9767 RUNTIME_ASSERT(size > 0); |
| 9768 RUNTIME_ASSERT(size <= heap->MaxRegularSpaceAllocationSize()); | 9768 RUNTIME_ASSERT(size <= Page::kMaxRegularHeapObjectSize); |
| 9769 HeapObject* allocation; | 9769 HeapObject* allocation; |
| 9770 { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space, space); | 9770 { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space, space); |
| 9771 if (!maybe_allocation->To(&allocation)) return maybe_allocation; | 9771 if (!maybe_allocation->To(&allocation)) return maybe_allocation; |
| 9772 } | 9772 } |
| 9773 #ifdef DEBUG | 9773 #ifdef DEBUG |
| 9774 MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); | 9774 MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); |
| 9775 ASSERT(chunk->owner()->identity() == space); | 9775 ASSERT(chunk->owner()->identity() == space); |
| 9776 #endif | 9776 #endif |
| 9777 heap->CreateFillerObjectAt(allocation->address(), size); | 9777 heap->CreateFillerObjectAt(allocation->address(), size); |
| 9778 return allocation; | 9778 return allocation; |
| (...skipping 5120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 14899 // Handle last resort GC and make sure to allow future allocations | 14899 // Handle last resort GC and make sure to allow future allocations |
| 14900 // to grow the heap without causing GCs (if possible). | 14900 // to grow the heap without causing GCs (if possible). |
| 14901 isolate->counters()->gc_last_resort_from_js()->Increment(); | 14901 isolate->counters()->gc_last_resort_from_js()->Increment(); |
| 14902 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, | 14902 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, |
| 14903 "Runtime::PerformGC"); | 14903 "Runtime::PerformGC"); |
| 14904 } | 14904 } |
| 14905 } | 14905 } |
| 14906 | 14906 |
| 14907 | 14907 |
| 14908 } } // namespace v8::internal | 14908 } } // namespace v8::internal |
| OLD | NEW |