| Index: src/heap.cc
|
| diff --git a/src/heap.cc b/src/heap.cc
|
| index 26a005a2e92fad75c28ed582131a6b033f3e05a1..7bf1e53eb3aaa35b5fbfc4cf3a8c3fe5a31196f2 100644
|
| --- a/src/heap.cc
|
| +++ b/src/heap.cc
|
| @@ -67,29 +67,14 @@ namespace internal {
|
|
|
| Heap::Heap()
|
| : isolate_(NULL),
|
| + code_range_size_(kIs64BitArch ? 512 * MB : 0),
|
| // semispace_size_ should be a power of 2 and old_generation_size_ should be
|
| // a multiple of Page::kPageSize.
|
| -#if V8_TARGET_ARCH_X64
|
| -#define LUMP_OF_MEMORY (2 * MB)
|
| - code_range_size_(512*MB),
|
| -#else
|
| -#define LUMP_OF_MEMORY MB
|
| - code_range_size_(0),
|
| -#endif
|
| -#if defined(ANDROID) || V8_TARGET_ARCH_MIPS
|
| - reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
| - max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
| + reserved_semispace_size_(8 * (kPointerSize / 4) * MB),
|
| + max_semispace_size_(8 * (kPointerSize / 4) * MB),
|
| initial_semispace_size_(Page::kPageSize),
|
| - max_old_generation_size_(192*MB),
|
| - max_executable_size_(max_old_generation_size_),
|
| -#else
|
| - reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
| - max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
| - initial_semispace_size_(Page::kPageSize),
|
| - max_old_generation_size_(700ul * LUMP_OF_MEMORY),
|
| - max_executable_size_(256l * LUMP_OF_MEMORY),
|
| -#endif
|
| -
|
| + max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
|
| + max_executable_size_(256ul * (kPointerSize / 4) * MB),
|
| // Variables set based on semispace_size_ and old_generation_size_ in
|
| // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
|
| // Will be 4 * reserved_semispace_size_ to ensure that young
|
| @@ -170,6 +155,9 @@ Heap::Heap()
|
| max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
|
| #endif
|
|
|
| + // Ensure old_generation_size_ is a multiple of kPageSize.
|
| + ASSERT(MB >= Page::kPageSize);
|
| +
|
| intptr_t max_virtual = OS::MaxVirtualMemory();
|
|
|
| if (max_virtual > 0) {
|
| @@ -608,6 +596,11 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
|
| // Note: as weak callbacks can execute arbitrary code, we cannot
|
| // hope that eventually there will be no weak callbacks invocations.
|
| // Therefore stop recollecting after several attempts.
|
| + if (FLAG_concurrent_recompilation) {
|
| + // The optimizing compiler may be unnecessarily holding on to memory.
|
| + DisallowHeapAllocation no_recursive_gc;
|
| + isolate()->optimizing_compiler_thread()->Flush();
|
| + }
|
| mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
|
| kReduceMemoryFootprintMask);
|
| isolate_->compilation_cache()->Clear();
|
| @@ -5340,25 +5333,10 @@ MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) {
|
| }
|
|
|
|
|
| -MaybeObject* Heap::AllocateRawFixedArray(int length) {
|
| - if (length < 0 || length > FixedArray::kMaxLength) {
|
| - return Failure::OutOfMemoryException(0xd);
|
| - }
|
| - ASSERT(length > 0);
|
| - // Use the general function if we're forced to always allocate.
|
| - if (always_allocate()) return AllocateFixedArray(length, TENURED);
|
| - // Allocate the raw data for a fixed array.
|
| - int size = FixedArray::SizeFor(length);
|
| - return size <= Page::kMaxNonCodeHeapObjectSize
|
| - ? new_space_.AllocateRaw(size)
|
| - : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
|
| -}
|
| -
|
| -
|
| MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
|
| int len = src->length();
|
| Object* obj;
|
| - { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
|
| + { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED);
|
| if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| }
|
| if (InNewSpace(obj)) {
|
| @@ -5409,22 +5387,20 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
|
| }
|
|
|
|
|
| -MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
|
| - Heap* heap,
|
| - int length,
|
| - PretenureFlag pretenure,
|
| - Object* filler) {
|
| +MaybeObject* Heap::AllocateFixedArrayWithFiller(int length,
|
| + PretenureFlag pretenure,
|
| + Object* filler) {
|
| ASSERT(length >= 0);
|
| - ASSERT(heap->empty_fixed_array()->IsFixedArray());
|
| - if (length == 0) return heap->empty_fixed_array();
|
| + ASSERT(empty_fixed_array()->IsFixedArray());
|
| + if (length == 0) return empty_fixed_array();
|
|
|
| - ASSERT(!heap->InNewSpace(filler));
|
| + ASSERT(!InNewSpace(filler));
|
| Object* result;
|
| - { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
|
| + { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure);
|
| if (!maybe_result->ToObject(&result)) return maybe_result;
|
| }
|
|
|
| - HeapObject::cast(result)->set_map_no_write_barrier(heap->fixed_array_map());
|
| + HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map());
|
| FixedArray* array = FixedArray::cast(result);
|
| array->set_length(length);
|
| MemsetPointer(array->data_start(), filler, length);
|
| @@ -5433,19 +5409,13 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
|
|
|
|
|
| MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
|
| - return AllocateFixedArrayWithFiller(this,
|
| - length,
|
| - pretenure,
|
| - undefined_value());
|
| + return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
|
| }
|
|
|
|
|
| MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
|
| PretenureFlag pretenure) {
|
| - return AllocateFixedArrayWithFiller(this,
|
| - length,
|
| - pretenure,
|
| - the_hole_value());
|
| + return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
|
| }
|
|
|
|
|
| @@ -5453,7 +5423,7 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
|
| if (length == 0) return empty_fixed_array();
|
|
|
| Object* obj;
|
| - { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
|
| + { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED);
|
| if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| }
|
|
|
|
|