Index: src/heap.cc |
diff --git a/src/heap.cc b/src/heap.cc |
index 53088e289f4979a49e610a1d7c0ee1200a8736e2..bbe0b82d5da1d2e30afe375b9cdda8f3224679e6 100644 |
--- a/src/heap.cc |
+++ b/src/heap.cc |
@@ -1896,6 +1896,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
+STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, |
@@ -2593,6 +2594,12 @@ bool Heap::CreateInitialMaps() { |
set_fixed_double_array_map(Map::cast(obj)); |
{ MaybeObject* maybe_obj = |
+ AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel); |
+ if (!maybe_obj->ToObject(&obj)) return false; |
+ } |
+ set_constant_pool_array_map(Map::cast(obj)); |
+ |
+ { MaybeObject* maybe_obj = |
AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); |
if (!maybe_obj->ToObject(&obj)) return false; |
} |
@@ -5505,6 +5512,26 @@ MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, |
} |
+MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, |
+ Map* map) { |
+ int len = src->length(); |
+ int first_int32_index = src->first_int32_index(); |
+ Object* obj; |
+ { MaybeObject* maybe_obj = |
+ AllocateRawConstantPoolArray(len, first_int32_index, NOT_TENURED); |
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
+ } |
+ HeapObject* dst = HeapObject::cast(obj); |
+ dst->set_map_no_write_barrier(map); |
+ CopyBlock( |
+ dst->address() + ConstantPoolArray::kLengthOffset, |
+ src->address() + ConstantPoolArray::kLengthOffset, |
+ ConstantPoolArray::SizeFor(len, first_int32_index) |
+ - ConstantPoolArray::kLengthOffset); |
+ return obj; |
+} |
+ |
+ |
MaybeObject* Heap::AllocateFixedArray(int length) { |
ASSERT(length >= 0); |
if (length == 0) return empty_fixed_array(); |
@@ -5676,6 +5703,34 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
} |
+MaybeObject* Heap::AllocateRawConstantPoolArray(int length, |
+ int first_int32_index, |
+ PretenureFlag pretenure) { |
+ if (length < 0) { |
ulan
2013/08/09 08:52:54
ASSERT(length > 0), instead of this check?
rmcilroy
2013/08/16 10:27:37
I was following AllocateRawFixedArray which does t
|
+ return Failure::OutOfMemoryException(0xf); |
+ } |
+ int size = ConstantPoolArray::SizeFor(length, first_int32_index); |
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
ulan
2013/08/09 08:52:54
It's probably better to pretenure, because code ob
rmcilroy
2013/08/16 10:27:37
Done.
|
+ AllocationSpace retry_space = OLD_DATA_SPACE; |
+ |
+#ifndef V8_HOST_ARCH_64_BIT |
+ size += kPointerSize; |
+#endif |
+ |
+ if (size > Page::kMaxNonCodeHeapObjectSize) { |
+ // Allocate in large object space, retry space will be ignored. |
+ space = LO_SPACE; |
+ } |
+ |
+ HeapObject* object; |
+ { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); |
+ if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
+ } |
+ |
+ return EnsureDoubleAligned(this, object, size); |
+} |
+ |
+ |
MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
Object* result; |
{ MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |