Index: src/arm/code-stubs-arm.cc |
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc |
index a670d5ce701647919a4d47c6d69f9e243020a6f2..3bcc57699c81c09cd7942f163393b41bad58b1d5 100644 |
--- a/src/arm/code-stubs-arm.cc |
+++ b/src/arm/code-stubs-arm.cc |
@@ -344,36 +344,35 @@ static void GenerateFastCloneShallowArrayCommon( |
MacroAssembler* masm, |
int length, |
FastCloneShallowArrayStub::Mode mode, |
- AllocationSiteInfoMode allocation_site_info_mode, |
Label* fail) { |
// Registers on entry: |
// |
// r3: boilerplate literal array. |
- ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
+ ASSERT(!FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)); |
+ bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode); |
// All sizes here are multiples of kPointerSize. |
int elements_size = 0; |
if (length > 0) { |
- elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
+ elements_size = FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode) |
? FixedDoubleArray::SizeFor(length) |
: FixedArray::SizeFor(length); |
} |
+ |
int size = JSArray::kSize; |
int allocation_info_start = size; |
- if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
- size += AllocationSiteInfo::kSize; |
- } |
- size += elements_size; |
+ size += tracking_on ? AllocationSiteInfo::kSize + elements_size |
danno
2013/01/11 16:14:40
nit: "? AllocationSiteInfo::kSize + elements_size"
|
+ : elements_size; |
// Allocate both the JS array and the elements array in one big |
// allocation. This avoids multiple limit checks. |
AllocationFlags flags = TAG_OBJECT; |
- if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
+ if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) { |
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
} |
__ AllocateInNewSpace(size, r0, r1, r2, fail, flags); |
- if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
+ if (tracking_on) { |
__ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()-> |
allocation_site_info_map()))); |
__ str(r2, FieldMemOperand(r0, allocation_info_start)); |
@@ -392,7 +391,7 @@ static void GenerateFastCloneShallowArrayCommon( |
// Get hold of the elements array of the boilerplate and setup the |
// elements pointer in the resulting object. |
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
- if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
+ if (tracking_on) { |
__ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); |
} else { |
__ add(r2, r0, Operand(JSArray::kSize)); |
@@ -423,23 +422,16 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ b(eq, &slow_case); |
FastCloneShallowArrayStub::Mode mode = mode_; |
- AllocationSiteInfoMode allocation_site_info_mode = |
- DONT_TRACK_ALLOCATION_SITE_INFO; |
- if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) { |
- mode = CLONE_ANY_ELEMENTS; |
- allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO; |
- } |
- |
- if (mode == CLONE_ANY_ELEMENTS) { |
+ if (FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)) { |
Label double_elements, check_fast_elements; |
__ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); |
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
__ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); |
__ b(ne, &check_fast_elements); |
- GenerateFastCloneShallowArrayCommon(masm, 0, |
- COPY_ON_WRITE_ELEMENTS, |
- allocation_site_info_mode, |
- &slow_case); |
+ bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode); |
+ mode = tracking_on ? COPY_ON_WRITE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
danno
2013/01/11 16:14:40
nit: ? on next line
|
+ : COPY_ON_WRITE_ELEMENTS; |
+ GenerateFastCloneShallowArrayCommon(masm, 0, mode, &slow_case); |
// Return and remove the on-stack parameters. |
__ add(sp, sp, Operand(3 * kPointerSize)); |
__ Ret(); |
@@ -447,30 +439,30 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ bind(&check_fast_elements); |
__ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); |
__ b(ne, &double_elements); |
- GenerateFastCloneShallowArrayCommon(masm, length_, |
- CLONE_ELEMENTS, |
- allocation_site_info_mode, |
- &slow_case); |
+ mode = tracking_on ? CLONE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
danno
2013/01/11 16:14:40
nit: ? on next line
|
+ : CLONE_ELEMENTS; |
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
// Return and remove the on-stack parameters. |
__ add(sp, sp, Operand(3 * kPointerSize)); |
__ Ret(); |
__ bind(&double_elements); |
- mode = CLONE_DOUBLE_ELEMENTS; |
+ mode = tracking_on ? CLONE_DOUBLE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
danno
2013/01/11 16:14:40
nit: ? on next line
|
+ : CLONE_DOUBLE_ELEMENTS; |
// Fall through to generate the code to handle double elements. |
} |
if (FLAG_debug_code) { |
const char* message; |
Heap::RootListIndex expected_map_index; |
- if (mode == CLONE_ELEMENTS) { |
+ if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) { |
message = "Expected (writable) fixed array"; |
expected_map_index = Heap::kFixedArrayMapRootIndex; |
- } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
+ } else if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) { |
message = "Expected (writable) fixed double array"; |
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
} else { |
- ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
+ ASSERT(FastCloneShallowArrayStub::IsCopyOnWriteElementsMode(mode)); |
message = "Expected copy-on-write fixed array"; |
expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
} |
@@ -482,8 +474,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ pop(r3); |
} |
- GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
- allocation_site_info_mode, &slow_case); |
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
// Return and remove the on-stack parameters. |
__ add(sp, sp, Operand(3 * kPointerSize)); |