Index: src/mips/code-stubs-mips.cc |
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc |
index 9f0d535416c6c8c3c04b2c3bc63ed94f067b690a..e920abf82c9d5d696f59870277276ffd598594d9 100644 |
--- a/src/mips/code-stubs-mips.cc |
+++ b/src/mips/code-stubs-mips.cc |
@@ -343,6 +343,7 @@ static void GenerateFastCloneShallowArrayCommon( |
MacroAssembler* masm, |
int length, |
FastCloneShallowArrayStub::Mode mode, |
+ AllocationSiteInfoMode allocation_site_info_mode, |
Label* fail) { |
// Registers on entry: |
// a3: boilerplate literal array. |
@@ -355,7 +356,12 @@ static void GenerateFastCloneShallowArrayCommon( |
? FixedDoubleArray::SizeFor(length) |
: FixedArray::SizeFor(length); |
} |
- int size = JSArray::kSize + elements_size; |
+ int size = JSArray::kSize; |
+ int allocation_info_start = size; |
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
+ size += AllocationSiteInfo::kSize; |
+ } |
+ size += elements_size; |
// Allocate both the JS array and the elements array in one big |
// allocation. This avoids multiple limit checks. |
@@ -366,6 +372,13 @@ static void GenerateFastCloneShallowArrayCommon( |
fail, |
TAG_OBJECT); |
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
+ __ li(a2, Operand(Handle<Map>(masm->isolate()->heap()-> |
+ allocation_site_info_map()))); |
+ __ sw(a2, FieldMemOperand(v0, allocation_info_start)); |
+ __ sw(a3, FieldMemOperand(v0, allocation_info_start + kPointerSize)); |
+ } |
+ |
// Copy the JS array part. |
for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
if ((i != JSArray::kElementsOffset) || (length == 0)) { |
@@ -378,7 +391,11 @@ static void GenerateFastCloneShallowArrayCommon( |
// Get hold of the elements array of the boilerplate and setup the |
// elements pointer in the resulting object. |
__ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); |
- __ Addu(a2, v0, Operand(JSArray::kSize)); |
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
+ __ Addu(a2, v0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); |
+ } else { |
+ __ Addu(a2, v0, Operand(JSArray::kSize)); |
+ } |
__ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); |
// Copy the elements array. |
@@ -407,6 +424,12 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ Branch(&slow_case, eq, a3, Operand(t1)); |
FastCloneShallowArrayStub::Mode mode = mode_; |
+ AllocationSiteInfoMode allocation_site_info_mode = |
+ DONT_TRACK_ALLOCATION_SITE_INFO; |
+ if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) { |
+ mode = CLONE_ANY_ELEMENTS; |
+ allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO; |
+ } |
if (mode == CLONE_ANY_ELEMENTS) { |
Label double_elements, check_fast_elements; |
__ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); |
@@ -414,7 +437,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex); |
__ Branch(&check_fast_elements, ne, v0, Operand(t1)); |
GenerateFastCloneShallowArrayCommon(masm, 0, |
- COPY_ON_WRITE_ELEMENTS, &slow_case); |
+ COPY_ON_WRITE_ELEMENTS, |
+ allocation_site_info_mode, |
+ &slow_case); |
// Return and remove the on-stack parameters. |
__ DropAndRet(3); |
@@ -422,7 +447,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); |
__ Branch(&double_elements, ne, v0, Operand(t1)); |
GenerateFastCloneShallowArrayCommon(masm, length_, |
- CLONE_ELEMENTS, &slow_case); |
+ CLONE_ELEMENTS, |
+ allocation_site_info_mode, |
+ &slow_case); |
// Return and remove the on-stack parameters. |
__ DropAndRet(3); |
@@ -453,7 +480,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
__ pop(a3); |
} |
- GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
+ allocation_site_info_mode, &slow_case); |
// Return and remove the on-stack parameters. |
__ DropAndRet(3); |