| Index: src/x64/lithium-codegen-x64.cc
|
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
|
| index d64d4ff28c621a7088b2f9b5ad9cd7bdc0dbf682..643e364d9ba2a83280cf944eca4b2209608124fa 100644
|
| --- a/src/x64/lithium-codegen-x64.cc
|
| +++ b/src/x64/lithium-codegen-x64.cc
|
| @@ -4940,10 +4940,18 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
| } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
| CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
|
| } else {
|
| - FastCloneShallowArrayStub::Mode mode =
|
| + // TODO(mvstanton): I'm doing more work than necessary here by running
|
| + // CLONE_ANY_ELEMENTS instead of the more specific stub, but I'm doing it
|
| + // just because I want to track allocation info. Alternative approach: quick
|
| + // baking allocation tracking info into this field, instead just have it on
|
| + // all the time?
|
| + FastCloneShallowArrayStub::Mode mode = FastCloneShallowArrayStub::
|
| + CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
|
| + /*
|
| boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
|
| ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
| : FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
| + */
|
| FastCloneShallowArrayStub stub(mode, length);
|
| CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| }
|
| @@ -4953,7 +4961,8 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
| void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| Register result,
|
| Register source,
|
| - int* offset) {
|
| + int* offset,
|
| + bool create_allocation_site_info) {
|
| ASSERT(!source.is(rcx));
|
| ASSERT(!result.is(rcx));
|
|
|
| @@ -4967,8 +4976,14 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| int object_offset = *offset;
|
| int object_size = object->map()->instance_size();
|
| int elements_offset = *offset + object_size;
|
| + if (create_allocation_site_info) {
|
| + elements_offset += AllocationSiteInfo::kSize;
|
| + }
|
| int elements_size = has_elements ? elements->Size() : 0;
|
| *offset += object_size + elements_size;
|
| + if (create_allocation_site_info) {
|
| + *offset += AllocationSiteInfo::kSize;
|
| + }
|
|
|
| // Copy object header.
|
| ASSERT(object->properties()->length() == 0);
|
| @@ -5002,6 +5017,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| }
|
| }
|
|
|
| + // Build Allocation Site Info if desired
|
| + if (create_allocation_site_info) {
|
| + __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
|
| + __ movq(FieldOperand(result, object_size), kScratchRegister);
|
| + __ movq(FieldOperand(result, object_size + kPointerSize), source);
|
| + }
|
| +
|
| if (has_elements) {
|
| // Copy elements backing store header.
|
| __ LoadHeapObject(source, elements);
|
| @@ -5082,7 +5104,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
|
| __ bind(&allocated);
|
| int offset = 0;
|
| __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
|
| - EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
|
| + EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset,
|
| + instr->hydrogen()->create_allocation_site_info());
|
| ASSERT_EQ(size, offset);
|
| }
|
|
|
|
|