| Index: src/x64/code-stubs-x64.cc
|
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
|
| index 8d6c044201da58f4d83d3ffb4f82ca2eab96e5f7..1efcc6599996564931fb2d054876f610d1d1003c 100644
|
| --- a/src/x64/code-stubs-x64.cc
|
| +++ b/src/x64/code-stubs-x64.cc
|
| @@ -316,36 +316,34 @@ static void GenerateFastCloneShallowArrayCommon(
|
| MacroAssembler* masm,
|
| int length,
|
| FastCloneShallowArrayStub::Mode mode,
|
| - AllocationSiteInfoMode allocation_site_info_mode,
|
| Label* fail) {
|
| // Registers on entry:
|
| //
|
| // rcx: boilerplate literal array.
|
| ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
|
| + bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
|
|
|
| // All sizes here are multiples of kPointerSize.
|
| int elements_size = 0;
|
| if (length > 0) {
|
| - elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
| + elements_size = FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)
|
| ? FixedDoubleArray::SizeFor(length)
|
| : FixedArray::SizeFor(length);
|
| }
|
| int size = JSArray::kSize;
|
| int allocation_info_start = size;
|
| - if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
|
| - size += AllocationSiteInfo::kSize;
|
| - }
|
| - size += elements_size;
|
| + size += tracking_on ? AllocationSiteInfo::kSize + elements_size
|
| + : elements_size;
|
|
|
| // Allocate both the JS array and the elements array in one big
|
| // allocation. This avoids multiple limit checks.
|
| AllocationFlags flags = TAG_OBJECT;
|
| - if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
|
| + if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
|
| flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
|
| }
|
| __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
|
|
|
| - if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
|
| + if (tracking_on) {
|
| __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
|
| __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
|
| __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
|
| @@ -363,7 +361,7 @@ static void GenerateFastCloneShallowArrayCommon(
|
| // Get hold of the elements array of the boilerplate and setup the
|
| // elements pointer in the resulting object.
|
| __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
|
| - if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
|
| + if (tracking_on) {
|
| __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
|
| } else {
|
| __ lea(rdx, Operand(rax, JSArray::kSize));
|
| @@ -371,13 +369,13 @@ static void GenerateFastCloneShallowArrayCommon(
|
| __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
|
|
|
| // Copy the elements array.
|
| - if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
|
| + if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
|
| for (int i = 0; i < elements_size; i += kPointerSize) {
|
| __ movq(rbx, FieldOperand(rcx, i));
|
| __ movq(FieldOperand(rdx, i), rbx);
|
| }
|
| } else {
|
| - ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
|
| + ASSERT(FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode));
|
| int i;
|
| for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
|
| __ movq(rbx, FieldOperand(rcx, i));
|
| @@ -414,51 +412,44 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
| FastCloneShallowArrayStub::Mode mode = mode_;
|
| // rcx is boilerplate object.
|
| Factory* factory = masm->isolate()->factory();
|
| - AllocationSiteInfoMode allocation_site_info_mode =
|
| - DONT_TRACK_ALLOCATION_SITE_INFO;
|
| - if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
|
| - mode = CLONE_ANY_ELEMENTS;
|
| - allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
|
| - }
|
| -
|
| - if (mode == CLONE_ANY_ELEMENTS) {
|
| + if (FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)) {
|
| Label double_elements, check_fast_elements;
|
| __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
|
| __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
|
| factory->fixed_cow_array_map());
|
| __ j(not_equal, &check_fast_elements);
|
| - GenerateFastCloneShallowArrayCommon(masm, 0,
|
| - COPY_ON_WRITE_ELEMENTS,
|
| - allocation_site_info_mode,
|
| - &slow_case);
|
| + bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
|
| + mode = tracking_on ? COPY_ON_WRITE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
|
| + : COPY_ON_WRITE_ELEMENTS;
|
| + GenerateFastCloneShallowArrayCommon(masm, 0, mode, &slow_case);
|
| __ ret(3 * kPointerSize);
|
|
|
| __ bind(&check_fast_elements);
|
| __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
|
| factory->fixed_array_map());
|
| __ j(not_equal, &double_elements);
|
| - GenerateFastCloneShallowArrayCommon(masm, length_,
|
| - CLONE_ELEMENTS,
|
| - allocation_site_info_mode,
|
| - &slow_case);
|
| + mode = tracking_on ? CLONE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
|
| + : CLONE_ELEMENTS;
|
| + GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
|
| __ ret(3 * kPointerSize);
|
|
|
| __ bind(&double_elements);
|
| - mode = CLONE_DOUBLE_ELEMENTS;
|
| + mode = tracking_on ? CLONE_DOUBLE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
|
| + : CLONE_DOUBLE_ELEMENTS;
|
| // Fall through to generate the code to handle double elements.
|
| }
|
|
|
| if (FLAG_debug_code) {
|
| const char* message;
|
| Heap::RootListIndex expected_map_index;
|
| - if (mode == CLONE_ELEMENTS) {
|
| + if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
|
| message = "Expected (writable) fixed array";
|
| expected_map_index = Heap::kFixedArrayMapRootIndex;
|
| - } else if (mode == CLONE_DOUBLE_ELEMENTS) {
|
| + } else if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
|
| message = "Expected (writable) fixed double array";
|
| expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
|
| } else {
|
| - ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
|
| + ASSERT(FastCloneShallowArrayStub::IsCopyOnWriteElementsMode(mode));
|
| message = "Expected copy-on-write fixed array";
|
| expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
|
| }
|
| @@ -470,8 +461,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
| __ pop(rcx);
|
| }
|
|
|
| - GenerateFastCloneShallowArrayCommon(masm, length_, mode,
|
| - allocation_site_info_mode, &slow_case);
|
| + GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
|
| __ ret(3 * kPointerSize);
|
|
|
| __ bind(&slow_case);
|
|
|