Index: src/x64/code-stubs-x64.cc |
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
index fffd37f519db9c9dcb19fb44d5640de8e39093e9..ad39981317e3371a487db764996c11999b658862 100644 |
--- a/src/x64/code-stubs-x64.cc |
+++ b/src/x64/code-stubs-x64.cc |
@@ -39,6 +39,18 @@ namespace v8 { |
namespace internal { |
+void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( |
+ Isolate* isolate, |
+ CodeStubInterfaceDescriptor* descriptor) { |
+ static Register registers[] = { rax, rbx, rcx }; |
+ descriptor->register_param_count_ = 3; |
+ descriptor->register_params_ = registers; |
+ descriptor->stack_parameter_count_ = NULL; |
+ descriptor->deoptimization_handler_ = |
+ Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry; |
+} |
+ |
+ |
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( |
Isolate* isolate, |
CodeStubInterfaceDescriptor* descriptor) { |
@@ -386,165 +398,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
} |
-static void GenerateFastCloneShallowArrayCommon( |
- MacroAssembler* masm, |
- int length, |
- FastCloneShallowArrayStub::Mode mode, |
- AllocationSiteMode allocation_site_mode, |
- Label* fail) { |
- // Registers on entry: |
- // |
- // rcx: boilerplate literal array. |
- ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
- |
- // All sizes here are multiples of kPointerSize. |
- int elements_size = 0; |
- if (length > 0) { |
- elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
- ? FixedDoubleArray::SizeFor(length) |
- : FixedArray::SizeFor(length); |
- } |
- int size = JSArray::kSize; |
- int allocation_info_start = size; |
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
- size += AllocationSiteInfo::kSize; |
- } |
- size += elements_size; |
- |
- // Allocate both the JS array and the elements array in one big |
- // allocation. This avoids multiple limit checks. |
- AllocationFlags flags = TAG_OBJECT; |
- if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
- flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
- } |
- __ Allocate(size, rax, rbx, rdx, fail, flags); |
- |
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
- __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); |
- __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); |
- __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); |
- } |
- |
- // Copy the JS array part. |
- for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
- if ((i != JSArray::kElementsOffset) || (length == 0)) { |
- __ movq(rbx, FieldOperand(rcx, i)); |
- __ movq(FieldOperand(rax, i), rbx); |
- } |
- } |
- |
- if (length > 0) { |
- // Get hold of the elements array of the boilerplate and setup the |
- // elements pointer in the resulting object. |
- __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
- __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize)); |
- } else { |
- __ lea(rdx, Operand(rax, JSArray::kSize)); |
- } |
- __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); |
- |
- // Copy the elements array. |
- if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { |
- for (int i = 0; i < elements_size; i += kPointerSize) { |
- __ movq(rbx, FieldOperand(rcx, i)); |
- __ movq(FieldOperand(rdx, i), rbx); |
- } |
- } else { |
- ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); |
- int i; |
- for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { |
- __ movq(rbx, FieldOperand(rcx, i)); |
- __ movq(FieldOperand(rdx, i), rbx); |
- } |
- while (i < elements_size) { |
- __ movsd(xmm0, FieldOperand(rcx, i)); |
- __ movsd(FieldOperand(rdx, i), xmm0); |
- i += kDoubleSize; |
- } |
- ASSERT(i == elements_size); |
- } |
- } |
-} |
- |
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
- // Stack layout on entry: |
- // |
- // [rsp + kPointerSize]: constant elements. |
- // [rsp + (2 * kPointerSize)]: literal index. |
- // [rsp + (3 * kPointerSize)]: literals array. |
- |
- // Load boilerplate object into rcx and check if we need to create a |
- // boilerplate. |
- __ movq(rcx, Operand(rsp, 3 * kPointerSize)); |
- __ movq(rax, Operand(rsp, 2 * kPointerSize)); |
- SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
- __ movq(rcx, |
- FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); |
- __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
- Label slow_case; |
- __ j(equal, &slow_case); |
- |
- FastCloneShallowArrayStub::Mode mode = mode_; |
- // rcx is boilerplate object. |
- Factory* factory = masm->isolate()->factory(); |
- if (mode == CLONE_ANY_ELEMENTS) { |
- Label double_elements, check_fast_elements; |
- __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); |
- __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
- factory->fixed_cow_array_map()); |
- __ j(not_equal, &check_fast_elements); |
- GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, |
- allocation_site_mode_, |
- &slow_case); |
- __ ret(3 * kPointerSize); |
- |
- __ bind(&check_fast_elements); |
- __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
- factory->fixed_array_map()); |
- __ j(not_equal, &double_elements); |
- GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, |
- allocation_site_mode_, |
- &slow_case); |
- __ ret(3 * kPointerSize); |
- |
- __ bind(&double_elements); |
- mode = CLONE_DOUBLE_ELEMENTS; |
- // Fall through to generate the code to handle double elements. |
- } |
- |
- if (FLAG_debug_code) { |
- const char* message; |
- Heap::RootListIndex expected_map_index; |
- if (mode == CLONE_ELEMENTS) { |
- message = "Expected (writable) fixed array"; |
- expected_map_index = Heap::kFixedArrayMapRootIndex; |
- } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
- message = "Expected (writable) fixed double array"; |
- expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
- } else { |
- ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
- message = "Expected copy-on-write fixed array"; |
- expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
- } |
- __ push(rcx); |
- __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
- __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
- expected_map_index); |
- __ Assert(equal, message); |
- __ pop(rcx); |
- } |
- |
- GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
- allocation_site_mode_, |
- &slow_case); |
- __ ret(3 * kPointerSize); |
- |
- __ bind(&slow_case); |
- __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
-} |
- |
- |
// The stub expects its argument on the stack and returns its result in tos_: |
// zero for false, and a non-zero value for true. |
void ToBooleanStub::Generate(MacroAssembler* masm) { |
@@ -4154,6 +4007,7 @@ bool CEntryStub::IsPregenerated() { |
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
CEntryStub::GenerateAheadOfTime(isolate); |
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
+ StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
// It is important that the store buffer overflow stubs are generated first. |
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
} |
@@ -6775,8 +6629,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { |
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
- ASSERT(!Serializer::enabled()); |
- CEntryStub ces(1, kSaveFPRegs); |
+ CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
int parameter_count_offset = |
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |