| Index: src/x64/code-stubs-x64.cc
|
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
|
| index 551a71690e7a45400ec4d078ef7fcf4e89e39f0b..5411e42bfc65177924ef0cac24c9af774758ef7e 100644
|
| --- a/src/x64/code-stubs-x64.cc
|
| +++ b/src/x64/code-stubs-x64.cc
|
| @@ -317,7 +317,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| __ IncrementCounter(counters->fast_new_closure_total(), 1);
|
|
|
| // Get the function info from the stack.
|
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(1 * kPointerSize));
|
|
|
| int map_index = Context::FunctionMapIndex(language_mode_, is_generator_);
|
|
|
| @@ -425,7 +425,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| __ ret(1 * kPointerSize);
|
|
|
| __ bind(&restore);
|
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(1 * kPointerSize));
|
| __ jmp(&install_unoptimized);
|
|
|
| // Create a new closure through the slower runtime call.
|
| @@ -448,7 +448,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
|
| rax, rbx, rcx, &gc, TAG_OBJECT);
|
|
|
| // Get the function from the stack.
|
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
|
|
| // Set up the object header.
|
| __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
|
| @@ -494,10 +494,10 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
| rax, rbx, rcx, &gc, TAG_OBJECT);
|
|
|
| // Get the function from the stack.
|
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
|
|
| // Get the serialized scope info from the stack.
|
| - __ movq(rbx, Operand(rsp, 2 * kPointerSize));
|
| + __ movq(rbx, StackOperandForArgument(2 * kPointerSize));
|
|
|
| // Set up the object header.
|
| __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
|
| @@ -1276,7 +1276,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
| if (tagged) {
|
| Label input_not_smi, loaded;
|
| // Test that rax is a number.
|
| - __ movq(rax, Operand(rsp, kPointerSize));
|
| + __ movq(rax, StackOperandForArgument(1 * kPointerSize));
|
| __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
|
| // Input is a smi. Untag and load it onto the FPU stack.
|
| // Then load the bits of the double into rbx.
|
| @@ -1809,8 +1809,8 @@ void MathPowStub::Generate(MacroAssembler* masm) {
|
| // The exponent and base are supplied as arguments on the stack.
|
| // This can only happen if the stub is called from non-optimized code.
|
| // Load input parameters from stack.
|
| - __ movq(base, Operand(rsp, 2 * kPointerSize));
|
| - __ movq(exponent, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(base, StackOperandForArgument(2 * kPointerSize));
|
| + __ movq(exponent, StackOperandForArgument(1 * kPointerSize));
|
| __ JumpIfSmi(base, &base_is_smi, Label::kNear);
|
| __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
|
| Heap::kHeapNumberMapRootIndex);
|
| @@ -2243,7 +2243,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
|
|
| Factory* factory = masm->isolate()->factory();
|
|
|
| - __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize));
|
| + __ SmiToInteger64(rbx, StackOperandForArgument(1 * kPointerSize));
|
| // rbx = parameter count (untagged)
|
|
|
| // Check if the calling frame is an arguments adaptor frame.
|
| @@ -2265,7 +2265,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
|
| StandardFrameConstants::kCallerSPOffset));
|
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx);
|
| + __ movq(StackOperandForArgument(2 * kPointerSize), rdx);
|
|
|
| // rbx = parameter count (untagged)
|
| // rcx = argument count (untagged)
|
| @@ -2326,7 +2326,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
|
|
| // Set up the callee in-object property.
|
| STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
|
| - __ movq(rdx, Operand(rsp, 3 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(3 * kPointerSize));
|
| __ movq(FieldOperand(rax, JSObject::kHeaderSize +
|
| Heap::kArgumentsCalleeIndex * kPointerSize),
|
| rdx);
|
| @@ -2377,7 +2377,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| // Load tagged parameter count into r9.
|
| __ Integer32ToSmi(r9, rbx);
|
| __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
|
| - __ addq(r8, Operand(rsp, 1 * kPointerSize));
|
| + __ addq(r8, StackOperandForArgument(1 * kPointerSize));
|
| __ subq(r8, r9);
|
| __ Move(r11, factory->the_hole_value());
|
| __ movq(rdx, rdi);
|
| @@ -2416,7 +2416,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
|
|
| Label arguments_loop, arguments_test;
|
| __ movq(r8, rbx);
|
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(2 * kPointerSize));
|
| // Untag rcx for the loop below.
|
| __ SmiToInteger64(rcx, rcx);
|
| __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
|
| @@ -2443,7 +2443,8 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| // rcx = argument count (untagged)
|
| __ bind(&runtime);
|
| __ Integer32ToSmi(rcx, rcx);
|
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
|
| + // Patch argument count.
|
| + __ movq(StackOperandForArgument(1 * kPointerSize), rcx);
|
| __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
|
| }
|
|
|
| @@ -2463,11 +2464,11 @@ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
|
|
|
| // Patch the arguments.length and the parameters pointer.
|
| __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx);
|
| + __ movq(StackOperandForArgument(1 * kPointerSize), rcx);
|
| __ SmiToInteger64(rcx, rcx);
|
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
|
| StandardFrameConstants::kCallerSPOffset));
|
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx);
|
| + __ movq(StackOperandForArgument(2 * kPointerSize), rdx);
|
|
|
| __ bind(&runtime);
|
| __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
|
| @@ -2488,18 +2489,18 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| __ j(equal, &adaptor_frame);
|
|
|
| // Get the length from the frame.
|
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
| __ SmiToInteger64(rcx, rcx);
|
| __ jmp(&try_allocate);
|
|
|
| // Patch the arguments.length and the parameters pointer.
|
| __ bind(&adaptor_frame);
|
| __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx);
|
| + __ movq(StackOperandForArgument(1 * kPointerSize), rcx);
|
| __ SmiToInteger64(rcx, rcx);
|
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
|
| StandardFrameConstants::kCallerSPOffset));
|
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx);
|
| + __ movq(StackOperandForArgument(2 * kPointerSize), rdx);
|
|
|
| // Try the new space allocation. Start out with computing the size of
|
| // the arguments object and the elements array.
|
| @@ -2529,7 +2530,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
|
|
| // Get the length (smi tagged) and set that as an in-object property too.
|
| STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
|
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
| __ movq(FieldOperand(rax, JSObject::kHeaderSize +
|
| Heap::kArgumentsLengthIndex * kPointerSize),
|
| rcx);
|
| @@ -2540,7 +2541,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| __ j(zero, &done);
|
|
|
| // Get the parameters pointer from the stack.
|
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(2 * kPointerSize));
|
|
|
| // Set up the elements pointer in the allocated arguments object and
|
| // initialize the header in the elements fixed array.
|
| @@ -3023,7 +3024,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
|
| const int kMaxInlineLength = 100;
|
| Label slowcase;
|
| Label done;
|
| - __ movq(r8, Operand(rsp, kPointerSize * 3));
|
| + __ movq(r8, StackOperandForArgument(3 * kPointerSize));
|
| __ JumpIfNotSmi(r8, &slowcase);
|
| __ SmiToInteger32(rbx, r8);
|
| __ cmpl(rbx, Immediate(kMaxInlineLength));
|
| @@ -3061,11 +3062,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
|
| __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
|
|
|
| // Set input, index and length fields from arguments.
|
| - __ movq(r8, Operand(rsp, kPointerSize * 1));
|
| + __ movq(r8, StackOperandForArgument(1 * kPointerSize));
|
| __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
|
| - __ movq(r8, Operand(rsp, kPointerSize * 2));
|
| + __ movq(r8, StackOperandForArgument(2 * kPointerSize));
|
| __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
|
| - __ movq(r8, Operand(rsp, kPointerSize * 3));
|
| + __ movq(r8, StackOperandForArgument(3 * kPointerSize));
|
| __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
|
|
|
| // Fill out the elements FixedArray.
|
| @@ -3196,7 +3197,7 @@ void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
|
| void NumberToStringStub::Generate(MacroAssembler* masm) {
|
| Label runtime;
|
|
|
| - __ movq(rbx, Operand(rsp, kPointerSize));
|
| + __ movq(rbx, StackOperandForArgument(1 * kPointerSize));
|
|
|
| // Generate code to lookup number in the number string cache.
|
| GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
|
| @@ -3616,14 +3617,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| Label call;
|
| // Get the receiver from the stack.
|
| // +1 ~ return address
|
| - __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
|
| + __ movq(rax, StackOperandForArgument((argc_ + 1) * kPointerSize));
|
| // Call as function is indicated with the hole.
|
| __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
|
| __ j(not_equal, &call, Label::kNear);
|
| // Patch the receiver on the stack with the global receiver object.
|
| __ movq(rcx, GlobalObjectOperand());
|
| __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
|
| - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx);
|
| + __ movq(StackOperandForArgument((argc_ + 1) * kPointerSize), rcx);
|
| __ bind(&call);
|
| }
|
|
|
| @@ -3685,7 +3686,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
|
| // of the original receiver from the call site).
|
| __ bind(&non_function);
|
| - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
|
| + __ movq(StackOperandForArgument((argc_ + 1) * kPointerSize), rdi);
|
| __ Set(rax, argc_);
|
| __ Set(rbx, 0);
|
| __ SetCallKind(rcx, CALL_AS_METHOD);
|
| @@ -4235,7 +4236,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| __ j(above, &slow);
|
|
|
| // Get the prototype of the function.
|
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
|
| + __ movq(rdx, StackOperandForArgument(1 * kPointerSize + extra_stack_space));
|
| // rdx is function, rax is map.
|
|
|
| // If there is a call site cache don't look in the global cache, but do the
|
| @@ -4271,7 +4272,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| } else {
|
| // Get return address and delta to inlined map check.
|
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
|
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
|
| + __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize));
|
| if (FLAG_debug_code) {
|
| __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
|
| __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
|
| @@ -4312,7 +4313,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| ASSERT(true_offset >= 0 && true_offset < 0x100);
|
| __ movl(rax, Immediate(true_offset));
|
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
|
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
|
| + __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize));
|
| __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
|
| if (FLAG_debug_code) {
|
| __ movl(rax, Immediate(kWordBeforeResultValue));
|
| @@ -4335,7 +4336,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| ASSERT(false_offset >= 0 && false_offset < 0x100);
|
| __ movl(rax, Immediate(false_offset));
|
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
|
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
|
| + __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize));
|
| __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
|
| if (FLAG_debug_code) {
|
| __ movl(rax, Immediate(kWordBeforeResultValue));
|
| @@ -4501,8 +4502,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| Builtins::JavaScript builtin_id = Builtins::ADD;
|
|
|
| // Load the two arguments.
|
| - __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
|
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
|
| + __ movq(rax, StackOperandForArgument(2 * kPointerSize)); // First (left).
|
| + __ movq(rdx, StackOperandForArgument(1 * kPointerSize)); // Second (right).
|
|
|
| // Make sure that both arguments are strings if not known in advance.
|
| // Otherwise, at least one of the arguments is definitely a string,
|
| @@ -5509,8 +5510,8 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
|
| // rsp[8] : right string
|
| // rsp[16] : left string
|
|
|
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
|
| - __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
|
| + __ movq(rdx, StackOperandForArgument(2 * kPointerSize)); // left
|
| + __ movq(rax, StackOperandForArgument(1 * kPointerSize)); // right
|
|
|
| // Check for identity.
|
| Label not_same;
|
| @@ -6025,7 +6026,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
| // (their names are the null value).
|
| for (int i = kInlinedProbes; i < kTotalProbes; i++) {
|
| // Compute the masked index: (hash + i + i * i) & mask.
|
| - __ movq(scratch, Operand(rsp, 2 * kPointerSize));
|
| + __ movq(scratch, StackOperandForArgument(2 * kPointerSize));
|
| if (i > 0) {
|
| __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
|
| }
|
| @@ -6045,7 +6046,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
| __ j(equal, ¬_in_dictionary);
|
|
|
| // Stop if found the property.
|
| - __ cmpq(scratch, Operand(rsp, 3 * kPointerSize));
|
| + __ cmpq(scratch, StackOperandForArgument(3 * kPointerSize));
|
| __ j(equal, &in_dictionary);
|
|
|
| if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
|
| @@ -6397,8 +6398,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
|
| Label fast_elements;
|
|
|
| // Get array literal index, array literal and its map.
|
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize));
|
| - __ movq(rbx, Operand(rsp, 2 * kPointerSize));
|
| + __ movq(rdx, StackOperandForArgument(1 * kPointerSize));
|
| + __ movq(rbx, StackOperandForArgument(2 * kPointerSize));
|
| __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
|
|
|
| __ CheckFastElements(rdi, &double_elements);
|
| @@ -6567,7 +6568,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
|
| __ j(not_zero, &normal_sequence);
|
|
|
| // look at the first argument
|
| - __ movq(rcx, Operand(rsp, kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
| __ testq(rcx, rcx);
|
| __ j(zero, &normal_sequence);
|
|
|
| @@ -6746,7 +6747,7 @@ void InternalArrayConstructorStub::GenerateCase(
|
| if (IsFastPackedElementsKind(kind)) {
|
| // We might need to create a holey array
|
| // look at the first argument
|
| - __ movq(rcx, Operand(rsp, kPointerSize));
|
| + __ movq(rcx, StackOperandForArgument(1 * kPointerSize));
|
| __ testq(rcx, rcx);
|
| __ j(zero, &normal_sequence);
|
|
|
|
|