Chromium Code Reviews| Index: src/x64/code-stubs-x64.cc |
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
| index 551a71690e7a45400ec4d078ef7fcf4e89e39f0b..6499c73e667964a88508462d36c19de09d71f205 100644 |
| --- a/src/x64/code-stubs-x64.cc |
| +++ b/src/x64/code-stubs-x64.cc |
| @@ -317,7 +317,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| __ IncrementCounter(counters->fast_new_closure_total(), 1); |
| // Get the function info from the stack. |
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(0)); |
| int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); |
| @@ -425,7 +425,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| __ ret(1 * kPointerSize); |
| __ bind(&restore); |
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(0)); |
| __ jmp(&install_unoptimized); |
| // Create a new closure through the slower runtime call. |
| @@ -448,7 +448,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) { |
| rax, rbx, rcx, &gc, TAG_OBJECT); |
| // Get the function from the stack. |
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| // Set up the object header. |
| __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
| @@ -494,10 +494,10 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
| rax, rbx, rcx, &gc, TAG_OBJECT); |
| // Get the function from the stack. |
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| // Get the serialized scope info from the stack. |
| - __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| + __ movq(rbx, StackOperandForReversedArgument(1)); |
| // Set up the object header. |
| __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
| @@ -1276,7 +1276,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
| if (tagged) { |
| Label input_not_smi, loaded; |
| // Test that rax is a number. |
| - __ movq(rax, Operand(rsp, kPointerSize)); |
| + __ movq(rax, StackOperandForReversedArgument(0)); |
| __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
| // Input is a smi. Untag and load it onto the FPU stack. |
| // Then load the bits of the double into rbx. |
| @@ -1809,8 +1809,8 @@ void MathPowStub::Generate(MacroAssembler* masm) { |
| // The exponent and base are supplied as arguments on the stack. |
| // This can only happen if the stub is called from non-optimized code. |
| // Load input parameters from stack. |
| - __ movq(base, Operand(rsp, 2 * kPointerSize)); |
| - __ movq(exponent, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(base, StackOperandForReversedArgument(1)); |
| + __ movq(exponent, StackOperandForReversedArgument(0)); |
| __ JumpIfSmi(base, &base_is_smi, Label::kNear); |
| __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), |
| Heap::kHeapNumberMapRootIndex); |
| @@ -2243,7 +2243,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| Factory* factory = masm->isolate()->factory(); |
| - __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); |
| + __ SmiToInteger64(rbx, StackOperandForReversedArgument(0)); |
| // rbx = parameter count (untagged) |
| // Check if the calling frame is an arguments adaptor frame. |
| @@ -2265,7 +2265,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| StandardFrameConstants::kCallerSPOffset)); |
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
| + __ movq(StackOperandForReversedArgument(1), rdx); |
| // rbx = parameter count (untagged) |
| // rcx = argument count (untagged) |
| @@ -2326,7 +2326,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| // Set up the callee in-object property. |
| STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
| - __ movq(rdx, Operand(rsp, 3 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(2)); |
| __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| Heap::kArgumentsCalleeIndex * kPointerSize), |
| rdx); |
| @@ -2377,7 +2377,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| // Load tagged parameter count into r9. |
| __ Integer32ToSmi(r9, rbx); |
| __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); |
| - __ addq(r8, Operand(rsp, 1 * kPointerSize)); |
| + __ addq(r8, StackOperandForReversedArgument(0)); |
| __ subq(r8, r9); |
| __ Move(r11, factory->the_hole_value()); |
| __ movq(rdx, rdi); |
| @@ -2416,7 +2416,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| Label arguments_loop, arguments_test; |
| __ movq(r8, rbx); |
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(1)); |
| // Untag rcx for the loop below. |
| __ SmiToInteger64(rcx, rcx); |
| __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); |
| @@ -2443,7 +2443,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| // rcx = argument count (untagged) |
| __ bind(&runtime); |
| __ Integer32ToSmi(rcx, rcx); |
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. |
| + __ movq(StackOperandForReversedArgument(0), rcx); // Patch argument count. |
| __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
| } |
| @@ -2463,11 +2463,11 @@ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
| // Patch the arguments.length and the parameters pointer. |
| __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
| + __ movq(StackOperandForReversedArgument(0), rcx); |
| __ SmiToInteger64(rcx, rcx); |
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| StandardFrameConstants::kCallerSPOffset)); |
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
| + __ movq(StackOperandForReversedArgument(1), rdx); |
| __ bind(&runtime); |
| __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
| @@ -2488,18 +2488,18 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| __ j(equal, &adaptor_frame); |
| // Get the length from the frame. |
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| __ SmiToInteger64(rcx, rcx); |
| __ jmp(&try_allocate); |
| // Patch the arguments.length and the parameters pointer. |
| __ bind(&adaptor_frame); |
| __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| - __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
| + __ movq(StackOperandForReversedArgument(0), rcx); |
| __ SmiToInteger64(rcx, rcx); |
| __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| StandardFrameConstants::kCallerSPOffset)); |
| - __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
| + __ movq(StackOperandForReversedArgument(1), rdx); |
| // Try the new space allocation. Start out with computing the size of |
| // the arguments object and the elements array. |
| @@ -2529,7 +2529,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| // Get the length (smi tagged) and set that as an in-object property too. |
| STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| Heap::kArgumentsLengthIndex * kPointerSize), |
| rcx); |
| @@ -2540,7 +2540,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| __ j(zero, &done); |
| // Get the parameters pointer from the stack. |
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(1)); |
| // Set up the elements pointer in the allocated arguments object and |
| // initialize the header in the elements fixed array. |
| @@ -3023,7 +3023,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| const int kMaxInlineLength = 100; |
| Label slowcase; |
| Label done; |
| - __ movq(r8, Operand(rsp, kPointerSize * 3)); |
| + __ movq(r8, StackOperandForReversedArgument(2)); |
| __ JumpIfNotSmi(r8, &slowcase); |
| __ SmiToInteger32(rbx, r8); |
| __ cmpl(rbx, Immediate(kMaxInlineLength)); |
| @@ -3061,11 +3061,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); |
| // Set input, index and length fields from arguments. |
| - __ movq(r8, Operand(rsp, kPointerSize * 1)); |
| + __ movq(r8, StackOperandForReversedArgument(0)); |
| __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); |
| - __ movq(r8, Operand(rsp, kPointerSize * 2)); |
| + __ movq(r8, StackOperandForReversedArgument(1)); |
| __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); |
| - __ movq(r8, Operand(rsp, kPointerSize * 3)); |
| + __ movq(r8, StackOperandForReversedArgument(2)); |
| __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); |
| // Fill out the elements FixedArray. |
| @@ -3196,7 +3196,7 @@ void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm, |
| void NumberToStringStub::Generate(MacroAssembler* masm) { |
| Label runtime; |
| - __ movq(rbx, Operand(rsp, kPointerSize)); |
| + __ movq(rbx, StackOperandForReversedArgument(0)); |
| // Generate code to lookup number in the number string cache. |
| GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); |
| @@ -3615,15 +3615,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
| if (ReceiverMightBeImplicit()) { |
| Label call; |
| // Get the receiver from the stack. |
| - // +1 ~ return address |
| - __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); |
| + __ movq(rax, StackOperandForReceiver(argc_)); |
| // Call as function is indicated with the hole. |
| __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
| __ j(not_equal, &call, Label::kNear); |
| // Patch the receiver on the stack with the global receiver object. |
| __ movq(rcx, GlobalObjectOperand()); |
| __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
| - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); |
| + __ movq(StackOperandForReceiver(argc_), rcx); |
| __ bind(&call); |
| } |
| @@ -3685,7 +3684,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
| // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| // of the original receiver from the call site). |
| __ bind(&non_function); |
| - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); |
| + __ movq(StackOperandForReceiver(argc_), rdi); |
| __ Set(rax, argc_); |
| __ Set(rbx, 0); |
| __ SetCallKind(rcx, CALL_AS_METHOD); |
| @@ -4221,11 +4220,12 @@ void InstanceofStub::Generate(MacroAssembler* masm) { |
| // Only the inline check flag is supported on X64. |
| ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); |
| int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; |
| + int extra_argument_offset = extra_stack_space / kPointerSize; |
|
danno
2013/08/02 08:48:00
Just make this
int extra_argument_offset = HasCa
|
| // Get the object - go slow case if it's a smi. |
| Label slow; |
| - __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); |
| + __ movq(rax, StackOperandForReversedArgument(1 + extra_argument_offset)); |
| __ JumpIfSmi(rax, &slow); |
| // Check that the left hand is a JS object. Leave its map in rax. |
| @@ -4235,7 +4235,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { |
| __ j(above, &slow); |
| // Get the prototype of the function. |
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); |
| + __ movq(rdx, StackOperandForReversedArgument(extra_argument_offset)); |
| // rdx is function, rax is map. |
| // If there is a call site cache don't look in the global cache, but do the |
| @@ -4271,7 +4271,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { |
| } else { |
| // Get return address and delta to inlined map check. |
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| + __ subq(kScratchRegister, StackOperandForReversedArgument(0)); |
| if (FLAG_debug_code) { |
| __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
| __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
| @@ -4312,7 +4312,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { |
| ASSERT(true_offset >= 0 && true_offset < 0x100); |
| __ movl(rax, Immediate(true_offset)); |
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| + __ subq(kScratchRegister, StackOperandForReversedArgument(0)); |
| __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| if (FLAG_debug_code) { |
| __ movl(rax, Immediate(kWordBeforeResultValue)); |
| @@ -4335,7 +4335,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { |
| ASSERT(false_offset >= 0 && false_offset < 0x100); |
| __ movl(rax, Immediate(false_offset)); |
| __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| + __ subq(kScratchRegister, StackOperandForReversedArgument(0)); |
| __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| if (FLAG_debug_code) { |
| __ movl(rax, Immediate(kWordBeforeResultValue)); |
| @@ -4501,8 +4501,8 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
| Builtins::JavaScript builtin_id = Builtins::ADD; |
| // Load the two arguments. |
| - __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). |
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). |
| + __ movq(rax, StackOperandForReversedArgument(1)); // First argument (left). |
| + __ movq(rdx, StackOperandForReversedArgument(0)); // Second argument (right). |
| // Make sure that both arguments are strings if not known in advance. |
| // Otherwise, at least one of the arguments is definitely a string, |
| @@ -5509,8 +5509,8 @@ void StringCompareStub::Generate(MacroAssembler* masm) { |
| // rsp[8] : right string |
| // rsp[16] : left string |
| - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left |
| - __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right |
| + __ movq(rdx, StackOperandForReversedArgument(1)); // left |
| + __ movq(rax, StackOperandForReversedArgument(0)); // right |
| // Check for identity. |
| Label not_same; |
| @@ -6025,7 +6025,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| // (their names are the null value). |
| for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| // Compute the masked index: (hash + i + i * i) & mask. |
| - __ movq(scratch, Operand(rsp, 2 * kPointerSize)); |
| + __ movq(scratch, StackOperandForReversedArgument(1)); |
| if (i > 0) { |
| __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
| } |
| @@ -6045,7 +6045,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| __ j(equal, ¬_in_dictionary); |
| // Stop if found the property. |
| - __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); |
| + __ cmpq(scratch, StackOperandForReversedArgument(2)); |
| __ j(equal, &in_dictionary); |
| if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| @@ -6397,8 +6397,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { |
| Label fast_elements; |
| // Get array literal index, array literal and its map. |
| - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
| - __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| + __ movq(rdx, StackOperandForReversedArgument(0)); |
| + __ movq(rbx, StackOperandForReversedArgument(1)); |
| __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); |
| __ CheckFastElements(rdi, &double_elements); |
| @@ -6567,7 +6567,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
| __ j(not_zero, &normal_sequence); |
| // look at the first argument |
| - __ movq(rcx, Operand(rsp, kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| __ testq(rcx, rcx); |
| __ j(zero, &normal_sequence); |
| @@ -6746,7 +6746,7 @@ void InternalArrayConstructorStub::GenerateCase( |
| if (IsFastPackedElementsKind(kind)) { |
| // We might need to create a holey array |
| // look at the first argument |
| - __ movq(rcx, Operand(rsp, kPointerSize)); |
| + __ movq(rcx, StackOperandForReversedArgument(0)); |
| __ testq(rcx, rcx); |
| __ j(zero, &normal_sequence); |