Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(438)

Unified Diff: src/x64/code-stubs-x64.cc

Issue 21123008: Introduce StackArgumentsAccessor class for X64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed nits Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/x64/code-stubs-x64.cc
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 551a71690e7a45400ec4d078ef7fcf4e89e39f0b..79e865e309f1727ac7ab53854a3a6173d1d2a0e7 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -317,7 +317,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->fast_new_closure_total(), 1);
// Get the function info from the stack.
- __ movq(rdx, Operand(rsp, 1 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rdx, args.GetArgumentOperand(0));
int map_index = Context::FunctionMapIndex(language_mode_, is_generator_);
@@ -425,7 +426,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ ret(1 * kPointerSize);
__ bind(&restore);
- __ movq(rdx, Operand(rsp, 1 * kPointerSize));
+ __ movq(rdx, args.GetArgumentOperand(0));
__ jmp(&install_unoptimized);
// Create a new closure through the slower runtime call.
@@ -448,7 +449,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
- __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rcx, args.GetArgumentOperand(0));
// Set up the object header.
__ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
@@ -494,10 +496,10 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
- __ movq(rcx, Operand(rsp, 1 * kPointerSize));
-
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rcx, args.GetArgumentOperand(1));
// Get the serialized scope info from the stack.
- __ movq(rbx, Operand(rsp, 2 * kPointerSize));
+ __ movq(rbx, args.GetArgumentOperand(0));
// Set up the object header.
__ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
@@ -1276,7 +1278,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
if (tagged) {
Label input_not_smi, loaded;
// Test that rax is a number.
- __ movq(rax, Operand(rsp, kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rax, args.GetArgumentOperand(0));
__ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
// Input is a smi. Untag and load it onto the FPU stack.
// Then load the bits of the double into rbx.
@@ -1809,8 +1812,9 @@ void MathPowStub::Generate(MacroAssembler* masm) {
// The exponent and base are supplied as arguments on the stack.
// This can only happen if the stub is called from non-optimized code.
// Load input parameters from stack.
- __ movq(base, Operand(rsp, 2 * kPointerSize));
- __ movq(exponent, Operand(rsp, 1 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(base, args.GetArgumentOperand(0));
+ __ movq(exponent, args.GetArgumentOperand(1));
__ JumpIfSmi(base, &base_is_smi, Label::kNear);
__ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
@@ -2243,7 +2247,8 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Factory* factory = masm->isolate()->factory();
- __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
// rbx = parameter count (untagged)
// Check if the calling frame is an arguments adaptor frame.
@@ -2265,7 +2270,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
- __ movq(Operand(rsp, 2 * kPointerSize), rdx);
+ __ movq(args.GetArgumentOperand(1), rdx);
// rbx = parameter count (untagged)
// rcx = argument count (untagged)
@@ -2326,7 +2331,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
- __ movq(rdx, Operand(rsp, 3 * kPointerSize));
+ __ movq(rdx, args.GetArgumentOperand(0));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize),
rdx);
@@ -2377,7 +2382,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// Load tagged parameter count into r9.
__ Integer32ToSmi(r9, rbx);
__ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
- __ addq(r8, Operand(rsp, 1 * kPointerSize));
+ __ addq(r8, args.GetArgumentOperand(2));
__ subq(r8, r9);
__ Move(r11, factory->the_hole_value());
__ movq(rdx, rdi);
@@ -2416,7 +2421,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Label arguments_loop, arguments_test;
__ movq(r8, rbx);
- __ movq(rdx, Operand(rsp, 2 * kPointerSize));
+ __ movq(rdx, args.GetArgumentOperand(1));
// Untag rcx for the loop below.
__ SmiToInteger64(rcx, rcx);
__ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
@@ -2443,7 +2448,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// rcx = argument count (untagged)
__ bind(&runtime);
__ Integer32ToSmi(rcx, rcx);
- __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
+ __ movq(args.GetArgumentOperand(2), rcx); // Patch argument count.
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
@@ -2462,12 +2467,13 @@ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
__ j(not_equal, &runtime);
// Patch the arguments.length and the parameters pointer.
+ StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
__ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ movq(Operand(rsp, 1 * kPointerSize), rcx);
+ __ movq(args.GetArgumentOperand(2), rcx);
__ SmiToInteger64(rcx, rcx);
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
- __ movq(Operand(rsp, 2 * kPointerSize), rdx);
+ __ movq(args.GetArgumentOperand(1), rdx);
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
@@ -2488,18 +2494,19 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ j(equal, &adaptor_frame);
// Get the length from the frame.
- __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rcx, args.GetArgumentOperand(2));
__ SmiToInteger64(rcx, rcx);
__ jmp(&try_allocate);
// Patch the arguments.length and the parameters pointer.
__ bind(&adaptor_frame);
__ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ movq(Operand(rsp, 1 * kPointerSize), rcx);
+ __ movq(args.GetArgumentOperand(2), rcx);
__ SmiToInteger64(rcx, rcx);
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
- __ movq(Operand(rsp, 2 * kPointerSize), rdx);
+ __ movq(args.GetArgumentOperand(1), rdx);
// Try the new space allocation. Start out with computing the size of
// the arguments object and the elements array.
@@ -2529,7 +2536,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
- __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+ __ movq(rcx, args.GetArgumentOperand(2));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
rcx);
@@ -2540,7 +2547,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ j(zero, &done);
// Get the parameters pointer from the stack.
- __ movq(rdx, Operand(rsp, 2 * kPointerSize));
+ __ movq(rdx, args.GetArgumentOperand(1));
// Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
@@ -3023,7 +3030,8 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
const int kMaxInlineLength = 100;
Label slowcase;
Label done;
- __ movq(r8, Operand(rsp, kPointerSize * 3));
+ StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(r8, args.GetArgumentOperand(0));
__ JumpIfNotSmi(r8, &slowcase);
__ SmiToInteger32(rbx, r8);
__ cmpl(rbx, Immediate(kMaxInlineLength));
@@ -3061,11 +3069,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
// Set input, index and length fields from arguments.
- __ movq(r8, Operand(rsp, kPointerSize * 1));
+ __ movq(r8, args.GetArgumentOperand(2));
__ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
- __ movq(r8, Operand(rsp, kPointerSize * 2));
+ __ movq(r8, args.GetArgumentOperand(1));
__ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
- __ movq(r8, Operand(rsp, kPointerSize * 3));
+ __ movq(r8, args.GetArgumentOperand(0));
__ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
// Fill out the elements FixedArray.
@@ -3196,7 +3204,8 @@ void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
void NumberToStringStub::Generate(MacroAssembler* masm) {
Label runtime;
- __ movq(rbx, Operand(rsp, kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rbx, args.GetArgumentOperand(0));
// Generate code to lookup number in the number string cache.
GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
@@ -3608,6 +3617,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// rdi : the function to call
Isolate* isolate = masm->isolate();
Label slow, non_function;
+ StackArgumentsAccessor args(rsp, argc_);
// The receiver might implicitly be the global object. This is
// indicated by passing the hole as the receiver to the call
@@ -3615,15 +3625,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
if (ReceiverMightBeImplicit()) {
Label call;
// Get the receiver from the stack.
- // +1 ~ return address
- __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
+ __ movq(rax, args.GetReceiverOperand());
// Call as function is indicated with the hole.
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &call, Label::kNear);
// Patch the receiver on the stack with the global receiver object.
__ movq(rcx, GlobalObjectOperand());
__ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx);
+ __ movq(args.GetReceiverOperand(), rcx);
__ bind(&call);
}
@@ -3685,7 +3694,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// CALL_NON_FUNCTION expects the non-function callee as receiver (instead
// of the original receiver from the call site).
__ bind(&non_function);
- __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
+ __ movq(args.GetReceiverOperand(), rdi);
__ Set(rax, argc_);
__ Set(rbx, 0);
__ SetCallKind(rcx, CALL_AS_METHOD);
@@ -4220,12 +4229,13 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
static const unsigned int kWordBeforeResultValue = 0x458B4909;
// Only the inline check flag is supported on X64.
ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
- int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
+ int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
// Get the object - go slow case if it's a smi.
Label slow;
-
- __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
+ StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
+ ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rax, args.GetArgumentOperand(0));
__ JumpIfSmi(rax, &slow);
// Check that the left hand is a JS object. Leave its map in rax.
@@ -4235,7 +4245,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ j(above, &slow);
// Get the prototype of the function.
- __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
+ __ movq(rdx, args.GetArgumentOperand(1));
// rdx is function, rax is map.
// If there is a call site cache don't look in the global cache, but do the
@@ -4270,8 +4280,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
} else {
// Get return address and delta to inlined map check.
- __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
- __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
+ __ movq(kScratchRegister, StackOperandForReturnAddress(0));
+ __ subq(kScratchRegister, args.GetArgumentOperand(2));
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
@@ -4311,8 +4321,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Assert it is a 1-byte signed value.
ASSERT(true_offset >= 0 && true_offset < 0x100);
__ movl(rax, Immediate(true_offset));
- __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
- __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
+ __ movq(kScratchRegister, StackOperandForReturnAddress(0));
+ __ subq(kScratchRegister, args.GetArgumentOperand(2));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
@@ -4321,7 +4331,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
}
__ Set(rax, 0);
}
- __ ret(2 * kPointerSize + extra_stack_space);
+ __ ret((2 + extra_argument_offset) * kPointerSize);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
@@ -4334,8 +4344,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Assert it is a 1-byte signed value.
ASSERT(false_offset >= 0 && false_offset < 0x100);
__ movl(rax, Immediate(false_offset));
- __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
- __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
+ __ movq(kScratchRegister, StackOperandForReturnAddress(0));
+ __ subq(kScratchRegister, args.GetArgumentOperand(2));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
@@ -4343,7 +4353,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
}
- __ ret(2 * kPointerSize + extra_stack_space);
+ __ ret((2 + extra_argument_offset) * kPointerSize);
// Slow-case: Go through the JavaScript implementation.
__ bind(&slow);
@@ -4501,8 +4511,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
Builtins::JavaScript builtin_id = Builtins::ADD;
// Load the two arguments.
- __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
- __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rax, args.GetArgumentOperand(0)); // First argument (left).
+ __ movq(rdx, args.GetArgumentOperand(1)); // Second argument (right).
// Make sure that both arguments are strings if not known in advance.
// Otherwise, at least one of the arguments is definitely a string,
@@ -5509,8 +5520,9 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
// rsp[8] : right string
// rsp[16] : left string
- __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
- __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rdx, args.GetArgumentOperand(0)); // left
+ __ movq(rax, args.GetArgumentOperand(1)); // right
// Check for identity.
Label not_same;
@@ -6023,9 +6035,11 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
// (their names are the null value).
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
+ kPointerSize);
for (int i = kInlinedProbes; i < kTotalProbes; i++) {
// Compute the masked index: (hash + i + i * i) & mask.
- __ movq(scratch, Operand(rsp, 2 * kPointerSize));
+ __ movq(scratch, args.GetArgumentOperand(1));
if (i > 0) {
__ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
}
@@ -6045,7 +6059,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
__ j(equal, &not_in_dictionary);
// Stop if found the property.
- __ cmpq(scratch, Operand(rsp, 3 * kPointerSize));
+ __ cmpq(scratch, args.GetArgumentOperand(0));
__ j(equal, &in_dictionary);
if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
@@ -6397,8 +6411,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label fast_elements;
// Get array literal index, array literal and its map.
- __ movq(rdx, Operand(rsp, 1 * kPointerSize));
- __ movq(rbx, Operand(rsp, 2 * kPointerSize));
+ StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rdx, args.GetArgumentOperand(1));
+ __ movq(rbx, args.GetArgumentOperand(0));
__ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
__ CheckFastElements(rdi, &double_elements);
@@ -6567,7 +6582,8 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
__ j(not_zero, &normal_sequence);
// look at the first argument
- __ movq(rcx, Operand(rsp, kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rcx, args.GetArgumentOperand(0));
__ testq(rcx, rcx);
__ j(zero, &normal_sequence);
@@ -6746,7 +6762,8 @@ void InternalArrayConstructorStub::GenerateCase(
if (IsFastPackedElementsKind(kind)) {
// We might need to create a holey array
// look at the first argument
- __ movq(rcx, Operand(rsp, kPointerSize));
+ StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
+ __ movq(rcx, args.GetArgumentOperand(0));
__ testq(rcx, rcx);
__ j(zero, &normal_sequence);
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698