Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(463)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 156663002: Use StackArgumentsAccessor to access receiver on stack, use kPCOnStackSize to (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2266 matching lines...) Expand 10 before | Expand all | Expand 10 after
2277 __ j(not_equal, &cont); 2277 __ j(not_equal, &cont);
2278 2278
2279 // Do not transform the receiver for natives. 2279 // Do not transform the receiver for natives.
2280 // SharedFunctionInfo is already loaded into rcx. 2280 // SharedFunctionInfo is already loaded into rcx.
2281 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2281 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2282 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2282 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2283 __ j(not_equal, &cont); 2283 __ j(not_equal, &cont);
2284 } 2284 }
2285 2285
2286 // Load the receiver from the stack. 2286 // Load the receiver from the stack.
2287 __ movp(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); 2287 __ movp(rax, args.GetReceiverOperand());
2288 2288
2289 if (NeedsChecks()) { 2289 if (NeedsChecks()) {
2290 __ JumpIfSmi(rax, &wrap); 2290 __ JumpIfSmi(rax, &wrap);
2291 2291
2292 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2292 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2293 __ j(below, &wrap); 2293 __ j(below, &wrap);
2294 } else { 2294 } else {
2295 __ jmp(&wrap); 2295 __ jmp(&wrap);
2296 } 2296 }
2297 2297
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2338 2338
2339 if (CallAsMethod()) { 2339 if (CallAsMethod()) {
2340 __ bind(&wrap); 2340 __ bind(&wrap);
2341 // Wrap the receiver and patch it back onto the stack. 2341 // Wrap the receiver and patch it back onto the stack.
2342 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2342 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2343 __ push(rdi); 2343 __ push(rdi);
2344 __ push(rax); 2344 __ push(rax);
2345 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2345 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2346 __ pop(rdi); 2346 __ pop(rdi);
2347 } 2347 }
2348 __ movp(Operand(rsp, (argc_ + 1) * kPointerSize), rax); 2348 __ movp(args.GetReceiverOperand(), rax);
2349 __ jmp(&cont); 2349 __ jmp(&cont);
2350 } 2350 }
2351 } 2351 }
2352 2352
2353 2353
2354 void CallConstructStub::Generate(MacroAssembler* masm) { 2354 void CallConstructStub::Generate(MacroAssembler* masm) {
2355 // rax : number of arguments 2355 // rax : number of arguments
2356 // rbx : cache cell for call target 2356 // rbx : cache cell for call target
2357 // rdi : constructor function 2357 // rdi : constructor function
2358 Label slow, non_function_call; 2358 Label slow, non_function_call;
(...skipping 550 matching lines...) Expand 10 before | Expand all | Expand 10 after
2909 2909
2910 // Register mapping: 2910 // Register mapping:
2911 // rax is object map. 2911 // rax is object map.
2912 // rdx is function. 2912 // rdx is function.
2913 // rbx is function prototype. 2913 // rbx is function prototype.
2914 if (!HasCallSiteInlineCheck()) { 2914 if (!HasCallSiteInlineCheck()) {
2915 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2915 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2916 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2916 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2917 } else { 2917 } else {
2918 // Get return address and delta to inlined map check. 2918 // Get return address and delta to inlined map check.
2919 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 2919 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2920 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 2920 __ subq(kScratchRegister, args.GetArgumentOperand(2));
2921 if (FLAG_debug_code) { 2921 if (FLAG_debug_code) {
2922 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 2922 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
2923 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 2923 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
2924 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); 2924 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
2925 } 2925 }
2926 __ movp(kScratchRegister, 2926 __ movp(kScratchRegister,
2927 Operand(kScratchRegister, kOffsetToMapCheckValue)); 2927 Operand(kScratchRegister, kOffsetToMapCheckValue));
2928 __ movp(Operand(kScratchRegister, 0), rax); 2928 __ movp(Operand(kScratchRegister, 0), rax);
2929 } 2929 }
(...skipping 20 matching lines...) Expand all
2950 // Store bitwise zero in the cache. This is a Smi in GC terms. 2950 // Store bitwise zero in the cache. This is a Smi in GC terms.
2951 STATIC_ASSERT(kSmiTag == 0); 2951 STATIC_ASSERT(kSmiTag == 0);
2952 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 2952 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2953 } else { 2953 } else {
2954 // Store offset of true in the root array at the inline check site. 2954 // Store offset of true in the root array at the inline check site.
2955 int true_offset = 0x100 + 2955 int true_offset = 0x100 +
2956 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 2956 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2957 // Assert it is a 1-byte signed value. 2957 // Assert it is a 1-byte signed value.
2958 ASSERT(true_offset >= 0 && true_offset < 0x100); 2958 ASSERT(true_offset >= 0 && true_offset < 0x100);
2959 __ movl(rax, Immediate(true_offset)); 2959 __ movl(rax, Immediate(true_offset));
2960 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 2960 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2961 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 2961 __ subq(kScratchRegister, args.GetArgumentOperand(2));
2962 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 2962 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2963 if (FLAG_debug_code) { 2963 if (FLAG_debug_code) {
2964 __ movl(rax, Immediate(kWordBeforeResultValue)); 2964 __ movl(rax, Immediate(kWordBeforeResultValue));
2965 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 2965 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2966 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 2966 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2967 } 2967 }
2968 __ Set(rax, 0); 2968 __ Set(rax, 0);
2969 } 2969 }
2970 __ ret((2 + extra_argument_offset) * kPointerSize); 2970 __ ret((2 + extra_argument_offset) * kPointerSize);
2971 2971
2972 __ bind(&is_not_instance); 2972 __ bind(&is_not_instance);
2973 if (!HasCallSiteInlineCheck()) { 2973 if (!HasCallSiteInlineCheck()) {
2974 // We have to store a non-zero value in the cache. 2974 // We have to store a non-zero value in the cache.
2975 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 2975 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
2976 } else { 2976 } else {
2977 // Store offset of false in the root array at the inline check site. 2977 // Store offset of false in the root array at the inline check site.
2978 int false_offset = 0x100 + 2978 int false_offset = 0x100 +
2979 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 2979 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2980 // Assert it is a 1-byte signed value. 2980 // Assert it is a 1-byte signed value.
2981 ASSERT(false_offset >= 0 && false_offset < 0x100); 2981 ASSERT(false_offset >= 0 && false_offset < 0x100);
2982 __ movl(rax, Immediate(false_offset)); 2982 __ movl(rax, Immediate(false_offset));
2983 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 2983 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2984 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 2984 __ subq(kScratchRegister, args.GetArgumentOperand(2));
2985 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 2985 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2986 if (FLAG_debug_code) { 2986 if (FLAG_debug_code) {
2987 __ movl(rax, Immediate(kWordBeforeResultValue)); 2987 __ movl(rax, Immediate(kWordBeforeResultValue));
2988 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 2988 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2989 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 2989 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2990 } 2990 }
2991 } 2991 }
2992 __ ret((2 + extra_argument_offset) * kPointerSize); 2992 __ ret((2 + extra_argument_offset) * kPointerSize);
2993 2993
(...skipping 2262 matching lines...) Expand 10 before | Expand all | Expand 10 after
5256 #endif 5256 #endif
5257 Register api_function_address = r8; 5257 Register api_function_address = r8;
5258 Register scratch = rax; 5258 Register scratch = rax;
5259 5259
5260 // v8::Arguments::values_ and handler for name. 5260 // v8::Arguments::values_ and handler for name.
5261 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; 5261 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
5262 5262
5263 // Allocate v8::AccessorInfo in non-GCed stack space. 5263 // Allocate v8::AccessorInfo in non-GCed stack space.
5264 const int kArgStackSpace = 1; 5264 const int kArgStackSpace = 1;
5265 5265
5266 __ lea(name_arg, Operand(rsp, 1 * kPointerSize)); 5266 __ lea(name_arg, Operand(rsp, kPCOnStackSize));
5267 5267
5268 __ PrepareCallApiFunction(kArgStackSpace); 5268 __ PrepareCallApiFunction(kArgStackSpace);
5269 __ lea(scratch, Operand(name_arg, 1 * kPointerSize)); 5269 __ lea(scratch, Operand(name_arg, 1 * kPointerSize));
5270 5270
5271 // v8::PropertyAccessorInfo::args_. 5271 // v8::PropertyAccessorInfo::args_.
5272 __ movp(StackSpaceOperand(0), scratch); 5272 __ movp(StackSpaceOperand(0), scratch);
5273 5273
5274 // The context register (rsi) has been saved in PrepareCallApiFunction and 5274 // The context register (rsi) has been saved in PrepareCallApiFunction and
5275 // could be used to pass arguments. 5275 // could be used to pass arguments.
5276 __ lea(accessor_info_arg, StackSpaceOperand(0)); 5276 __ lea(accessor_info_arg, StackSpaceOperand(0));
(...skipping 17 matching lines...) Expand all
5294 return_value_operand, 5294 return_value_operand,
5295 NULL); 5295 NULL);
5296 } 5296 }
5297 5297
5298 5298
5299 #undef __ 5299 #undef __
5300 5300
5301 } } // namespace v8::internal 5301 } } // namespace v8::internal
5302 5302
5303 #endif // V8_TARGET_ARCH_X64 5303 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698