Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 199903002: Introduce Push and Pop macro instructions for x64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after
483 483
484 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 484 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
485 int param_count = descriptor->register_param_count_; 485 int param_count = descriptor->register_param_count_;
486 { 486 {
487 // Call the runtime system in a fresh internal frame. 487 // Call the runtime system in a fresh internal frame.
488 FrameScope scope(masm, StackFrame::INTERNAL); 488 FrameScope scope(masm, StackFrame::INTERNAL);
489 ASSERT(descriptor->register_param_count_ == 0 || 489 ASSERT(descriptor->register_param_count_ == 0 ||
490 rax.is(descriptor->register_params_[param_count - 1])); 490 rax.is(descriptor->register_params_[param_count - 1]));
491 // Push arguments 491 // Push arguments
492 for (int i = 0; i < param_count; ++i) { 492 for (int i = 0; i < param_count; ++i) {
493 __ push(descriptor->register_params_[i]); 493 __ Push(descriptor->register_params_[i]);
494 } 494 }
495 ExternalReference miss = descriptor->miss_handler(); 495 ExternalReference miss = descriptor->miss_handler();
496 __ CallExternalReference(miss, descriptor->register_param_count_); 496 __ CallExternalReference(miss, descriptor->register_param_count_);
497 } 497 }
498 498
499 __ Ret(); 499 __ Ret();
500 } 500 }
501 501
502 502
503 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 503 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
(...skipping 30 matching lines...) Expand all
534 void DoubleToIStub::Generate(MacroAssembler* masm) { 534 void DoubleToIStub::Generate(MacroAssembler* masm) {
535 Register input_reg = this->source(); 535 Register input_reg = this->source();
536 Register final_result_reg = this->destination(); 536 Register final_result_reg = this->destination();
537 ASSERT(is_truncating()); 537 ASSERT(is_truncating());
538 538
539 Label check_negative, process_64_bits, done; 539 Label check_negative, process_64_bits, done;
540 540
541 int double_offset = offset(); 541 int double_offset = offset();
542 542
543 // Account for return address and saved regs if input is rsp. 543 // Account for return address and saved regs if input is rsp.
544 if (input_reg.is(rsp)) double_offset += 3 * kPointerSize; 544 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
545 545
546 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 546 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
547 MemOperand exponent_operand(MemOperand(input_reg, 547 MemOperand exponent_operand(MemOperand(input_reg,
548 double_offset + kDoubleSize / 2)); 548 double_offset + kDoubleSize / 2));
549 549
550 Register scratch1; 550 Register scratch1;
551 Register scratch_candidates[3] = { rbx, rdx, rdi }; 551 Register scratch_candidates[3] = { rbx, rdx, rdi };
552 for (int i = 0; i < 3; i++) { 552 for (int i = 0; i < 3; i++) {
553 scratch1 = scratch_candidates[i]; 553 scratch1 = scratch_candidates[i];
554 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 554 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
555 } 555 }
556 556
557 // Since we must use rcx for shifts below, use some other register (rax) 557 // Since we must use rcx for shifts below, use some other register (rax)
558 // to calculate the result if ecx is the requested return register. 558 // to calculate the result if ecx is the requested return register.
559 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg; 559 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
560 // Save ecx if it isn't the return register and therefore volatile, or if it 560 // Save ecx if it isn't the return register and therefore volatile, or if it
561 // is the return register, then save the temp register we use in its stead 561 // is the return register, then save the temp register we use in its stead
562 // for the result. 562 // for the result.
563 Register save_reg = final_result_reg.is(rcx) ? rax : rcx; 563 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
564 __ push(scratch1); 564 __ pushq(scratch1);
565 __ push(save_reg); 565 __ pushq(save_reg);
566 566
567 bool stash_exponent_copy = !input_reg.is(rsp); 567 bool stash_exponent_copy = !input_reg.is(rsp);
568 __ movl(scratch1, mantissa_operand); 568 __ movl(scratch1, mantissa_operand);
569 __ movsd(xmm0, mantissa_operand); 569 __ movsd(xmm0, mantissa_operand);
570 __ movl(rcx, exponent_operand); 570 __ movl(rcx, exponent_operand);
571 if (stash_exponent_copy) __ push(rcx); 571 if (stash_exponent_copy) __ pushq(rcx);
572 572
573 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); 573 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
574 __ shrl(rcx, Immediate(HeapNumber::kExponentShift)); 574 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
575 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias)); 575 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
576 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits)); 576 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
577 __ j(below, &process_64_bits); 577 __ j(below, &process_64_bits);
578 578
579 // Result is entirely in lower 32-bits of mantissa 579 // Result is entirely in lower 32-bits of mantissa
580 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; 580 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
581 __ subl(rcx, Immediate(delta)); 581 __ subl(rcx, Immediate(delta));
(...skipping 20 matching lines...) Expand all
602 602
603 // Restore registers 603 // Restore registers
604 __ bind(&done); 604 __ bind(&done);
605 if (stash_exponent_copy) { 605 if (stash_exponent_copy) {
606 __ addq(rsp, Immediate(kDoubleSize)); 606 __ addq(rsp, Immediate(kDoubleSize));
607 } 607 }
608 if (!final_result_reg.is(result_reg)) { 608 if (!final_result_reg.is(result_reg)) {
609 ASSERT(final_result_reg.is(rcx)); 609 ASSERT(final_result_reg.is(rcx));
610 __ movl(final_result_reg, result_reg); 610 __ movl(final_result_reg, result_reg);
611 } 611 }
612 __ pop(save_reg); 612 __ popq(save_reg);
613 __ pop(scratch1); 613 __ popq(scratch1);
614 __ ret(0); 614 __ ret(0);
615 } 615 }
616 616
617 617
618 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, 618 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
619 Label* not_numbers) { 619 Label* not_numbers) {
620 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; 620 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
621 // Load operand in rdx into xmm0, or branch to not_numbers. 621 // Load operand in rdx into xmm0, or branch to not_numbers.
622 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); 622 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
623 __ JumpIfSmi(rdx, &load_smi_rdx); 623 __ JumpIfSmi(rdx, &load_smi_rdx);
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 __ SmiToInteger32(rcx, rcx); 976 __ SmiToInteger32(rcx, rcx);
977 StackArgumentsAccessor adaptor_args(rbx, rcx, 977 StackArgumentsAccessor adaptor_args(rbx, rcx,
978 ARGUMENTS_DONT_CONTAIN_RECEIVER); 978 ARGUMENTS_DONT_CONTAIN_RECEIVER);
979 __ movp(rax, adaptor_args.GetArgumentOperand(0)); 979 __ movp(rax, adaptor_args.GetArgumentOperand(0));
980 __ Ret(); 980 __ Ret();
981 981
982 // Slow-case: Handle non-smi or out-of-bounds access to arguments 982 // Slow-case: Handle non-smi or out-of-bounds access to arguments
983 // by calling the runtime system. 983 // by calling the runtime system.
984 __ bind(&slow); 984 __ bind(&slow);
985 __ PopReturnAddressTo(rbx); 985 __ PopReturnAddressTo(rbx);
986 __ push(rdx); 986 __ Push(rdx);
987 __ PushReturnAddressFrom(rbx); 987 __ PushReturnAddressFrom(rbx);
988 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 988 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
989 } 989 }
990 990
991 991
992 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 992 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
993 // Stack layout: 993 // Stack layout:
994 // rsp[0] : return address 994 // rsp[0] : return address
995 // rsp[8] : number of parameters (tagged) 995 // rsp[8] : number of parameters (tagged)
996 // rsp[16] : receiver displacement 996 // rsp[16] : receiver displacement
(...skipping 1060 matching lines...) Expand 10 before | Expand all | Expand 10 after
2057 __ Set(rax, EQUAL); 2057 __ Set(rax, EQUAL);
2058 __ bind(&return_unequal); 2058 __ bind(&return_unequal);
2059 // Return non-equal by returning the non-zero object pointer in rax, 2059 // Return non-equal by returning the non-zero object pointer in rax,
2060 // or return equal if we fell through to here. 2060 // or return equal if we fell through to here.
2061 __ ret(0); 2061 __ ret(0);
2062 __ bind(&not_both_objects); 2062 __ bind(&not_both_objects);
2063 } 2063 }
2064 2064
2065 // Push arguments below the return address to prepare jump to builtin. 2065 // Push arguments below the return address to prepare jump to builtin.
2066 __ PopReturnAddressTo(rcx); 2066 __ PopReturnAddressTo(rcx);
2067 __ push(rdx); 2067 __ Push(rdx);
2068 __ push(rax); 2068 __ Push(rax);
2069 2069
2070 // Figure out which native to call and setup the arguments. 2070 // Figure out which native to call and setup the arguments.
2071 Builtins::JavaScript builtin; 2071 Builtins::JavaScript builtin;
2072 if (cc == equal) { 2072 if (cc == equal) {
2073 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; 2073 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2074 } else { 2074 } else {
2075 builtin = Builtins::COMPARE; 2075 builtin = Builtins::COMPARE;
2076 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); 2076 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
2077 } 2077 }
2078 2078
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
2147 __ cmpq(rdi, rcx); 2147 __ cmpq(rdi, rcx);
2148 __ j(not_equal, &not_array_function); 2148 __ j(not_equal, &not_array_function);
2149 2149
2150 // The target function is the Array constructor, 2150 // The target function is the Array constructor,
2151 // Create an AllocationSite if we don't already have it, store it in the slot. 2151 // Create an AllocationSite if we don't already have it, store it in the slot.
2152 { 2152 {
2153 FrameScope scope(masm, StackFrame::INTERNAL); 2153 FrameScope scope(masm, StackFrame::INTERNAL);
2154 2154
2155 // Arguments register must be smi-tagged to call out. 2155 // Arguments register must be smi-tagged to call out.
2156 __ Integer32ToSmi(rax, rax); 2156 __ Integer32ToSmi(rax, rax);
2157 __ push(rax); 2157 __ Push(rax);
2158 __ push(rdi); 2158 __ Push(rdi);
2159 __ Integer32ToSmi(rdx, rdx); 2159 __ Integer32ToSmi(rdx, rdx);
2160 __ push(rdx); 2160 __ Push(rdx);
2161 __ push(rbx); 2161 __ Push(rbx);
2162 2162
2163 CreateAllocationSiteStub create_stub; 2163 CreateAllocationSiteStub create_stub;
2164 __ CallStub(&create_stub); 2164 __ CallStub(&create_stub);
2165 2165
2166 __ pop(rbx); 2166 __ Pop(rbx);
2167 __ pop(rdx); 2167 __ Pop(rdx);
2168 __ pop(rdi); 2168 __ Pop(rdi);
2169 __ pop(rax); 2169 __ Pop(rax);
2170 __ SmiToInteger32(rax, rax); 2170 __ SmiToInteger32(rax, rax);
2171 } 2171 }
2172 __ jmp(&done_no_smi_convert); 2172 __ jmp(&done_no_smi_convert);
2173 2173
2174 __ bind(&not_array_function); 2174 __ bind(&not_array_function);
2175 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2175 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2176 rdi); 2176 rdi);
2177 2177
2178 // We won't need rdx or rbx anymore, just save rdi 2178 // We won't need rdx or rbx anymore, just save rdi
2179 __ push(rdi); 2179 __ Push(rdi);
2180 __ push(rbx); 2180 __ Push(rbx);
2181 __ push(rdx); 2181 __ Push(rdx);
2182 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs, 2182 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
2183 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2183 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2184 __ pop(rdx); 2184 __ Pop(rdx);
2185 __ pop(rbx); 2185 __ Pop(rbx);
2186 __ pop(rdi); 2186 __ Pop(rdi);
2187 2187
2188 __ bind(&done); 2188 __ bind(&done);
2189 __ Integer32ToSmi(rdx, rdx); 2189 __ Integer32ToSmi(rdx, rdx);
2190 2190
2191 __ bind(&done_no_smi_convert); 2191 __ bind(&done_no_smi_convert);
2192 } 2192 }
2193 2193
2194 2194
2195 void CallFunctionStub::Generate(MacroAssembler* masm) { 2195 void CallFunctionStub::Generate(MacroAssembler* masm) {
2196 // rbx : feedback vector 2196 // rbx : feedback vector
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2259 __ SmiToInteger32(rdx, rdx); 2259 __ SmiToInteger32(rdx, rdx);
2260 __ Move(FieldOperand(rbx, rdx, times_pointer_size, 2260 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2261 FixedArray::kHeaderSize), 2261 FixedArray::kHeaderSize),
2262 TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2262 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2263 __ Integer32ToSmi(rdx, rdx); 2263 __ Integer32ToSmi(rdx, rdx);
2264 } 2264 }
2265 // Check for function proxy. 2265 // Check for function proxy.
2266 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2266 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2267 __ j(not_equal, &non_function); 2267 __ j(not_equal, &non_function);
2268 __ PopReturnAddressTo(rcx); 2268 __ PopReturnAddressTo(rcx);
2269 __ push(rdi); // put proxy as additional argument under return address 2269 __ Push(rdi); // put proxy as additional argument under return address
2270 __ PushReturnAddressFrom(rcx); 2270 __ PushReturnAddressFrom(rcx);
2271 __ Set(rax, argc_ + 1); 2271 __ Set(rax, argc_ + 1);
2272 __ Set(rbx, 0); 2272 __ Set(rbx, 0);
2273 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2273 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2274 { 2274 {
2275 Handle<Code> adaptor = 2275 Handle<Code> adaptor =
2276 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2276 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2277 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2277 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2278 } 2278 }
2279 2279
2280 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2280 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2281 // of the original receiver from the call site). 2281 // of the original receiver from the call site).
2282 __ bind(&non_function); 2282 __ bind(&non_function);
2283 __ movp(args.GetReceiverOperand(), rdi); 2283 __ movp(args.GetReceiverOperand(), rdi);
2284 __ Set(rax, argc_); 2284 __ Set(rax, argc_);
2285 __ Set(rbx, 0); 2285 __ Set(rbx, 0);
2286 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2286 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2287 Handle<Code> adaptor = 2287 Handle<Code> adaptor =
2288 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2288 isolate->builtins()->ArgumentsAdaptorTrampoline();
2289 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2289 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2290 } 2290 }
2291 2291
2292 if (CallAsMethod()) { 2292 if (CallAsMethod()) {
2293 __ bind(&wrap); 2293 __ bind(&wrap);
2294 // Wrap the receiver and patch it back onto the stack. 2294 // Wrap the receiver and patch it back onto the stack.
2295 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2295 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2296 __ push(rdi); 2296 __ Push(rdi);
2297 __ push(rax); 2297 __ Push(rax);
2298 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2298 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2299 __ pop(rdi); 2299 __ Pop(rdi);
2300 } 2300 }
2301 __ movp(args.GetReceiverOperand(), rax); 2301 __ movp(args.GetReceiverOperand(), rax);
2302 __ jmp(&cont); 2302 __ jmp(&cont);
2303 } 2303 }
2304 } 2304 }
2305 2305
2306 2306
2307 void CallConstructStub::Generate(MacroAssembler* masm) { 2307 void CallConstructStub::Generate(MacroAssembler* masm) {
2308 // rax : number of arguments 2308 // rax : number of arguments
2309 // rbx : feedback vector 2309 // rbx : feedback vector
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
2626 2626
2627 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 2627 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2628 Label invoke, handler_entry, exit; 2628 Label invoke, handler_entry, exit;
2629 Label not_outermost_js, not_outermost_js_2; 2629 Label not_outermost_js, not_outermost_js_2;
2630 2630
2631 ProfileEntryHookStub::MaybeCallEntryHook(masm); 2631 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2632 2632
2633 { // NOLINT. Scope block confuses linter. 2633 { // NOLINT. Scope block confuses linter.
2634 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); 2634 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2635 // Set up frame. 2635 // Set up frame.
2636 __ push(rbp); 2636 __ pushq(rbp);
2637 __ movp(rbp, rsp); 2637 __ movp(rbp, rsp);
2638 2638
2639 // Push the stack frame type marker twice. 2639 // Push the stack frame type marker twice.
2640 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 2640 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2641 // Scratch register is neither callee-save, nor an argument register on any 2641 // Scratch register is neither callee-save, nor an argument register on any
2642 // platform. It's free to use at this point. 2642 // platform. It's free to use at this point.
2643 // Cannot use smi-register for loading yet. 2643 // Cannot use smi-register for loading yet.
2644 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone()); 2644 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
2645 __ push(kScratchRegister); // context slot 2645 __ Push(kScratchRegister); // context slot
2646 __ push(kScratchRegister); // function slot 2646 __ Push(kScratchRegister); // function slot
2647 // Save callee-saved registers (X64/Win64 calling conventions). 2647 // Save callee-saved registers (X64/X32/Win64 calling conventions).
2648 __ push(r12); 2648 __ pushq(r12);
2649 __ push(r13); 2649 __ pushq(r13);
2650 __ push(r14); 2650 __ pushq(r14);
2651 __ push(r15); 2651 __ pushq(r15);
2652 #ifdef _WIN64 2652 #ifdef _WIN64
2653 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2653 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2654 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2654 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2655 #endif 2655 #endif
2656 __ push(rbx); 2656 __ pushq(rbx);
2657 2657
2658 #ifdef _WIN64 2658 #ifdef _WIN64
2659 // On Win64 XMM6-XMM15 are callee-save 2659 // On Win64 XMM6-XMM15 are callee-save
2660 __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 2660 __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2661 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6); 2661 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
2662 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7); 2662 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
2663 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8); 2663 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
2664 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9); 2664 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
2665 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10); 2665 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
2666 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11); 2666 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
2667 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12); 2667 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
2668 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13); 2668 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
2669 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); 2669 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2670 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); 2670 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2671 #endif 2671 #endif
2672 2672
2673 // Set up the roots and smi constant registers. 2673 // Set up the roots and smi constant registers.
2674 // Needs to be done before any further smi loads. 2674 // Needs to be done before any further smi loads.
2675 __ InitializeSmiConstantRegister(); 2675 __ InitializeSmiConstantRegister();
2676 __ InitializeRootRegister(); 2676 __ InitializeRootRegister();
2677 } 2677 }
2678 2678
2679 Isolate* isolate = masm->isolate(); 2679 Isolate* isolate = masm->isolate();
2680 2680
2681 // Save copies of the top frame descriptor on the stack. 2681 // Save copies of the top frame descriptor on the stack.
2682 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate); 2682 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
2683 { 2683 {
2684 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 2684 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2685 __ push(c_entry_fp_operand); 2685 __ Push(c_entry_fp_operand);
2686 } 2686 }
2687 2687
2688 // If this is the outermost JS call, set js_entry_sp value. 2688 // If this is the outermost JS call, set js_entry_sp value.
2689 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); 2689 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
2690 __ Load(rax, js_entry_sp); 2690 __ Load(rax, js_entry_sp);
2691 __ testq(rax, rax); 2691 __ testq(rax, rax);
2692 __ j(not_zero, &not_outermost_js); 2692 __ j(not_zero, &not_outermost_js);
2693 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 2693 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2694 __ movp(rax, rbp); 2694 __ movp(rax, rbp);
2695 __ Store(js_entry_sp, rax); 2695 __ Store(js_entry_sp, rax);
(...skipping 19 matching lines...) Expand all
2715 // Invoke: Link this frame into the handler chain. There's only one 2715 // Invoke: Link this frame into the handler chain. There's only one
2716 // handler block in this code object, so its index is 0. 2716 // handler block in this code object, so its index is 0.
2717 __ bind(&invoke); 2717 __ bind(&invoke);
2718 __ PushTryHandler(StackHandler::JS_ENTRY, 0); 2718 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2719 2719
2720 // Clear any pending exceptions. 2720 // Clear any pending exceptions.
2721 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); 2721 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2722 __ Store(pending_exception, rax); 2722 __ Store(pending_exception, rax);
2723 2723
2724 // Fake a receiver (NULL). 2724 // Fake a receiver (NULL).
2725 __ push(Immediate(0)); // receiver 2725 __ Push(Immediate(0)); // receiver
2726 2726
2727 // Invoke the function by calling through JS entry trampoline builtin and 2727 // Invoke the function by calling through JS entry trampoline builtin and
2728 // pop the faked function when we return. We load the address from an 2728 // pop the faked function when we return. We load the address from an
2729 // external reference instead of inlining the call target address directly 2729 // external reference instead of inlining the call target address directly
2730 // in the code, because the builtin stubs may not have been generated yet 2730 // in the code, because the builtin stubs may not have been generated yet
2731 // at the time this code is generated. 2731 // at the time this code is generated.
2732 if (is_construct) { 2732 if (is_construct) {
2733 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 2733 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2734 isolate); 2734 isolate);
2735 __ Load(rax, construct_entry); 2735 __ Load(rax, construct_entry);
2736 } else { 2736 } else {
2737 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate); 2737 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
2738 __ Load(rax, entry); 2738 __ Load(rax, entry);
2739 } 2739 }
2740 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); 2740 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2741 __ call(kScratchRegister); 2741 __ call(kScratchRegister);
2742 2742
2743 // Unlink this frame from the handler chain. 2743 // Unlink this frame from the handler chain.
2744 __ PopTryHandler(); 2744 __ PopTryHandler();
2745 2745
2746 __ bind(&exit); 2746 __ bind(&exit);
2747 // Check if the current stack frame is marked as the outermost JS frame. 2747 // Check if the current stack frame is marked as the outermost JS frame.
2748 __ pop(rbx); 2748 __ Pop(rbx);
2749 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 2749 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2750 __ j(not_equal, &not_outermost_js_2); 2750 __ j(not_equal, &not_outermost_js_2);
2751 __ Move(kScratchRegister, js_entry_sp); 2751 __ Move(kScratchRegister, js_entry_sp);
2752 __ movp(Operand(kScratchRegister, 0), Immediate(0)); 2752 __ movp(Operand(kScratchRegister, 0), Immediate(0));
2753 __ bind(&not_outermost_js_2); 2753 __ bind(&not_outermost_js_2);
2754 2754
2755 // Restore the top frame descriptor from the stack. 2755 // Restore the top frame descriptor from the stack.
2756 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 2756 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2757 __ pop(c_entry_fp_operand); 2757 __ Pop(c_entry_fp_operand);
2758 } 2758 }
2759 2759
2760 // Restore callee-saved registers (X64 conventions). 2760 // Restore callee-saved registers (X64 conventions).
2761 #ifdef _WIN64 2761 #ifdef _WIN64
2762 // On Win64 XMM6-XMM15 are callee-save 2762 // On Win64 XMM6-XMM15 are callee-save
2763 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0)); 2763 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2764 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1)); 2764 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2765 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2)); 2765 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2766 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3)); 2766 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2767 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4)); 2767 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2768 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5)); 2768 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2769 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6)); 2769 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2770 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7)); 2770 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2771 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8)); 2771 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2772 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9)); 2772 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2773 __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 2773 __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2774 #endif 2774 #endif
2775 2775
2776 __ pop(rbx); 2776 __ popq(rbx);
2777 #ifdef _WIN64 2777 #ifdef _WIN64
2778 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI. 2778 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2779 __ pop(rsi); 2779 __ popq(rsi);
2780 __ pop(rdi); 2780 __ popq(rdi);
2781 #endif 2781 #endif
2782 __ pop(r15); 2782 __ popq(r15);
2783 __ pop(r14); 2783 __ popq(r14);
2784 __ pop(r13); 2784 __ popq(r13);
2785 __ pop(r12); 2785 __ popq(r12);
2786 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers 2786 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
2787 2787
2788 // Restore frame pointer and return. 2788 // Restore frame pointer and return.
2789 __ pop(rbp); 2789 __ popq(rbp);
2790 __ ret(0); 2790 __ ret(0);
2791 } 2791 }
2792 2792
2793 2793
2794 void InstanceofStub::Generate(MacroAssembler* masm) { 2794 void InstanceofStub::Generate(MacroAssembler* masm) {
2795 // Implements "value instanceof function" operator. 2795 // Implements "value instanceof function" operator.
2796 // Expected input state with no inline cache: 2796 // Expected input state with no inline cache:
2797 // rsp[0] : return address 2797 // rsp[0] : return address
2798 // rsp[8] : function pointer 2798 // rsp[8] : function pointer
2799 // rsp[16] : value 2799 // rsp[16] : value
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
2944 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 2944 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2945 } 2945 }
2946 } 2946 }
2947 __ ret((2 + extra_argument_offset) * kPointerSize); 2947 __ ret((2 + extra_argument_offset) * kPointerSize);
2948 2948
2949 // Slow-case: Go through the JavaScript implementation. 2949 // Slow-case: Go through the JavaScript implementation.
2950 __ bind(&slow); 2950 __ bind(&slow);
2951 if (HasCallSiteInlineCheck()) { 2951 if (HasCallSiteInlineCheck()) {
2952 // Remove extra value from the stack. 2952 // Remove extra value from the stack.
2953 __ PopReturnAddressTo(rcx); 2953 __ PopReturnAddressTo(rcx);
2954 __ pop(rax); 2954 __ Pop(rax);
2955 __ PushReturnAddressFrom(rcx); 2955 __ PushReturnAddressFrom(rcx);
2956 } 2956 }
2957 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 2957 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2958 } 2958 }
2959 2959
2960 2960
2961 // Passing arguments in registers is not supported. 2961 // Passing arguments in registers is not supported.
2962 Register InstanceofStub::left() { return no_reg; } 2962 Register InstanceofStub::left() { return no_reg; }
2963 2963
2964 2964
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3009 3009
3010 Factory* factory = masm->isolate()->factory(); 3010 Factory* factory = masm->isolate()->factory();
3011 // Index is not a smi. 3011 // Index is not a smi.
3012 __ bind(&index_not_smi_); 3012 __ bind(&index_not_smi_);
3013 // If index is a heap number, try converting it to an integer. 3013 // If index is a heap number, try converting it to an integer.
3014 __ CheckMap(index_, 3014 __ CheckMap(index_,
3015 factory->heap_number_map(), 3015 factory->heap_number_map(),
3016 index_not_number_, 3016 index_not_number_,
3017 DONT_DO_SMI_CHECK); 3017 DONT_DO_SMI_CHECK);
3018 call_helper.BeforeCall(masm); 3018 call_helper.BeforeCall(masm);
3019 __ push(object_); 3019 __ Push(object_);
3020 __ push(index_); // Consumed by runtime conversion function. 3020 __ Push(index_); // Consumed by runtime conversion function.
3021 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 3021 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3022 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 3022 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3023 } else { 3023 } else {
3024 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 3024 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3025 // NumberToSmi discards numbers that are not exact integers. 3025 // NumberToSmi discards numbers that are not exact integers.
3026 __ CallRuntime(Runtime::kNumberToSmi, 1); 3026 __ CallRuntime(Runtime::kNumberToSmi, 1);
3027 } 3027 }
3028 if (!index_.is(rax)) { 3028 if (!index_.is(rax)) {
3029 // Save the conversion result before the pop instructions below 3029 // Save the conversion result before the pop instructions below
3030 // have a chance to overwrite it. 3030 // have a chance to overwrite it.
3031 __ movp(index_, rax); 3031 __ movp(index_, rax);
3032 } 3032 }
3033 __ pop(object_); 3033 __ Pop(object_);
3034 // Reload the instance type. 3034 // Reload the instance type.
3035 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); 3035 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
3036 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 3036 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3037 call_helper.AfterCall(masm); 3037 call_helper.AfterCall(masm);
3038 // If index is still not a smi, it must be out of range. 3038 // If index is still not a smi, it must be out of range.
3039 __ JumpIfNotSmi(index_, index_out_of_range_); 3039 __ JumpIfNotSmi(index_, index_out_of_range_);
3040 // Otherwise, return to the fast path. 3040 // Otherwise, return to the fast path.
3041 __ jmp(&got_smi_index_); 3041 __ jmp(&got_smi_index_);
3042 3042
3043 // Call runtime. We get here when the receiver is a string and the 3043 // Call runtime. We get here when the receiver is a string and the
3044 // index is a number, but the code of getting the actual character 3044 // index is a number, but the code of getting the actual character
3045 // is too complex (e.g., when the string needs to be flattened). 3045 // is too complex (e.g., when the string needs to be flattened).
3046 __ bind(&call_runtime_); 3046 __ bind(&call_runtime_);
3047 call_helper.BeforeCall(masm); 3047 call_helper.BeforeCall(masm);
3048 __ push(object_); 3048 __ Push(object_);
3049 __ Integer32ToSmi(index_, index_); 3049 __ Integer32ToSmi(index_, index_);
3050 __ push(index_); 3050 __ Push(index_);
3051 __ CallRuntime(Runtime::kStringCharCodeAt, 2); 3051 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3052 if (!result_.is(rax)) { 3052 if (!result_.is(rax)) {
3053 __ movp(result_, rax); 3053 __ movp(result_, rax);
3054 } 3054 }
3055 call_helper.AfterCall(masm); 3055 call_helper.AfterCall(masm);
3056 __ jmp(&exit_); 3056 __ jmp(&exit_);
3057 3057
3058 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 3058 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3059 } 3059 }
3060 3060
(...skipping 17 matching lines...) Expand all
3078 } 3078 }
3079 3079
3080 3080
3081 void StringCharFromCodeGenerator::GenerateSlow( 3081 void StringCharFromCodeGenerator::GenerateSlow(
3082 MacroAssembler* masm, 3082 MacroAssembler* masm,
3083 const RuntimeCallHelper& call_helper) { 3083 const RuntimeCallHelper& call_helper) {
3084 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 3084 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3085 3085
3086 __ bind(&slow_case_); 3086 __ bind(&slow_case_);
3087 call_helper.BeforeCall(masm); 3087 call_helper.BeforeCall(masm);
3088 __ push(code_); 3088 __ Push(code_);
3089 __ CallRuntime(Runtime::kCharFromCode, 1); 3089 __ CallRuntime(Runtime::kCharFromCode, 1);
3090 if (!result_.is(rax)) { 3090 if (!result_.is(rax)) {
3091 __ movp(result_, rax); 3091 __ movp(result_, rax);
3092 } 3092 }
3093 call_helper.AfterCall(masm); 3093 call_helper.AfterCall(masm);
3094 __ jmp(&exit_); 3094 __ jmp(&exit_);
3095 3095
3096 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 3096 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3097 } 3097 }
3098 3098
(...skipping 1003 matching lines...) Expand 10 before | Expand all | Expand 10 after
4102 StringCompareStub::GenerateFlatAsciiStringEquals( 4102 StringCompareStub::GenerateFlatAsciiStringEquals(
4103 masm, left, right, tmp1, tmp2); 4103 masm, left, right, tmp1, tmp2);
4104 } else { 4104 } else {
4105 StringCompareStub::GenerateCompareFlatAsciiStrings( 4105 StringCompareStub::GenerateCompareFlatAsciiStrings(
4106 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); 4106 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
4107 } 4107 }
4108 4108
4109 // Handle more complex cases in runtime. 4109 // Handle more complex cases in runtime.
4110 __ bind(&runtime); 4110 __ bind(&runtime);
4111 __ PopReturnAddressTo(tmp1); 4111 __ PopReturnAddressTo(tmp1);
4112 __ push(left); 4112 __ Push(left);
4113 __ push(right); 4113 __ Push(right);
4114 __ PushReturnAddressFrom(tmp1); 4114 __ PushReturnAddressFrom(tmp1);
4115 if (equality) { 4115 if (equality) {
4116 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); 4116 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4117 } else { 4117 } else {
4118 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 4118 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4119 } 4119 }
4120 4120
4121 __ bind(&miss); 4121 __ bind(&miss);
4122 GenerateMiss(masm); 4122 GenerateMiss(masm);
4123 } 4123 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
4163 } 4163 }
4164 4164
4165 4165
4166 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 4166 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4167 { 4167 {
4168 // Call the runtime system in a fresh internal frame. 4168 // Call the runtime system in a fresh internal frame.
4169 ExternalReference miss = 4169 ExternalReference miss =
4170 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 4170 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4171 4171
4172 FrameScope scope(masm, StackFrame::INTERNAL); 4172 FrameScope scope(masm, StackFrame::INTERNAL);
4173 __ push(rdx); 4173 __ Push(rdx);
4174 __ push(rax); 4174 __ Push(rax);
4175 __ push(rdx); 4175 __ Push(rdx);
4176 __ push(rax); 4176 __ Push(rax);
4177 __ Push(Smi::FromInt(op_)); 4177 __ Push(Smi::FromInt(op_));
4178 __ CallExternalReference(miss, 3); 4178 __ CallExternalReference(miss, 3);
4179 4179
4180 // Compute the entry point of the rewritten stub. 4180 // Compute the entry point of the rewritten stub.
4181 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); 4181 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4182 __ pop(rax); 4182 __ Pop(rax);
4183 __ pop(rdx); 4183 __ Pop(rdx);
4184 } 4184 }
4185 4185
4186 // Do a tail call to the rewritten stub. 4186 // Do a tail call to the rewritten stub.
4187 __ jmp(rdi); 4187 __ jmp(rdi);
4188 } 4188 }
4189 4189
4190 4190
4191 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, 4191 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
4192 Label* miss, 4192 Label* miss,
4193 Label* done, 4193 Label* done,
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
4235 4235
4236 // Check if the entry name is not a unique name. 4236 // Check if the entry name is not a unique name.
4237 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 4237 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4238 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), 4238 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4239 miss); 4239 miss);
4240 __ bind(&good); 4240 __ bind(&good);
4241 } 4241 }
4242 4242
4243 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); 4243 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
4244 __ Push(Handle<Object>(name)); 4244 __ Push(Handle<Object>(name));
4245 __ push(Immediate(name->Hash())); 4245 __ Push(Immediate(name->Hash()));
4246 __ CallStub(&stub); 4246 __ CallStub(&stub);
4247 __ testq(r0, r0); 4247 __ testq(r0, r0);
4248 __ j(not_zero, miss); 4248 __ j(not_zero, miss);
4249 __ jmp(done); 4249 __ jmp(done);
4250 } 4250 }
4251 4251
4252 4252
4253 // Probe the name dictionary in the |elements| register. Jump to the 4253 // Probe the name dictionary in the |elements| register. Jump to the
4254 // |done| label if a property with the given name is found leaving the 4254 // |done| label if a property with the given name is found leaving the
4255 // index into the dictionary in |r1|. Jump to the |miss| label 4255 // index into the dictionary in |r1|. Jump to the |miss| label
(...skipping 28 matching lines...) Expand all
4284 ASSERT(NameDictionary::kEntrySize == 3); 4284 ASSERT(NameDictionary::kEntrySize == 3);
4285 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 4285 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
4286 4286
4287 // Check if the key is identical to the name. 4287 // Check if the key is identical to the name.
4288 __ cmpq(name, Operand(elements, r1, times_pointer_size, 4288 __ cmpq(name, Operand(elements, r1, times_pointer_size,
4289 kElementsStartOffset - kHeapObjectTag)); 4289 kElementsStartOffset - kHeapObjectTag));
4290 __ j(equal, done); 4290 __ j(equal, done);
4291 } 4291 }
4292 4292
4293 NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP); 4293 NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
4294 __ push(name); 4294 __ Push(name);
4295 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); 4295 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
4296 __ shrl(r0, Immediate(Name::kHashShift)); 4296 __ shrl(r0, Immediate(Name::kHashShift));
4297 __ push(r0); 4297 __ Push(r0);
4298 __ CallStub(&stub); 4298 __ CallStub(&stub);
4299 4299
4300 __ testq(r0, r0); 4300 __ testq(r0, r0);
4301 __ j(zero, miss); 4301 __ j(zero, miss);
4302 __ jmp(done); 4302 __ jmp(done);
4303 } 4303 }
4304 4304
4305 4305
4306 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 4306 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
4307 // This stub overrides SometimesSetsUpAFrame() to return false. That means 4307 // This stub overrides SometimesSetsUpAFrame() to return false. That means
4308 // we cannot call anything that could cause a GC from this stub. 4308 // we cannot call anything that could cause a GC from this stub.
4309 // Stack frame on entry: 4309 // Stack frame on entry:
4310 // rsp[0 * kPointerSize] : return address. 4310 // rsp[0 * kPointerSize] : return address.
4311 // rsp[1 * kPointerSize] : key's hash. 4311 // rsp[1 * kPointerSize] : key's hash.
4312 // rsp[2 * kPointerSize] : key. 4312 // rsp[2 * kPointerSize] : key.
4313 // Registers: 4313 // Registers:
4314 // dictionary_: NameDictionary to probe. 4314 // dictionary_: NameDictionary to probe.
4315 // result_: used as scratch. 4315 // result_: used as scratch.
4316 // index_: will hold an index of entry if lookup is successful. 4316 // index_: will hold an index of entry if lookup is successful.
4317 // might alias with result_. 4317 // might alias with result_.
4318 // Returns: 4318 // Returns:
4319 // result_ is zero if lookup failed, non zero otherwise. 4319 // result_ is zero if lookup failed, non zero otherwise.
4320 4320
4321 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 4321 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4322 4322
4323 Register scratch = result_; 4323 Register scratch = result_;
4324 4324
4325 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); 4325 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
4326 __ decl(scratch); 4326 __ decl(scratch);
4327 __ push(scratch); 4327 __ Push(scratch);
4328 4328
4329 // If names of slots in range from 1 to kProbes - 1 for the hash value are 4329 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4330 // not equal to the name and kProbes-th slot is not used (its name is the 4330 // not equal to the name and kProbes-th slot is not used (its name is the
4331 // undefined value), it guarantees the hash table doesn't contain the 4331 // undefined value), it guarantees the hash table doesn't contain the
4332 // property. It's true even if some slots represent deleted properties 4332 // property. It's true even if some slots represent deleted properties
4333 // (their names are the null value). 4333 // (their names are the null value).
4334 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, 4334 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
4335 kPointerSize); 4335 kPointerSize);
4336 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 4336 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4337 // Compute the masked index: (hash + i + i * i) & mask. 4337 // Compute the masked index: (hash + i + i * i) & mask.
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
4566 regs_.scratch1(), // Scratch. 4566 regs_.scratch1(), // Scratch.
4567 MemoryChunk::kSkipEvacuationSlotsRecordingMask, 4567 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4568 zero, 4568 zero,
4569 &need_incremental); 4569 &need_incremental);
4570 4570
4571 __ bind(&ensure_not_white); 4571 __ bind(&ensure_not_white);
4572 } 4572 }
4573 4573
4574 // We need an extra register for this, so we push the object register 4574 // We need an extra register for this, so we push the object register
4575 // temporarily. 4575 // temporarily.
4576 __ push(regs_.object()); 4576 __ Push(regs_.object());
4577 __ EnsureNotWhite(regs_.scratch0(), // The value. 4577 __ EnsureNotWhite(regs_.scratch0(), // The value.
4578 regs_.scratch1(), // Scratch. 4578 regs_.scratch1(), // Scratch.
4579 regs_.object(), // Scratch. 4579 regs_.object(), // Scratch.
4580 &need_incremental_pop_object, 4580 &need_incremental_pop_object,
4581 Label::kNear); 4581 Label::kNear);
4582 __ pop(regs_.object()); 4582 __ Pop(regs_.object());
4583 4583
4584 regs_.Restore(masm); 4584 regs_.Restore(masm);
4585 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4585 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4586 __ RememberedSetHelper(object_, 4586 __ RememberedSetHelper(object_,
4587 address_, 4587 address_,
4588 value_, 4588 value_,
4589 save_fp_regs_mode_, 4589 save_fp_regs_mode_,
4590 MacroAssembler::kReturnAtEnd); 4590 MacroAssembler::kReturnAtEnd);
4591 } else { 4591 } else {
4592 __ ret(0); 4592 __ ret(0);
4593 } 4593 }
4594 4594
4595 __ bind(&need_incremental_pop_object); 4595 __ bind(&need_incremental_pop_object);
4596 __ pop(regs_.object()); 4596 __ Pop(regs_.object());
4597 4597
4598 __ bind(&need_incremental); 4598 __ bind(&need_incremental);
4599 4599
4600 // Fall through when we need to inform the incremental marker. 4600 // Fall through when we need to inform the incremental marker.
4601 } 4601 }
4602 4602
4603 4603
4604 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { 4604 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4605 // ----------- S t a t e ------------- 4605 // ----------- S t a t e -------------
4606 // -- rax : element value to store 4606 // -- rax : element value to store
(...skipping 20 matching lines...) Expand all
4627 4627
4628 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS 4628 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
4629 __ JumpIfSmi(rax, &smi_element); 4629 __ JumpIfSmi(rax, &smi_element);
4630 __ CheckFastSmiElements(rdi, &fast_elements); 4630 __ CheckFastSmiElements(rdi, &fast_elements);
4631 4631
4632 // Store into the array literal requires a elements transition. Call into 4632 // Store into the array literal requires a elements transition. Call into
4633 // the runtime. 4633 // the runtime.
4634 4634
4635 __ bind(&slow_elements); 4635 __ bind(&slow_elements);
4636 __ PopReturnAddressTo(rdi); 4636 __ PopReturnAddressTo(rdi);
4637 __ push(rbx); 4637 __ Push(rbx);
4638 __ push(rcx); 4638 __ Push(rcx);
4639 __ push(rax); 4639 __ Push(rax);
4640 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4640 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4641 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); 4641 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
4642 __ push(rdx); 4642 __ Push(rdx);
4643 __ PushReturnAddressFrom(rdi); 4643 __ PushReturnAddressFrom(rdi);
4644 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); 4644 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4645 4645
4646 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. 4646 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4647 __ bind(&fast_elements); 4647 __ bind(&fast_elements);
4648 __ SmiToInteger32(kScratchRegister, rcx); 4648 __ SmiToInteger32(kScratchRegister, rcx);
4649 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); 4649 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4650 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, 4650 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
4651 FixedArrayBase::kHeaderSize)); 4651 FixedArrayBase::kHeaderSize));
4652 __ movp(Operand(rcx, 0), rax); 4652 __ movp(Operand(rcx, 0), rax);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
4701 ProfileEntryHookStub stub; 4701 ProfileEntryHookStub stub;
4702 masm->CallStub(&stub); 4702 masm->CallStub(&stub);
4703 } 4703 }
4704 } 4704 }
4705 4705
4706 4706
4707 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4707 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4708 // This stub can be called from essentially anywhere, so it needs to save 4708 // This stub can be called from essentially anywhere, so it needs to save
4709 // all volatile and callee-save registers. 4709 // all volatile and callee-save registers.
4710 const size_t kNumSavedRegisters = 2; 4710 const size_t kNumSavedRegisters = 2;
4711 __ push(arg_reg_1); 4711 __ pushq(arg_reg_1);
4712 __ push(arg_reg_2); 4712 __ pushq(arg_reg_2);
4713 4713
4714 // Calculate the original stack pointer and store it in the second arg. 4714 // Calculate the original stack pointer and store it in the second arg.
4715 __ lea(arg_reg_2, 4715 __ lea(arg_reg_2,
4716 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); 4716 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4717 4717
4718 // Calculate the function address to the first arg. 4718 // Calculate the function address to the first arg.
4719 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); 4719 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4720 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 4720 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4721 4721
4722 // Save the remainder of the volatile registers. 4722 // Save the remainder of the volatile registers.
4723 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4723 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4724 4724
4725 // Call the entry hook function. 4725 // Call the entry hook function.
4726 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), 4726 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
4727 Assembler::RelocInfoNone()); 4727 Assembler::RelocInfoNone());
4728 4728
4729 AllowExternalCallThatCantCauseGC scope(masm); 4729 AllowExternalCallThatCantCauseGC scope(masm);
4730 4730
4731 const int kArgumentCount = 2; 4731 const int kArgumentCount = 2;
4732 __ PrepareCallCFunction(kArgumentCount); 4732 __ PrepareCallCFunction(kArgumentCount);
4733 __ CallCFunction(rax, kArgumentCount); 4733 __ CallCFunction(rax, kArgumentCount);
4734 4734
4735 // Restore volatile regs. 4735 // Restore volatile regs.
4736 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4736 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4737 __ pop(arg_reg_2); 4737 __ popq(arg_reg_2);
4738 __ pop(arg_reg_1); 4738 __ popq(arg_reg_1);
4739 4739
4740 __ Ret(); 4740 __ Ret();
4741 } 4741 }
4742 4742
4743 4743
4744 template<class T> 4744 template<class T>
4745 static void CreateArrayDispatch(MacroAssembler* masm, 4745 static void CreateArrayDispatch(MacroAssembler* masm,
4746 AllocationSiteOverrideMode mode) { 4746 AllocationSiteOverrideMode mode) {
4747 if (mode == DISABLE_ALLOCATION_SITES) { 4747 if (mode == DISABLE_ALLOCATION_SITES) {
4748 T stub(GetInitialFastElementsKind(), mode); 4748 T stub(GetInitialFastElementsKind(), mode);
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
5109 STATIC_ASSERT(FCA::kDataIndex == 4); 5109 STATIC_ASSERT(FCA::kDataIndex == 4);
5110 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5110 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5111 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5111 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5112 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5112 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5113 STATIC_ASSERT(FCA::kHolderIndex == 0); 5113 STATIC_ASSERT(FCA::kHolderIndex == 0);
5114 STATIC_ASSERT(FCA::kArgsLength == 7); 5114 STATIC_ASSERT(FCA::kArgsLength == 7);
5115 5115
5116 __ PopReturnAddressTo(return_address); 5116 __ PopReturnAddressTo(return_address);
5117 5117
5118 // context save 5118 // context save
5119 __ push(context); 5119 __ Push(context);
5120 // load context from callee 5120 // load context from callee
5121 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset)); 5121 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5122 5122
5123 // callee 5123 // callee
5124 __ push(callee); 5124 __ Push(callee);
5125 5125
5126 // call data 5126 // call data
5127 __ push(call_data); 5127 __ Push(call_data);
5128 Register scratch = call_data; 5128 Register scratch = call_data;
5129 if (!call_data_undefined) { 5129 if (!call_data_undefined) {
5130 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 5130 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5131 } 5131 }
5132 // return value 5132 // return value
5133 __ push(scratch); 5133 __ Push(scratch);
5134 // return value default 5134 // return value default
5135 __ push(scratch); 5135 __ Push(scratch);
5136 // isolate 5136 // isolate
5137 __ Move(scratch, 5137 __ Move(scratch,
5138 ExternalReference::isolate_address(masm->isolate())); 5138 ExternalReference::isolate_address(masm->isolate()));
5139 __ push(scratch); 5139 __ Push(scratch);
5140 // holder 5140 // holder
5141 __ push(holder); 5141 __ Push(holder);
5142 5142
5143 __ movp(scratch, rsp); 5143 __ movp(scratch, rsp);
5144 // Push return address back on stack. 5144 // Push return address back on stack.
5145 __ PushReturnAddressFrom(return_address); 5145 __ PushReturnAddressFrom(return_address);
5146 5146
5147 // Allocate the v8::Arguments structure in the arguments' space since 5147 // Allocate the v8::Arguments structure in the arguments' space since
5148 // it's not controlled by GC. 5148 // it's not controlled by GC.
5149 const int kApiStackSpace = 4; 5149 const int kApiStackSpace = 4;
5150 5150
5151 __ PrepareCallApiFunction(kApiStackSpace); 5151 __ PrepareCallApiFunction(kApiStackSpace);
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
5251 return_value_operand, 5251 return_value_operand,
5252 NULL); 5252 NULL);
5253 } 5253 }
5254 5254
5255 5255
5256 #undef __ 5256 #undef __
5257 5257
5258 } } // namespace v8::internal 5258 } } // namespace v8::internal
5259 5259
5260 #endif // V8_TARGET_ARCH_X64 5260 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698