Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(99)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 199903002: Introduce Push and Pop macro instructions for x64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 // -- ... 54 // -- ...
55 // -- rsp[8 * argc] : first argument (argc == rax) 55 // -- rsp[8 * argc] : first argument (argc == rax)
56 // -- rsp[8 * (argc + 1)] : receiver 56 // -- rsp[8 * (argc + 1)] : receiver
57 // ----------------------------------- 57 // -----------------------------------
58 58
59 // Insert extra arguments. 59 // Insert extra arguments.
60 int num_extra_args = 0; 60 int num_extra_args = 0;
61 if (extra_args == NEEDS_CALLED_FUNCTION) { 61 if (extra_args == NEEDS_CALLED_FUNCTION) {
62 num_extra_args = 1; 62 num_extra_args = 1;
63 __ PopReturnAddressTo(kScratchRegister); 63 __ PopReturnAddressTo(kScratchRegister);
64 __ push(rdi); 64 __ Push(rdi);
65 __ PushReturnAddressFrom(kScratchRegister); 65 __ PushReturnAddressFrom(kScratchRegister);
66 } else { 66 } else {
67 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); 67 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68 } 68 }
69 69
70 // JumpToExternalReference expects rax to contain the number of arguments 70 // JumpToExternalReference expects rax to contain the number of arguments
71 // including the receiver and the extra arguments. 71 // including the receiver and the extra arguments.
72 __ addq(rax, Immediate(num_extra_args + 1)); 72 __ addq(rax, Immediate(num_extra_args + 1));
73 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); 73 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
74 } 74 }
75 75
76 76
77 static void CallRuntimePassFunction( 77 static void CallRuntimePassFunction(
78 MacroAssembler* masm, Runtime::FunctionId function_id) { 78 MacroAssembler* masm, Runtime::FunctionId function_id) {
79 FrameScope scope(masm, StackFrame::INTERNAL); 79 FrameScope scope(masm, StackFrame::INTERNAL);
80 // Push a copy of the function onto the stack. 80 // Push a copy of the function onto the stack.
81 __ push(rdi); 81 __ Push(rdi);
82 // Function is also the parameter to the runtime call. 82 // Function is also the parameter to the runtime call.
83 __ push(rdi); 83 __ Push(rdi);
84 84
85 __ CallRuntime(function_id, 1); 85 __ CallRuntime(function_id, 1);
86 // Restore receiver. 86 // Restore receiver.
87 __ pop(rdi); 87 __ Pop(rdi);
88 } 88 }
89 89
90 90
91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
92 __ movp(kScratchRegister, 92 __ movp(kScratchRegister,
93 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 93 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
94 __ movp(kScratchRegister, 94 __ movp(kScratchRegister,
95 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); 95 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
96 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); 96 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
97 __ jmp(kScratchRegister); 97 __ jmp(kScratchRegister);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 132
133 // Should never count constructions for api objects. 133 // Should never count constructions for api objects.
134 ASSERT(!is_api_function || !count_constructions); 134 ASSERT(!is_api_function || !count_constructions);
135 135
136 // Enter a construct frame. 136 // Enter a construct frame.
137 { 137 {
138 FrameScope scope(masm, StackFrame::CONSTRUCT); 138 FrameScope scope(masm, StackFrame::CONSTRUCT);
139 139
140 // Store a smi-tagged arguments count on the stack. 140 // Store a smi-tagged arguments count on the stack.
141 __ Integer32ToSmi(rax, rax); 141 __ Integer32ToSmi(rax, rax);
142 __ push(rax); 142 __ Push(rax);
143 143
144 // Push the function to invoke on the stack. 144 // Push the function to invoke on the stack.
145 __ push(rdi); 145 __ Push(rdi);
146 146
147 // Try to allocate the object without transitioning into C code. If any of 147 // Try to allocate the object without transitioning into C code. If any of
148 // the preconditions is not met, the code bails out to the runtime call. 148 // the preconditions is not met, the code bails out to the runtime call.
149 Label rt_call, allocated; 149 Label rt_call, allocated;
150 if (FLAG_inline_new) { 150 if (FLAG_inline_new) {
151 Label undo_allocation; 151 Label undo_allocation;
152 152
153 #ifdef ENABLE_DEBUGGER_SUPPORT 153 #ifdef ENABLE_DEBUGGER_SUPPORT
154 ExternalReference debug_step_in_fp = 154 ExternalReference debug_step_in_fp =
155 ExternalReference::debug_step_in_fp_address(masm->isolate()); 155 ExternalReference::debug_step_in_fp_address(masm->isolate());
(...skipping 23 matching lines...) Expand all
179 __ j(equal, &rt_call); 179 __ j(equal, &rt_call);
180 180
181 if (count_constructions) { 181 if (count_constructions) {
182 Label allocate; 182 Label allocate;
183 // Decrease generous allocation count. 183 // Decrease generous allocation count.
184 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 184 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
185 __ decb(FieldOperand(rcx, 185 __ decb(FieldOperand(rcx,
186 SharedFunctionInfo::kConstructionCountOffset)); 186 SharedFunctionInfo::kConstructionCountOffset));
187 __ j(not_zero, &allocate); 187 __ j(not_zero, &allocate);
188 188
189 __ push(rax); 189 __ Push(rax);
190 __ push(rdi); 190 __ Push(rdi);
191 191
192 __ push(rdi); // constructor 192 __ Push(rdi); // constructor
193 // The call will replace the stub, so the countdown is only done once. 193 // The call will replace the stub, so the countdown is only done once.
194 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 194 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
195 195
196 __ pop(rdi); 196 __ Pop(rdi);
197 __ pop(rax); 197 __ Pop(rax);
198 198
199 __ bind(&allocate); 199 __ bind(&allocate);
200 } 200 }
201 201
202 // Now allocate the JSObject on the heap. 202 // Now allocate the JSObject on the heap.
203 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); 203 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
204 __ shl(rdi, Immediate(kPointerSizeLog2)); 204 __ shl(rdi, Immediate(kPointerSizeLog2));
205 // rdi: size of new object 205 // rdi: size of new object
206 __ Allocate(rdi, 206 __ Allocate(rdi,
207 rbx, 207 rbx,
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
324 // rbx: JSObject (previous new top) 324 // rbx: JSObject (previous new top)
325 __ bind(&undo_allocation); 325 __ bind(&undo_allocation);
326 __ UndoAllocationInNewSpace(rbx); 326 __ UndoAllocationInNewSpace(rbx);
327 } 327 }
328 328
329 // Allocate the new receiver object using the runtime call. 329 // Allocate the new receiver object using the runtime call.
330 // rdi: function (constructor) 330 // rdi: function (constructor)
331 __ bind(&rt_call); 331 __ bind(&rt_call);
332 // Must restore rdi (constructor) before calling runtime. 332 // Must restore rdi (constructor) before calling runtime.
333 __ movp(rdi, Operand(rsp, 0)); 333 __ movp(rdi, Operand(rsp, 0));
334 __ push(rdi); 334 __ Push(rdi);
335 __ CallRuntime(Runtime::kNewObject, 1); 335 __ CallRuntime(Runtime::kNewObject, 1);
336 __ movp(rbx, rax); // store result in rbx 336 __ movp(rbx, rax); // store result in rbx
337 337
338 // New object allocated. 338 // New object allocated.
339 // rbx: newly allocated object 339 // rbx: newly allocated object
340 __ bind(&allocated); 340 __ bind(&allocated);
341 // Retrieve the function from the stack. 341 // Retrieve the function from the stack.
342 __ pop(rdi); 342 __ Pop(rdi);
343 343
344 // Retrieve smi-tagged arguments count from the stack. 344 // Retrieve smi-tagged arguments count from the stack.
345 __ movp(rax, Operand(rsp, 0)); 345 __ movp(rax, Operand(rsp, 0));
346 __ SmiToInteger32(rax, rax); 346 __ SmiToInteger32(rax, rax);
347 347
348 // Push the allocated receiver to the stack. We need two copies 348 // Push the allocated receiver to the stack. We need two copies
349 // because we may have to return the original one and the calling 349 // because we may have to return the original one and the calling
350 // conventions dictate that the called function pops the receiver. 350 // conventions dictate that the called function pops the receiver.
351 __ push(rbx); 351 __ Push(rbx);
352 __ push(rbx); 352 __ Push(rbx);
353 353
354 // Set up pointer to last argument. 354 // Set up pointer to last argument.
355 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); 355 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
356 356
357 // Copy arguments and receiver to the expression stack. 357 // Copy arguments and receiver to the expression stack.
358 Label loop, entry; 358 Label loop, entry;
359 __ movp(rcx, rax); 359 __ movp(rcx, rax);
360 __ jmp(&entry); 360 __ jmp(&entry);
361 __ bind(&loop); 361 __ bind(&loop);
362 __ push(Operand(rbx, rcx, times_pointer_size, 0)); 362 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
363 __ bind(&entry); 363 __ bind(&entry);
364 __ decq(rcx); 364 __ decq(rcx);
365 __ j(greater_equal, &loop); 365 __ j(greater_equal, &loop);
366 366
367 // Call the function. 367 // Call the function.
368 if (is_api_function) { 368 if (is_api_function) {
369 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 369 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
370 Handle<Code> code = 370 Handle<Code> code =
371 masm->isolate()->builtins()->HandleApiCallConstruct(); 371 masm->isolate()->builtins()->HandleApiCallConstruct();
372 __ Call(code, RelocInfo::CODE_TARGET); 372 __ Call(code, RelocInfo::CODE_TARGET);
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
463 463
464 // Clear the context before we push it when entering the internal frame. 464 // Clear the context before we push it when entering the internal frame.
465 __ Set(rsi, 0); 465 __ Set(rsi, 0);
466 // Enter an internal frame. 466 // Enter an internal frame.
467 FrameScope scope(masm, StackFrame::INTERNAL); 467 FrameScope scope(masm, StackFrame::INTERNAL);
468 468
469 // Load the function context into rsi. 469 // Load the function context into rsi.
470 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 470 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
471 471
472 // Push the function and the receiver onto the stack. 472 // Push the function and the receiver onto the stack.
473 __ push(rdx); 473 __ Push(rdx);
474 __ push(r8); 474 __ Push(r8);
475 475
476 // Load the number of arguments and setup pointer to the arguments. 476 // Load the number of arguments and setup pointer to the arguments.
477 __ movp(rax, r9); 477 __ movp(rax, r9);
478 // Load the previous frame pointer to access C argument on stack 478 // Load the previous frame pointer to access C argument on stack
479 __ movp(kScratchRegister, Operand(rbp, 0)); 479 __ movp(kScratchRegister, Operand(rbp, 0));
480 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 480 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
481 // Load the function pointer into rdi. 481 // Load the function pointer into rdi.
482 __ movp(rdi, rdx); 482 __ movp(rdi, rdx);
483 #else // _WIN64 483 #else // _WIN64
484 // GCC parameters in: 484 // GCC parameters in:
485 // rdi : entry (ignored) 485 // rdi : entry (ignored)
486 // rsi : function 486 // rsi : function
487 // rdx : receiver 487 // rdx : receiver
488 // rcx : argc 488 // rcx : argc
489 // r8 : argv 489 // r8 : argv
490 490
491 __ movp(rdi, rsi); 491 __ movp(rdi, rsi);
492 // rdi : function 492 // rdi : function
493 493
494 // Clear the context before we push it when entering the internal frame. 494 // Clear the context before we push it when entering the internal frame.
495 __ Set(rsi, 0); 495 __ Set(rsi, 0);
496 // Enter an internal frame. 496 // Enter an internal frame.
497 FrameScope scope(masm, StackFrame::INTERNAL); 497 FrameScope scope(masm, StackFrame::INTERNAL);
498 498
499 // Push the function and receiver and setup the context. 499 // Push the function and receiver and setup the context.
500 __ push(rdi); 500 __ Push(rdi);
501 __ push(rdx); 501 __ Push(rdx);
502 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 502 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
503 503
504 // Load the number of arguments and setup pointer to the arguments. 504 // Load the number of arguments and setup pointer to the arguments.
505 __ movp(rax, rcx); 505 __ movp(rax, rcx);
506 __ movp(rbx, r8); 506 __ movp(rbx, r8);
507 #endif // _WIN64 507 #endif // _WIN64
508 508
509 // Current stack contents: 509 // Current stack contents:
510 // [rsp + 2 * kPointerSize ... ] : Internal frame 510 // [rsp + 2 * kPointerSize ... ] : Internal frame
511 // [rsp + kPointerSize] : function 511 // [rsp + kPointerSize] : function
512 // [rsp] : receiver 512 // [rsp] : receiver
513 // Current register contents: 513 // Current register contents:
514 // rax : argc 514 // rax : argc
515 // rbx : argv 515 // rbx : argv
516 // rsi : context 516 // rsi : context
517 // rdi : function 517 // rdi : function
518 518
519 // Copy arguments to the stack in a loop. 519 // Copy arguments to the stack in a loop.
520 // Register rbx points to array of pointers to handle locations. 520 // Register rbx points to array of pointers to handle locations.
521 // Push the values of these handles. 521 // Push the values of these handles.
522 Label loop, entry; 522 Label loop, entry;
523 __ Set(rcx, 0); // Set loop variable to 0. 523 __ Set(rcx, 0); // Set loop variable to 0.
524 __ jmp(&entry); 524 __ jmp(&entry);
525 __ bind(&loop); 525 __ bind(&loop);
526 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 526 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
527 __ push(Operand(kScratchRegister, 0)); // dereference handle 527 __ Push(Operand(kScratchRegister, 0)); // dereference handle
528 __ addq(rcx, Immediate(1)); 528 __ addq(rcx, Immediate(1));
529 __ bind(&entry); 529 __ bind(&entry);
530 __ cmpq(rcx, rax); 530 __ cmpq(rcx, rax);
531 __ j(not_equal, &loop); 531 __ j(not_equal, &loop);
532 532
533 // Invoke the code. 533 // Invoke the code.
534 if (is_construct) { 534 if (is_construct) {
535 // No type feedback cell is available 535 // No type feedback cell is available
536 Handle<Object> megamorphic_sentinel = 536 Handle<Object> megamorphic_sentinel =
537 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()); 537 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate());
(...skipping 29 matching lines...) Expand all
567 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { 567 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
568 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); 568 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
569 GenerateTailCallToReturnedCode(masm); 569 GenerateTailCallToReturnedCode(masm);
570 } 570 }
571 571
572 572
573 static void CallCompileOptimized(MacroAssembler* masm, 573 static void CallCompileOptimized(MacroAssembler* masm,
574 bool concurrent) { 574 bool concurrent) {
575 FrameScope scope(masm, StackFrame::INTERNAL); 575 FrameScope scope(masm, StackFrame::INTERNAL);
576 // Push a copy of the function onto the stack. 576 // Push a copy of the function onto the stack.
577 __ push(rdi); 577 __ Push(rdi);
578 // Function is also the parameter to the runtime call. 578 // Function is also the parameter to the runtime call.
579 __ push(rdi); 579 __ Push(rdi);
580 // Whether to compile in a background thread. 580 // Whether to compile in a background thread.
581 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); 581 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
582 582
583 __ CallRuntime(Runtime::kCompileOptimized, 2); 583 __ CallRuntime(Runtime::kCompileOptimized, 2);
584 // Restore receiver. 584 // Restore receiver.
585 __ pop(rdi); 585 __ Pop(rdi);
586 } 586 }
587 587
588 588
589 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 589 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
590 CallCompileOptimized(masm, false); 590 CallCompileOptimized(masm, false);
591 GenerateTailCallToReturnedCode(masm); 591 GenerateTailCallToReturnedCode(masm);
592 } 592 }
593 593
594 594
595 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 595 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
648 FrameScope scope(masm, StackFrame::MANUAL); 648 FrameScope scope(masm, StackFrame::MANUAL);
649 __ PrepareCallCFunction(2); 649 __ PrepareCallCFunction(2);
650 __ CallCFunction( 650 __ CallCFunction(
651 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 651 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
652 2); 652 2);
653 } 653 }
654 __ Popad(); 654 __ Popad();
655 655
656 // Perform prologue operations usually performed by the young code stub. 656 // Perform prologue operations usually performed by the young code stub.
657 __ PopReturnAddressTo(kScratchRegister); 657 __ PopReturnAddressTo(kScratchRegister);
658 __ push(rbp); // Caller's frame pointer. 658 __ pushq(rbp); // Caller's frame pointer.
659 __ movp(rbp, rsp); 659 __ movp(rbp, rsp);
660 __ push(rsi); // Callee's context. 660 __ Push(rsi); // Callee's context.
661 __ push(rdi); // Callee's JS Function. 661 __ Push(rdi); // Callee's JS Function.
662 __ PushReturnAddressFrom(kScratchRegister); 662 __ PushReturnAddressFrom(kScratchRegister);
663 663
664 // Jump to point after the code-age stub. 664 // Jump to point after the code-age stub.
665 __ ret(0); 665 __ ret(0);
666 } 666 }
667 667
668 668
669 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 669 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
670 GenerateMakeCodeYoungAgainCommon(masm); 670 GenerateMakeCodeYoungAgainCommon(masm);
671 } 671 }
672 672
673 673
674 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 674 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
675 SaveFPRegsMode save_doubles) { 675 SaveFPRegsMode save_doubles) {
676 // Enter an internal frame. 676 // Enter an internal frame.
677 { 677 {
678 FrameScope scope(masm, StackFrame::INTERNAL); 678 FrameScope scope(masm, StackFrame::INTERNAL);
679 679
680 // Preserve registers across notification, this is important for compiled 680 // Preserve registers across notification, this is important for compiled
681 // stubs that tail call the runtime on deopts passing their parameters in 681 // stubs that tail call the runtime on deopts passing their parameters in
682 // registers. 682 // registers.
683 __ Pushad(); 683 __ Pushad();
684 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); 684 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
685 __ Popad(); 685 __ Popad();
686 // Tear down internal frame. 686 // Tear down internal frame.
687 } 687 }
688 688
689 __ pop(MemOperand(rsp, 0)); // Ignore state offset 689 __ Pop(MemOperand(rsp, 0)); // Ignore state offset
690 __ ret(0); // Return to IC Miss stub, continuation still on stack. 690 __ ret(0); // Return to IC Miss stub, continuation still on stack.
691 } 691 }
692 692
693 693
694 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 694 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
695 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 695 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
696 } 696 }
697 697
698 698
699 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 699 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
810 810
811 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 811 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
812 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 812 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
813 __ j(above_equal, &shift_arguments); 813 __ j(above_equal, &shift_arguments);
814 814
815 __ bind(&convert_to_object); 815 __ bind(&convert_to_object);
816 { 816 {
817 // Enter an internal frame in order to preserve argument count. 817 // Enter an internal frame in order to preserve argument count.
818 FrameScope scope(masm, StackFrame::INTERNAL); 818 FrameScope scope(masm, StackFrame::INTERNAL);
819 __ Integer32ToSmi(rax, rax); 819 __ Integer32ToSmi(rax, rax);
820 __ push(rax); 820 __ Push(rax);
821 821
822 __ push(rbx); 822 __ Push(rbx);
823 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 823 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
824 __ movp(rbx, rax); 824 __ movp(rbx, rax);
825 __ Set(rdx, 0); // indicate regular JS_FUNCTION 825 __ Set(rdx, 0); // indicate regular JS_FUNCTION
826 826
827 __ pop(rax); 827 __ Pop(rax);
828 __ SmiToInteger32(rax, rax); 828 __ SmiToInteger32(rax, rax);
829 } 829 }
830 830
831 // Restore the function to rdi. 831 // Restore the function to rdi.
832 __ movp(rdi, args.GetReceiverOperand()); 832 __ movp(rdi, args.GetReceiverOperand());
833 __ jmp(&patch_receiver, Label::kNear); 833 __ jmp(&patch_receiver, Label::kNear);
834 834
835 __ bind(&use_global_receiver); 835 __ bind(&use_global_receiver);
836 __ movp(rbx, 836 __ movp(rbx,
837 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 837 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
(...skipping 23 matching lines...) Expand all
861 // (overwriting the original receiver). Adjust argument count to make 861 // (overwriting the original receiver). Adjust argument count to make
862 // the original first argument the new receiver. 862 // the original first argument the new receiver.
863 __ bind(&shift_arguments); 863 __ bind(&shift_arguments);
864 { Label loop; 864 { Label loop;
865 __ movp(rcx, rax); 865 __ movp(rcx, rax);
866 __ bind(&loop); 866 __ bind(&loop);
867 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0)); 867 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
868 __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); 868 __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
869 __ decq(rcx); 869 __ decq(rcx);
870 __ j(not_sign, &loop); // While non-negative (to copy return address). 870 __ j(not_sign, &loop); // While non-negative (to copy return address).
871 __ pop(rbx); // Discard copy of return address. 871 __ popq(rbx); // Discard copy of return address.
872 __ decq(rax); // One fewer argument (first argument is new receiver). 872 __ decq(rax); // One fewer argument (first argument is new receiver).
873 } 873 }
874 874
875 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 875 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
876 // or a function proxy via CALL_FUNCTION_PROXY. 876 // or a function proxy via CALL_FUNCTION_PROXY.
877 { Label function, non_proxy; 877 { Label function, non_proxy;
878 __ testq(rdx, rdx); 878 __ testq(rdx, rdx);
879 __ j(zero, &function); 879 __ j(zero, &function);
880 __ Set(rbx, 0); 880 __ Set(rbx, 0);
881 __ cmpq(rdx, Immediate(1)); 881 __ cmpq(rdx, Immediate(1));
882 __ j(not_equal, &non_proxy); 882 __ j(not_equal, &non_proxy);
883 883
884 __ PopReturnAddressTo(rdx); 884 __ PopReturnAddressTo(rdx);
885 __ push(rdi); // re-add proxy object as additional argument 885 __ Push(rdi); // re-add proxy object as additional argument
886 __ PushReturnAddressFrom(rdx); 886 __ PushReturnAddressFrom(rdx);
887 __ incq(rax); 887 __ incq(rax);
888 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 888 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
889 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 889 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
890 RelocInfo::CODE_TARGET); 890 RelocInfo::CODE_TARGET);
891 891
892 __ bind(&non_proxy); 892 __ bind(&non_proxy);
893 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 893 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
894 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 894 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
895 RelocInfo::CODE_TARGET); 895 RelocInfo::CODE_TARGET);
(...skipping 29 matching lines...) Expand all
925 // Stack frame: 925 // Stack frame:
926 // rbp : Old base pointer 926 // rbp : Old base pointer
927 // rbp[8] : return address 927 // rbp[8] : return address
928 // rbp[16] : function arguments 928 // rbp[16] : function arguments
929 // rbp[24] : receiver 929 // rbp[24] : receiver
930 // rbp[32] : function 930 // rbp[32] : function
931 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; 931 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
932 static const int kReceiverOffset = kArgumentsOffset + kPointerSize; 932 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
933 static const int kFunctionOffset = kReceiverOffset + kPointerSize; 933 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
934 934
935 __ push(Operand(rbp, kFunctionOffset)); 935 __ Push(Operand(rbp, kFunctionOffset));
936 __ push(Operand(rbp, kArgumentsOffset)); 936 __ Push(Operand(rbp, kArgumentsOffset));
937 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 937 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
938 938
939 // Check the stack for overflow. We are not trying to catch 939 // Check the stack for overflow. We are not trying to catch
940 // interruptions (e.g. debug break and preemption) here, so the "real stack 940 // interruptions (e.g. debug break and preemption) here, so the "real stack
941 // limit" is checked. 941 // limit" is checked.
942 Label okay; 942 Label okay;
943 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 943 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
944 __ movp(rcx, rsp); 944 __ movp(rcx, rsp);
945 // Make rcx the space we have left. The stack might already be overflowed 945 // Make rcx the space we have left. The stack might already be overflowed
946 // here which will cause rcx to become negative. 946 // here which will cause rcx to become negative.
947 __ subq(rcx, kScratchRegister); 947 __ subq(rcx, kScratchRegister);
948 // Make rdx the space we need for the array when it is unrolled onto the 948 // Make rdx the space we need for the array when it is unrolled onto the
949 // stack. 949 // stack.
950 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); 950 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
951 // Check if the arguments will overflow the stack. 951 // Check if the arguments will overflow the stack.
952 __ cmpq(rcx, rdx); 952 __ cmpq(rcx, rdx);
953 __ j(greater, &okay); // Signed comparison. 953 __ j(greater, &okay); // Signed comparison.
954 954
955 // Out of stack space. 955 // Out of stack space.
956 __ push(Operand(rbp, kFunctionOffset)); 956 __ Push(Operand(rbp, kFunctionOffset));
957 __ push(rax); 957 __ Push(rax);
958 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 958 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
959 __ bind(&okay); 959 __ bind(&okay);
960 // End of stack check. 960 // End of stack check.
961 961
962 // Push current index and limit. 962 // Push current index and limit.
963 const int kLimitOffset = 963 const int kLimitOffset =
964 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; 964 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
965 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; 965 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
966 __ push(rax); // limit 966 __ Push(rax); // limit
967 __ push(Immediate(0)); // index 967 __ Push(Immediate(0)); // index
968 968
969 // Get the receiver. 969 // Get the receiver.
970 __ movp(rbx, Operand(rbp, kReceiverOffset)); 970 __ movp(rbx, Operand(rbp, kReceiverOffset));
971 971
972 // Check that the function is a JS function (otherwise it must be a proxy). 972 // Check that the function is a JS function (otherwise it must be a proxy).
973 Label push_receiver; 973 Label push_receiver;
974 __ movp(rdi, Operand(rbp, kFunctionOffset)); 974 __ movp(rdi, Operand(rbp, kFunctionOffset));
975 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 975 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
976 __ j(not_equal, &push_receiver); 976 __ j(not_equal, &push_receiver);
977 977
(...skipping 20 matching lines...) Expand all
998 __ j(equal, &use_global_receiver); 998 __ j(equal, &use_global_receiver);
999 999
1000 // If given receiver is already a JavaScript object then there's no 1000 // If given receiver is already a JavaScript object then there's no
1001 // reason for converting it. 1001 // reason for converting it.
1002 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1002 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1003 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 1003 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1004 __ j(above_equal, &push_receiver); 1004 __ j(above_equal, &push_receiver);
1005 1005
1006 // Convert the receiver to an object. 1006 // Convert the receiver to an object.
1007 __ bind(&call_to_object); 1007 __ bind(&call_to_object);
1008 __ push(rbx); 1008 __ Push(rbx);
1009 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1009 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1010 __ movp(rbx, rax); 1010 __ movp(rbx, rax);
1011 __ jmp(&push_receiver, Label::kNear); 1011 __ jmp(&push_receiver, Label::kNear);
1012 1012
1013 __ bind(&use_global_receiver); 1013 __ bind(&use_global_receiver);
1014 __ movp(rbx, 1014 __ movp(rbx,
1015 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1015 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1016 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 1016 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1017 1017
1018 // Push the receiver. 1018 // Push the receiver.
1019 __ bind(&push_receiver); 1019 __ bind(&push_receiver);
1020 __ push(rbx); 1020 __ Push(rbx);
1021 1021
1022 // Copy all arguments from the array to the stack. 1022 // Copy all arguments from the array to the stack.
1023 Label entry, loop; 1023 Label entry, loop;
1024 __ movp(rax, Operand(rbp, kIndexOffset)); 1024 __ movp(rax, Operand(rbp, kIndexOffset));
1025 __ jmp(&entry); 1025 __ jmp(&entry);
1026 __ bind(&loop); 1026 __ bind(&loop);
1027 __ movp(rdx, Operand(rbp, kArgumentsOffset)); // load arguments 1027 __ movp(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
1028 1028
1029 // Use inline caching to speed up access to arguments. 1029 // Use inline caching to speed up access to arguments.
1030 Handle<Code> ic = 1030 Handle<Code> ic =
1031 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); 1031 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1032 __ Call(ic, RelocInfo::CODE_TARGET); 1032 __ Call(ic, RelocInfo::CODE_TARGET);
1033 // It is important that we do not have a test instruction after the 1033 // It is important that we do not have a test instruction after the
1034 // call. A test instruction after the call is used to indicate that 1034 // call. A test instruction after the call is used to indicate that
1035 // we have generated an inline version of the keyed load. In this 1035 // we have generated an inline version of the keyed load. In this
1036 // case, we know that we are not generating a test instruction next. 1036 // case, we know that we are not generating a test instruction next.
1037 1037
1038 // Push the nth argument. 1038 // Push the nth argument.
1039 __ push(rax); 1039 __ Push(rax);
1040 1040
1041 // Update the index on the stack and in register rax. 1041 // Update the index on the stack and in register rax.
1042 __ movp(rax, Operand(rbp, kIndexOffset)); 1042 __ movp(rax, Operand(rbp, kIndexOffset));
1043 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 1043 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
1044 __ movp(Operand(rbp, kIndexOffset), rax); 1044 __ movp(Operand(rbp, kIndexOffset), rax);
1045 1045
1046 __ bind(&entry); 1046 __ bind(&entry);
1047 __ cmpq(rax, Operand(rbp, kLimitOffset)); 1047 __ cmpq(rax, Operand(rbp, kLimitOffset));
1048 __ j(not_equal, &loop); 1048 __ j(not_equal, &loop);
1049 1049
1050 // Call the function. 1050 // Call the function.
1051 Label call_proxy; 1051 Label call_proxy;
1052 ParameterCount actual(rax); 1052 ParameterCount actual(rax);
1053 __ SmiToInteger32(rax, rax); 1053 __ SmiToInteger32(rax, rax);
1054 __ movp(rdi, Operand(rbp, kFunctionOffset)); 1054 __ movp(rdi, Operand(rbp, kFunctionOffset));
1055 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 1055 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1056 __ j(not_equal, &call_proxy); 1056 __ j(not_equal, &call_proxy);
1057 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); 1057 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1058 1058
1059 frame_scope.GenerateLeaveFrame(); 1059 frame_scope.GenerateLeaveFrame();
1060 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 1060 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1061 1061
1062 // Call the function proxy. 1062 // Call the function proxy.
1063 __ bind(&call_proxy); 1063 __ bind(&call_proxy);
1064 __ push(rdi); // add function proxy as last argument 1064 __ Push(rdi); // add function proxy as last argument
1065 __ incq(rax); 1065 __ incq(rax);
1066 __ Set(rbx, 0); 1066 __ Set(rbx, 0);
1067 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 1067 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1068 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1068 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1069 RelocInfo::CODE_TARGET); 1069 RelocInfo::CODE_TARGET);
1070 1070
1071 // Leave internal frame. 1071 // Leave internal frame.
1072 } 1072 }
1073 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 1073 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1074 } 1074 }
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
1225 __ j(NegateCondition(is_string), &convert_argument); 1225 __ j(NegateCondition(is_string), &convert_argument);
1226 __ movp(rbx, rax); 1226 __ movp(rbx, rax);
1227 __ IncrementCounter(counters->string_ctor_string_value(), 1); 1227 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1228 __ jmp(&argument_is_string); 1228 __ jmp(&argument_is_string);
1229 1229
1230 // Invoke the conversion builtin and put the result into rbx. 1230 // Invoke the conversion builtin and put the result into rbx.
1231 __ bind(&convert_argument); 1231 __ bind(&convert_argument);
1232 __ IncrementCounter(counters->string_ctor_conversions(), 1); 1232 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1233 { 1233 {
1234 FrameScope scope(masm, StackFrame::INTERNAL); 1234 FrameScope scope(masm, StackFrame::INTERNAL);
1235 __ push(rdi); // Preserve the function. 1235 __ Push(rdi); // Preserve the function.
1236 __ push(rax); 1236 __ Push(rax);
1237 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 1237 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1238 __ pop(rdi); 1238 __ Pop(rdi);
1239 } 1239 }
1240 __ movp(rbx, rax); 1240 __ movp(rbx, rax);
1241 __ jmp(&argument_is_string); 1241 __ jmp(&argument_is_string);
1242 1242
1243 // Load the empty string into rbx, remove the receiver from the 1243 // Load the empty string into rbx, remove the receiver from the
1244 // stack, and jump back to the case where the argument is a string. 1244 // stack, and jump back to the case where the argument is a string.
1245 __ bind(&no_arguments); 1245 __ bind(&no_arguments);
1246 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); 1246 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1247 __ PopReturnAddressTo(rcx); 1247 __ PopReturnAddressTo(rcx);
1248 __ lea(rsp, Operand(rsp, kPointerSize)); 1248 __ lea(rsp, Operand(rsp, kPointerSize));
1249 __ PushReturnAddressFrom(rcx); 1249 __ PushReturnAddressFrom(rcx);
1250 __ jmp(&argument_is_string); 1250 __ jmp(&argument_is_string);
1251 1251
1252 // At this point the argument is already a string. Call runtime to 1252 // At this point the argument is already a string. Call runtime to
1253 // create a string wrapper. 1253 // create a string wrapper.
1254 __ bind(&gc_required); 1254 __ bind(&gc_required);
1255 __ IncrementCounter(counters->string_ctor_gc_required(), 1); 1255 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1256 { 1256 {
1257 FrameScope scope(masm, StackFrame::INTERNAL); 1257 FrameScope scope(masm, StackFrame::INTERNAL);
1258 __ push(rbx); 1258 __ Push(rbx);
1259 __ CallRuntime(Runtime::kNewStringWrapper, 1); 1259 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1260 } 1260 }
1261 __ ret(0); 1261 __ ret(0);
1262 } 1262 }
1263 1263
1264 1264
1265 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1265 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1266 __ push(rbp); 1266 __ pushq(rbp);
1267 __ movp(rbp, rsp); 1267 __ movp(rbp, rsp);
1268 1268
1269 // Store the arguments adaptor context sentinel. 1269 // Store the arguments adaptor context sentinel.
1270 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1270 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1271 1271
1272 // Push the function on the stack. 1272 // Push the function on the stack.
1273 __ push(rdi); 1273 __ Push(rdi);
1274 1274
1275 // Preserve the number of arguments on the stack. Must preserve rax, 1275 // Preserve the number of arguments on the stack. Must preserve rax,
1276 // rbx and rcx because these registers are used when copying the 1276 // rbx and rcx because these registers are used when copying the
1277 // arguments and the receiver. 1277 // arguments and the receiver.
1278 __ Integer32ToSmi(r8, rax); 1278 __ Integer32ToSmi(r8, rax);
1279 __ push(r8); 1279 __ Push(r8);
1280 } 1280 }
1281 1281
1282 1282
1283 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1283 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1284 // Retrieve the number of arguments from the stack. Number is a Smi. 1284 // Retrieve the number of arguments from the stack. Number is a Smi.
1285 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1285 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1286 1286
1287 // Leave the frame. 1287 // Leave the frame.
1288 __ movp(rsp, rbp); 1288 __ movp(rsp, rbp);
1289 __ pop(rbp); 1289 __ popq(rbp);
1290 1290
1291 // Remove caller arguments from the stack. 1291 // Remove caller arguments from the stack.
1292 __ PopReturnAddressTo(rcx); 1292 __ PopReturnAddressTo(rcx);
1293 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 1293 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1294 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 1294 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1295 __ PushReturnAddressFrom(rcx); 1295 __ PushReturnAddressFrom(rcx);
1296 } 1296 }
1297 1297
1298 1298
1299 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1299 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
(...skipping 19 matching lines...) Expand all
1319 EnterArgumentsAdaptorFrame(masm); 1319 EnterArgumentsAdaptorFrame(masm);
1320 1320
1321 // Copy receiver and all expected arguments. 1321 // Copy receiver and all expected arguments.
1322 const int offset = StandardFrameConstants::kCallerSPOffset; 1322 const int offset = StandardFrameConstants::kCallerSPOffset;
1323 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset)); 1323 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1324 __ Set(r8, -1); // account for receiver 1324 __ Set(r8, -1); // account for receiver
1325 1325
1326 Label copy; 1326 Label copy;
1327 __ bind(&copy); 1327 __ bind(&copy);
1328 __ incq(r8); 1328 __ incq(r8);
1329 __ push(Operand(rax, 0)); 1329 __ Push(Operand(rax, 0));
1330 __ subq(rax, Immediate(kPointerSize)); 1330 __ subq(rax, Immediate(kPointerSize));
1331 __ cmpq(r8, rbx); 1331 __ cmpq(r8, rbx);
1332 __ j(less, &copy); 1332 __ j(less, &copy);
1333 __ jmp(&invoke); 1333 __ jmp(&invoke);
1334 } 1334 }
1335 1335
1336 { // Too few parameters: Actual < expected. 1336 { // Too few parameters: Actual < expected.
1337 __ bind(&too_few); 1337 __ bind(&too_few);
1338 EnterArgumentsAdaptorFrame(masm); 1338 EnterArgumentsAdaptorFrame(masm);
1339 1339
1340 // Copy receiver and all actual arguments. 1340 // Copy receiver and all actual arguments.
1341 const int offset = StandardFrameConstants::kCallerSPOffset; 1341 const int offset = StandardFrameConstants::kCallerSPOffset;
1342 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset)); 1342 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1343 __ Set(r8, -1); // account for receiver 1343 __ Set(r8, -1); // account for receiver
1344 1344
1345 Label copy; 1345 Label copy;
1346 __ bind(&copy); 1346 __ bind(&copy);
1347 __ incq(r8); 1347 __ incq(r8);
1348 __ push(Operand(rdi, 0)); 1348 __ Push(Operand(rdi, 0));
1349 __ subq(rdi, Immediate(kPointerSize)); 1349 __ subq(rdi, Immediate(kPointerSize));
1350 __ cmpq(r8, rax); 1350 __ cmpq(r8, rax);
1351 __ j(less, &copy); 1351 __ j(less, &copy);
1352 1352
1353 // Fill remaining expected arguments with undefined values. 1353 // Fill remaining expected arguments with undefined values.
1354 Label fill; 1354 Label fill;
1355 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 1355 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1356 __ bind(&fill); 1356 __ bind(&fill);
1357 __ incq(r8); 1357 __ incq(r8);
1358 __ push(kScratchRegister); 1358 __ Push(kScratchRegister);
1359 __ cmpq(r8, rbx); 1359 __ cmpq(r8, rbx);
1360 __ j(less, &fill); 1360 __ j(less, &fill);
1361 1361
1362 // Restore function pointer. 1362 // Restore function pointer.
1363 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1363 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1364 } 1364 }
1365 1365
1366 // Call the entry point. 1366 // Call the entry point.
1367 __ bind(&invoke); 1367 __ bind(&invoke);
1368 __ call(rdx); 1368 __ call(rdx);
(...skipping 12 matching lines...) Expand all
1381 __ jmp(rdx); 1381 __ jmp(rdx);
1382 } 1382 }
1383 1383
1384 1384
1385 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1385 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1386 // Lookup the function in the JavaScript frame. 1386 // Lookup the function in the JavaScript frame.
1387 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1387 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1388 { 1388 {
1389 FrameScope scope(masm, StackFrame::INTERNAL); 1389 FrameScope scope(masm, StackFrame::INTERNAL);
1390 // Pass function as argument. 1390 // Pass function as argument.
1391 __ push(rax); 1391 __ Push(rax);
1392 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1392 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1393 } 1393 }
1394 1394
1395 Label skip; 1395 Label skip;
1396 // If the code object is null, just return to the unoptimized code. 1396 // If the code object is null, just return to the unoptimized code.
1397 __ cmpq(rax, Immediate(0)); 1397 __ cmpq(rax, Immediate(0));
1398 __ j(not_equal, &skip, Label::kNear); 1398 __ j(not_equal, &skip, Label::kNear);
1399 __ ret(0); 1399 __ ret(0);
1400 1400
1401 __ bind(&skip); 1401 __ bind(&skip);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1433 __ bind(&ok); 1433 __ bind(&ok);
1434 __ ret(0); 1434 __ ret(0);
1435 } 1435 }
1436 1436
1437 1437
1438 #undef __ 1438 #undef __
1439 1439
1440 } } // namespace v8::internal 1440 } } // namespace v8::internal
1441 1441
1442 #endif // V8_TARGET_ARCH_X64 1442 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698