| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 543 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 554 __ LeaveInternalFrame(); | 554 __ LeaveInternalFrame(); |
| 555 | 555 |
| 556 // Do a tail-call of the compiled function. | 556 // Do a tail-call of the compiled function. |
| 557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize)); | 557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize)); |
| 558 __ jmp(rcx); | 558 __ jmp(rcx); |
| 559 } | 559 } |
| 560 | 560 |
| 561 | 561 |
| 562 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 562 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 563 Deoptimizer::BailoutType type) { | 563 Deoptimizer::BailoutType type) { |
| 564 __ int3(); | 564 // Enter an internal frame. |
| 565 __ EnterInternalFrame(); |
| 566 |
| 567 // Pass the deoptimization type to the runtime system. |
| 568 __ Push(Smi::FromInt(static_cast<int>(type))); |
| 569 |
| 570 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 571 // Tear down temporary frame. |
| 572 __ LeaveInternalFrame(); |
| 573 |
| 574 // Get the full codegen state from the stack and untag it. |
| 575 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); |
| 576 |
| 577 // Switch on the state. |
| 578 NearLabel not_no_registers, not_tos_rax; |
| 579 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); |
| 580 __ j(not_equal, ¬_no_registers); |
| 581 __ ret(1 * kPointerSize); // Remove state. |
| 582 |
| 583 __ bind(¬_no_registers); |
| 584 __ movq(rax, Operand(rsp, 2 * kPointerSize)); |
| 585 __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG)); |
| 586 __ j(not_equal, ¬_tos_rax); |
| 587 __ ret(2 * kPointerSize); // Remove state, rax. |
| 588 |
| 589 __ bind(¬_tos_rax); |
| 590 __ Abort("no cases left"); |
| 565 } | 591 } |
| 566 | 592 |
| 567 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 593 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
| 568 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 594 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| 569 } | 595 } |
| 570 | 596 |
| 571 | 597 |
| 572 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 598 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 573 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 599 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 574 } | 600 } |
| 575 | 601 |
| 576 | 602 |
| 577 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { | 603 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { |
| 578 __ int3(); | 604 // For now, we are relying on the fact that Runtime::NotifyOSR |
| 605 // doesn't do any garbage collection which allows us to save/restore |
| 606 // the registers without worrying about which of them contain |
| 607 // pointers. This seems a bit fragile. |
| 608 __ Pushad(); |
| 609 __ EnterInternalFrame(); |
| 610 __ CallRuntime(Runtime::kNotifyOSR, 0); |
| 611 __ LeaveInternalFrame(); |
| 612 __ Popad(); |
| 613 __ ret(0); |
| 579 } | 614 } |
| 580 | 615 |
| 581 | 616 |
| 582 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 617 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 583 // Stack Layout: | 618 // Stack Layout: |
| 584 // rsp[0]: Return address | 619 // rsp[0]: Return address |
| 585 // rsp[1]: Argument n | 620 // rsp[1]: Argument n |
| 586 // rsp[2]: Argument n-1 | 621 // rsp[2]: Argument n-1 |
| 587 // ... | 622 // ... |
| 588 // rsp[n]: Argument 1 | 623 // rsp[n]: Argument 1 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 609 __ JumpIfSmi(rdi, &non_function); | 644 __ JumpIfSmi(rdi, &non_function); |
| 610 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 645 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 611 __ j(not_equal, &non_function); | 646 __ j(not_equal, &non_function); |
| 612 | 647 |
| 613 // 3a. Patch the first argument if necessary when calling a function. | 648 // 3a. Patch the first argument if necessary when calling a function. |
| 614 Label shift_arguments; | 649 Label shift_arguments; |
| 615 { Label convert_to_object, use_global_receiver, patch_receiver; | 650 { Label convert_to_object, use_global_receiver, patch_receiver; |
| 616 // Change context eagerly in case we need the global receiver. | 651 // Change context eagerly in case we need the global receiver. |
| 617 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 652 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 618 | 653 |
| 654 // Do not transform the receiver for strict mode functions. |
| 655 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 656 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), |
| 657 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 658 __ j(not_equal, &shift_arguments); |
| 659 |
| 660 // Compute the receiver in non-strict mode. |
| 619 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); | 661 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); |
| 620 __ JumpIfSmi(rbx, &convert_to_object); | 662 __ JumpIfSmi(rbx, &convert_to_object); |
| 621 | 663 |
| 622 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 664 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
| 623 __ j(equal, &use_global_receiver); | 665 __ j(equal, &use_global_receiver); |
| 624 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 666 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 625 __ j(equal, &use_global_receiver); | 667 __ j(equal, &use_global_receiver); |
| 626 | 668 |
| 627 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); | 669 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 628 __ j(below, &convert_to_object); | 670 __ j(below, &convert_to_object); |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 765 __ push(Immediate(0)); // index | 807 __ push(Immediate(0)); // index |
| 766 | 808 |
| 767 // Change context eagerly to get the right global object if | 809 // Change context eagerly to get the right global object if |
| 768 // necessary. | 810 // necessary. |
| 769 __ movq(rdi, Operand(rbp, kFunctionOffset)); | 811 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
| 770 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 812 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 771 | 813 |
| 772 // Compute the receiver. | 814 // Compute the receiver. |
| 773 Label call_to_object, use_global_receiver, push_receiver; | 815 Label call_to_object, use_global_receiver, push_receiver; |
| 774 __ movq(rbx, Operand(rbp, kReceiverOffset)); | 816 __ movq(rbx, Operand(rbp, kReceiverOffset)); |
| 817 |
| 818 // Do not transform the receiver for strict mode functions. |
| 819 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 820 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), |
| 821 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 822 __ j(not_equal, &push_receiver); |
| 823 |
| 824 // Compute the receiver in non-strict mode. |
| 775 __ JumpIfSmi(rbx, &call_to_object); | 825 __ JumpIfSmi(rbx, &call_to_object); |
| 776 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 826 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
| 777 __ j(equal, &use_global_receiver); | 827 __ j(equal, &use_global_receiver); |
| 778 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 828 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 779 __ j(equal, &use_global_receiver); | 829 __ j(equal, &use_global_receiver); |
| 780 | 830 |
| 781 // If given receiver is already a JavaScript object then there's no | 831 // If given receiver is already a JavaScript object then there's no |
| 782 // reason for converting it. | 832 // reason for converting it. |
| 783 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); | 833 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 784 __ j(below, &call_to_object); | 834 __ j(below, &call_to_object); |
| (...skipping 588 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1373 | 1423 |
| 1374 // ------------------------------------------- | 1424 // ------------------------------------------- |
| 1375 // Dont adapt arguments. | 1425 // Dont adapt arguments. |
| 1376 // ------------------------------------------- | 1426 // ------------------------------------------- |
| 1377 __ bind(&dont_adapt_arguments); | 1427 __ bind(&dont_adapt_arguments); |
| 1378 __ jmp(rdx); | 1428 __ jmp(rdx); |
| 1379 } | 1429 } |
| 1380 | 1430 |
| 1381 | 1431 |
| 1382 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1432 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1383 __ int3(); | 1433 // Get the loop depth of the stack guard check. This is recorded in |
| 1434 // a test(rax, depth) instruction right after the call. |
| 1435 Label stack_check; |
| 1436 __ movq(rbx, Operand(rsp, 0)); // return address |
| 1437 __ movzxbq(rbx, Operand(rbx, 1)); // depth |
| 1438 |
| 1439 // Get the loop nesting level at which we allow OSR from the |
| 1440 // unoptimized code and check if we want to do OSR yet. If not we |
| 1441 // should perform a stack guard check so we can get interrupts while |
| 1442 // waiting for on-stack replacement. |
| 1443 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1444 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); |
| 1445 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); |
| 1446 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); |
| 1447 __ j(greater, &stack_check); |
| 1448 |
| 1449 // Pass the function to optimize as the argument to the on-stack |
| 1450 // replacement runtime function. |
| 1451 __ EnterInternalFrame(); |
| 1452 __ push(rax); |
| 1453 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1454 __ LeaveInternalFrame(); |
| 1455 |
| 1456 // If the result was -1 it means that we couldn't optimize the |
| 1457 // function. Just return and continue in the unoptimized version. |
| 1458 NearLabel skip; |
| 1459 __ SmiCompare(rax, Smi::FromInt(-1)); |
| 1460 __ j(not_equal, &skip); |
| 1461 __ ret(0); |
| 1462 |
| 1463 // If we decide not to perform on-stack replacement we perform a |
| 1464 // stack guard check to enable interrupts. |
| 1465 __ bind(&stack_check); |
| 1466 NearLabel ok; |
| 1467 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 1468 __ j(above_equal, &ok); |
| 1469 |
| 1470 StackCheckStub stub; |
| 1471 __ TailCallStub(&stub); |
| 1472 __ Abort("Unreachable code: returned from tail call."); |
| 1473 __ bind(&ok); |
| 1474 __ ret(0); |
| 1475 |
| 1476 __ bind(&skip); |
| 1477 // Untag the AST id and push it on the stack. |
| 1478 __ SmiToInteger32(rax, rax); |
| 1479 __ push(rax); |
| 1480 |
| 1481 // Generate the code for doing the frame-to-frame translation using |
| 1482 // the deoptimizer infrastructure. |
| 1483 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); |
| 1484 generator.Generate(); |
| 1384 } | 1485 } |
| 1385 | 1486 |
| 1386 | 1487 |
| 1387 #undef __ | 1488 #undef __ |
| 1388 | 1489 |
| 1389 } } // namespace v8::internal | 1490 } } // namespace v8::internal |
| 1390 | 1491 |
| 1391 #endif // V8_TARGET_ARCH_X64 | 1492 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |