| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| (...skipping 577 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 589 __ bind(¬_tos_rax); | 589 __ bind(¬_tos_rax); |
| 590 __ Abort("no cases left"); | 590 __ Abort("no cases left"); |
| 591 } | 591 } |
| 592 | 592 |
| 593 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 593 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
| 594 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 594 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| 595 } | 595 } |
| 596 | 596 |
| 597 | 597 |
| 598 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 598 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 599 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 599 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 600 } | 600 } |
| 601 | 601 |
| 602 | 602 |
| 603 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { | 603 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { |
| 604 __ int3(); | 604 // For now, we are relying on the fact that Runtime::NotifyOSR |
| 605 // doesn't do any garbage collection which allows us to save/restore |
| 606 // the registers without worrying about which of them contain |
| 607 // pointers. This seems a bit fragile. |
| 608 __ Pushad(); |
| 609 __ EnterInternalFrame(); |
| 610 __ CallRuntime(Runtime::kNotifyOSR, 0); |
| 611 __ LeaveInternalFrame(); |
| 612 __ Popad(); |
| 613 __ ret(0); |
| 605 } | 614 } |
| 606 | 615 |
| 607 | 616 |
| 608 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 617 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 609 // Stack Layout: | 618 // Stack Layout: |
| 610 // rsp[0]: Return address | 619 // rsp[0]: Return address |
| 611 // rsp[1]: Argument n | 620 // rsp[1]: Argument n |
| 612 // rsp[2]: Argument n-1 | 621 // rsp[2]: Argument n-1 |
| 613 // ... | 622 // ... |
| 614 // rsp[n]: Argument 1 | 623 // rsp[n]: Argument 1 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 635 __ JumpIfSmi(rdi, &non_function); | 644 __ JumpIfSmi(rdi, &non_function); |
| 636 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 645 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 637 __ j(not_equal, &non_function); | 646 __ j(not_equal, &non_function); |
| 638 | 647 |
| 639 // 3a. Patch the first argument if necessary when calling a function. | 648 // 3a. Patch the first argument if necessary when calling a function. |
| 640 Label shift_arguments; | 649 Label shift_arguments; |
| 641 { Label convert_to_object, use_global_receiver, patch_receiver; | 650 { Label convert_to_object, use_global_receiver, patch_receiver; |
| 642 // Change context eagerly in case we need the global receiver. | 651 // Change context eagerly in case we need the global receiver. |
| 643 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 652 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 644 | 653 |
| 654 // Do not transform the receiver for strict mode functions. |
| 655 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 656 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), |
| 657 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 658 __ j(not_equal, &shift_arguments); |
| 659 |
| 660 // Compute the receiver in non-strict mode. |
| 645 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); | 661 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); |
| 646 __ JumpIfSmi(rbx, &convert_to_object); | 662 __ JumpIfSmi(rbx, &convert_to_object); |
| 647 | 663 |
| 648 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 664 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
| 649 __ j(equal, &use_global_receiver); | 665 __ j(equal, &use_global_receiver); |
| 650 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 666 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 651 __ j(equal, &use_global_receiver); | 667 __ j(equal, &use_global_receiver); |
| 652 | 668 |
| 653 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); | 669 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 654 __ j(below, &convert_to_object); | 670 __ j(below, &convert_to_object); |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 791 __ push(Immediate(0)); // index | 807 __ push(Immediate(0)); // index |
| 792 | 808 |
| 793 // Change context eagerly to get the right global object if | 809 // Change context eagerly to get the right global object if |
| 794 // necessary. | 810 // necessary. |
| 795 __ movq(rdi, Operand(rbp, kFunctionOffset)); | 811 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
| 796 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 812 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 797 | 813 |
| 798 // Compute the receiver. | 814 // Compute the receiver. |
| 799 Label call_to_object, use_global_receiver, push_receiver; | 815 Label call_to_object, use_global_receiver, push_receiver; |
| 800 __ movq(rbx, Operand(rbp, kReceiverOffset)); | 816 __ movq(rbx, Operand(rbp, kReceiverOffset)); |
| 817 |
| 818 // Do not transform the receiver for strict mode functions. |
| 819 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 820 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), |
| 821 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 822 __ j(not_equal, &push_receiver); |
| 823 |
| 824 // Compute the receiver in non-strict mode. |
| 801 __ JumpIfSmi(rbx, &call_to_object); | 825 __ JumpIfSmi(rbx, &call_to_object); |
| 802 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 826 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
| 803 __ j(equal, &use_global_receiver); | 827 __ j(equal, &use_global_receiver); |
| 804 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 828 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 805 __ j(equal, &use_global_receiver); | 829 __ j(equal, &use_global_receiver); |
| 806 | 830 |
| 807 // If given receiver is already a JavaScript object then there's no | 831 // If given receiver is already a JavaScript object then there's no |
| 808 // reason for converting it. | 832 // reason for converting it. |
| 809 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); | 833 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 810 __ j(below, &call_to_object); | 834 __ j(below, &call_to_object); |
| (...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1402 | 1426 |
| 1403 // ------------------------------------------- | 1427 // ------------------------------------------- |
| 1404 // Dont adapt arguments. | 1428 // Dont adapt arguments. |
| 1405 // ------------------------------------------- | 1429 // ------------------------------------------- |
| 1406 __ bind(&dont_adapt_arguments); | 1430 __ bind(&dont_adapt_arguments); |
| 1407 __ jmp(rdx); | 1431 __ jmp(rdx); |
| 1408 } | 1432 } |
| 1409 | 1433 |
| 1410 | 1434 |
| 1411 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1435 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1412 __ int3(); | 1436 // Get the loop depth of the stack guard check. This is recorded in |
| 1437 // a test(rax, depth) instruction right after the call. |
| 1438 Label stack_check; |
| 1439 __ movq(rbx, Operand(rsp, 0)); // return address |
| 1440 __ movzxbq(rbx, Operand(rbx, 1)); // depth |
| 1441 |
| 1442 // Get the loop nesting level at which we allow OSR from the |
| 1443 // unoptimized code and check if we want to do OSR yet. If not we |
| 1444 // should perform a stack guard check so we can get interrupts while |
| 1445 // waiting for on-stack replacement. |
| 1446 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1447 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); |
| 1448 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); |
| 1449 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); |
| 1450 __ j(greater, &stack_check); |
| 1451 |
| 1452 // Pass the function to optimize as the argument to the on-stack |
| 1453 // replacement runtime function. |
| 1454 __ EnterInternalFrame(); |
| 1455 __ push(rax); |
| 1456 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1457 __ LeaveInternalFrame(); |
| 1458 |
| 1459 // If the result was -1 it means that we couldn't optimize the |
| 1460 // function. Just return and continue in the unoptimized version. |
| 1461 NearLabel skip; |
| 1462 __ SmiCompare(rax, Smi::FromInt(-1)); |
| 1463 __ j(not_equal, &skip); |
| 1464 __ ret(0); |
| 1465 |
| 1466 // If we decide not to perform on-stack replacement we perform a |
| 1467 // stack guard check to enable interrupts. |
| 1468 __ bind(&stack_check); |
| 1469 NearLabel ok; |
| 1470 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 1471 __ j(above_equal, &ok); |
| 1472 |
| 1473 StackCheckStub stub; |
| 1474 __ TailCallStub(&stub); |
| 1475 __ Abort("Unreachable code: returned from tail call."); |
| 1476 __ bind(&ok); |
| 1477 __ ret(0); |
| 1478 |
| 1479 __ bind(&skip); |
| 1480 // Untag the AST id and push it on the stack. |
| 1481 __ SmiToInteger32(rax, rax); |
| 1482 __ push(rax); |
| 1483 |
| 1484 // Generate the code for doing the frame-to-frame translation using |
| 1485 // the deoptimizer infrastructure. |
| 1486 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); |
| 1487 generator.Generate(); |
| 1413 } | 1488 } |
| 1414 | 1489 |
| 1415 | 1490 |
| 1416 #undef __ | 1491 #undef __ |
| 1417 | 1492 |
| 1418 } } // namespace v8::internal | 1493 } } // namespace v8::internal |
| 1419 | 1494 |
| 1420 #endif // V8_TARGET_ARCH_X64 | 1495 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |