OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
276 ProcessDeclarations(scope_->declarations()); | 276 ProcessDeclarations(scope_->declarations()); |
277 // Bail out if a stack-overflow exception occurred when processing | 277 // Bail out if a stack-overflow exception occurred when processing |
278 // declarations. | 278 // declarations. |
279 if (HasStackOverflow()) return; | 279 if (HasStackOverflow()) return; |
280 } | 280 } |
281 | 281 |
282 if (FLAG_trace) { | 282 if (FLAG_trace) { |
283 frame_->CallRuntime(Runtime::kTraceEnter, 0); | 283 frame_->CallRuntime(Runtime::kTraceEnter, 0); |
284 // Ignore the return value. | 284 // Ignore the return value. |
285 } | 285 } |
286 // CheckStack(); | 286 CheckStack(); |
287 | 287 |
288 // Compile the body of the function in a vanilla state. Don't | 288 // Compile the body of the function in a vanilla state. Don't |
289 // bother compiling all the code if the scope has an illegal | 289 // bother compiling all the code if the scope has an illegal |
290 // redeclaration. | 290 // redeclaration. |
291 if (!scope_->HasIllegalRedeclaration()) { | 291 if (!scope_->HasIllegalRedeclaration()) { |
292 Comment cmnt(masm_, "[ function body"); | 292 Comment cmnt(masm_, "[ function body"); |
293 #ifdef DEBUG | 293 #ifdef DEBUG |
294 bool is_builtin = Bootstrapper::IsActive(); | 294 bool is_builtin = Bootstrapper::IsActive(); |
295 bool should_trace = | 295 bool should_trace = |
296 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; | 296 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
402 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0)) | 402 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0)) |
403 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0)) | 403 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0)) |
404 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0)) | 404 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0)) |
405 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0)) | 405 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0)) |
406 && (allocator()->count(r13) == (frame()->is_used(r13) ? 1 : 0)) | 406 && (allocator()->count(r13) == (frame()->is_used(r13) ? 1 : 0)) |
407 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0)); | 407 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0)); |
408 } | 408 } |
409 #endif | 409 #endif |
410 | 410 |
411 | 411 |
| 412 class DeferredStackCheck: public DeferredCode { |
| 413 public: |
| 414 DeferredStackCheck() { |
| 415 set_comment("[ DeferredStackCheck"); |
| 416 } |
| 417 |
| 418 virtual void Generate(); |
| 419 }; |
| 420 |
| 421 |
| 422 void DeferredStackCheck::Generate() { |
| 423 StackCheckStub stub; |
| 424 __ CallStub(&stub); |
| 425 } |
| 426 |
| 427 |
| 428 void CodeGenerator::CheckStack() { |
| 429 if (FLAG_check_stack) { |
| 430 DeferredStackCheck* deferred = new DeferredStackCheck; |
| 431 ExternalReference stack_guard_limit = |
| 432 ExternalReference::address_of_stack_guard_limit(); |
| 433 __ movq(kScratchRegister, stack_guard_limit); |
| 434 __ cmpq(rsp, Operand(kScratchRegister, 0)); |
| 435 deferred->Branch(below); |
| 436 deferred->BindExit(); |
| 437 } |
| 438 } |
| 439 |
| 440 |
412 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { | 441 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
413 ASSERT(!in_spilled_code()); | 442 ASSERT(!in_spilled_code()); |
414 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { | 443 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { |
415 Visit(statements->at(i)); | 444 Visit(statements->at(i)); |
416 } | 445 } |
417 } | 446 } |
418 | 447 |
419 | 448 |
420 void CodeGenerator::VisitBlock(Block* node) { | 449 void CodeGenerator::VisitBlock(Block* node) { |
421 ASSERT(!in_spilled_code()); | 450 ASSERT(!in_spilled_code()); |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
651 } | 680 } |
652 | 681 |
653 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) { | 682 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) { |
654 UNIMPLEMENTED(); | 683 UNIMPLEMENTED(); |
655 } | 684 } |
656 | 685 |
657 void CodeGenerator::VisitSwitchStatement(SwitchStatement* a) { | 686 void CodeGenerator::VisitSwitchStatement(SwitchStatement* a) { |
658 UNIMPLEMENTED(); | 687 UNIMPLEMENTED(); |
659 } | 688 } |
660 | 689 |
661 void CodeGenerator::VisitLoopStatement(LoopStatement* a) { | 690 |
662 UNIMPLEMENTED(); | 691 void CodeGenerator::VisitLoopStatement(LoopStatement* node) { |
| 692 ASSERT(!in_spilled_code()); |
| 693 Comment cmnt(masm_, "[ LoopStatement"); |
| 694 CodeForStatementPosition(node); |
| 695 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 696 |
| 697 // Simple condition analysis. ALWAYS_TRUE and ALWAYS_FALSE represent a |
| 698 // known result for the test expression, with no side effects. |
| 699 enum { ALWAYS_TRUE, ALWAYS_FALSE, DONT_KNOW } info = DONT_KNOW; |
| 700 if (node->cond() == NULL) { |
| 701 ASSERT(node->type() == LoopStatement::FOR_LOOP); |
| 702 info = ALWAYS_TRUE; |
| 703 } else { |
| 704 Literal* lit = node->cond()->AsLiteral(); |
| 705 if (lit != NULL) { |
| 706 if (lit->IsTrue()) { |
| 707 info = ALWAYS_TRUE; |
| 708 } else if (lit->IsFalse()) { |
| 709 info = ALWAYS_FALSE; |
| 710 } |
| 711 } |
| 712 } |
| 713 |
| 714 switch (node->type()) { |
| 715 case LoopStatement::DO_LOOP: { |
| 716 JumpTarget body(JumpTarget::BIDIRECTIONAL); |
| 717 IncrementLoopNesting(); |
| 718 |
| 719 // Label the top of the loop for the backward jump if necessary. |
| 720 if (info == ALWAYS_TRUE) { |
| 721 // Use the continue target. |
| 722 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 723 node->continue_target()->Bind(); |
| 724 } else if (info == ALWAYS_FALSE) { |
| 725 // No need to label it. |
| 726 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 727 } else { |
| 728 // Continue is the test, so use the backward body target. |
| 729 ASSERT(info == DONT_KNOW); |
| 730 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 731 body.Bind(); |
| 732 } |
| 733 |
| 734 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 735 Visit(node->body()); |
| 736 |
| 737 // Compile the test. |
| 738 if (info == ALWAYS_TRUE) { |
| 739 // If control flow can fall off the end of the body, jump back |
| 740 // to the top and bind the break target at the exit. |
| 741 if (has_valid_frame()) { |
| 742 node->continue_target()->Jump(); |
| 743 } |
| 744 if (node->break_target()->is_linked()) { |
| 745 node->break_target()->Bind(); |
| 746 } |
| 747 |
| 748 } else if (info == ALWAYS_FALSE) { |
| 749 // We may have had continues or breaks in the body. |
| 750 if (node->continue_target()->is_linked()) { |
| 751 node->continue_target()->Bind(); |
| 752 } |
| 753 if (node->break_target()->is_linked()) { |
| 754 node->break_target()->Bind(); |
| 755 } |
| 756 |
| 757 } else { |
| 758 ASSERT(info == DONT_KNOW); |
| 759 // We have to compile the test expression if it can be reached by |
| 760 // control flow falling out of the body or via continue. |
| 761 if (node->continue_target()->is_linked()) { |
| 762 node->continue_target()->Bind(); |
| 763 } |
| 764 if (has_valid_frame()) { |
| 765 ControlDestination dest(&body, node->break_target(), false); |
| 766 LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); |
| 767 } |
| 768 if (node->break_target()->is_linked()) { |
| 769 node->break_target()->Bind(); |
| 770 } |
| 771 } |
| 772 break; |
| 773 } |
| 774 |
| 775 case LoopStatement::WHILE_LOOP: { |
| 776 // Do not duplicate conditions that may have function literal |
| 777 // subexpressions. This can cause us to compile the function |
| 778 // literal twice. |
| 779 bool test_at_bottom = !node->may_have_function_literal(); |
| 780 |
| 781 IncrementLoopNesting(); |
| 782 |
| 783 // If the condition is always false and has no side effects, we |
| 784 // do not need to compile anything. |
| 785 if (info == ALWAYS_FALSE) break; |
| 786 |
| 787 JumpTarget body; |
| 788 if (test_at_bottom) { |
| 789 body.set_direction(JumpTarget::BIDIRECTIONAL); |
| 790 } |
| 791 |
| 792 // Based on the condition analysis, compile the test as necessary. |
| 793 if (info == ALWAYS_TRUE) { |
| 794 // We will not compile the test expression. Label the top of |
| 795 // the loop with the continue target. |
| 796 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 797 node->continue_target()->Bind(); |
| 798 } else { |
| 799 ASSERT(info == DONT_KNOW); // ALWAYS_FALSE cannot reach here. |
| 800 if (test_at_bottom) { |
| 801 // Continue is the test at the bottom, no need to label the |
| 802 // test at the top. The body is a backward target. |
| 803 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 804 } else { |
| 805 // Label the test at the top as the continue target. The |
| 806 // body is a forward-only target. |
| 807 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 808 node->continue_target()->Bind(); |
| 809 } |
| 810 // Compile the test with the body as the true target and |
| 811 // preferred fall-through and with the break target as the |
| 812 // false target. |
| 813 ControlDestination dest(&body, node->break_target(), true); |
| 814 LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); |
| 815 |
| 816 if (dest.false_was_fall_through()) { |
| 817 // If we got the break target as fall-through, the test may |
| 818 // have been unconditionally false (if there are no jumps to |
| 819 // the body). |
| 820 if (!body.is_linked()) break; |
| 821 |
| 822 // Otherwise, jump around the body on the fall through and |
| 823 // then bind the body target. |
| 824 node->break_target()->Unuse(); |
| 825 node->break_target()->Jump(); |
| 826 body.Bind(); |
| 827 } |
| 828 } |
| 829 |
| 830 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 831 Visit(node->body()); |
| 832 |
| 833 // Based on the condition analysis, compile the backward jump as |
| 834 // necessary. |
| 835 if (info == ALWAYS_TRUE) { |
| 836 // The loop body has been labeled with the continue target. |
| 837 if (has_valid_frame()) { |
| 838 node->continue_target()->Jump(); |
| 839 } |
| 840 } else { |
| 841 ASSERT(info == DONT_KNOW); // ALWAYS_FALSE cannot reach here. |
| 842 if (test_at_bottom) { |
| 843 // If we have chosen to recompile the test at the bottom, |
| 844 // then it is the continue target. |
| 845 if (node->continue_target()->is_linked()) { |
| 846 node->continue_target()->Bind(); |
| 847 } |
| 848 if (has_valid_frame()) { |
| 849 // The break target is the fall-through (body is a backward |
| 850 // jump from here and thus an invalid fall-through). |
| 851 ControlDestination dest(&body, node->break_target(), false); |
| 852 LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); |
| 853 } |
| 854 } else { |
| 855 // If we have chosen not to recompile the test at the |
| 856 // bottom, jump back to the one at the top. |
| 857 if (has_valid_frame()) { |
| 858 node->continue_target()->Jump(); |
| 859 } |
| 860 } |
| 861 } |
| 862 |
| 863 // The break target may be already bound (by the condition), or |
| 864 // there may not be a valid frame. Bind it only if needed. |
| 865 if (node->break_target()->is_linked()) { |
| 866 node->break_target()->Bind(); |
| 867 } |
| 868 break; |
| 869 } |
| 870 |
| 871 case LoopStatement::FOR_LOOP: { |
| 872 // Do not duplicate conditions that may have function literal |
| 873 // subexpressions. This can cause us to compile the function |
| 874 // literal twice. |
| 875 bool test_at_bottom = !node->may_have_function_literal(); |
| 876 |
| 877 // Compile the init expression if present. |
| 878 if (node->init() != NULL) { |
| 879 Visit(node->init()); |
| 880 } |
| 881 |
| 882 IncrementLoopNesting(); |
| 883 |
| 884 // If the condition is always false and has no side effects, we |
| 885 // do not need to compile anything else. |
| 886 if (info == ALWAYS_FALSE) break; |
| 887 |
| 888 // Target for backward edge if no test at the bottom, otherwise |
| 889 // unused. |
| 890 JumpTarget loop(JumpTarget::BIDIRECTIONAL); |
| 891 |
| 892 // Target for backward edge if there is a test at the bottom, |
| 893 // otherwise used as target for test at the top. |
| 894 JumpTarget body; |
| 895 if (test_at_bottom) { |
| 896 body.set_direction(JumpTarget::BIDIRECTIONAL); |
| 897 } |
| 898 |
| 899 // Based on the condition analysis, compile the test as necessary. |
| 900 if (info == ALWAYS_TRUE) { |
| 901 // We will not compile the test expression. Label the top of |
| 902 // the loop. |
| 903 if (node->next() == NULL) { |
| 904 // Use the continue target if there is no update expression. |
| 905 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 906 node->continue_target()->Bind(); |
| 907 } else { |
| 908 // Otherwise use the backward loop target. |
| 909 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 910 loop.Bind(); |
| 911 } |
| 912 } else { |
| 913 ASSERT(info == DONT_KNOW); |
| 914 if (test_at_bottom) { |
| 915 // Continue is either the update expression or the test at |
| 916 // the bottom, no need to label the test at the top. |
| 917 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 918 } else if (node->next() == NULL) { |
| 919 // We are not recompiling the test at the bottom and there |
| 920 // is no update expression. |
| 921 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 922 node->continue_target()->Bind(); |
| 923 } else { |
| 924 // We are not recompiling the test at the bottom and there |
| 925 // is an update expression. |
| 926 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 927 loop.Bind(); |
| 928 } |
| 929 |
| 930 // Compile the test with the body as the true target and |
| 931 // preferred fall-through and with the break target as the |
| 932 // false target. |
| 933 ControlDestination dest(&body, node->break_target(), true); |
| 934 LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); |
| 935 |
| 936 if (dest.false_was_fall_through()) { |
| 937 // If we got the break target as fall-through, the test may |
| 938 // have been unconditionally false (if there are no jumps to |
| 939 // the body). |
| 940 if (!body.is_linked()) break; |
| 941 |
| 942 // Otherwise, jump around the body on the fall through and |
| 943 // then bind the body target. |
| 944 node->break_target()->Unuse(); |
| 945 node->break_target()->Jump(); |
| 946 body.Bind(); |
| 947 } |
| 948 } |
| 949 |
| 950 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 951 Visit(node->body()); |
| 952 |
| 953 // If there is an update expression, compile it if necessary. |
| 954 if (node->next() != NULL) { |
| 955 if (node->continue_target()->is_linked()) { |
| 956 node->continue_target()->Bind(); |
| 957 } |
| 958 |
| 959 // Control can reach the update by falling out of the body or |
| 960 // by a continue. |
| 961 if (has_valid_frame()) { |
| 962 // Record the source position of the statement as this code |
| 963 // which is after the code for the body actually belongs to |
| 964 // the loop statement and not the body. |
| 965 CodeForStatementPosition(node); |
| 966 Visit(node->next()); |
| 967 } |
| 968 } |
| 969 |
| 970 // Based on the condition analysis, compile the backward jump as |
| 971 // necessary. |
| 972 if (info == ALWAYS_TRUE) { |
| 973 if (has_valid_frame()) { |
| 974 if (node->next() == NULL) { |
| 975 node->continue_target()->Jump(); |
| 976 } else { |
| 977 loop.Jump(); |
| 978 } |
| 979 } |
| 980 } else { |
| 981 ASSERT(info == DONT_KNOW); // ALWAYS_FALSE cannot reach here. |
| 982 if (test_at_bottom) { |
| 983 if (node->continue_target()->is_linked()) { |
| 984 // We can have dangling jumps to the continue target if |
| 985 // there was no update expression. |
| 986 node->continue_target()->Bind(); |
| 987 } |
| 988 // Control can reach the test at the bottom by falling out |
| 989 // of the body, by a continue in the body, or from the |
| 990 // update expression. |
| 991 if (has_valid_frame()) { |
| 992 // The break target is the fall-through (body is a |
| 993 // backward jump from here). |
| 994 ControlDestination dest(&body, node->break_target(), false); |
| 995 LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); |
| 996 } |
| 997 } else { |
| 998 // Otherwise, jump back to the test at the top. |
| 999 if (has_valid_frame()) { |
| 1000 if (node->next() == NULL) { |
| 1001 node->continue_target()->Jump(); |
| 1002 } else { |
| 1003 loop.Jump(); |
| 1004 } |
| 1005 } |
| 1006 } |
| 1007 } |
| 1008 |
| 1009 // The break target may be already bound (by the condition), or |
| 1010 // there may not be a valid frame. Bind it only if needed. |
| 1011 if (node->break_target()->is_linked()) { |
| 1012 node->break_target()->Bind(); |
| 1013 } |
| 1014 break; |
| 1015 } |
| 1016 } |
| 1017 |
| 1018 DecrementLoopNesting(); |
| 1019 node->continue_target()->Unuse(); |
| 1020 node->break_target()->Unuse(); |
663 } | 1021 } |
664 | 1022 |
| 1023 |
665 void CodeGenerator::VisitForInStatement(ForInStatement* a) { | 1024 void CodeGenerator::VisitForInStatement(ForInStatement* a) { |
666 UNIMPLEMENTED(); | 1025 UNIMPLEMENTED(); |
667 } | 1026 } |
668 | 1027 |
669 void CodeGenerator::VisitTryCatch(TryCatch* a) { | 1028 void CodeGenerator::VisitTryCatch(TryCatch* a) { |
670 UNIMPLEMENTED(); | 1029 UNIMPLEMENTED(); |
671 } | 1030 } |
672 | 1031 |
673 void CodeGenerator::VisitTryFinally(TryFinally* a) { | 1032 void CodeGenerator::VisitTryFinally(TryFinally* a) { |
674 UNIMPLEMENTED(); | 1033 UNIMPLEMENTED(); |
(...skipping 589 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1264 // ---------------------------------- | 1623 // ---------------------------------- |
1265 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj | 1624 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj |
1266 // ---------------------------------- | 1625 // ---------------------------------- |
1267 | 1626 |
1268 // Load the function from the context. Sync the frame so we can | 1627 // Load the function from the context. Sync the frame so we can |
1269 // push the arguments directly into place. | 1628 // push the arguments directly into place. |
1270 frame_->SyncRange(0, frame_->element_count() - 1); | 1629 frame_->SyncRange(0, frame_->element_count() - 1); |
1271 frame_->EmitPush(esi); | 1630 frame_->EmitPush(esi); |
1272 frame_->EmitPush(Immediate(var->name())); | 1631 frame_->EmitPush(Immediate(var->name())); |
1273 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); | 1632 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
1274 // The runtime call returns a pair of values in eax and edx. The | 1633 // The runtime call returns a pair of values in rax and rdx. The |
1275 // looked-up function is in eax and the receiver is in edx. These | 1634 // looked-up function is in rax and the receiver is in rdx. These |
1276 // register references are not ref counted here. We spill them | 1635 // register references are not ref counted here. We spill them |
1277 // eagerly since they are arguments to an inevitable call (and are | 1636 // eagerly since they are arguments to an inevitable call (and are |
1278 // not sharable by the arguments). | 1637 // not sharable by the arguments). |
1279 ASSERT(!allocator()->is_used(eax)); | 1638 ASSERT(!allocator()->is_used(rax)); |
1280 frame_->EmitPush(eax); | 1639 frame_->EmitPush(rax); |
1281 | 1640 |
1282 // Load the receiver. | 1641 // Load the receiver. |
1283 ASSERT(!allocator()->is_used(edx)); | 1642 ASSERT(!allocator()->is_used(rdx)); |
1284 frame_->EmitPush(edx); | 1643 frame_->EmitPush(rdx); |
1285 | 1644 |
1286 // Call the function. | 1645 // Call the function. |
1287 CallWithArguments(args, node->position()); | 1646 CallWithArguments(args, node->position()); |
1288 */ | 1647 */ |
1289 } else if (property != NULL) { | 1648 } else if (property != NULL) { |
1290 // Check if the key is a literal string. | 1649 // Check if the key is a literal string. |
1291 Literal* literal = property->key()->AsLiteral(); | 1650 Literal* literal = property->key()->AsLiteral(); |
1292 | 1651 |
1293 if (literal != NULL && literal->handle()->IsSymbol()) { | 1652 if (literal != NULL && literal->handle()->IsSymbol()) { |
1294 // ------------------------------------------------------------------ | 1653 // ------------------------------------------------------------------ |
(...skipping 1481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2776 // Code pattern for loading a floating point value and converting it | 3135 // Code pattern for loading a floating point value and converting it |
2777 // to a 32 bit integer. Input value must be either a smi or a heap number | 3136 // to a 32 bit integer. Input value must be either a smi or a heap number |
2778 // object. | 3137 // object. |
2779 // Returns operands as 32-bit sign extended integers in a general purpose | 3138 // Returns operands as 32-bit sign extended integers in a general purpose |
2780 // registers. | 3139 // registers. |
2781 static void LoadInt32Operand(MacroAssembler* masm, | 3140 static void LoadInt32Operand(MacroAssembler* masm, |
2782 const Operand& src, | 3141 const Operand& src, |
2783 Register dst); | 3142 Register dst); |
2784 | 3143 |
2785 // Test if operands are smi or number objects (fp). Requirements: | 3144 // Test if operands are smi or number objects (fp). Requirements: |
2786 // operand_1 in eax, operand_2 in edx; falls through on float | 3145 // operand_1 in rax, operand_2 in rdx; falls through on float |
2787 // operands, jumps to the non_float label otherwise. | 3146 // operands, jumps to the non_float label otherwise. |
2788 static void CheckFloatOperands(MacroAssembler* masm, | 3147 static void CheckFloatOperands(MacroAssembler* masm, |
2789 Label* non_float); | 3148 Label* non_float); |
2790 // Allocate a heap number in new space with undefined value. | 3149 // Allocate a heap number in new space with undefined value. |
2791 // Returns tagged pointer in result, or jumps to need_gc if new space is full. | 3150 // Returns tagged pointer in result, or jumps to need_gc if new space is full. |
2792 static void AllocateHeapNumber(MacroAssembler* masm, | 3151 static void AllocateHeapNumber(MacroAssembler* masm, |
2793 Label* need_gc, | 3152 Label* need_gc, |
2794 Register scratch, | 3153 Register scratch, |
2795 Register result); | 3154 Register result); |
2796 }; | 3155 }; |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3001 Register receiver_; | 3360 Register receiver_; |
3002 Handle<String> name_; | 3361 Handle<String> name_; |
3003 }; | 3362 }; |
3004 | 3363 |
3005 | 3364 |
3006 void DeferredReferenceGetNamedValue::Generate() { | 3365 void DeferredReferenceGetNamedValue::Generate() { |
3007 __ push(receiver_); | 3366 __ push(receiver_); |
3008 __ Move(rcx, name_); | 3367 __ Move(rcx, name_); |
3009 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 3368 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
3010 __ Call(ic, RelocInfo::CODE_TARGET); | 3369 __ Call(ic, RelocInfo::CODE_TARGET); |
3011 // The call must be followed by a test eax instruction to indicate | 3370 // The call must be followed by a test rax instruction to indicate |
3012 // that the inobject property case was inlined. | 3371 // that the inobject property case was inlined. |
3013 // | 3372 // |
3014 // Store the delta to the map check instruction here in the test | 3373 // Store the delta to the map check instruction here in the test |
3015 // instruction. Use masm_-> instead of the __ macro since the | 3374 // instruction. Use masm_-> instead of the __ macro since the |
3016 // latter can't return a value. | 3375 // latter can't return a value. |
3017 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); | 3376 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
3018 // Here we use masm_-> instead of the __ macro because this is the | 3377 // Here we use masm_-> instead of the __ macro because this is the |
3019 // instruction that gets patched and coverage code gets in the way. | 3378 // instruction that gets patched and coverage code gets in the way. |
3020 masm_->testq(rax, Immediate(-delta_to_patch_site)); | 3379 masm_->testq(rax, Immediate(-delta_to_patch_site)); |
3021 __ IncrementCounter(&Counters::named_load_inline_miss, 1); | 3380 __ IncrementCounter(&Counters::named_load_inline_miss, 1); |
(...skipping 962 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3984 __ push(rcx); | 4343 __ push(rcx); |
3985 | 4344 |
3986 // Inlined floating point compare. | 4345 // Inlined floating point compare. |
3987 // Call builtin if operands are not floating point or smi. | 4346 // Call builtin if operands are not floating point or smi. |
3988 Label check_for_symbols; | 4347 Label check_for_symbols; |
3989 // TODO(X64): Implement floating point comparisons; | 4348 // TODO(X64): Implement floating point comparisons; |
3990 __ int3(); | 4349 __ int3(); |
3991 | 4350 |
3992 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up. | 4351 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up. |
3993 Label below_lbl, above_lbl; | 4352 Label below_lbl, above_lbl; |
3994 // use edx, eax to convert unsigned to signed comparison | 4353 // use rdx, rax to convert unsigned to signed comparison |
3995 __ j(below, &below_lbl); | 4354 __ j(below, &below_lbl); |
3996 __ j(above, &above_lbl); | 4355 __ j(above, &above_lbl); |
3997 | 4356 |
3998 __ xor_(rax, rax); // equal | 4357 __ xor_(rax, rax); // equal |
3999 __ ret(2 * kPointerSize); | 4358 __ ret(2 * kPointerSize); |
4000 | 4359 |
4001 __ bind(&below_lbl); | 4360 __ bind(&below_lbl); |
4002 __ movq(rax, Immediate(-1)); | 4361 __ movq(rax, Immediate(-1)); |
4003 __ ret(2 * kPointerSize); | 4362 __ ret(2 * kPointerSize); |
4004 | 4363 |
4005 __ bind(&above_lbl); | 4364 __ bind(&above_lbl); |
4006 __ movq(rax, Immediate(1)); | 4365 __ movq(rax, Immediate(1)); |
4007 __ ret(2 * kPointerSize); // eax, edx were pushed | 4366 __ ret(2 * kPointerSize); // rax, rdx were pushed |
4008 | 4367 |
4009 // Fast negative check for symbol-to-symbol equality. | 4368 // Fast negative check for symbol-to-symbol equality. |
4010 __ bind(&check_for_symbols); | 4369 __ bind(&check_for_symbols); |
4011 if (cc_ == equal) { | 4370 if (cc_ == equal) { |
4012 BranchIfNonSymbol(masm, &call_builtin, rax); | 4371 BranchIfNonSymbol(masm, &call_builtin, rax); |
4013 BranchIfNonSymbol(masm, &call_builtin, rdx); | 4372 BranchIfNonSymbol(masm, &call_builtin, rdx); |
4014 | 4373 |
4015 // We've already checked for object identity, so if both operands | 4374 // We've already checked for object identity, so if both operands |
4016 // are symbols they aren't equal. Register eax already holds a | 4375 // are symbols they aren't equal. Register rax already holds a |
4017 // non-zero value, which indicates not equal, so just return. | 4376 // non-zero value, which indicates not equal, so just return. |
4018 __ ret(2 * kPointerSize); | 4377 __ ret(2 * kPointerSize); |
4019 } | 4378 } |
4020 | 4379 |
4021 __ bind(&call_builtin); | 4380 __ bind(&call_builtin); |
4022 // must swap argument order | 4381 // must swap argument order |
4023 __ pop(rcx); | 4382 __ pop(rcx); |
4024 __ pop(rdx); | 4383 __ pop(rdx); |
4025 __ pop(rax); | 4384 __ pop(rax); |
4026 __ push(rdx); | 4385 __ push(rdx); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4058 __ j(zero, label); | 4417 __ j(zero, label); |
4059 __ movq(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); | 4418 __ movq(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
4060 __ movzxbq(kScratchRegister, | 4419 __ movzxbq(kScratchRegister, |
4061 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); | 4420 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); |
4062 __ and_(kScratchRegister, Immediate(kIsSymbolMask | kIsNotStringMask)); | 4421 __ and_(kScratchRegister, Immediate(kIsSymbolMask | kIsNotStringMask)); |
4063 __ cmpb(kScratchRegister, Immediate(kSymbolTag | kStringTag)); | 4422 __ cmpb(kScratchRegister, Immediate(kSymbolTag | kStringTag)); |
4064 __ j(not_equal, label); | 4423 __ j(not_equal, label); |
4065 } | 4424 } |
4066 | 4425 |
4067 | 4426 |
4068 void StackCheckStub::Generate(MacroAssembler* masm) { | |
4069 } | |
4070 | |
4071 | |
4072 class CallFunctionStub: public CodeStub { | 4427 class CallFunctionStub: public CodeStub { |
4073 public: | 4428 public: |
4074 CallFunctionStub(int argc, InLoopFlag in_loop) | 4429 CallFunctionStub(int argc, InLoopFlag in_loop) |
4075 : argc_(argc), in_loop_(in_loop) { } | 4430 : argc_(argc), in_loop_(in_loop) { } |
4076 | 4431 |
4077 void Generate(MacroAssembler* masm); | 4432 void Generate(MacroAssembler* masm); |
4078 | 4433 |
4079 private: | 4434 private: |
4080 int argc_; | 4435 int argc_; |
4081 InLoopFlag in_loop_; | 4436 InLoopFlag in_loop_; |
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4597 __ pop(rbp); | 4952 __ pop(rbp); |
4598 __ ret(0); | 4953 __ ret(0); |
4599 } | 4954 } |
4600 | 4955 |
4601 | 4956 |
4602 // ----------------------------------------------------------------------------- | 4957 // ----------------------------------------------------------------------------- |
4603 // Implementation of stubs. | 4958 // Implementation of stubs. |
4604 | 4959 |
4605 // Stub classes have public member named masm, not masm_. | 4960 // Stub classes have public member named masm, not masm_. |
4606 | 4961 |
| 4962 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 4963 // Because builtins always remove the receiver from the stack, we |
| 4964 // have to fake one to avoid underflowing the stack. The receiver |
| 4965 // must be inserted below the return address on the stack so we |
| 4966 // temporarily store that in a register. |
| 4967 __ pop(rax); |
| 4968 __ push(Immediate(Smi::FromInt(0))); |
| 4969 __ push(rax); |
| 4970 |
| 4971 // Do tail-call to runtime routine. |
| 4972 __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1); |
| 4973 } |
| 4974 |
4607 | 4975 |
4608 void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm, | 4976 void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm, |
4609 Label* need_gc, | 4977 Label* need_gc, |
4610 Register scratch, | 4978 Register scratch, |
4611 Register result) { | 4979 Register result) { |
4612 ExternalReference allocation_top = | 4980 ExternalReference allocation_top = |
4613 ExternalReference::new_space_allocation_top_address(); | 4981 ExternalReference::new_space_allocation_top_address(); |
4614 ExternalReference allocation_limit = | 4982 ExternalReference allocation_limit = |
4615 ExternalReference::new_space_allocation_limit_address(); | 4983 ExternalReference::new_space_allocation_limit_address(); |
4616 __ movq(scratch, allocation_top); // scratch: address of allocation top. | 4984 __ movq(scratch, allocation_top); // scratch: address of allocation top. |
(...skipping 518 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5135 break; | 5503 break; |
5136 default: | 5504 default: |
5137 UNREACHABLE(); | 5505 UNREACHABLE(); |
5138 } | 5506 } |
5139 } | 5507 } |
5140 | 5508 |
5141 | 5509 |
5142 #undef __ | 5510 #undef __ |
5143 | 5511 |
5144 } } // namespace v8::internal | 5512 } } // namespace v8::internal |
OLD | NEW |