| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 47 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { | 47 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { |
| 48 int action = registers_[i]; | 48 int action = registers_[i]; |
| 49 if (action == kPush) { | 49 if (action == kPush) { |
| 50 __ push(RegisterAllocator::ToRegister(i)); | 50 __ push(RegisterAllocator::ToRegister(i)); |
| 51 } else if (action != kIgnore && (action & kSyncedFlag) == 0) { | 51 } else if (action != kIgnore && (action & kSyncedFlag) == 0) { |
| 52 __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i)); | 52 __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i)); |
| 53 } | 53 } |
| 54 } | 54 } |
| 55 } | 55 } |
| 56 | 56 |
| 57 |
| 57 void DeferredCode::RestoreRegisters() { | 58 void DeferredCode::RestoreRegisters() { |
| 58 // Restore registers in reverse order due to the stack. | 59 // Restore registers in reverse order due to the stack. |
| 59 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) { | 60 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) { |
| 60 int action = registers_[i]; | 61 int action = registers_[i]; |
| 61 if (action == kPush) { | 62 if (action == kPush) { |
| 62 __ pop(RegisterAllocator::ToRegister(i)); | 63 __ pop(RegisterAllocator::ToRegister(i)); |
| 63 } else if (action != kIgnore) { | 64 } else if (action != kIgnore) { |
| 64 action &= ~kSyncedFlag; | 65 action &= ~kSyncedFlag; |
| 65 __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action)); | 66 __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action)); |
| 66 } | 67 } |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 230 // object. | 231 // object. |
| 231 // Returns operands as 32-bit sign extended integers in a general purpose | 232 // Returns operands as 32-bit sign extended integers in a general purpose |
| 232 // registers. | 233 // registers. |
| 233 static void LoadInt32Operand(MacroAssembler* masm, | 234 static void LoadInt32Operand(MacroAssembler* masm, |
| 234 const Operand& src, | 235 const Operand& src, |
| 235 Register dst); | 236 Register dst); |
| 236 | 237 |
| 237 // Test if operands are smi or number objects (fp). Requirements: | 238 // Test if operands are smi or number objects (fp). Requirements: |
| 238 // operand_1 in rax, operand_2 in rdx; falls through on float or smi | 239 // operand_1 in rax, operand_2 in rdx; falls through on float or smi |
| 239 // operands, jumps to the non_float label otherwise. | 240 // operands, jumps to the non_float label otherwise. |
| 240 static void CheckFloatOperands(MacroAssembler* masm, | 241 static void CheckNumberOperands(MacroAssembler* masm, |
| 241 Label* non_float); | 242 Label* non_float); |
| 242 | 243 |
| 243 // Allocate a heap number in new space with undefined value. | 244 // Allocate a heap number in new space with undefined value. |
| 244 // Returns tagged pointer in result, or jumps to need_gc if new space is full. | 245 // Returns tagged pointer in result, or jumps to need_gc if new space is full. |
| 245 static void AllocateHeapNumber(MacroAssembler* masm, | 246 static void AllocateHeapNumber(MacroAssembler* masm, |
| 246 Label* need_gc, | 247 Label* need_gc, |
| 247 Register scratch, | 248 Register scratch, |
| 248 Register result); | 249 Register result); |
| 249 }; | 250 }; |
| 250 | 251 |
| 251 | 252 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 271 | 272 |
| 272 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 273 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 273 // Call the runtime to declare the globals. The inevitable call | 274 // Call the runtime to declare the globals. The inevitable call |
| 274 // will sync frame elements to memory anyway, so we do it eagerly to | 275 // will sync frame elements to memory anyway, so we do it eagerly to |
| 275 // allow us to push the arguments directly into place. | 276 // allow us to push the arguments directly into place. |
| 276 frame_->SyncRange(0, frame_->element_count() - 1); | 277 frame_->SyncRange(0, frame_->element_count() - 1); |
| 277 | 278 |
| 278 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT); | 279 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT); |
| 279 frame_->EmitPush(kScratchRegister); | 280 frame_->EmitPush(kScratchRegister); |
| 280 frame_->EmitPush(rsi); // The context is the second argument. | 281 frame_->EmitPush(rsi); // The context is the second argument. |
| 281 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0))); | 282 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0)); |
| 282 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3); | 283 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
| 283 // Return value is ignored. | 284 // Return value is ignored. |
| 284 } | 285 } |
| 285 | 286 |
| 286 | 287 |
| 287 void CodeGenerator::GenCode(FunctionLiteral* function) { | 288 void CodeGenerator::GenCode(FunctionLiteral* function) { |
| 288 // Record the position for debugging purposes. | 289 // Record the position for debugging purposes. |
| 289 CodeForFunctionPosition(function); | 290 CodeForFunctionPosition(function); |
| 290 ZoneList<Statement*>* body = function->body(); | 291 ZoneList<Statement*>* body = function->body(); |
| 291 | 292 |
| (...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 760 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); | 761 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); |
| 761 Condition is_smi = masm_->CheckSmi(rdi); | 762 Condition is_smi = masm_->CheckSmi(rdi); |
| 762 build_args.Branch(is_smi); | 763 build_args.Branch(is_smi); |
| 763 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 764 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 764 build_args.Branch(not_equal); | 765 build_args.Branch(not_equal); |
| 765 | 766 |
| 766 // Copy the arguments to this function possibly from the | 767 // Copy the arguments to this function possibly from the |
| 767 // adaptor frame below it. | 768 // adaptor frame below it. |
| 768 Label invoke, adapted; | 769 Label invoke, adapted; |
| 769 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 770 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 770 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 771 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
| 771 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 772 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 772 __ j(equal, &adapted); | 773 __ j(equal, &adapted); |
| 773 | 774 |
| 774 // No arguments adaptor frame. Copy fixed number of arguments. | 775 // No arguments adaptor frame. Copy fixed number of arguments. |
| 775 __ movq(rax, Immediate(scope_->num_parameters())); | 776 __ movq(rax, Immediate(scope_->num_parameters())); |
| 776 for (int i = 0; i < scope_->num_parameters(); i++) { | 777 for (int i = 0; i < scope_->num_parameters(); i++) { |
| 777 __ push(frame_->ParameterAt(i)); | 778 __ push(frame_->ParameterAt(i)); |
| 778 } | 779 } |
| 779 __ jmp(&invoke); | 780 __ jmp(&invoke); |
| 780 | 781 |
| 781 // Arguments adaptor frame present. Copy arguments from there, but | 782 // Arguments adaptor frame present. Copy arguments from there, but |
| 782 // avoid copying too many arguments to avoid stack overflows. | 783 // avoid copying too many arguments to avoid stack overflows. |
| 783 __ bind(&adapted); | 784 __ bind(&adapted); |
| 784 static const uint32_t kArgumentsLimit = 1 * KB; | 785 static const uint32_t kArgumentsLimit = 1 * KB; |
| 785 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 786 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 786 __ SmiToInteger32(rax, rax); | 787 __ SmiToInteger32(rax, rax); |
| 787 __ movq(rcx, rax); | 788 __ movq(rcx, rax); |
| 788 __ cmpq(rax, Immediate(kArgumentsLimit)); | 789 __ cmpq(rax, Immediate(kArgumentsLimit)); |
| 789 build_args.Branch(above); | 790 build_args.Branch(above); |
| 790 | 791 |
| 791 // Loop through the arguments pushing them onto the execution | 792 // Loop through the arguments pushing them onto the execution |
| 792 // stack. We don't inform the virtual frame of the push, so we don't | 793 // stack. We don't inform the virtual frame of the push, so we don't |
| 793 // have to worry about getting rid of the elements from the virtual | 794 // have to worry about getting rid of the elements from the virtual |
| 794 // frame. | 795 // frame. |
| 795 Label loop; | 796 Label loop; |
| 796 __ bind(&loop); | |
| 797 __ testl(rcx, rcx); | 797 __ testl(rcx, rcx); |
| 798 __ j(zero, &invoke); | 798 __ j(zero, &invoke); |
| 799 __ bind(&loop); |
| 799 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize)); | 800 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize)); |
| 800 __ decl(rcx); | 801 __ decl(rcx); |
| 801 __ jmp(&loop); | 802 __ j(not_zero, &loop); |
| 802 | 803 |
| 803 // Invoke the function. The virtual frame knows about the receiver | 804 // Invoke the function. The virtual frame knows about the receiver |
| 804 // so make sure to forget that explicitly. | 805 // so make sure to forget that explicitly. |
| 805 __ bind(&invoke); | 806 __ bind(&invoke); |
| 806 ParameterCount actual(rax); | 807 ParameterCount actual(rax); |
| 807 __ InvokeFunction(rdi, actual, CALL_FUNCTION); | 808 __ InvokeFunction(rdi, actual, CALL_FUNCTION); |
| 808 frame_->Forget(1); | 809 frame_->Forget(1); |
| 809 Result result = allocator()->Allocate(rax); | 810 Result result = allocator()->Allocate(rax); |
| 810 frame_->SetElementAt(0, &result); | 811 frame_->SetElementAt(0, &result); |
| 811 done.Jump(); | 812 done.Jump(); |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 926 ASSERT(var->is_dynamic()); | 927 ASSERT(var->is_dynamic()); |
| 927 // For now, just do a runtime call. Sync the virtual frame eagerly | 928 // For now, just do a runtime call. Sync the virtual frame eagerly |
| 928 // so we can simply push the arguments into place. | 929 // so we can simply push the arguments into place. |
| 929 frame_->SyncRange(0, frame_->element_count() - 1); | 930 frame_->SyncRange(0, frame_->element_count() - 1); |
| 930 frame_->EmitPush(rsi); | 931 frame_->EmitPush(rsi); |
| 931 __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT); | 932 __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT); |
| 932 frame_->EmitPush(kScratchRegister); | 933 frame_->EmitPush(kScratchRegister); |
| 933 // Declaration nodes are always introduced in one of two modes. | 934 // Declaration nodes are always introduced in one of two modes. |
| 934 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); | 935 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); |
| 935 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; | 936 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; |
| 936 frame_->EmitPush(Immediate(Smi::FromInt(attr))); | 937 frame_->EmitPush(Smi::FromInt(attr)); |
| 937 // Push initial value, if any. | 938 // Push initial value, if any. |
| 938 // Note: For variables we must not push an initial value (such as | 939 // Note: For variables we must not push an initial value (such as |
| 939 // 'undefined') because we may have a (legal) redeclaration and we | 940 // 'undefined') because we may have a (legal) redeclaration and we |
| 940 // must not destroy the current value. | 941 // must not destroy the current value. |
| 941 if (node->mode() == Variable::CONST) { | 942 if (node->mode() == Variable::CONST) { |
| 942 frame_->EmitPush(Heap::kTheHoleValueRootIndex); | 943 frame_->EmitPush(Heap::kTheHoleValueRootIndex); |
| 943 } else if (node->fun() != NULL) { | 944 } else if (node->fun() != NULL) { |
| 944 Load(node->fun()); | 945 Load(node->fun()); |
| 945 } else { | 946 } else { |
| 946 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value! | 947 frame_->EmitPush(Smi::FromInt(0)); // no initial value! |
| 947 } | 948 } |
| 948 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); | 949 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 949 // Ignore the return value (declarations are statements). | 950 // Ignore the return value (declarations are statements). |
| 950 return; | 951 return; |
| 951 } | 952 } |
| 952 | 953 |
| 953 ASSERT(!var->is_global()); | 954 ASSERT(!var->is_global()); |
| 954 | 955 |
| 955 // If we have a function or a constant, we need to initialize the variable. | 956 // If we have a function or a constant, we need to initialize the variable. |
| 956 Expression* val = NULL; | 957 Expression* val = NULL; |
| (...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1693 // Get the bridge array held in the enumeration index field. | 1694 // Get the bridge array held in the enumeration index field. |
| 1694 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); | 1695 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); |
| 1695 // Get the cache from the bridge array. | 1696 // Get the cache from the bridge array. |
| 1696 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1697 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 1697 | 1698 |
| 1698 frame_->EmitPush(rax); // <- slot 3 | 1699 frame_->EmitPush(rax); // <- slot 3 |
| 1699 frame_->EmitPush(rdx); // <- slot 2 | 1700 frame_->EmitPush(rdx); // <- slot 2 |
| 1700 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); | 1701 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); |
| 1701 __ Integer32ToSmi(rax, rax); | 1702 __ Integer32ToSmi(rax, rax); |
| 1702 frame_->EmitPush(rax); // <- slot 1 | 1703 frame_->EmitPush(rax); // <- slot 1 |
| 1703 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 1704 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
| 1704 entry.Jump(); | 1705 entry.Jump(); |
| 1705 | 1706 |
| 1706 fixed_array.Bind(); | 1707 fixed_array.Bind(); |
| 1707 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast) | 1708 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast) |
| 1708 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 | 1709 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3 |
| 1709 frame_->EmitPush(rax); // <- slot 2 | 1710 frame_->EmitPush(rax); // <- slot 2 |
| 1710 | 1711 |
| 1711 // Push the length of the array and the initial index onto the stack. | 1712 // Push the length of the array and the initial index onto the stack. |
| 1712 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset)); | 1713 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
| 1713 __ Integer32ToSmi(rax, rax); | 1714 __ Integer32ToSmi(rax, rax); |
| 1714 frame_->EmitPush(rax); // <- slot 1 | 1715 frame_->EmitPush(rax); // <- slot 1 |
| 1715 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 1716 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
| 1716 | 1717 |
| 1717 // Condition. | 1718 // Condition. |
| 1718 entry.Bind(); | 1719 entry.Bind(); |
| 1719 // Grab the current frame's height for the break and continue | 1720 // Grab the current frame's height for the break and continue |
| 1720 // targets only after all the state is pushed on the frame. | 1721 // targets only after all the state is pushed on the frame. |
| 1721 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1722 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 1722 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1723 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 1723 | 1724 |
| 1724 __ movq(rax, frame_->ElementAt(0)); // load the current count | 1725 __ movq(rax, frame_->ElementAt(0)); // load the current count |
| 1725 __ cmpl(rax, frame_->ElementAt(1)); // compare to the array length | 1726 __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length |
| 1726 node->break_target()->Branch(above_equal); | 1727 node->break_target()->Branch(below_equal); |
| 1727 | 1728 |
| 1728 // Get the i'th entry of the array. | 1729 // Get the i'th entry of the array. |
| 1729 __ movq(rdx, frame_->ElementAt(2)); | 1730 __ movq(rdx, frame_->ElementAt(2)); |
| 1730 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2); | 1731 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2); |
| 1731 __ movq(rbx, | 1732 __ movq(rbx, |
| 1732 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize)); | 1733 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize)); |
| 1733 | 1734 |
| 1734 // Get the expected map from the stack or a zero map in the | 1735 // Get the expected map from the stack or a zero map in the |
| 1735 // permanent slow case rax: current iteration count rbx: i'th entry | 1736 // permanent slow case rax: current iteration count rbx: i'th entry |
| 1736 // of the enum cache | 1737 // of the enum cache |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1789 | 1790 |
| 1790 // Body. | 1791 // Body. |
| 1791 CheckStack(); // TODO(1222600): ignore if body contains calls. | 1792 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 1792 VisitAndSpill(node->body()); | 1793 VisitAndSpill(node->body()); |
| 1793 | 1794 |
| 1794 // Next. Reestablish a spilled frame in case we are coming here via | 1795 // Next. Reestablish a spilled frame in case we are coming here via |
| 1795 // a continue in the body. | 1796 // a continue in the body. |
| 1796 node->continue_target()->Bind(); | 1797 node->continue_target()->Bind(); |
| 1797 frame_->SpillAll(); | 1798 frame_->SpillAll(); |
| 1798 frame_->EmitPop(rax); | 1799 frame_->EmitPop(rax); |
| 1799 __ addq(rax, Immediate(Smi::FromInt(1))); | 1800 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
| 1800 frame_->EmitPush(rax); | 1801 frame_->EmitPush(rax); |
| 1801 entry.Jump(); | 1802 entry.Jump(); |
| 1802 | 1803 |
| 1803 // Cleanup. No need to spill because VirtualFrame::Drop is safe for | 1804 // Cleanup. No need to spill because VirtualFrame::Drop is safe for |
| 1804 // any frame. | 1805 // any frame. |
| 1805 node->break_target()->Bind(); | 1806 node->break_target()->Bind(); |
| 1806 frame_->Drop(5); | 1807 frame_->Drop(5); |
| 1807 | 1808 |
| 1808 // Exit. | 1809 // Exit. |
| 1809 exit.Bind(); | 1810 exit.Bind(); |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1962 // break/continue from within the try block. | 1963 // break/continue from within the try block. |
| 1963 enum { FALLING, THROWING, JUMPING }; | 1964 enum { FALLING, THROWING, JUMPING }; |
| 1964 | 1965 |
| 1965 JumpTarget try_block; | 1966 JumpTarget try_block; |
| 1966 JumpTarget finally_block; | 1967 JumpTarget finally_block; |
| 1967 | 1968 |
| 1968 try_block.Call(); | 1969 try_block.Call(); |
| 1969 | 1970 |
| 1970 frame_->EmitPush(rax); | 1971 frame_->EmitPush(rax); |
| 1971 // In case of thrown exceptions, this is where we continue. | 1972 // In case of thrown exceptions, this is where we continue. |
| 1972 __ movq(rcx, Immediate(Smi::FromInt(THROWING))); | 1973 __ Move(rcx, Smi::FromInt(THROWING)); |
| 1973 finally_block.Jump(); | 1974 finally_block.Jump(); |
| 1974 | 1975 |
| 1975 // --- Try block --- | 1976 // --- Try block --- |
| 1976 try_block.Bind(); | 1977 try_block.Bind(); |
| 1977 | 1978 |
| 1978 frame_->PushTryHandler(TRY_FINALLY_HANDLER); | 1979 frame_->PushTryHandler(TRY_FINALLY_HANDLER); |
| 1979 int handler_height = frame_->height(); | 1980 int handler_height = frame_->height(); |
| 1980 | 1981 |
| 1981 // Shadow the jump targets for all escapes from the try block, including | 1982 // Shadow the jump targets for all escapes from the try block, including |
| 1982 // returns. During shadowing, the original target is hidden as the | 1983 // returns. During shadowing, the original target is hidden as the |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2021 if (has_valid_frame()) { | 2022 if (has_valid_frame()) { |
| 2022 // The next handler address is on top of the frame. | 2023 // The next handler address is on top of the frame. |
| 2023 ASSERT(StackHandlerConstants::kNextOffset == 0); | 2024 ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 2024 __ movq(kScratchRegister, handler_address); | 2025 __ movq(kScratchRegister, handler_address); |
| 2025 frame_->EmitPop(Operand(kScratchRegister, 0)); | 2026 frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 2026 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2027 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 2027 | 2028 |
| 2028 // Fake a top of stack value (unneeded when FALLING) and set the | 2029 // Fake a top of stack value (unneeded when FALLING) and set the |
| 2029 // state in ecx, then jump around the unlink blocks if any. | 2030 // state in ecx, then jump around the unlink blocks if any. |
| 2030 frame_->EmitPush(Heap::kUndefinedValueRootIndex); | 2031 frame_->EmitPush(Heap::kUndefinedValueRootIndex); |
| 2031 __ movq(rcx, Immediate(Smi::FromInt(FALLING))); | 2032 __ Move(rcx, Smi::FromInt(FALLING)); |
| 2032 if (nof_unlinks > 0) { | 2033 if (nof_unlinks > 0) { |
| 2033 finally_block.Jump(); | 2034 finally_block.Jump(); |
| 2034 } | 2035 } |
| 2035 } | 2036 } |
| 2036 | 2037 |
| 2037 // Generate code to unlink and set the state for the (formerly) | 2038 // Generate code to unlink and set the state for the (formerly) |
| 2038 // shadowing targets that have been jumped to. | 2039 // shadowing targets that have been jumped to. |
| 2039 for (int i = 0; i < shadows.length(); i++) { | 2040 for (int i = 0; i < shadows.length(); i++) { |
| 2040 if (shadows[i]->is_linked()) { | 2041 if (shadows[i]->is_linked()) { |
| 2041 // If we have come from the shadowed return, the return value is | 2042 // If we have come from the shadowed return, the return value is |
| (...skipping 25 matching lines...) Expand all Loading... |
| 2067 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2068 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 2068 | 2069 |
| 2069 if (i == kReturnShadowIndex) { | 2070 if (i == kReturnShadowIndex) { |
| 2070 // If this target shadowed the function return, materialize | 2071 // If this target shadowed the function return, materialize |
| 2071 // the return value on the stack. | 2072 // the return value on the stack. |
| 2072 frame_->EmitPush(rax); | 2073 frame_->EmitPush(rax); |
| 2073 } else { | 2074 } else { |
| 2074 // Fake TOS for targets that shadowed breaks and continues. | 2075 // Fake TOS for targets that shadowed breaks and continues. |
| 2075 frame_->EmitPush(Heap::kUndefinedValueRootIndex); | 2076 frame_->EmitPush(Heap::kUndefinedValueRootIndex); |
| 2076 } | 2077 } |
| 2077 __ movq(rcx, Immediate(Smi::FromInt(JUMPING + i))); | 2078 __ Move(rcx, Smi::FromInt(JUMPING + i)); |
| 2078 if (--nof_unlinks > 0) { | 2079 if (--nof_unlinks > 0) { |
| 2079 // If this is not the last unlink block, jump around the next. | 2080 // If this is not the last unlink block, jump around the next. |
| 2080 finally_block.Jump(); | 2081 finally_block.Jump(); |
| 2081 } | 2082 } |
| 2082 } | 2083 } |
| 2083 } | 2084 } |
| 2084 | 2085 |
| 2085 // --- Finally block --- | 2086 // --- Finally block --- |
| 2086 finally_block.Bind(); | 2087 finally_block.Bind(); |
| 2087 | 2088 |
| (...skipping 10 matching lines...) Expand all Loading... |
| 2098 // Restore state and return value or faked TOS. | 2099 // Restore state and return value or faked TOS. |
| 2099 frame_->EmitPop(rcx); | 2100 frame_->EmitPop(rcx); |
| 2100 frame_->EmitPop(rax); | 2101 frame_->EmitPop(rax); |
| 2101 } | 2102 } |
| 2102 | 2103 |
| 2103 // Generate code to jump to the right destination for all used | 2104 // Generate code to jump to the right destination for all used |
| 2104 // formerly shadowing targets. Deallocate each shadow target. | 2105 // formerly shadowing targets. Deallocate each shadow target. |
| 2105 for (int i = 0; i < shadows.length(); i++) { | 2106 for (int i = 0; i < shadows.length(); i++) { |
| 2106 if (has_valid_frame() && shadows[i]->is_bound()) { | 2107 if (has_valid_frame() && shadows[i]->is_bound()) { |
| 2107 BreakTarget* original = shadows[i]->other_target(); | 2108 BreakTarget* original = shadows[i]->other_target(); |
| 2108 __ cmpq(rcx, Immediate(Smi::FromInt(JUMPING + i))); | 2109 __ SmiCompare(rcx, Smi::FromInt(JUMPING + i)); |
| 2109 if (i == kReturnShadowIndex) { | 2110 if (i == kReturnShadowIndex) { |
| 2110 // The return value is (already) in rax. | 2111 // The return value is (already) in rax. |
| 2111 Result return_value = allocator_->Allocate(rax); | 2112 Result return_value = allocator_->Allocate(rax); |
| 2112 ASSERT(return_value.is_valid()); | 2113 ASSERT(return_value.is_valid()); |
| 2113 if (function_return_is_shadowed_) { | 2114 if (function_return_is_shadowed_) { |
| 2114 original->Branch(equal, &return_value); | 2115 original->Branch(equal, &return_value); |
| 2115 } else { | 2116 } else { |
| 2116 // Branch around the preparation for return which may emit | 2117 // Branch around the preparation for return which may emit |
| 2117 // code. | 2118 // code. |
| 2118 JumpTarget skip; | 2119 JumpTarget skip; |
| 2119 skip.Branch(not_equal); | 2120 skip.Branch(not_equal); |
| 2120 frame_->PrepareForReturn(); | 2121 frame_->PrepareForReturn(); |
| 2121 original->Jump(&return_value); | 2122 original->Jump(&return_value); |
| 2122 skip.Bind(); | 2123 skip.Bind(); |
| 2123 } | 2124 } |
| 2124 } else { | 2125 } else { |
| 2125 original->Branch(equal); | 2126 original->Branch(equal); |
| 2126 } | 2127 } |
| 2127 } | 2128 } |
| 2128 } | 2129 } |
| 2129 | 2130 |
| 2130 if (has_valid_frame()) { | 2131 if (has_valid_frame()) { |
| 2131 // Check if we need to rethrow the exception. | 2132 // Check if we need to rethrow the exception. |
| 2132 JumpTarget exit; | 2133 JumpTarget exit; |
| 2133 __ cmpq(rcx, Immediate(Smi::FromInt(THROWING))); | 2134 __ SmiCompare(rcx, Smi::FromInt(THROWING)); |
| 2134 exit.Branch(not_equal); | 2135 exit.Branch(not_equal); |
| 2135 | 2136 |
| 2136 // Rethrow exception. | 2137 // Rethrow exception. |
| 2137 frame_->EmitPush(rax); // undo pop from above | 2138 frame_->EmitPush(rax); // undo pop from above |
| 2138 frame_->CallRuntime(Runtime::kReThrow, 1); | 2139 frame_->CallRuntime(Runtime::kReThrow, 1); |
| 2139 | 2140 |
| 2140 // Done. | 2141 // Done. |
| 2141 exit.Bind(); | 2142 exit.Bind(); |
| 2142 } | 2143 } |
| 2143 } | 2144 } |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2271 RegExpLiteral* node_; | 2272 RegExpLiteral* node_; |
| 2272 }; | 2273 }; |
| 2273 | 2274 |
| 2274 | 2275 |
| 2275 void DeferredRegExpLiteral::Generate() { | 2276 void DeferredRegExpLiteral::Generate() { |
| 2276 // Since the entry is undefined we call the runtime system to | 2277 // Since the entry is undefined we call the runtime system to |
| 2277 // compute the literal. | 2278 // compute the literal. |
| 2278 // Literal array (0). | 2279 // Literal array (0). |
| 2279 __ push(literals_); | 2280 __ push(literals_); |
| 2280 // Literal index (1). | 2281 // Literal index (1). |
| 2281 __ push(Immediate(Smi::FromInt(node_->literal_index()))); | 2282 __ Push(Smi::FromInt(node_->literal_index())); |
| 2282 // RegExp pattern (2). | 2283 // RegExp pattern (2). |
| 2283 __ Push(node_->pattern()); | 2284 __ Push(node_->pattern()); |
| 2284 // RegExp flags (3). | 2285 // RegExp flags (3). |
| 2285 __ Push(node_->flags()); | 2286 __ Push(node_->flags()); |
| 2286 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 2287 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
| 2287 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); | 2288 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); |
| 2288 } | 2289 } |
| 2289 | 2290 |
| 2290 | 2291 |
| 2291 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { | 2292 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2344 ObjectLiteral* node_; | 2345 ObjectLiteral* node_; |
| 2345 }; | 2346 }; |
| 2346 | 2347 |
| 2347 | 2348 |
| 2348 void DeferredObjectLiteral::Generate() { | 2349 void DeferredObjectLiteral::Generate() { |
| 2349 // Since the entry is undefined we call the runtime system to | 2350 // Since the entry is undefined we call the runtime system to |
| 2350 // compute the literal. | 2351 // compute the literal. |
| 2351 // Literal array (0). | 2352 // Literal array (0). |
| 2352 __ push(literals_); | 2353 __ push(literals_); |
| 2353 // Literal index (1). | 2354 // Literal index (1). |
| 2354 __ push(Immediate(Smi::FromInt(node_->literal_index()))); | 2355 __ Push(Smi::FromInt(node_->literal_index())); |
| 2355 // Constant properties (2). | 2356 // Constant properties (2). |
| 2356 __ Push(node_->constant_properties()); | 2357 __ Push(node_->constant_properties()); |
| 2357 __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3); | 2358 __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3); |
| 2358 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); | 2359 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); |
| 2359 } | 2360 } |
| 2360 | 2361 |
| 2361 | 2362 |
| 2362 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { | 2363 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
| 2363 Comment cmnt(masm_, "[ ObjectLiteral"); | 2364 Comment cmnt(masm_, "[ ObjectLiteral"); |
| 2364 | 2365 |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2477 ArrayLiteral* node_; | 2478 ArrayLiteral* node_; |
| 2478 }; | 2479 }; |
| 2479 | 2480 |
| 2480 | 2481 |
| 2481 void DeferredArrayLiteral::Generate() { | 2482 void DeferredArrayLiteral::Generate() { |
| 2482 // Since the entry is undefined we call the runtime system to | 2483 // Since the entry is undefined we call the runtime system to |
| 2483 // compute the literal. | 2484 // compute the literal. |
| 2484 // Literal array (0). | 2485 // Literal array (0). |
| 2485 __ push(literals_); | 2486 __ push(literals_); |
| 2486 // Literal index (1). | 2487 // Literal index (1). |
| 2487 __ push(Immediate(Smi::FromInt(node_->literal_index()))); | 2488 __ Push(Smi::FromInt(node_->literal_index())); |
| 2488 // Constant properties (2). | 2489 // Constant properties (2). |
| 2489 __ Push(node_->literals()); | 2490 __ Push(node_->literals()); |
| 2490 __ CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3); | 2491 __ CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3); |
| 2491 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); | 2492 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); |
| 2492 } | 2493 } |
| 2493 | 2494 |
| 2494 | 2495 |
| 2495 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { | 2496 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
| 2496 Comment cmnt(masm_, "[ ArrayLiteral"); | 2497 Comment cmnt(masm_, "[ ArrayLiteral"); |
| 2497 | 2498 |
| (...skipping 646 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3144 private: | 3145 private: |
| 3145 Register dst_; | 3146 Register dst_; |
| 3146 bool is_increment_; | 3147 bool is_increment_; |
| 3147 }; | 3148 }; |
| 3148 | 3149 |
| 3149 | 3150 |
| 3150 void DeferredPrefixCountOperation::Generate() { | 3151 void DeferredPrefixCountOperation::Generate() { |
| 3151 __ push(dst_); | 3152 __ push(dst_); |
| 3152 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3153 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); |
| 3153 __ push(rax); | 3154 __ push(rax); |
| 3154 __ push(Immediate(Smi::FromInt(1))); | 3155 __ Push(Smi::FromInt(1)); |
| 3155 if (is_increment_) { | 3156 if (is_increment_) { |
| 3156 __ CallRuntime(Runtime::kNumberAdd, 2); | 3157 __ CallRuntime(Runtime::kNumberAdd, 2); |
| 3157 } else { | 3158 } else { |
| 3158 __ CallRuntime(Runtime::kNumberSub, 2); | 3159 __ CallRuntime(Runtime::kNumberSub, 2); |
| 3159 } | 3160 } |
| 3160 if (!dst_.is(rax)) __ movq(dst_, rax); | 3161 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 3161 } | 3162 } |
| 3162 | 3163 |
| 3163 | 3164 |
| 3164 // The value in dst was optimistically incremented or decremented. The | 3165 // The value in dst was optimistically incremented or decremented. The |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3184 | 3185 |
| 3185 void DeferredPostfixCountOperation::Generate() { | 3186 void DeferredPostfixCountOperation::Generate() { |
| 3186 __ push(dst_); | 3187 __ push(dst_); |
| 3187 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3188 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); |
| 3188 | 3189 |
| 3189 // Save the result of ToNumber to use as the old value. | 3190 // Save the result of ToNumber to use as the old value. |
| 3190 __ push(rax); | 3191 __ push(rax); |
| 3191 | 3192 |
| 3192 // Call the runtime for the addition or subtraction. | 3193 // Call the runtime for the addition or subtraction. |
| 3193 __ push(rax); | 3194 __ push(rax); |
| 3194 __ push(Immediate(Smi::FromInt(1))); | 3195 __ Push(Smi::FromInt(1)); |
| 3195 if (is_increment_) { | 3196 if (is_increment_) { |
| 3196 __ CallRuntime(Runtime::kNumberAdd, 2); | 3197 __ CallRuntime(Runtime::kNumberAdd, 2); |
| 3197 } else { | 3198 } else { |
| 3198 __ CallRuntime(Runtime::kNumberSub, 2); | 3199 __ CallRuntime(Runtime::kNumberSub, 2); |
| 3199 } | 3200 } |
| 3200 if (!dst_.is(rax)) __ movq(dst_, rax); | 3201 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 3201 __ pop(old_); | 3202 __ pop(old_); |
| 3202 } | 3203 } |
| 3203 | 3204 |
| 3204 | 3205 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3242 DeferredCode* deferred = NULL; | 3243 DeferredCode* deferred = NULL; |
| 3243 if (is_postfix) { | 3244 if (is_postfix) { |
| 3244 deferred = new DeferredPostfixCountOperation(new_value.reg(), | 3245 deferred = new DeferredPostfixCountOperation(new_value.reg(), |
| 3245 old_value.reg(), | 3246 old_value.reg(), |
| 3246 is_increment); | 3247 is_increment); |
| 3247 } else { | 3248 } else { |
| 3248 deferred = new DeferredPrefixCountOperation(new_value.reg(), | 3249 deferred = new DeferredPrefixCountOperation(new_value.reg(), |
| 3249 is_increment); | 3250 is_increment); |
| 3250 } | 3251 } |
| 3251 | 3252 |
| 3252 __ movq(kScratchRegister, new_value.reg()); | 3253 __ JumpIfNotSmi(new_value.reg(), deferred->entry_label()); |
| 3253 if (is_increment) { | 3254 if (is_increment) { |
| 3254 __ addl(kScratchRegister, Immediate(Smi::FromInt(1))); | 3255 __ SmiAddConstant(kScratchRegister, |
| 3256 new_value.reg(), |
| 3257 Smi::FromInt(1), |
| 3258 deferred->entry_label()); |
| 3255 } else { | 3259 } else { |
| 3256 __ subl(kScratchRegister, Immediate(Smi::FromInt(1))); | 3260 __ SmiSubConstant(kScratchRegister, |
| 3261 new_value.reg(), |
| 3262 Smi::FromInt(1), |
| 3263 deferred->entry_label()); |
| 3257 } | 3264 } |
| 3258 // Smi test. | |
| 3259 deferred->Branch(overflow); | |
| 3260 __ JumpIfNotSmi(kScratchRegister, deferred->entry_label()); | |
| 3261 __ movq(new_value.reg(), kScratchRegister); | 3265 __ movq(new_value.reg(), kScratchRegister); |
| 3262 deferred->BindExit(); | 3266 deferred->BindExit(); |
| 3263 | 3267 |
| 3264 // Postfix: store the old value in the allocated slot under the | 3268 // Postfix: store the old value in the allocated slot under the |
| 3265 // reference. | 3269 // reference. |
| 3266 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); | 3270 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); |
| 3267 | 3271 |
| 3268 frame_->Push(&new_value); | 3272 frame_->Push(&new_value); |
| 3269 // Non-constant: update the reference. | 3273 // Non-constant: update the reference. |
| 3270 if (!is_const) target.SetValue(NOT_CONST_INIT); | 3274 if (!is_const) target.SetValue(NOT_CONST_INIT); |
| (...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3627 | 3631 |
| 3628 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) { | 3632 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) { |
| 3629 ASSERT(args->length() == 0); | 3633 ASSERT(args->length() == 0); |
| 3630 | 3634 |
| 3631 // Get the frame pointer for the calling frame. | 3635 // Get the frame pointer for the calling frame. |
| 3632 Result fp = allocator()->Allocate(); | 3636 Result fp = allocator()->Allocate(); |
| 3633 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3637 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 3634 | 3638 |
| 3635 // Skip the arguments adaptor frame if it exists. | 3639 // Skip the arguments adaptor frame if it exists. |
| 3636 Label check_frame_marker; | 3640 Label check_frame_marker; |
| 3637 __ cmpq(Operand(fp.reg(), StandardFrameConstants::kContextOffset), | 3641 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset), |
| 3638 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3642 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 3639 __ j(not_equal, &check_frame_marker); | 3643 __ j(not_equal, &check_frame_marker); |
| 3640 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset)); | 3644 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset)); |
| 3641 | 3645 |
| 3642 // Check the marker in the calling frame. | 3646 // Check the marker in the calling frame. |
| 3643 __ bind(&check_frame_marker); | 3647 __ bind(&check_frame_marker); |
| 3644 __ cmpq(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset), | 3648 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset), |
| 3645 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 3649 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 3646 fp.Unuse(); | 3650 fp.Unuse(); |
| 3647 destination()->Split(equal); | 3651 destination()->Split(equal); |
| 3648 } | 3652 } |
| 3649 | 3653 |
| 3650 | 3654 |
| 3651 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { | 3655 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
| 3652 ASSERT(args->length() == 0); | 3656 ASSERT(args->length() == 0); |
| 3653 // ArgumentsAccessStub takes the parameter count as an input argument | 3657 // ArgumentsAccessStub takes the parameter count as an input argument |
| 3654 // in register eax. Create a constant result for it. | 3658 // in register eax. Create a constant result for it. |
| 3655 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); | 3659 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3871 __ cmpq(right.reg(), left.reg()); | 3875 __ cmpq(right.reg(), left.reg()); |
| 3872 right.Unuse(); | 3876 right.Unuse(); |
| 3873 left.Unuse(); | 3877 left.Unuse(); |
| 3874 destination()->Split(equal); | 3878 destination()->Split(equal); |
| 3875 } | 3879 } |
| 3876 | 3880 |
| 3877 | 3881 |
| 3878 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { | 3882 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { |
| 3879 ASSERT(args->length() == 0); | 3883 ASSERT(args->length() == 0); |
| 3880 // RBP value is aligned, so it should be tagged as a smi (without necesarily | 3884 // RBP value is aligned, so it should be tagged as a smi (without necesarily |
| 3881 // being padded as a smi). | 3885 // being padded as a smi, so it should not be treated as a smi.). |
| 3882 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 3886 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 3883 Result rbp_as_smi = allocator_->Allocate(); | 3887 Result rbp_as_smi = allocator_->Allocate(); |
| 3884 ASSERT(rbp_as_smi.is_valid()); | 3888 ASSERT(rbp_as_smi.is_valid()); |
| 3885 __ movq(rbp_as_smi.reg(), rbp); | 3889 __ movq(rbp_as_smi.reg(), rbp); |
| 3886 frame_->Push(&rbp_as_smi); | 3890 frame_->Push(&rbp_as_smi); |
| 3887 } | 3891 } |
| 3888 | 3892 |
| 3889 | 3893 |
| 3890 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { | 3894 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { |
| 3891 ASSERT(args->length() == 0); | 3895 ASSERT(args->length() == 0); |
| (...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4255 | 4259 |
| 4256 // 'true' => true. | 4260 // 'true' => true. |
| 4257 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex); | 4261 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex); |
| 4258 dest->true_target()->Branch(equal); | 4262 dest->true_target()->Branch(equal); |
| 4259 | 4263 |
| 4260 // 'undefined' => false. | 4264 // 'undefined' => false. |
| 4261 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex); | 4265 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex); |
| 4262 dest->false_target()->Branch(equal); | 4266 dest->false_target()->Branch(equal); |
| 4263 | 4267 |
| 4264 // Smi => false iff zero. | 4268 // Smi => false iff zero. |
| 4265 Condition equals = masm_->CheckSmiEqualsConstant(value.reg(), 0); | 4269 __ SmiCompare(value.reg(), Smi::FromInt(0)); |
| 4266 dest->false_target()->Branch(equals); | 4270 dest->false_target()->Branch(equal); |
| 4267 Condition is_smi = masm_->CheckSmi(value.reg()); | 4271 Condition is_smi = masm_->CheckSmi(value.reg()); |
| 4268 dest->true_target()->Branch(is_smi); | 4272 dest->true_target()->Branch(is_smi); |
| 4269 | 4273 |
| 4270 // Call the stub for all other cases. | 4274 // Call the stub for all other cases. |
| 4271 frame_->Push(&value); // Undo the Pop() from above. | 4275 frame_->Push(&value); // Undo the Pop() from above. |
| 4272 ToBooleanStub stub; | 4276 ToBooleanStub stub; |
| 4273 Result temp = frame_->CallStub(&stub, 1); | 4277 Result temp = frame_->CallStub(&stub, 1); |
| 4274 // Convert the result to a condition code. | 4278 // Convert the result to a condition code. |
| 4275 __ testq(temp.reg(), temp.reg()); | 4279 __ testq(temp.reg(), temp.reg()); |
| 4276 temp.Unuse(); | 4280 temp.Unuse(); |
| (...skipping 661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4938 __ testq(result.reg(), result.reg()); | 4942 __ testq(result.reg(), result.reg()); |
| 4939 result.Unuse(); | 4943 result.Unuse(); |
| 4940 dest->true_target()->Branch(cc); | 4944 dest->true_target()->Branch(cc); |
| 4941 dest->false_target()->Jump(); | 4945 dest->false_target()->Jump(); |
| 4942 | 4946 |
| 4943 is_smi.Bind(); | 4947 is_smi.Bind(); |
| 4944 left_side = Result(left_reg); | 4948 left_side = Result(left_reg); |
| 4945 right_side = Result(right_val); | 4949 right_side = Result(right_val); |
| 4946 // Test smi equality and comparison by signed int comparison. | 4950 // Test smi equality and comparison by signed int comparison. |
| 4947 // Both sides are smis, so we can use an Immediate. | 4951 // Both sides are smis, so we can use an Immediate. |
| 4948 __ cmpl(left_side.reg(), Immediate(Smi::cast(*right_side.handle()))); | 4952 __ SmiCompare(left_side.reg(), Smi::cast(*right_side.handle())); |
| 4949 left_side.Unuse(); | 4953 left_side.Unuse(); |
| 4950 right_side.Unuse(); | 4954 right_side.Unuse(); |
| 4951 dest->Split(cc); | 4955 dest->Split(cc); |
| 4952 } | 4956 } |
| 4953 } else if (cc == equal && | 4957 } else if (cc == equal && |
| 4954 (left_side_constant_null || right_side_constant_null)) { | 4958 (left_side_constant_null || right_side_constant_null)) { |
| 4955 // To make null checks efficient, we check if either the left side or | 4959 // To make null checks efficient, we check if either the left side or |
| 4956 // the right side is the constant 'null'. | 4960 // the right side is the constant 'null'. |
| 4957 // If so, we optimize the code by inlining a null check instead of | 4961 // If so, we optimize the code by inlining a null check instead of |
| 4958 // calling the (very) general runtime routine for checking equality. | 4962 // calling the (very) general runtime routine for checking equality. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 4971 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex); | 4975 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex); |
| 4972 dest->true_target()->Branch(equal); | 4976 dest->true_target()->Branch(equal); |
| 4973 Condition is_smi = masm_->CheckSmi(operand.reg()); | 4977 Condition is_smi = masm_->CheckSmi(operand.reg()); |
| 4974 dest->false_target()->Branch(is_smi); | 4978 dest->false_target()->Branch(is_smi); |
| 4975 | 4979 |
| 4976 // It can be an undetectable object. | 4980 // It can be an undetectable object. |
| 4977 // Use a scratch register in preference to spilling operand.reg(). | 4981 // Use a scratch register in preference to spilling operand.reg(). |
| 4978 Result temp = allocator()->Allocate(); | 4982 Result temp = allocator()->Allocate(); |
| 4979 ASSERT(temp.is_valid()); | 4983 ASSERT(temp.is_valid()); |
| 4980 __ movq(temp.reg(), | 4984 __ movq(temp.reg(), |
| 4981 FieldOperand(operand.reg(), HeapObject::kMapOffset)); | 4985 FieldOperand(operand.reg(), HeapObject::kMapOffset)); |
| 4982 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset), | 4986 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset), |
| 4983 Immediate(1 << Map::kIsUndetectable)); | 4987 Immediate(1 << Map::kIsUndetectable)); |
| 4984 temp.Unuse(); | 4988 temp.Unuse(); |
| 4985 operand.Unuse(); | 4989 operand.Unuse(); |
| 4986 dest->Split(not_zero); | 4990 dest->Split(not_zero); |
| 4987 } | 4991 } |
| 4988 } else { // Neither side is a constant Smi or null. | 4992 } else { // Neither side is a constant Smi or null. |
| 4989 // If either side is a non-smi constant, skip the smi check. | 4993 // If either side is a non-smi constant, skip the smi check. |
| 4990 bool known_non_smi = | 4994 bool known_non_smi = |
| 4991 (left_side.is_constant() && !left_side.handle()->IsSmi()) || | 4995 (left_side.is_constant() && !left_side.handle()->IsSmi()) || |
| 4992 (right_side.is_constant() && !right_side.handle()->IsSmi()); | 4996 (right_side.is_constant() && !right_side.handle()->IsSmi()); |
| 4993 left_side.ToRegister(); | 4997 left_side.ToRegister(); |
| 4994 right_side.ToRegister(); | 4998 right_side.ToRegister(); |
| 4995 | 4999 |
| 4996 if (known_non_smi) { | 5000 if (known_non_smi) { |
| 4997 // When non-smi, call out to the compare stub. | 5001 // When non-smi, call out to the compare stub. |
| 4998 CompareStub stub(cc, strict); | 5002 CompareStub stub(cc, strict); |
| 4999 Result answer = frame_->CallStub(&stub, &left_side, &right_side); | 5003 Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
| 5000 // The result is a Smi, which is negative, zero, or positive. | 5004 // The result is a Smi, which is negative, zero, or positive. |
| 5001 __ testl(answer.reg(), answer.reg()); // Both zero and sign flag right. | 5005 __ SmiTest(answer.reg()); // Sets both zero and sign flag. |
| 5002 answer.Unuse(); | 5006 answer.Unuse(); |
| 5003 dest->Split(cc); | 5007 dest->Split(cc); |
| 5004 } else { | 5008 } else { |
| 5005 // Here we split control flow to the stub call and inlined cases | 5009 // Here we split control flow to the stub call and inlined cases |
| 5006 // before finally splitting it to the control destination. We use | 5010 // before finally splitting it to the control destination. We use |
| 5007 // a jump target and branching to duplicate the virtual frame at | 5011 // a jump target and branching to duplicate the virtual frame at |
| 5008 // the first split. We manually handle the off-frame references | 5012 // the first split. We manually handle the off-frame references |
| 5009 // by reconstituting them on the non-fall-through path. | 5013 // by reconstituting them on the non-fall-through path. |
| 5010 JumpTarget is_smi; | 5014 JumpTarget is_smi; |
| 5011 Register left_reg = left_side.reg(); | 5015 Register left_reg = left_side.reg(); |
| 5012 Register right_reg = right_side.reg(); | 5016 Register right_reg = right_side.reg(); |
| 5013 | 5017 |
| 5014 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg); | 5018 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg); |
| 5015 is_smi.Branch(both_smi); | 5019 is_smi.Branch(both_smi); |
| 5016 // When non-smi, call out to the compare stub. | 5020 // When non-smi, call out to the compare stub. |
| 5017 CompareStub stub(cc, strict); | 5021 CompareStub stub(cc, strict); |
| 5018 Result answer = frame_->CallStub(&stub, &left_side, &right_side); | 5022 Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
| 5019 __ testl(answer.reg(), answer.reg()); // Sets both zero and sign flags. | 5023 __ SmiTest(answer.reg()); // Sets both zero and sign flags. |
| 5020 answer.Unuse(); | 5024 answer.Unuse(); |
| 5021 dest->true_target()->Branch(cc); | 5025 dest->true_target()->Branch(cc); |
| 5022 dest->false_target()->Jump(); | 5026 dest->false_target()->Jump(); |
| 5023 | 5027 |
| 5024 is_smi.Bind(); | 5028 is_smi.Bind(); |
| 5025 left_side = Result(left_reg); | 5029 left_side = Result(left_reg); |
| 5026 right_side = Result(right_reg); | 5030 right_side = Result(right_reg); |
| 5027 __ cmpl(left_side.reg(), right_side.reg()); | 5031 __ SmiCompare(left_side.reg(), right_side.reg()); |
| 5028 right_side.Unuse(); | 5032 right_side.Unuse(); |
| 5029 left_side.Unuse(); | 5033 left_side.Unuse(); |
| 5030 dest->Split(cc); | 5034 dest->Split(cc); |
| 5031 } | 5035 } |
| 5032 } | 5036 } |
| 5033 } | 5037 } |
| 5034 | 5038 |
| 5035 | 5039 |
| 5036 class DeferredInlineBinaryOperation: public DeferredCode { | 5040 class DeferredInlineBinaryOperation: public DeferredCode { |
| 5037 public: | 5041 public: |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5214 masm_->testl(rax, Immediate(-delta_to_patch_site)); | 5218 masm_->testl(rax, Immediate(-delta_to_patch_site)); |
| 5215 __ IncrementCounter(&Counters::named_load_inline_miss, 1); | 5219 __ IncrementCounter(&Counters::named_load_inline_miss, 1); |
| 5216 | 5220 |
| 5217 if (!dst_.is(rax)) __ movq(dst_, rax); | 5221 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 5218 __ pop(receiver_); | 5222 __ pop(receiver_); |
| 5219 } | 5223 } |
| 5220 | 5224 |
| 5221 | 5225 |
| 5222 void DeferredInlineSmiAdd::Generate() { | 5226 void DeferredInlineSmiAdd::Generate() { |
| 5223 __ push(dst_); | 5227 __ push(dst_); |
| 5224 __ push(Immediate(value_)); | 5228 __ Push(value_); |
| 5225 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED); | 5229 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED); |
| 5226 __ CallStub(&igostub); | 5230 __ CallStub(&igostub); |
| 5227 if (!dst_.is(rax)) __ movq(dst_, rax); | 5231 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 5228 } | 5232 } |
| 5229 | 5233 |
| 5230 | 5234 |
| 5231 void DeferredInlineSmiAddReversed::Generate() { | 5235 void DeferredInlineSmiAddReversed::Generate() { |
| 5232 __ push(Immediate(value_)); // Note: sign extended. | 5236 __ Push(value_); |
| 5233 __ push(dst_); | 5237 __ push(dst_); |
| 5234 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED); | 5238 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED); |
| 5235 __ CallStub(&igostub); | 5239 __ CallStub(&igostub); |
| 5236 if (!dst_.is(rax)) __ movq(dst_, rax); | 5240 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 5237 } | 5241 } |
| 5238 | 5242 |
| 5239 | 5243 |
| 5240 void DeferredInlineSmiSub::Generate() { | 5244 void DeferredInlineSmiSub::Generate() { |
| 5241 __ push(dst_); | 5245 __ push(dst_); |
| 5242 __ push(Immediate(value_)); // Note: sign extended. | 5246 __ Push(value_); |
| 5243 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, SMI_CODE_INLINED); | 5247 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, SMI_CODE_INLINED); |
| 5244 __ CallStub(&igostub); | 5248 __ CallStub(&igostub); |
| 5245 if (!dst_.is(rax)) __ movq(dst_, rax); | 5249 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 5246 } | 5250 } |
| 5247 | 5251 |
| 5248 | 5252 |
| 5249 void DeferredInlineSmiOperation::Generate() { | 5253 void DeferredInlineSmiOperation::Generate() { |
| 5250 __ push(src_); | 5254 __ push(src_); |
| 5251 __ push(Immediate(value_)); // Note: sign extended. | 5255 __ Push(value_); |
| 5252 // For mod we don't generate all the Smi code inline. | 5256 // For mod we don't generate all the Smi code inline. |
| 5253 GenericBinaryOpStub stub( | 5257 GenericBinaryOpStub stub( |
| 5254 op_, | 5258 op_, |
| 5255 overwrite_mode_, | 5259 overwrite_mode_, |
| 5256 (op_ == Token::MOD) ? SMI_CODE_IN_STUB : SMI_CODE_INLINED); | 5260 (op_ == Token::MOD) ? SMI_CODE_IN_STUB : SMI_CODE_INLINED); |
| 5257 __ CallStub(&stub); | 5261 __ CallStub(&stub); |
| 5258 if (!dst_.is(rax)) __ movq(dst_, rax); | 5262 if (!dst_.is(rax)) __ movq(dst_, rax); |
| 5259 } | 5263 } |
| 5260 | 5264 |
| 5261 | 5265 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5299 smi_value, | 5303 smi_value, |
| 5300 overwrite_mode); | 5304 overwrite_mode); |
| 5301 } else { | 5305 } else { |
| 5302 deferred = new DeferredInlineSmiAdd(operand->reg(), | 5306 deferred = new DeferredInlineSmiAdd(operand->reg(), |
| 5303 smi_value, | 5307 smi_value, |
| 5304 overwrite_mode); | 5308 overwrite_mode); |
| 5305 } | 5309 } |
| 5306 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); | 5310 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); |
| 5307 __ SmiAddConstant(operand->reg(), | 5311 __ SmiAddConstant(operand->reg(), |
| 5308 operand->reg(), | 5312 operand->reg(), |
| 5309 int_value, | 5313 smi_value, |
| 5310 deferred->entry_label()); | 5314 deferred->entry_label()); |
| 5311 deferred->BindExit(); | 5315 deferred->BindExit(); |
| 5312 frame_->Push(operand); | 5316 frame_->Push(operand); |
| 5313 break; | 5317 break; |
| 5314 } | 5318 } |
| 5315 | 5319 |
| 5316 case Token::SUB: { | 5320 case Token::SUB: { |
| 5317 if (reversed) { | 5321 if (reversed) { |
| 5318 Result constant_operand(value); | 5322 Result constant_operand(value); |
| 5319 LikelySmiBinaryOperation(op, &constant_operand, operand, | 5323 LikelySmiBinaryOperation(op, &constant_operand, operand, |
| 5320 overwrite_mode); | 5324 overwrite_mode); |
| 5321 } else { | 5325 } else { |
| 5322 operand->ToRegister(); | 5326 operand->ToRegister(); |
| 5323 frame_->Spill(operand->reg()); | 5327 frame_->Spill(operand->reg()); |
| 5324 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(), | 5328 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(), |
| 5325 smi_value, | 5329 smi_value, |
| 5326 overwrite_mode); | 5330 overwrite_mode); |
| 5327 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); | 5331 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); |
| 5328 // A smi currently fits in a 32-bit Immediate. | 5332 // A smi currently fits in a 32-bit Immediate. |
| 5329 __ SmiSubConstant(operand->reg(), | 5333 __ SmiSubConstant(operand->reg(), |
| 5330 operand->reg(), | 5334 operand->reg(), |
| 5331 int_value, | 5335 smi_value, |
| 5332 deferred->entry_label()); | 5336 deferred->entry_label()); |
| 5333 deferred->BindExit(); | 5337 deferred->BindExit(); |
| 5334 frame_->Push(operand); | 5338 frame_->Push(operand); |
| 5335 } | 5339 } |
| 5336 break; | 5340 break; |
| 5337 } | 5341 } |
| 5338 | 5342 |
| 5339 case Token::SAR: | 5343 case Token::SAR: |
| 5340 if (reversed) { | 5344 if (reversed) { |
| 5341 Result constant_operand(value); | 5345 Result constant_operand(value); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5375 Result answer = allocator()->Allocate(); | 5379 Result answer = allocator()->Allocate(); |
| 5376 ASSERT(answer.is_valid()); | 5380 ASSERT(answer.is_valid()); |
| 5377 DeferredInlineSmiOperation* deferred = | 5381 DeferredInlineSmiOperation* deferred = |
| 5378 new DeferredInlineSmiOperation(op, | 5382 new DeferredInlineSmiOperation(op, |
| 5379 answer.reg(), | 5383 answer.reg(), |
| 5380 operand->reg(), | 5384 operand->reg(), |
| 5381 smi_value, | 5385 smi_value, |
| 5382 overwrite_mode); | 5386 overwrite_mode); |
| 5383 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); | 5387 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); |
| 5384 __ SmiShiftLogicalRightConstant(answer.reg(), | 5388 __ SmiShiftLogicalRightConstant(answer.reg(), |
| 5385 operand->reg(), | 5389 operand->reg(), |
| 5386 shift_value, | 5390 shift_value, |
| 5387 deferred->entry_label()); | 5391 deferred->entry_label()); |
| 5388 deferred->BindExit(); | 5392 deferred->BindExit(); |
| 5389 operand->Unuse(); | 5393 operand->Unuse(); |
| 5390 frame_->Push(&answer); | 5394 frame_->Push(&answer); |
| 5391 } | 5395 } |
| 5392 break; | 5396 break; |
| 5393 | 5397 |
| 5394 case Token::SHL: | 5398 case Token::SHL: |
| 5395 if (reversed) { | 5399 if (reversed) { |
| 5396 Result constant_operand(value); | 5400 Result constant_operand(value); |
| 5397 LikelySmiBinaryOperation(op, &constant_operand, operand, | 5401 LikelySmiBinaryOperation(op, &constant_operand, operand, |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5446 // Swap left and right overwrite modes. 0->0, 1->2, 2->1. | 5450 // Swap left and right overwrite modes. 0->0, 1->2, 2->1. |
| 5447 overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3); | 5451 overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3); |
| 5448 } | 5452 } |
| 5449 DeferredCode* deferred = new DeferredInlineSmiOperation(op, | 5453 DeferredCode* deferred = new DeferredInlineSmiOperation(op, |
| 5450 operand->reg(), | 5454 operand->reg(), |
| 5451 operand->reg(), | 5455 operand->reg(), |
| 5452 smi_value, | 5456 smi_value, |
| 5453 overwrite_mode); | 5457 overwrite_mode); |
| 5454 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); | 5458 __ JumpIfNotSmi(operand->reg(), deferred->entry_label()); |
| 5455 if (op == Token::BIT_AND) { | 5459 if (op == Token::BIT_AND) { |
| 5456 __ SmiAndConstant(operand->reg(), operand->reg(), int_value); | 5460 __ SmiAndConstant(operand->reg(), operand->reg(), smi_value); |
| 5457 } else if (op == Token::BIT_XOR) { | 5461 } else if (op == Token::BIT_XOR) { |
| 5458 if (int_value != 0) { | 5462 if (int_value != 0) { |
| 5459 __ SmiXorConstant(operand->reg(), operand->reg(), int_value); | 5463 __ SmiXorConstant(operand->reg(), operand->reg(), smi_value); |
| 5460 } | 5464 } |
| 5461 } else { | 5465 } else { |
| 5462 ASSERT(op == Token::BIT_OR); | 5466 ASSERT(op == Token::BIT_OR); |
| 5463 if (int_value != 0) { | 5467 if (int_value != 0) { |
| 5464 __ SmiOrConstant(operand->reg(), operand->reg(), int_value); | 5468 __ SmiOrConstant(operand->reg(), operand->reg(), smi_value); |
| 5465 } | 5469 } |
| 5466 } | 5470 } |
| 5467 deferred->BindExit(); | 5471 deferred->BindExit(); |
| 5468 frame_->Push(operand); | 5472 frame_->Push(operand); |
| 5469 break; | 5473 break; |
| 5470 } | 5474 } |
| 5471 | 5475 |
| 5472 // Generate inline code for mod of powers of 2 and negative powers of 2. | 5476 // Generate inline code for mod of powers of 2 and negative powers of 2. |
| 5473 case Token::MOD: | 5477 case Token::MOD: |
| 5474 if (!reversed && | 5478 if (!reversed && |
| 5475 int_value != 0 && | 5479 int_value != 0 && |
| 5476 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) { | 5480 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) { |
| 5477 operand->ToRegister(); | 5481 operand->ToRegister(); |
| 5478 frame_->Spill(operand->reg()); | 5482 frame_->Spill(operand->reg()); |
| 5479 DeferredCode* deferred = new DeferredInlineSmiOperation(op, | 5483 DeferredCode* deferred = |
| 5480 operand->reg(), | 5484 new DeferredInlineSmiOperation(op, |
| 5481 operand->reg(), | 5485 operand->reg(), |
| 5482 smi_value, | 5486 operand->reg(), |
| 5483 overwrite_mode); | 5487 smi_value, |
| 5488 overwrite_mode); |
| 5484 // Check for negative or non-Smi left hand side. | 5489 // Check for negative or non-Smi left hand side. |
| 5485 __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label()); | 5490 __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label()); |
| 5486 if (int_value < 0) int_value = -int_value; | 5491 if (int_value < 0) int_value = -int_value; |
| 5487 if (int_value == 1) { | 5492 if (int_value == 1) { |
| 5488 __ movl(operand->reg(), Immediate(Smi::FromInt(0))); | 5493 __ Move(operand->reg(), Smi::FromInt(0)); |
| 5489 } else { | 5494 } else { |
| 5490 __ SmiAndConstant(operand->reg(), operand->reg(), int_value - 1); | 5495 __ SmiAndConstant(operand->reg(), |
| 5496 operand->reg(), |
| 5497 Smi::FromInt(int_value - 1)); |
| 5491 } | 5498 } |
| 5492 deferred->BindExit(); | 5499 deferred->BindExit(); |
| 5493 frame_->Push(operand); | 5500 frame_->Push(operand); |
| 5494 break; // This break only applies if we generated code for MOD. | 5501 break; // This break only applies if we generated code for MOD. |
| 5495 } | 5502 } |
| 5496 // Fall through if we did not find a power of 2 on the right hand side! | 5503 // Fall through if we did not find a power of 2 on the right hand side! |
| 5497 // The next case must be the default. | 5504 // The next case must be the default. |
| 5498 | 5505 |
| 5499 default: { | 5506 default: { |
| 5500 Result constant_operand(value); | 5507 Result constant_operand(value); |
| (...skipping 577 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6078 receiver.reg()); | 6085 receiver.reg()); |
| 6079 | 6086 |
| 6080 // Check that the value is a smi if it is not a constant. | 6087 // Check that the value is a smi if it is not a constant. |
| 6081 // We can skip the write barrier for smis and constants. | 6088 // We can skip the write barrier for smis and constants. |
| 6082 if (!value_is_constant) { | 6089 if (!value_is_constant) { |
| 6083 __ JumpIfNotSmi(value.reg(), deferred->entry_label()); | 6090 __ JumpIfNotSmi(value.reg(), deferred->entry_label()); |
| 6084 } | 6091 } |
| 6085 | 6092 |
| 6086 // Check that the key is a non-negative smi. | 6093 // Check that the key is a non-negative smi. |
| 6087 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); | 6094 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); |
| 6088 // Ensure that the smi is zero-extended. This is not guaranteed. | |
| 6089 __ movl(key.reg(), key.reg()); | |
| 6090 | 6095 |
| 6091 // Check that the receiver is not a smi. | 6096 // Check that the receiver is not a smi. |
| 6092 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); | 6097 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); |
| 6093 | 6098 |
| 6094 // Check that the receiver is a JSArray. | 6099 // Check that the receiver is a JSArray. |
| 6095 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister); | 6100 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister); |
| 6096 deferred->Branch(not_equal); | 6101 deferred->Branch(not_equal); |
| 6097 | 6102 |
| 6098 // Check that the key is within bounds. Both the key and the | 6103 // Check that the key is within bounds. Both the key and the |
| 6099 // length of the JSArray are smis, so compare only low 32 bits. | 6104 // length of the JSArray are smis. |
| 6100 __ cmpl(key.reg(), | 6105 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset), |
| 6101 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); | 6106 key.reg()); |
| 6102 deferred->Branch(greater_equal); | 6107 deferred->Branch(less_equal); |
| 6103 | 6108 |
| 6104 // Get the elements array from the receiver and check that it | 6109 // Get the elements array from the receiver and check that it |
| 6105 // is a flat array (not a dictionary). | 6110 // is a flat array (not a dictionary). |
| 6106 __ movq(tmp.reg(), | 6111 __ movq(tmp.reg(), |
| 6107 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 6112 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
| 6108 // Bind the deferred code patch site to be able to locate the | 6113 // Bind the deferred code patch site to be able to locate the |
| 6109 // fixed array map comparison. When debugging, we patch this | 6114 // fixed array map comparison. When debugging, we patch this |
| 6110 // comparison to always fail so that we will hit the IC call | 6115 // comparison to always fail so that we will hit the IC call |
| 6111 // in the deferred code which will allow the debugger to | 6116 // in the deferred code which will allow the debugger to |
| 6112 // break for fast case stores. | 6117 // break for fast case stores. |
| (...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6296 return true; | 6301 return true; |
| 6297 } | 6302 } |
| 6298 | 6303 |
| 6299 | 6304 |
| 6300 // End of CodeGenerator implementation. | 6305 // End of CodeGenerator implementation. |
| 6301 | 6306 |
| 6302 void UnarySubStub::Generate(MacroAssembler* masm) { | 6307 void UnarySubStub::Generate(MacroAssembler* masm) { |
| 6303 Label slow; | 6308 Label slow; |
| 6304 Label done; | 6309 Label done; |
| 6305 Label try_float; | 6310 Label try_float; |
| 6306 Label special; | |
| 6307 // Check whether the value is a smi. | 6311 // Check whether the value is a smi. |
| 6308 __ JumpIfNotSmi(rax, &try_float); | 6312 __ JumpIfNotSmi(rax, &try_float); |
| 6309 | 6313 |
| 6310 // Enter runtime system if the value of the smi is zero | 6314 // Enter runtime system if the value of the smi is zero |
| 6311 // to make sure that we switch between 0 and -0. | 6315 // to make sure that we switch between 0 and -0. |
| 6312 // Also enter it if the value of the smi is Smi::kMinValue | 6316 // Also enter it if the value of the smi is Smi::kMinValue. |
| 6313 __ testl(rax, Immediate(0x7FFFFFFE)); | 6317 __ SmiNeg(rax, rax, &done); |
| 6314 __ j(zero, &special); | |
| 6315 __ negl(rax); | |
| 6316 __ jmp(&done); | |
| 6317 | 6318 |
| 6318 __ bind(&special); | 6319 // Either zero or Smi::kMinValue, neither of which become a smi when negated. |
| 6319 // Either zero or -0x4000000, neither of which become a smi when negated. | 6320 __ SmiCompare(rax, Smi::FromInt(0)); |
| 6320 __ testl(rax, rax); | 6321 __ j(not_equal, &slow); |
| 6321 __ j(not_zero, &slow); | |
| 6322 __ Move(rax, Factory::minus_zero_value()); | 6322 __ Move(rax, Factory::minus_zero_value()); |
| 6323 __ jmp(&done); | 6323 __ jmp(&done); |
| 6324 | 6324 |
| 6325 // Enter runtime system. | 6325 // Enter runtime system. |
| 6326 __ bind(&slow); | 6326 __ bind(&slow); |
| 6327 __ pop(rcx); // pop return address | 6327 __ pop(rcx); // pop return address |
| 6328 __ push(rax); | 6328 __ push(rax); |
| 6329 __ push(rcx); // push return address | 6329 __ push(rcx); // push return address |
| 6330 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | 6330 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); |
| 6331 __ jmp(&done); | 6331 __ jmp(&done); |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6463 // Push arguments below the return address to prepare jump to builtin. | 6463 // Push arguments below the return address to prepare jump to builtin. |
| 6464 __ pop(rcx); | 6464 __ pop(rcx); |
| 6465 __ push(rax); | 6465 __ push(rax); |
| 6466 __ push(rdx); | 6466 __ push(rdx); |
| 6467 __ push(rcx); | 6467 __ push(rcx); |
| 6468 | 6468 |
| 6469 // Inlined floating point compare. | 6469 // Inlined floating point compare. |
| 6470 // Call builtin if operands are not floating point or smi. | 6470 // Call builtin if operands are not floating point or smi. |
| 6471 Label check_for_symbols; | 6471 Label check_for_symbols; |
| 6472 // Push arguments on stack, for helper functions. | 6472 // Push arguments on stack, for helper functions. |
| 6473 FloatingPointHelper::CheckFloatOperands(masm, &check_for_symbols); | 6473 FloatingPointHelper::CheckNumberOperands(masm, &check_for_symbols); |
| 6474 FloatingPointHelper::LoadFloatOperands(masm, rax, rdx); | 6474 FloatingPointHelper::LoadFloatOperands(masm, rax, rdx); |
| 6475 __ FCmp(); | 6475 __ FCmp(); |
| 6476 | 6476 |
| 6477 // Jump to builtin for NaN. | 6477 // Jump to builtin for NaN. |
| 6478 __ j(parity_even, &call_builtin); | 6478 __ j(parity_even, &call_builtin); |
| 6479 | 6479 |
| 6480 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up. | 6480 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up. |
| 6481 Label below_lbl, above_lbl; | 6481 Label below_lbl, above_lbl; |
| 6482 // use rdx, rax to convert unsigned to signed comparison | 6482 // use rdx, rax to convert unsigned to signed comparison |
| 6483 __ j(below, &below_lbl); | 6483 __ j(below, &below_lbl); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6520 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | 6520 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
| 6521 } else { | 6521 } else { |
| 6522 builtin = Builtins::COMPARE; | 6522 builtin = Builtins::COMPARE; |
| 6523 int ncr; // NaN compare result | 6523 int ncr; // NaN compare result |
| 6524 if (cc_ == less || cc_ == less_equal) { | 6524 if (cc_ == less || cc_ == less_equal) { |
| 6525 ncr = GREATER; | 6525 ncr = GREATER; |
| 6526 } else { | 6526 } else { |
| 6527 ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases | 6527 ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases |
| 6528 ncr = LESS; | 6528 ncr = LESS; |
| 6529 } | 6529 } |
| 6530 __ push(Immediate(Smi::FromInt(ncr))); | 6530 __ Push(Smi::FromInt(ncr)); |
| 6531 } | 6531 } |
| 6532 | 6532 |
| 6533 // Restore return address on the stack. | 6533 // Restore return address on the stack. |
| 6534 __ push(rcx); | 6534 __ push(rcx); |
| 6535 | 6535 |
| 6536 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 6536 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 6537 // tagged as a small integer. | 6537 // tagged as a small integer. |
| 6538 __ InvokeBuiltin(builtin, JUMP_FUNCTION); | 6538 __ InvokeBuiltin(builtin, JUMP_FUNCTION); |
| 6539 } | 6539 } |
| 6540 | 6540 |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6619 __ j(equal, &is_not_instance); | 6619 __ j(equal, &is_not_instance); |
| 6620 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 6620 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
| 6621 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); | 6621 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); |
| 6622 __ jmp(&loop); | 6622 __ jmp(&loop); |
| 6623 | 6623 |
| 6624 __ bind(&is_instance); | 6624 __ bind(&is_instance); |
| 6625 __ xor_(rax, rax); | 6625 __ xor_(rax, rax); |
| 6626 __ ret(2 * kPointerSize); | 6626 __ ret(2 * kPointerSize); |
| 6627 | 6627 |
| 6628 __ bind(&is_not_instance); | 6628 __ bind(&is_not_instance); |
| 6629 __ movq(rax, Immediate(Smi::FromInt(1))); | 6629 __ Move(rax, Smi::FromInt(1)); |
| 6630 __ ret(2 * kPointerSize); | 6630 __ ret(2 * kPointerSize); |
| 6631 | 6631 |
| 6632 // Slow-case: Go through the JavaScript implementation. | 6632 // Slow-case: Go through the JavaScript implementation. |
| 6633 __ bind(&slow); | 6633 __ bind(&slow); |
| 6634 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 6634 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 6635 } | 6635 } |
| 6636 | 6636 |
| 6637 | 6637 |
| 6638 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { | 6638 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { |
| 6639 // The displacement is used for skipping the return address and the | 6639 // The displacement is used for skipping the return address and the |
| 6640 // frame pointer on the stack. It is the offset of the last | 6640 // frame pointer on the stack. It is the offset of the last |
| 6641 // parameter (if any) relative to the frame pointer. | 6641 // parameter (if any) relative to the frame pointer. |
| 6642 static const int kDisplacement = 2 * kPointerSize; | 6642 static const int kDisplacement = 2 * kPointerSize; |
| 6643 | 6643 |
| 6644 // Check if the calling frame is an arguments adaptor frame. | 6644 // Check if the calling frame is an arguments adaptor frame. |
| 6645 Label runtime; | 6645 Label runtime; |
| 6646 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 6646 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 6647 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 6647 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
| 6648 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 6648 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 6649 __ j(not_equal, &runtime); | 6649 __ j(not_equal, &runtime); |
| 6650 // Value in rcx is Smi encoded. | 6650 // Value in rcx is Smi encoded. |
| 6651 | 6651 |
| 6652 // Patch the arguments.length and the parameters pointer. | 6652 // Patch the arguments.length and the parameters pointer. |
| 6653 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 6653 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 6654 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 6654 __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
| 6655 SmiIndex index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2); | 6655 SmiIndex index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2); |
| 6656 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement)); | 6656 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement)); |
| 6657 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 6657 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
| 6658 | 6658 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 6671 // to the frame pointer. | 6671 // to the frame pointer. |
| 6672 static const int kDisplacement = 1 * kPointerSize; | 6672 static const int kDisplacement = 1 * kPointerSize; |
| 6673 | 6673 |
| 6674 // Check that the key is a smi. | 6674 // Check that the key is a smi. |
| 6675 Label slow; | 6675 Label slow; |
| 6676 __ JumpIfNotSmi(rdx, &slow); | 6676 __ JumpIfNotSmi(rdx, &slow); |
| 6677 | 6677 |
| 6678 // Check if the calling frame is an arguments adaptor frame. | 6678 // Check if the calling frame is an arguments adaptor frame. |
| 6679 Label adaptor; | 6679 Label adaptor; |
| 6680 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 6680 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 6681 __ movq(rcx, Operand(rbx, StandardFrameConstants::kContextOffset)); | 6681 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset), |
| 6682 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 6682 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 6683 __ j(equal, &adaptor); | 6683 __ j(equal, &adaptor); |
| 6684 | 6684 |
| 6685 // Check index against formal parameters count limit passed in | 6685 // Check index against formal parameters count limit passed in |
| 6686 // through register rax. Use unsigned comparison to get negative | 6686 // through register rax. Use unsigned comparison to get negative |
| 6687 // check for free. | 6687 // check for free. |
| 6688 __ cmpq(rdx, rax); | 6688 __ cmpq(rdx, rax); |
| 6689 __ j(above_equal, &slow); | 6689 __ j(above_equal, &slow); |
| 6690 | 6690 |
| 6691 // Read the argument from the stack and return it. | 6691 // Read the argument from the stack and return it. |
| 6692 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 6692 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 6719 Runtime::Function* f = | 6719 Runtime::Function* f = |
| 6720 Runtime::FunctionForId(Runtime::kGetArgumentsProperty); | 6720 Runtime::FunctionForId(Runtime::kGetArgumentsProperty); |
| 6721 __ TailCallRuntime(ExternalReference(f), 1, f->result_size); | 6721 __ TailCallRuntime(ExternalReference(f), 1, f->result_size); |
| 6722 } | 6722 } |
| 6723 | 6723 |
| 6724 | 6724 |
| 6725 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { | 6725 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { |
| 6726 // Check if the calling frame is an arguments adaptor frame. | 6726 // Check if the calling frame is an arguments adaptor frame. |
| 6727 Label adaptor; | 6727 Label adaptor; |
| 6728 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 6728 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 6729 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 6729 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
| 6730 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 6730 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 6731 __ j(equal, &adaptor); | 6731 __ j(equal, &adaptor); |
| 6732 | 6732 |
| 6733 // Nothing to do: The formal number of parameters has already been | 6733 // Nothing to do: The formal number of parameters has already been |
| 6734 // passed in register rax by calling function. Just return it. | 6734 // passed in register rax by calling function. Just return it. |
| 6735 __ ret(0); | 6735 __ ret(0); |
| 6736 | 6736 |
| 6737 // Arguments adaptor case: Read the arguments length from the | 6737 // Arguments adaptor case: Read the arguments length from the |
| 6738 // adaptor frame and return it. | 6738 // adaptor frame and return it. |
| 6739 __ bind(&adaptor); | 6739 __ bind(&adaptor); |
| 6740 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 6740 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| (...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7062 #ifdef ENABLE_LOGGING_AND_PROFILING | 7062 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 7063 Label not_outermost_js, not_outermost_js_2; | 7063 Label not_outermost_js, not_outermost_js_2; |
| 7064 #endif | 7064 #endif |
| 7065 | 7065 |
| 7066 // Setup frame. | 7066 // Setup frame. |
| 7067 __ push(rbp); | 7067 __ push(rbp); |
| 7068 __ movq(rbp, rsp); | 7068 __ movq(rbp, rsp); |
| 7069 | 7069 |
| 7070 // Push the stack frame type marker twice. | 7070 // Push the stack frame type marker twice. |
| 7071 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 7071 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 7072 __ push(Immediate(Smi::FromInt(marker))); // context slot | 7072 __ Push(Smi::FromInt(marker)); // context slot |
| 7073 __ push(Immediate(Smi::FromInt(marker))); // function slot | 7073 __ Push(Smi::FromInt(marker)); // function slot |
| 7074 // Save callee-saved registers (X64 calling conventions). | 7074 // Save callee-saved registers (X64 calling conventions). |
| 7075 __ push(r12); | 7075 __ push(r12); |
| 7076 __ push(r13); | 7076 __ push(r13); |
| 7077 __ push(r14); | 7077 __ push(r14); |
| 7078 __ push(r15); | 7078 __ push(r15); |
| 7079 __ push(rdi); | 7079 __ push(rdi); |
| 7080 __ push(rsi); | 7080 __ push(rsi); |
| 7081 __ push(rbx); | 7081 __ push(rbx); |
| 7082 // TODO(X64): Push XMM6-XMM15 (low 64 bits) as well, or make them | 7082 // TODO(X64): Push XMM6-XMM15 (low 64 bits) as well, or make them |
| 7083 // callee-save in JS code as well. | 7083 // callee-save in JS code as well. |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7175 // Implementation of stubs. | 7175 // Implementation of stubs. |
| 7176 | 7176 |
| 7177 // Stub classes have public member named masm, not masm_. | 7177 // Stub classes have public member named masm, not masm_. |
| 7178 | 7178 |
| 7179 void StackCheckStub::Generate(MacroAssembler* masm) { | 7179 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 7180 // Because builtins always remove the receiver from the stack, we | 7180 // Because builtins always remove the receiver from the stack, we |
| 7181 // have to fake one to avoid underflowing the stack. The receiver | 7181 // have to fake one to avoid underflowing the stack. The receiver |
| 7182 // must be inserted below the return address on the stack so we | 7182 // must be inserted below the return address on the stack so we |
| 7183 // temporarily store that in a register. | 7183 // temporarily store that in a register. |
| 7184 __ pop(rax); | 7184 __ pop(rax); |
| 7185 __ push(Immediate(Smi::FromInt(0))); | 7185 __ Push(Smi::FromInt(0)); |
| 7186 __ push(rax); | 7186 __ push(rax); |
| 7187 | 7187 |
| 7188 // Do tail-call to runtime routine. | 7188 // Do tail-call to runtime routine. |
| 7189 Runtime::Function* f = Runtime::FunctionForId(Runtime::kStackGuard); | 7189 Runtime::Function* f = Runtime::FunctionForId(Runtime::kStackGuard); |
| 7190 __ TailCallRuntime(ExternalReference(f), 1, f->result_size); | 7190 __ TailCallRuntime(ExternalReference(f), 1, f->result_size); |
| 7191 } | 7191 } |
| 7192 | 7192 |
| 7193 | 7193 |
| 7194 void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm, | 7194 void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm, |
| 7195 Label* need_gc, | 7195 Label* need_gc, |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7314 __ bind(&load_smi_rhs); | 7314 __ bind(&load_smi_rhs); |
| 7315 __ SmiToInteger64(kScratchRegister, rhs); | 7315 __ SmiToInteger64(kScratchRegister, rhs); |
| 7316 __ push(kScratchRegister); | 7316 __ push(kScratchRegister); |
| 7317 __ fild_d(Operand(rsp, 0)); | 7317 __ fild_d(Operand(rsp, 0)); |
| 7318 __ pop(kScratchRegister); | 7318 __ pop(kScratchRegister); |
| 7319 | 7319 |
| 7320 __ bind(&done); | 7320 __ bind(&done); |
| 7321 } | 7321 } |
| 7322 | 7322 |
| 7323 | 7323 |
| 7324 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, | 7324 void FloatingPointHelper::CheckNumberOperands(MacroAssembler* masm, |
| 7325 Label* non_float) { | 7325 Label* non_float) { |
| 7326 Label test_other, done; | 7326 Label test_other, done; |
| 7327 // Test if both operands are numbers (heap_numbers or smis). | 7327 // Test if both operands are numbers (heap_numbers or smis). |
| 7328 // If not, jump to label non_float. | 7328 // If not, jump to label non_float. |
| 7329 __ JumpIfSmi(rdx, &test_other); // argument in rdx is OK | 7329 __ JumpIfSmi(rdx, &test_other); // argument in rdx is OK |
| 7330 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), Factory::heap_number_map()); | 7330 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), Factory::heap_number_map()); |
| 7331 __ j(not_equal, non_float); // The argument in rdx is not a number. | 7331 __ j(not_equal, non_float); // The argument in rdx is not a number. |
| 7332 | 7332 |
| 7333 __ bind(&test_other); | 7333 __ bind(&test_other); |
| 7334 __ JumpIfSmi(rax, &done); // argument in rax is OK | 7334 __ JumpIfSmi(rax, &done); // argument in rax is OK |
| 7335 __ Cmp(FieldOperand(rax, HeapObject::kMapOffset), Factory::heap_number_map()); | 7335 __ Cmp(FieldOperand(rax, HeapObject::kMapOffset), Factory::heap_number_map()); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7396 break; | 7396 break; |
| 7397 | 7397 |
| 7398 case Token::BIT_XOR: | 7398 case Token::BIT_XOR: |
| 7399 __ SmiXor(rax, rax, rbx); | 7399 __ SmiXor(rax, rax, rbx); |
| 7400 break; | 7400 break; |
| 7401 | 7401 |
| 7402 case Token::SHL: | 7402 case Token::SHL: |
| 7403 case Token::SHR: | 7403 case Token::SHR: |
| 7404 case Token::SAR: | 7404 case Token::SAR: |
| 7405 // Move the second operand into register ecx. | 7405 // Move the second operand into register ecx. |
| 7406 __ movl(rcx, rbx); | 7406 __ movq(rcx, rbx); |
| 7407 // Perform the operation. | 7407 // Perform the operation. |
| 7408 switch (op_) { | 7408 switch (op_) { |
| 7409 case Token::SAR: | 7409 case Token::SAR: |
| 7410 __ SmiShiftArithmeticRight(rax, rax, rbx); | 7410 __ SmiShiftArithmeticRight(rax, rax, rcx); |
| 7411 break; | 7411 break; |
| 7412 case Token::SHR: | 7412 case Token::SHR: |
| 7413 __ SmiShiftLogicalRight(rax, rax, rbx, slow); | 7413 __ SmiShiftLogicalRight(rax, rax, rcx, slow); |
| 7414 break; | 7414 break; |
| 7415 case Token::SHL: | 7415 case Token::SHL: |
| 7416 __ SmiShiftLeft(rax, rax, rbx, slow); | 7416 __ SmiShiftLeft(rax, rax, rcx, slow); |
| 7417 break; | 7417 break; |
| 7418 default: | 7418 default: |
| 7419 UNREACHABLE(); | 7419 UNREACHABLE(); |
| 7420 } | 7420 } |
| 7421 break; | 7421 break; |
| 7422 | 7422 |
| 7423 default: | 7423 default: |
| 7424 UNREACHABLE(); | 7424 UNREACHABLE(); |
| 7425 break; | 7425 break; |
| 7426 } | 7426 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 7447 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // get x | 7447 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // get x |
| 7448 | 7448 |
| 7449 // Floating point case. | 7449 // Floating point case. |
| 7450 switch (op_) { | 7450 switch (op_) { |
| 7451 case Token::ADD: | 7451 case Token::ADD: |
| 7452 case Token::SUB: | 7452 case Token::SUB: |
| 7453 case Token::MUL: | 7453 case Token::MUL: |
| 7454 case Token::DIV: { | 7454 case Token::DIV: { |
| 7455 // rax: y | 7455 // rax: y |
| 7456 // rdx: x | 7456 // rdx: x |
| 7457 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime); | 7457 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime); |
| 7458 // Fast-case: Both operands are numbers. | 7458 // Fast-case: Both operands are numbers. |
| 7459 // Allocate a heap number, if needed. | 7459 // Allocate a heap number, if needed. |
| 7460 Label skip_allocation; | 7460 Label skip_allocation; |
| 7461 switch (mode_) { | 7461 switch (mode_) { |
| 7462 case OVERWRITE_LEFT: | 7462 case OVERWRITE_LEFT: |
| 7463 __ movq(rax, rdx); | 7463 __ movq(rax, rdx); |
| 7464 // Fall through! | 7464 // Fall through! |
| 7465 case OVERWRITE_RIGHT: | 7465 case OVERWRITE_RIGHT: |
| 7466 // If the argument in rax is already an object, we skip the | 7466 // If the argument in rax is already an object, we skip the |
| 7467 // allocation of a heap number. | 7467 // allocation of a heap number. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 7492 case Token::MOD: { | 7492 case Token::MOD: { |
| 7493 // For MOD we go directly to runtime in the non-smi case. | 7493 // For MOD we go directly to runtime in the non-smi case. |
| 7494 break; | 7494 break; |
| 7495 } | 7495 } |
| 7496 case Token::BIT_OR: | 7496 case Token::BIT_OR: |
| 7497 case Token::BIT_AND: | 7497 case Token::BIT_AND: |
| 7498 case Token::BIT_XOR: | 7498 case Token::BIT_XOR: |
| 7499 case Token::SAR: | 7499 case Token::SAR: |
| 7500 case Token::SHL: | 7500 case Token::SHL: |
| 7501 case Token::SHR: { | 7501 case Token::SHR: { |
| 7502 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime); | 7502 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime); |
| 7503 // TODO(X64): Don't convert a Smi to float and then back to int32 | 7503 // TODO(X64): Don't convert a Smi to float and then back to int32 |
| 7504 // afterwards. | 7504 // afterwards. |
| 7505 FloatingPointHelper::LoadFloatOperands(masm); | 7505 FloatingPointHelper::LoadFloatOperands(masm); |
| 7506 | 7506 |
| 7507 Label skip_allocation, non_smi_result, operand_conversion_failure; | 7507 Label skip_allocation, non_smi_result, operand_conversion_failure; |
| 7508 | 7508 |
| 7509 // Reserve space for converted numbers. | 7509 // Reserve space for converted numbers. |
| 7510 __ subq(rsp, Immediate(2 * kPointerSize)); | 7510 __ subq(rsp, Immediate(2 * kPointerSize)); |
| 7511 | 7511 |
| 7512 if (use_sse3_) { | 7512 if (use_sse3_) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7546 __ and_(rax, Immediate(0x4400)); | 7546 __ and_(rax, Immediate(0x4400)); |
| 7547 __ cmpl(rax, Immediate(0x4000)); | 7547 __ cmpl(rax, Immediate(0x4000)); |
| 7548 __ j(not_zero, &operand_conversion_failure); | 7548 __ j(not_zero, &operand_conversion_failure); |
| 7549 } | 7549 } |
| 7550 } | 7550 } |
| 7551 | 7551 |
| 7552 // Get int32 operands and perform bitop. | 7552 // Get int32 operands and perform bitop. |
| 7553 __ pop(rcx); | 7553 __ pop(rcx); |
| 7554 __ pop(rax); | 7554 __ pop(rax); |
| 7555 switch (op_) { | 7555 switch (op_) { |
| 7556 case Token::BIT_OR: __ or_(rax, rcx); break; | 7556 case Token::BIT_OR: __ orl(rax, rcx); break; |
| 7557 case Token::BIT_AND: __ and_(rax, rcx); break; | 7557 case Token::BIT_AND: __ andl(rax, rcx); break; |
| 7558 case Token::BIT_XOR: __ xor_(rax, rcx); break; | 7558 case Token::BIT_XOR: __ xorl(rax, rcx); break; |
| 7559 case Token::SAR: __ sarl(rax); break; | 7559 case Token::SAR: __ sarl(rax); break; |
| 7560 case Token::SHL: __ shll(rax); break; | 7560 case Token::SHL: __ shll(rax); break; |
| 7561 case Token::SHR: __ shrl(rax); break; | 7561 case Token::SHR: __ shrl(rax); break; |
| 7562 default: UNREACHABLE(); | 7562 default: UNREACHABLE(); |
| 7563 } | 7563 } |
| 7564 if (op_ == Token::SHR) { | 7564 if (op_ == Token::SHR) { |
| 7565 // Check if result is non-negative and fits in a smi. | 7565 // Check if result is non-negative. This can only happen for a shift |
| 7566 __ testl(rax, Immediate(0xc0000000)); | 7566 // by zero, which also doesn't update the sign flag. |
| 7567 __ j(not_zero, &non_smi_result); | 7567 __ testl(rax, rax); |
| 7568 } else { | |
| 7569 // Check if result fits in a smi. | |
| 7570 __ cmpl(rax, Immediate(0xc0000000)); | |
| 7571 __ j(negative, &non_smi_result); | 7568 __ j(negative, &non_smi_result); |
| 7572 } | 7569 } |
| 7573 // Tag smi result and return. | 7570 __ JumpIfNotValidSmiValue(rax, &non_smi_result); |
| 7571 // Tag smi result, if possible, and return. |
| 7574 __ Integer32ToSmi(rax, rax); | 7572 __ Integer32ToSmi(rax, rax); |
| 7575 __ ret(2 * kPointerSize); | 7573 __ ret(2 * kPointerSize); |
| 7576 | 7574 |
| 7577 // All ops except SHR return a signed int32 that we load in a HeapNumber. | 7575 // All ops except SHR return a signed int32 that we load in a HeapNumber. |
| 7578 if (op_ != Token::SHR) { | 7576 if (op_ != Token::SHR && non_smi_result.is_linked()) { |
| 7579 __ bind(&non_smi_result); | 7577 __ bind(&non_smi_result); |
| 7580 // Allocate a heap number if needed. | 7578 // Allocate a heap number if needed. |
| 7581 __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result | 7579 __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result |
| 7582 switch (mode_) { | 7580 switch (mode_) { |
| 7583 case OVERWRITE_LEFT: | 7581 case OVERWRITE_LEFT: |
| 7584 case OVERWRITE_RIGHT: | 7582 case OVERWRITE_RIGHT: |
| 7585 // If the operand was an object, we skip the | 7583 // If the operand was an object, we skip the |
| 7586 // allocation of a heap number. | 7584 // allocation of a heap number. |
| 7587 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ? | 7585 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ? |
| 7588 1 * kPointerSize : 2 * kPointerSize)); | 7586 1 * kPointerSize : 2 * kPointerSize)); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7675 int CompareStub::MinorKey() { | 7673 int CompareStub::MinorKey() { |
| 7676 // Encode the two parameters in a unique 16 bit value. | 7674 // Encode the two parameters in a unique 16 bit value. |
| 7677 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); | 7675 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); |
| 7678 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); | 7676 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); |
| 7679 } | 7677 } |
| 7680 | 7678 |
| 7681 | 7679 |
| 7682 #undef __ | 7680 #undef __ |
| 7683 | 7681 |
| 7684 } } // namespace v8::internal | 7682 } } // namespace v8::internal |
| OLD | NEW |