OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1721 // targets only after all the state is pushed on the frame. | 1721 // targets only after all the state is pushed on the frame. |
1722 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1722 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
1723 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1723 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
1724 | 1724 |
1725 __ movq(rax, frame_->ElementAt(0)); // load the current count | 1725 __ movq(rax, frame_->ElementAt(0)); // load the current count |
1726 __ cmpl(rax, frame_->ElementAt(1)); // compare to the array length | 1726 __ cmpl(rax, frame_->ElementAt(1)); // compare to the array length |
1727 node->break_target()->Branch(above_equal); | 1727 node->break_target()->Branch(above_equal); |
1728 | 1728 |
1729 // Get the i'th entry of the array. | 1729 // Get the i'th entry of the array. |
1730 __ movq(rdx, frame_->ElementAt(2)); | 1730 __ movq(rdx, frame_->ElementAt(2)); |
1731 // TODO(smi): Find a way to abstract indexing by a smi value. | 1731 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2); |
1732 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 1732 __ movq(rbx, |
1733 // Multiplier is times_4 since rax is already a Smi. | 1733 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize)); |
1734 __ movq(rbx, FieldOperand(rdx, rax, times_4, FixedArray::kHeaderSize)); | |
1735 | 1734 |
1736 // Get the expected map from the stack or a zero map in the | 1735 // Get the expected map from the stack or a zero map in the |
1737 // permanent slow case rax: current iteration count rbx: i'th entry | 1736 // permanent slow case rax: current iteration count rbx: i'th entry |
1738 // of the enum cache | 1737 // of the enum cache |
1739 __ movq(rdx, frame_->ElementAt(3)); | 1738 __ movq(rdx, frame_->ElementAt(3)); |
1740 // Check if the expected map still matches that of the enumerable. | 1739 // Check if the expected map still matches that of the enumerable. |
1741 // If not, we have to filter the key. | 1740 // If not, we have to filter the key. |
1742 // rax: current iteration count | 1741 // rax: current iteration count |
1743 // rbx: i'th entry of the enum cache | 1742 // rbx: i'th entry of the enum cache |
1744 // rdx: expected map value | 1743 // rdx: expected map value |
(...skipping 2134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3879 left.ToRegister(); | 3878 left.ToRegister(); |
3880 __ cmpq(right.reg(), left.reg()); | 3879 __ cmpq(right.reg(), left.reg()); |
3881 right.Unuse(); | 3880 right.Unuse(); |
3882 left.Unuse(); | 3881 left.Unuse(); |
3883 destination()->Split(equal); | 3882 destination()->Split(equal); |
3884 } | 3883 } |
3885 | 3884 |
3886 | 3885 |
3887 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { | 3886 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { |
3888 ASSERT(args->length() == 0); | 3887 ASSERT(args->length() == 0); |
3889 ASSERT(kSmiTag == 0); // RBP value is aligned, so it should look like Smi. | 3888 // RBP value is aligned, so it should be tagged as a smi (without necesarily |
| 3889 // being padded as a smi). |
| 3890 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
3890 Result rbp_as_smi = allocator_->Allocate(); | 3891 Result rbp_as_smi = allocator_->Allocate(); |
3891 ASSERT(rbp_as_smi.is_valid()); | 3892 ASSERT(rbp_as_smi.is_valid()); |
3892 __ movq(rbp_as_smi.reg(), rbp); | 3893 __ movq(rbp_as_smi.reg(), rbp); |
3893 frame_->Push(&rbp_as_smi); | 3894 frame_->Push(&rbp_as_smi); |
3894 } | 3895 } |
3895 | 3896 |
3896 | 3897 |
3897 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { | 3898 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { |
3898 ASSERT(args->length() == 0); | 3899 ASSERT(args->length() == 0); |
3899 frame_->SpillAll(); | 3900 frame_->SpillAll(); |
(...skipping 2220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6120 __ bind(deferred->patch_site()); | 6121 __ bind(deferred->patch_site()); |
6121 // Avoid using __ to ensure the distance from patch_site | 6122 // Avoid using __ to ensure the distance from patch_site |
6122 // to the map address is always the same. | 6123 // to the map address is always the same. |
6123 masm->movq(kScratchRegister, Factory::fixed_array_map(), | 6124 masm->movq(kScratchRegister, Factory::fixed_array_map(), |
6124 RelocInfo::EMBEDDED_OBJECT); | 6125 RelocInfo::EMBEDDED_OBJECT); |
6125 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), | 6126 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
6126 kScratchRegister); | 6127 kScratchRegister); |
6127 deferred->Branch(not_equal); | 6128 deferred->Branch(not_equal); |
6128 | 6129 |
6129 // Store the value. | 6130 // Store the value. |
6130 ASSERT_EQ(1, kSmiTagSize); | 6131 SmiIndex index = |
6131 ASSERT_EQ(0, kSmiTag); | 6132 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
6132 // TODO(lrn) Find way to abstract indexing by smi. | 6133 __ movq(Operand(tmp.reg(), |
6133 __ movq(Operand(tmp.reg(), | 6134 index.reg, |
6134 key.reg(), | 6135 index.scale, |
6135 times_half_pointer_size, | |
6136 FixedArray::kHeaderSize - kHeapObjectTag), | 6136 FixedArray::kHeaderSize - kHeapObjectTag), |
6137 value.reg()); | 6137 value.reg()); |
6138 __ IncrementCounter(&Counters::keyed_store_inline, 1); | 6138 __ IncrementCounter(&Counters::keyed_store_inline, 1); |
6139 | 6139 |
6140 deferred->BindExit(); | 6140 deferred->BindExit(); |
6141 | 6141 |
6142 cgen_->frame()->Push(&receiver); | 6142 cgen_->frame()->Push(&receiver); |
6143 cgen_->frame()->Push(&key); | 6143 cgen_->frame()->Push(&key); |
6144 cgen_->frame()->Push(&value); | 6144 cgen_->frame()->Push(&value); |
6145 } else { | 6145 } else { |
(...skipping 507 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6653 Label runtime; | 6653 Label runtime; |
6654 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 6654 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
6655 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 6655 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
6656 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 6656 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
6657 __ j(not_equal, &runtime); | 6657 __ j(not_equal, &runtime); |
6658 // Value in rcx is Smi encoded. | 6658 // Value in rcx is Smi encoded. |
6659 | 6659 |
6660 // Patch the arguments.length and the parameters pointer. | 6660 // Patch the arguments.length and the parameters pointer. |
6661 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 6661 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
6662 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 6662 __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
6663 __ lea(rdx, Operand(rdx, rcx, times_4, kDisplacement)); | 6663 SmiIndex index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2); |
| 6664 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement)); |
6664 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 6665 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
6665 | 6666 |
6666 // Do the runtime call to allocate the arguments object. | 6667 // Do the runtime call to allocate the arguments object. |
6667 __ bind(&runtime); | 6668 __ bind(&runtime); |
6668 Runtime::Function* f = Runtime::FunctionForId(Runtime::kNewArgumentsFast); | 6669 Runtime::Function* f = Runtime::FunctionForId(Runtime::kNewArgumentsFast); |
6669 __ TailCallRuntime(ExternalReference(f), 3, f->result_size); | 6670 __ TailCallRuntime(ExternalReference(f), 3, f->result_size); |
6670 } | 6671 } |
6671 | 6672 |
6672 | 6673 |
6673 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 6674 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
(...skipping 15 matching lines...) Expand all Loading... |
6689 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 6690 __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
6690 __ j(equal, &adaptor); | 6691 __ j(equal, &adaptor); |
6691 | 6692 |
6692 // Check index against formal parameters count limit passed in | 6693 // Check index against formal parameters count limit passed in |
6693 // through register rax. Use unsigned comparison to get negative | 6694 // through register rax. Use unsigned comparison to get negative |
6694 // check for free. | 6695 // check for free. |
6695 __ cmpq(rdx, rax); | 6696 __ cmpq(rdx, rax); |
6696 __ j(above_equal, &slow); | 6697 __ j(above_equal, &slow); |
6697 | 6698 |
6698 // Read the argument from the stack and return it. | 6699 // Read the argument from the stack and return it. |
6699 // Shifting code depends on SmiEncoding being equivalent to left shift: | 6700 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
6700 // we multiply by four to get pointer alignment. | 6701 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0)); |
6701 // TODO(smi): Find a way to abstract indexing by a smi. | 6702 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); |
6702 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 6703 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); |
6703 __ lea(rbx, Operand(rbp, rax, times_4, 0)); | |
6704 __ neg(rdx); // TODO(smi): Abstract negative indexing too. | |
6705 __ movq(rax, Operand(rbx, rdx, times_4, kDisplacement)); | |
6706 __ Ret(); | 6704 __ Ret(); |
6707 | 6705 |
6708 // Arguments adaptor case: Check index against actual arguments | 6706 // Arguments adaptor case: Check index against actual arguments |
6709 // limit found in the arguments adaptor frame. Use unsigned | 6707 // limit found in the arguments adaptor frame. Use unsigned |
6710 // comparison to get negative check for free. | 6708 // comparison to get negative check for free. |
6711 __ bind(&adaptor); | 6709 __ bind(&adaptor); |
6712 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 6710 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
6713 __ cmpq(rdx, rcx); | 6711 __ cmpq(rdx, rcx); |
6714 __ j(above_equal, &slow); | 6712 __ j(above_equal, &slow); |
6715 | 6713 |
6716 // Read the argument from the stack and return it. | 6714 // Read the argument from the stack and return it. |
6717 // Shifting code depends on SmiEncoding being equivalent to left shift: | 6715 index = masm->SmiToIndex(rbx, rcx, kPointerSizeLog2); |
6718 // we multiply by four to get pointer alignment. | 6716 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); |
6719 // TODO(smi): Find a way to abstract indexing by a smi. | 6717 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); |
6720 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 6718 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); |
6721 __ lea(rbx, Operand(rbx, rcx, times_4, 0)); | |
6722 __ neg(rdx); | |
6723 __ movq(rax, Operand(rbx, rdx, times_4, kDisplacement)); | |
6724 __ Ret(); | 6719 __ Ret(); |
6725 | 6720 |
6726 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 6721 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
6727 // by calling the runtime system. | 6722 // by calling the runtime system. |
6728 __ bind(&slow); | 6723 __ bind(&slow); |
6729 __ pop(rbx); // Return address. | 6724 __ pop(rbx); // Return address. |
6730 __ push(rdx); | 6725 __ push(rdx); |
6731 __ push(rbx); | 6726 __ push(rbx); |
6732 Runtime::Function* f = | 6727 Runtime::Function* f = |
6733 Runtime::FunctionForId(Runtime::kGetArgumentsProperty); | 6728 Runtime::FunctionForId(Runtime::kGetArgumentsProperty); |
(...skipping 954 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7688 int CompareStub::MinorKey() { | 7683 int CompareStub::MinorKey() { |
7689 // Encode the two parameters in a unique 16 bit value. | 7684 // Encode the two parameters in a unique 16 bit value. |
7690 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); | 7685 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); |
7691 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); | 7686 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); |
7692 } | 7687 } |
7693 | 7688 |
7694 | 7689 |
7695 #undef __ | 7690 #undef __ |
7696 | 7691 |
7697 } } // namespace v8::internal | 7692 } } // namespace v8::internal |
OLD | NEW |