OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3556 | 3556 |
3557 // Stack layout: | 3557 // Stack layout: |
3558 // [tos] : value | 3558 // [tos] : value |
3559 // [tos+1] : key | 3559 // [tos+1] : key |
3560 // [tos+2] : receiver | 3560 // [tos+2] : receiver |
3561 // [tos+3] : receiver if at the end of an initialization block | 3561 // [tos+3] : receiver if at the end of an initialization block |
3562 | 3562 |
3563 // Perform the assignment. It is safe to ignore constants here. | 3563 // Perform the assignment. It is safe to ignore constants here. |
3564 ASSERT(node->op() != Token::INIT_CONST); | 3564 ASSERT(node->op() != Token::INIT_CONST); |
3565 CodeForSourcePosition(node->position()); | 3565 CodeForSourcePosition(node->position()); |
3566 frame_->PopToR0(); | |
3567 EmitKeyedStore(prop->key()->type()); | 3566 EmitKeyedStore(prop->key()->type()); |
3568 frame_->Drop(2); // Key and receiver are left on the stack. | |
3569 frame_->EmitPush(r0); | 3567 frame_->EmitPush(r0); |
3570 | 3568 |
3571 // Stack layout: | 3569 // Stack layout: |
3572 // [tos] : result | 3570 // [tos] : result |
3573 // [tos+1] : receiver if at the end of an initialization block | 3571 // [tos+1] : receiver if at the end of an initialization block |
3574 | 3572 |
3575 // Change to fast case at the end of an initialization block. | 3573 // Change to fast case at the end of an initialization block. |
3576 if (node->ends_initialization_block()) { | 3574 if (node->ends_initialization_block()) { |
3577 // The argument to the runtime call is the extra copy of the receiver, | 3575 // The argument to the runtime call is the extra copy of the receiver, |
3578 // which is below the value of the assignment. Swap the receiver and | 3576 // which is below the value of the assignment. Swap the receiver and |
(...skipping 1933 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5512 // Block the constant pool for one more instruction after leaving this | 5510 // Block the constant pool for one more instruction after leaving this |
5513 // constant pool block scope to include the branch instruction ending the | 5511 // constant pool block scope to include the branch instruction ending the |
5514 // deferred code. | 5512 // deferred code. |
5515 __ BlockConstPoolFor(1); | 5513 __ BlockConstPoolFor(1); |
5516 } | 5514 } |
5517 } | 5515 } |
5518 | 5516 |
5519 | 5517 |
5520 class DeferredReferenceSetKeyedValue: public DeferredCode { | 5518 class DeferredReferenceSetKeyedValue: public DeferredCode { |
5521 public: | 5519 public: |
5522 DeferredReferenceSetKeyedValue() { | 5520 DeferredReferenceSetKeyedValue(Register value, |
| 5521 Register key, |
| 5522 Register receiver) |
| 5523 : value_(value), key_(key), receiver_(receiver) { |
5523 set_comment("[ DeferredReferenceSetKeyedValue"); | 5524 set_comment("[ DeferredReferenceSetKeyedValue"); |
5524 } | 5525 } |
5525 | 5526 |
5526 virtual void Generate(); | 5527 virtual void Generate(); |
| 5528 |
| 5529 private: |
| 5530 Register value_; |
| 5531 Register key_; |
| 5532 Register receiver_; |
5527 }; | 5533 }; |
5528 | 5534 |
5529 | 5535 |
5530 void DeferredReferenceSetKeyedValue::Generate() { | 5536 void DeferredReferenceSetKeyedValue::Generate() { |
5531 Register scratch1 = VirtualFrame::scratch0(); | 5537 Register scratch1 = VirtualFrame::scratch0(); |
5532 Register scratch2 = VirtualFrame::scratch1(); | 5538 Register scratch2 = VirtualFrame::scratch1(); |
5533 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2); | 5539 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2); |
5534 __ IncrementCounter( | 5540 __ IncrementCounter( |
5535 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2); | 5541 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2); |
5536 | 5542 |
| 5543 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic |
| 5544 // calling convention. |
| 5545 if (value_.is(r1)) { |
| 5546 __ Swap(r0, r1, ip); |
| 5547 } |
| 5548 ASSERT(receiver_.is(r2)); |
| 5549 |
5537 // The rest of the instructions in the deferred code must be together. | 5550 // The rest of the instructions in the deferred code must be together. |
5538 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5551 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
5539 // Call keyed load IC. It has receiver amd key on the stack and the value to | 5552 // Call keyed store IC. It has the arguments value, key and receiver in r0, |
5540 // store in r0. | 5553 // r1 and r2. |
5541 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 5554 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
5542 __ Call(ic, RelocInfo::CODE_TARGET); | 5555 __ Call(ic, RelocInfo::CODE_TARGET); |
5543 // The call must be followed by a nop instruction to indicate that the | 5556 // The call must be followed by a nop instruction to indicate that the |
5544 // keyed store has been inlined. | 5557 // keyed store has been inlined. |
5545 __ nop(PROPERTY_ACCESS_INLINED); | 5558 __ nop(PROPERTY_ACCESS_INLINED); |
5546 | 5559 |
5547 // Block the constant pool for one more instruction after leaving this | 5560 // Block the constant pool for one more instruction after leaving this |
5548 // constant pool block scope to include the branch instruction ending the | 5561 // constant pool block scope to include the branch instruction ending the |
5549 // deferred code. | 5562 // deferred code. |
5550 __ BlockConstPoolFor(1); | 5563 __ BlockConstPoolFor(1); |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5690 scratch1, | 5703 scratch1, |
5691 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 5704 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
5692 __ ldr(scratch1, | 5705 __ ldr(scratch1, |
5693 MemOperand(scratch1, key, LSL, | 5706 MemOperand(scratch1, key, LSL, |
5694 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); | 5707 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); |
5695 __ cmp(scratch1, scratch2); | 5708 __ cmp(scratch1, scratch2); |
5696 deferred->Branch(eq); | 5709 deferred->Branch(eq); |
5697 | 5710 |
5698 __ mov(r0, scratch1); | 5711 __ mov(r0, scratch1); |
5699 // Make sure that the expected number of instructions are generated. | 5712 // Make sure that the expected number of instructions are generated. |
5700 ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatchSize, | 5713 ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatch, |
5701 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 5714 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
5702 } | 5715 } |
5703 | 5716 |
5704 deferred->BindExit(); | 5717 deferred->BindExit(); |
5705 } | 5718 } |
5706 } | 5719 } |
5707 | 5720 |
5708 | 5721 |
5709 void CodeGenerator::EmitKeyedStore(StaticType* key_type) { | 5722 void CodeGenerator::EmitKeyedStore(StaticType* key_type) { |
5710 VirtualFrame::SpilledScope scope(frame_); | |
5711 // Generate inlined version of the keyed store if the code is in a loop | 5723 // Generate inlined version of the keyed store if the code is in a loop |
5712 // and the key is likely to be a smi. | 5724 // and the key is likely to be a smi. |
5713 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { | 5725 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { |
5714 // Inline the keyed store. | 5726 // Inline the keyed store. |
5715 Comment cmnt(masm_, "[ Inlined store to keyed property"); | 5727 Comment cmnt(masm_, "[ Inlined store to keyed property"); |
5716 | 5728 |
5717 DeferredReferenceSetKeyedValue* deferred = | 5729 Register scratch1 = VirtualFrame::scratch0(); |
5718 new DeferredReferenceSetKeyedValue(); | 5730 Register scratch2 = VirtualFrame::scratch1(); |
| 5731 Register scratch3 = r3; |
5719 | 5732 |
5720 // Counter will be decremented in the deferred code. Placed here to avoid | 5733 // Counter will be decremented in the deferred code. Placed here to avoid |
5721 // having it in the instruction stream below where patching will occur. | 5734 // having it in the instruction stream below where patching will occur. |
5722 __ IncrementCounter(&Counters::keyed_store_inline, 1, | 5735 __ IncrementCounter(&Counters::keyed_store_inline, 1, |
5723 frame_->scratch0(), frame_->scratch1()); | 5736 scratch1, scratch2); |
| 5737 |
| 5738 // Load the value, key and receiver from the stack. |
| 5739 Register value = frame_->PopToRegister(); |
| 5740 Register key = frame_->PopToRegister(value); |
| 5741 Register receiver = r2; |
| 5742 frame_->EmitPop(receiver); |
| 5743 VirtualFrame::SpilledScope spilled(frame_); |
| 5744 |
| 5745 // The deferred code expects value, key and receiver in registers. |
| 5746 DeferredReferenceSetKeyedValue* deferred = |
| 5747 new DeferredReferenceSetKeyedValue(value, key, receiver); |
5724 | 5748 |
5725 // Check that the value is a smi. As this inlined code does not set the | 5749 // Check that the value is a smi. As this inlined code does not set the |
5726 // write barrier it is only possible to store smi values. | 5750 // write barrier it is only possible to store smi values. |
5727 __ tst(r0, Operand(kSmiTagMask)); | 5751 __ tst(value, Operand(kSmiTagMask)); |
5728 deferred->Branch(ne); | 5752 deferred->Branch(ne); |
5729 | 5753 |
5730 // Load the key and receiver from the stack. | |
5731 __ ldr(r1, MemOperand(sp, 0)); | |
5732 __ ldr(r2, MemOperand(sp, kPointerSize)); | |
5733 | |
5734 // Check that the key is a smi. | 5754 // Check that the key is a smi. |
5735 __ tst(r1, Operand(kSmiTagMask)); | 5755 __ tst(key, Operand(kSmiTagMask)); |
5736 deferred->Branch(ne); | 5756 deferred->Branch(ne); |
5737 | 5757 |
5738 // Check that the receiver is a heap object. | 5758 // Check that the receiver is a heap object. |
5739 __ tst(r2, Operand(kSmiTagMask)); | 5759 __ tst(receiver, Operand(kSmiTagMask)); |
5740 deferred->Branch(eq); | 5760 deferred->Branch(eq); |
5741 | 5761 |
5742 // Check that the receiver is a JSArray. | 5762 // Check that the receiver is a JSArray. |
5743 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE); | 5763 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE); |
5744 deferred->Branch(ne); | 5764 deferred->Branch(ne); |
5745 | 5765 |
5746 // Check that the key is within bounds. Both the key and the length of | 5766 // Check that the key is within bounds. Both the key and the length of |
5747 // the JSArray are smis. Use unsigned comparison to handle negative keys. | 5767 // the JSArray are smis. Use unsigned comparison to handle negative keys. |
5748 __ ldr(r3, FieldMemOperand(r2, JSArray::kLengthOffset)); | 5768 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
5749 __ cmp(r3, r1); | 5769 __ cmp(scratch1, key); |
5750 deferred->Branch(ls); // Unsigned less equal. | 5770 deferred->Branch(ls); // Unsigned less equal. |
5751 | 5771 |
5752 // The following instructions are the part of the inlined store keyed | 5772 // The following instructions are the part of the inlined store keyed |
5753 // property code which can be patched. Therefore the exact number of | 5773 // property code which can be patched. Therefore the exact number of |
5754 // instructions generated need to be fixed, so the constant pool is blocked | 5774 // instructions generated need to be fixed, so the constant pool is blocked |
5755 // while generating this code. | 5775 // while generating this code. |
5756 #ifdef DEBUG | |
5757 int kInlinedKeyedStoreInstructions = 7; | |
5758 Label check_inlined_codesize; | |
5759 masm_->bind(&check_inlined_codesize); | |
5760 #endif | |
5761 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5776 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
5762 // Get the elements array from the receiver and check that it | 5777 // Get the elements array from the receiver and check that it |
5763 // is not a dictionary. | 5778 // is not a dictionary. |
5764 __ ldr(r3, FieldMemOperand(r2, JSObject::kElementsOffset)); | 5779 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
5765 __ ldr(r4, FieldMemOperand(r3, JSObject::kMapOffset)); | 5780 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); |
5766 // Read the fixed array map from the constant pool (not from the root | 5781 // Read the fixed array map from the constant pool (not from the root |
5767 // array) so that the value can be patched. When debugging, we patch this | 5782 // array) so that the value can be patched. When debugging, we patch this |
5768 // comparison to always fail so that we will hit the IC call in the | 5783 // comparison to always fail so that we will hit the IC call in the |
5769 // deferred code which will allow the debugger to break for fast case | 5784 // deferred code which will allow the debugger to break for fast case |
5770 // stores. | 5785 // stores. |
5771 __ mov(r5, Operand(Factory::fixed_array_map())); | 5786 #ifdef DEBUG |
5772 __ cmp(r4, r5); | 5787 Label check_inlined_codesize; |
| 5788 masm_->bind(&check_inlined_codesize); |
| 5789 #endif |
| 5790 __ mov(scratch3, Operand(Factory::fixed_array_map())); |
| 5791 __ cmp(scratch2, scratch3); |
5773 deferred->Branch(ne); | 5792 deferred->Branch(ne); |
5774 | 5793 |
5775 // Store the value. | 5794 // Store the value. |
5776 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 5795 __ add(scratch1, scratch1, |
5777 __ str(r0, MemOperand(r3, r1, LSL, | 5796 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
5778 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); | 5797 __ str(value, |
| 5798 MemOperand(scratch1, key, LSL, |
| 5799 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); |
5779 | 5800 |
5780 // Make sure that the expected number of instructions are generated. | 5801 // Make sure that the expected number of instructions are generated. |
5781 ASSERT_EQ(kInlinedKeyedStoreInstructions, | 5802 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch, |
5782 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 5803 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
5783 } | 5804 } |
5784 | 5805 |
5785 deferred->BindExit(); | 5806 deferred->BindExit(); |
5786 } else { | 5807 } else { |
5787 frame()->CallKeyedStoreIC(); | 5808 frame()->CallKeyedStoreIC(); |
5788 } | 5809 } |
5789 } | 5810 } |
5790 | 5811 |
5791 | 5812 |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5887 | 5908 |
5888 case NAMED: { | 5909 case NAMED: { |
5889 Comment cmnt(masm, "[ Store to named Property"); | 5910 Comment cmnt(masm, "[ Store to named Property"); |
5890 cgen_->EmitNamedStore(GetName(), false); | 5911 cgen_->EmitNamedStore(GetName(), false); |
5891 frame->EmitPush(r0); | 5912 frame->EmitPush(r0); |
5892 set_unloaded(); | 5913 set_unloaded(); |
5893 break; | 5914 break; |
5894 } | 5915 } |
5895 | 5916 |
5896 case KEYED: { | 5917 case KEYED: { |
5897 VirtualFrame::SpilledScope scope(frame); | |
5898 Comment cmnt(masm, "[ Store to keyed Property"); | 5918 Comment cmnt(masm, "[ Store to keyed Property"); |
5899 Property* property = expression_->AsProperty(); | 5919 Property* property = expression_->AsProperty(); |
5900 ASSERT(property != NULL); | 5920 ASSERT(property != NULL); |
5901 cgen_->CodeForSourcePosition(property->position()); | 5921 cgen_->CodeForSourcePosition(property->position()); |
5902 | |
5903 frame->EmitPop(r0); // Value. | |
5904 cgen_->EmitKeyedStore(property->key()->type()); | 5922 cgen_->EmitKeyedStore(property->key()->type()); |
5905 frame->EmitPush(r0); | 5923 frame->EmitPush(r0); |
5906 cgen_->UnloadReference(this); | 5924 set_unloaded(); |
5907 break; | 5925 break; |
5908 } | 5926 } |
5909 | 5927 |
5910 default: | 5928 default: |
5911 UNREACHABLE(); | 5929 UNREACHABLE(); |
5912 } | 5930 } |
5913 } | 5931 } |
5914 | 5932 |
5915 | 5933 |
5916 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 5934 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
(...skipping 4100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10017 __ bind(&string_add_runtime); | 10035 __ bind(&string_add_runtime); |
10018 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 10036 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
10019 } | 10037 } |
10020 | 10038 |
10021 | 10039 |
10022 #undef __ | 10040 #undef __ |
10023 | 10041 |
10024 } } // namespace v8::internal | 10042 } } // namespace v8::internal |
10025 | 10043 |
10026 #endif // V8_TARGET_ARCH_ARM | 10044 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |