OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5571 // Inline the keyed load. | 5571 // Inline the keyed load. |
5572 Comment cmnt(masm_, "[ Inlined load from keyed property"); | 5572 Comment cmnt(masm_, "[ Inlined load from keyed property"); |
5573 | 5573 |
5574 // Counter will be decremented in the deferred code. Placed here to avoid | 5574 // Counter will be decremented in the deferred code. Placed here to avoid |
5575 // having it in the instruction stream below where patching will occur. | 5575 // having it in the instruction stream below where patching will occur. |
5576 __ IncrementCounter(&Counters::keyed_load_inline, 1, | 5576 __ IncrementCounter(&Counters::keyed_load_inline, 1, |
5577 frame_->scratch0(), frame_->scratch1()); | 5577 frame_->scratch0(), frame_->scratch1()); |
5578 | 5578 |
5579 // Load the key and receiver from the stack to r0 and r1. | 5579 // Load the key and receiver from the stack to r0 and r1. |
5580 frame_->PopToR1R0(); | 5580 frame_->PopToR1R0(); |
5581 Register receiver = r0; | 5581 Register key = r0; |
5582 Register key = r1; | 5582 Register receiver = r1; |
5583 VirtualFrame::SpilledScope spilled(frame_); | 5583 VirtualFrame::SpilledScope spilled(frame_); |
5584 | 5584 |
5585 // The deferred code expects key and receiver in r0 and r1. | 5585 // The deferred code expects key and receiver in r0 and r1. |
5586 DeferredReferenceGetKeyedValue* deferred = | 5586 DeferredReferenceGetKeyedValue* deferred = |
5587 new DeferredReferenceGetKeyedValue(); | 5587 new DeferredReferenceGetKeyedValue(); |
5588 | 5588 |
5589 // Check that the receiver is a heap object. | 5589 // Check that the receiver is a heap object. |
5590 __ tst(receiver, Operand(kSmiTagMask)); | 5590 __ tst(receiver, Operand(kSmiTagMask)); |
5591 deferred->Branch(eq); | 5591 deferred->Branch(eq); |
5592 | 5592 |
5593 // The following instructions are the part of the inlined load keyed | 5593 // The following instructions are the part of the inlined load keyed |
5594 // property code which can be patched. Therefore the exact number of | 5594 // property code which can be patched. Therefore the exact number of |
5595 // instructions generated need to be fixed, so the constant pool is blocked | 5595 // instructions generated need to be fixed, so the constant pool is blocked |
5596 // while generating this code. | 5596 // while generating this code. |
5597 #ifdef DEBUG | 5597 #ifdef DEBUG |
5598 int kInlinedKeyedLoadInstructions = 19; | 5598 int kInlinedKeyedLoadInstructions = 20; |
5599 Label check_inlined_codesize; | 5599 Label check_inlined_codesize; |
5600 masm_->bind(&check_inlined_codesize); | 5600 masm_->bind(&check_inlined_codesize); |
5601 #endif | 5601 #endif |
5602 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5602 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
5603 Register scratch1 = VirtualFrame::scratch0(); | 5603 Register scratch1 = VirtualFrame::scratch0(); |
5604 Register scratch2 = VirtualFrame::scratch1(); | 5604 Register scratch2 = VirtualFrame::scratch1(); |
5605 // Check the map. The null map used below is patched by the inline cache | 5605 // Check the map. The null map used below is patched by the inline cache |
5606 // code. | 5606 // code. |
5607 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 5607 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
5608 __ mov(scratch2, Operand(Factory::null_value())); | 5608 __ mov(scratch2, Operand(Factory::null_value())); |
(...skipping 16 matching lines...) Expand all Loading... | |
5625 // negative keys. | 5625 // negative keys. |
5626 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | 5626 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); |
5627 __ cmp(scratch2, Operand(key, ASR, kSmiTagSize)); | 5627 __ cmp(scratch2, Operand(key, ASR, kSmiTagSize)); |
5628 deferred->Branch(ls); // Unsigned less equal. | 5628 deferred->Branch(ls); // Unsigned less equal. |
5629 | 5629 |
5630 // Load and check that the result is not the hole (key is a smi). | 5630 // Load and check that the result is not the hole (key is a smi). |
5631 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); | 5631 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); |
5632 __ add(scratch1, | 5632 __ add(scratch1, |
5633 scratch1, | 5633 scratch1, |
5634 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 5634 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
5635 __ ldr(r0, | 5635 __ ldr(ip, |
Erik Corry
2010/05/10 10:16:48
Ip is a register that is used behind our backs by
Søren Thygesen Gjesse
2010/05/10 10:45:26
Done.
| |
5636 MemOperand(scratch1, key, LSL, | 5636 MemOperand(scratch1, key, LSL, |
5637 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); | 5637 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); |
5638 __ cmp(r0, scratch2); | 5638 __ cmp(ip, scratch2); |
5639 // This is the only branch to deferred where r0 and r1 do not contain the | |
5640 // receiver and key. We can't just load undefined here because we have to | |
5641 // check the prototype. | |
5642 deferred->Branch(eq); | 5639 deferred->Branch(eq); |
5643 | 5640 |
5641 __ mov(r0, ip); | |
5644 // Make sure that the expected number of instructions are generated. | 5642 // Make sure that the expected number of instructions are generated. |
5645 ASSERT_EQ(kInlinedKeyedLoadInstructions, | 5643 ASSERT_EQ(kInlinedKeyedLoadInstructions, |
5646 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 5644 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
5647 } | 5645 } |
5648 | 5646 |
5649 deferred->BindExit(); | 5647 deferred->BindExit(); |
5650 } | 5648 } |
5651 } | 5649 } |
5652 | 5650 |
5653 | 5651 |
(...skipping 4306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9960 | 9958 |
9961 // Just jump to runtime to add the two strings. | 9959 // Just jump to runtime to add the two strings. |
9962 __ bind(&string_add_runtime); | 9960 __ bind(&string_add_runtime); |
9963 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9961 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
9964 } | 9962 } |
9965 | 9963 |
9966 | 9964 |
9967 #undef __ | 9965 #undef __ |
9968 | 9966 |
9969 } } // namespace v8::internal | 9967 } } // namespace v8::internal |
OLD | NEW |