OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5774 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5785 | 5785 |
5786 | 5786 |
5787 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5787 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5788 Register object = ToRegister(instr->value()); | 5788 Register object = ToRegister(instr->value()); |
5789 Register map = ToRegister(instr->map()); | 5789 Register map = ToRegister(instr->map()); |
5790 __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5790 __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); |
5791 DeoptimizeIf(ne, instr->environment(), map, Operand(scratch0())); | 5791 DeoptimizeIf(ne, instr->environment(), map, Operand(scratch0())); |
5792 } | 5792 } |
5793 | 5793 |
5794 | 5794 |
5795 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
5796 Register result, | |
5797 Register object, | |
5798 Register index) { | |
5799 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | |
5800 __ Push(object); | |
5801 __ Push(index); | |
Paul Lind
2014/03/31 22:39:59
You should combine the two Pushes, to save an inst
| |
5802 __ mov(cp, zero_reg); | |
5803 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); | |
5804 RecordSafepointWithRegisters( | |
5805 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); | |
5806 __ StoreToSafepointRegisterSlot(v0, result); | |
5807 } | |
5808 | |
5809 | |
5795 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5810 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
5811 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { | |
5812 public: | |
5813 DeferredLoadMutableDouble(LCodeGen* codegen, | |
5814 LLoadFieldByIndex* instr, | |
5815 Register result, | |
5816 Register object, | |
5817 Register index) | |
5818 : LDeferredCode(codegen), | |
5819 instr_(instr), | |
5820 result_(result), | |
5821 object_(object), | |
5822 index_(index) { | |
5823 } | |
5824 virtual void Generate() V8_OVERRIDE { | |
5825 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); | |
5826 } | |
5827 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | |
5828 private: | |
5829 LLoadFieldByIndex* instr_; | |
5830 Register result_; | |
5831 Register object_; | |
5832 Register index_; | |
5833 }; | |
5834 | |
5796 Register object = ToRegister(instr->object()); | 5835 Register object = ToRegister(instr->object()); |
5797 Register index = ToRegister(instr->index()); | 5836 Register index = ToRegister(instr->index()); |
5798 Register result = ToRegister(instr->result()); | 5837 Register result = ToRegister(instr->result()); |
5799 Register scratch = scratch0(); | 5838 Register scratch = scratch0(); |
5800 | 5839 |
5840 DeferredLoadMutableDouble* deferred; | |
5841 deferred = new(zone()) DeferredLoadMutableDouble( | |
5842 this, instr, result, object, index); | |
5843 | |
5801 Label out_of_object, done; | 5844 Label out_of_object, done; |
5845 | |
5846 __ And(scratch, index, Operand(Smi::FromInt(1))); | |
5847 __ Branch(deferred->entry(), ne, scratch, Operand(zero_reg)); | |
5848 __ sra(index, index, 1); | |
5849 | |
5802 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg)); | 5850 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg)); |
5803 __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot. | 5851 __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot. |
5804 | 5852 |
5805 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); | 5853 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); |
5806 __ Addu(scratch, object, scratch); | 5854 __ Addu(scratch, object, scratch); |
5807 __ lw(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); | 5855 __ lw(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); |
5808 | 5856 |
5809 __ Branch(&done); | 5857 __ Branch(&done); |
5810 | 5858 |
5811 __ bind(&out_of_object); | 5859 __ bind(&out_of_object); |
5812 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5860 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5813 // Index is equal to negated out of object property index plus 1. | 5861 // Index is equal to negated out of object property index plus 1. |
5814 __ Subu(scratch, result, scratch); | 5862 __ Subu(scratch, result, scratch); |
5815 __ lw(result, FieldMemOperand(scratch, | 5863 __ lw(result, FieldMemOperand(scratch, |
5816 FixedArray::kHeaderSize - kPointerSize)); | 5864 FixedArray::kHeaderSize - kPointerSize)); |
5865 __ bind(deferred->exit()); | |
5817 __ bind(&done); | 5866 __ bind(&done); |
5818 } | 5867 } |
5819 | 5868 |
5820 | 5869 |
5821 #undef __ | 5870 #undef __ |
5822 | 5871 |
5823 } } // namespace v8::internal | 5872 } } // namespace v8::internal |
OLD | NEW |