OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5698 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5709 | 5709 |
5710 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5710 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5711 Register object = ToRegister(instr->value()); | 5711 Register object = ToRegister(instr->value()); |
5712 Register map = ToRegister(instr->map()); | 5712 Register map = ToRegister(instr->map()); |
5713 __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5713 __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); |
5714 __ cmp(map, scratch0()); | 5714 __ cmp(map, scratch0()); |
5715 DeoptimizeIf(ne, instr->environment()); | 5715 DeoptimizeIf(ne, instr->environment()); |
5716 } | 5716 } |
5717 | 5717 |
5718 | 5718 |
5719 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
5720 Register result, | |
5721 Register object, | |
5722 Register index) { | |
5723 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | |
5724 __ Push(object); | |
5725 __ Push(index); | |
5726 __ mov(cp, Operand::Zero()); | |
5727 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); | |
5728 RecordSafepointWithRegisters( | |
5729 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | |
5730 __ StoreToSafepointRegisterSlot(r0, result); | |
5731 } | |
5732 | |
5733 | |
5719 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5734 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
5735 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { | |
5736 public: | |
5737 DeferredLoadMutableDouble(LCodeGen* codegen, | |
5738 LLoadFieldByIndex* instr, | |
5739 Register result, | |
5740 Register object, | |
5741 Register index) | |
5742 : LDeferredCode(codegen), | |
5743 instr_(instr), | |
5744 result_(result), | |
5745 object_(object), | |
5746 index_(index) { | |
5747 } | |
5748 virtual void Generate() V8_OVERRIDE { | |
5749 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); | |
5750 } | |
5751 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | |
5752 private: | |
5753 LLoadFieldByIndex* instr_; | |
5754 Register result_; | |
5755 Register object_; | |
5756 Register index_; | |
5757 }; | |
5758 | |
5720 Register object = ToRegister(instr->object()); | 5759 Register object = ToRegister(instr->object()); |
5721 Register index = ToRegister(instr->index()); | 5760 Register index = ToRegister(instr->index()); |
5722 Register result = ToRegister(instr->result()); | 5761 Register result = ToRegister(instr->result()); |
5723 Register scratch = scratch0(); | 5762 Register scratch = scratch0(); |
5724 | 5763 |
5764 DeferredLoadMutableDouble* deferred; | |
5765 deferred = new(zone()) DeferredLoadMutableDouble( | |
5766 this, instr, result, object, index); | |
5767 | |
5725 Label out_of_object, done; | 5768 Label out_of_object, done; |
5769 | |
5770 __ and_(scratch, index, Operand(Smi::FromInt(1))); | |
5771 __ cmp(scratch, Operand::Zero()); | |
Igor Sheludko
2014/03/31 09:50:16
and + cmp => tst(index, Operand(Smi::FromInt(1));
Toon Verwaest
2014/03/31 09:56:30
Done.
| |
5772 __ b(ne, deferred->entry()); | |
5773 __ mov(index, Operand(index, ASR, 1)); | |
5774 | |
5726 __ cmp(index, Operand::Zero()); | 5775 __ cmp(index, Operand::Zero()); |
5727 __ b(lt, &out_of_object); | 5776 __ b(lt, &out_of_object); |
5728 | 5777 |
5729 __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index)); | 5778 __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index)); |
5730 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); | 5779 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); |
5731 | 5780 |
5732 __ b(&done); | 5781 __ b(&done); |
5733 | 5782 |
5734 __ bind(&out_of_object); | 5783 __ bind(&out_of_object); |
5735 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5784 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5736 // Index is equal to negated out of object property index plus 1. | 5785 // Index is equal to negated out of object property index plus 1. |
5737 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 5786 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
5738 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5787 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5739 __ ldr(result, FieldMemOperand(scratch, | 5788 __ ldr(result, FieldMemOperand(scratch, |
5740 FixedArray::kHeaderSize - kPointerSize)); | 5789 FixedArray::kHeaderSize - kPointerSize)); |
5790 __ bind(deferred->exit()); | |
5741 __ bind(&done); | 5791 __ bind(&done); |
5742 } | 5792 } |
5743 | 5793 |
5744 | 5794 |
5745 #undef __ | 5795 #undef __ |
5746 | 5796 |
5747 } } // namespace v8::internal | 5797 } } // namespace v8::internal |
OLD | NEW |