OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5698 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5709 | 5709 |
5710 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5710 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5711 Register object = ToRegister(instr->value()); | 5711 Register object = ToRegister(instr->value()); |
5712 Register map = ToRegister(instr->map()); | 5712 Register map = ToRegister(instr->map()); |
5713 __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5713 __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); |
5714 __ cmp(map, scratch0()); | 5714 __ cmp(map, scratch0()); |
5715 DeoptimizeIf(ne, instr->environment()); | 5715 DeoptimizeIf(ne, instr->environment()); |
5716 } | 5716 } |
5717 | 5717 |
5718 | 5718 |
5719 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
5720 Register result, | |
5721 Register object, | |
5722 Register index) { | |
5723 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | |
5724 __ Push(object); | |
5725 __ Push(index); | |
5726 __ mov(cp, Operand::Zero()); | |
5727 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); | |
5728 RecordSafepointWithRegisters( | |
5729 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | |
5730 __ StoreToSafepointRegisterSlot(r0, result); | |
5731 } | |
5732 | |
5733 | |
5734 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5719 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
5735 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { | |
5736 public: | |
5737 DeferredLoadMutableDouble(LCodeGen* codegen, | |
5738 LLoadFieldByIndex* instr, | |
5739 Register result, | |
5740 Register object, | |
5741 Register index) | |
5742 : LDeferredCode(codegen), | |
5743 instr_(instr), | |
5744 result_(result), | |
5745 object_(object), | |
5746 index_(index) { | |
5747 } | |
5748 virtual void Generate() V8_OVERRIDE { | |
5749 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); | |
5750 } | |
5751 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | |
5752 private: | |
5753 LLoadFieldByIndex* instr_; | |
5754 Register result_; | |
5755 Register object_; | |
5756 Register index_; | |
5757 }; | |
5758 | |
5759 Register object = ToRegister(instr->object()); | 5720 Register object = ToRegister(instr->object()); |
5760 Register index = ToRegister(instr->index()); | 5721 Register index = ToRegister(instr->index()); |
5761 Register result = ToRegister(instr->result()); | 5722 Register result = ToRegister(instr->result()); |
5762 Register scratch = scratch0(); | 5723 Register scratch = scratch0(); |
5763 | 5724 |
5764 DeferredLoadMutableDouble* deferred; | |
5765 deferred = new(zone()) DeferredLoadMutableDouble( | |
5766 this, instr, result, object, index); | |
5767 | |
5768 Label out_of_object, done; | 5725 Label out_of_object, done; |
5769 | |
5770 __ tst(index, Operand(Smi::FromInt(1))); | |
5771 __ b(ne, deferred->entry()); | |
5772 __ mov(index, Operand(index, ASR, 1)); | |
5773 | |
5774 __ cmp(index, Operand::Zero()); | 5726 __ cmp(index, Operand::Zero()); |
5775 __ b(lt, &out_of_object); | 5727 __ b(lt, &out_of_object); |
5776 | 5728 |
5777 __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index)); | 5729 __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index)); |
5778 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); | 5730 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); |
5779 | 5731 |
5780 __ b(&done); | 5732 __ b(&done); |
5781 | 5733 |
5782 __ bind(&out_of_object); | 5734 __ bind(&out_of_object); |
5783 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5735 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5784 // Index is equal to negated out of object property index plus 1. | 5736 // Index is equal to negated out of object property index plus 1. |
5785 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 5737 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
5786 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5738 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5787 __ ldr(result, FieldMemOperand(scratch, | 5739 __ ldr(result, FieldMemOperand(scratch, |
5788 FixedArray::kHeaderSize - kPointerSize)); | 5740 FixedArray::kHeaderSize - kPointerSize)); |
5789 __ bind(deferred->exit()); | |
5790 __ bind(&done); | 5741 __ bind(&done); |
5791 } | 5742 } |
5792 | 5743 |
5793 | 5744 |
5794 #undef __ | 5745 #undef __ |
5795 | 5746 |
5796 } } // namespace v8::internal | 5747 } } // namespace v8::internal |
OLD | NEW |