OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5854 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5865 | 5865 |
5866 __ Bind(&global_object); | 5866 __ Bind(&global_object); |
5867 __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 5867 __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset)); |
5868 __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 5868 __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX)); |
5869 __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset)); | 5869 __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset)); |
5870 | 5870 |
5871 __ Bind(&done); | 5871 __ Bind(&done); |
5872 } | 5872 } |
5873 | 5873 |
5874 | 5874 |
| 5875 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5876 Register result, |
| 5877 Register object, |
| 5878 Register index) { |
| 5879 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5880 __ Push(object); |
| 5881 __ Push(index); |
| 5882 __ Mov(cp, 0); |
| 5883 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); |
| 5884 RecordSafepointWithRegisters( |
| 5885 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 5886 __ StoreToSafepointRegisterSlot(x0, result); |
| 5887 } |
| 5888 |
| 5889 |
5875 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5890 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5891 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { |
| 5892 public: |
| 5893 DeferredLoadMutableDouble(LCodeGen* codegen, |
| 5894 LLoadFieldByIndex* instr, |
| 5895 Register result, |
| 5896 Register object, |
| 5897 Register index) |
| 5898 : LDeferredCode(codegen), |
| 5899 instr_(instr), |
| 5900 result_(result), |
| 5901 object_(object), |
| 5902 index_(index) { |
| 5903 } |
| 5904 virtual void Generate() V8_OVERRIDE { |
| 5905 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); |
| 5906 } |
| 5907 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 5908 private: |
| 5909 LLoadFieldByIndex* instr_; |
| 5910 Register result_; |
| 5911 Register object_; |
| 5912 Register index_; |
| 5913 }; |
5876 Register object = ToRegister(instr->object()); | 5914 Register object = ToRegister(instr->object()); |
5877 Register index = ToRegister(instr->index()); | 5915 Register index = ToRegister(instr->index()); |
5878 Register result = ToRegister(instr->result()); | 5916 Register result = ToRegister(instr->result()); |
5879 | 5917 |
5880 __ AssertSmi(index); | 5918 __ AssertSmi(index); |
5881 | 5919 |
| 5920 DeferredLoadMutableDouble* deferred; |
| 5921 deferred = new(zone()) DeferredLoadMutableDouble( |
| 5922 this, instr, result, object, index); |
| 5923 |
5882 Label out_of_object, done; | 5924 Label out_of_object, done; |
| 5925 |
| 5926 __ TestAndBranchIfAnySet( |
| 5927 index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry()); |
| 5928 __ Mov(index, Operand(index, ASR, 1)); |
| 5929 |
5883 __ Cmp(index, Smi::FromInt(0)); | 5930 __ Cmp(index, Smi::FromInt(0)); |
5884 __ B(lt, &out_of_object); | 5931 __ B(lt, &out_of_object); |
5885 | 5932 |
5886 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); | 5933 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); |
5887 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5934 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5888 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); | 5935 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); |
5889 | 5936 |
5890 __ B(&done); | 5937 __ B(&done); |
5891 | 5938 |
5892 __ Bind(&out_of_object); | 5939 __ Bind(&out_of_object); |
5893 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5940 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5894 // Index is equal to negated out of object property index plus 1. | 5941 // Index is equal to negated out of object property index plus 1. |
5895 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5942 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5896 __ Ldr(result, FieldMemOperand(result, | 5943 __ Ldr(result, FieldMemOperand(result, |
5897 FixedArray::kHeaderSize - kPointerSize)); | 5944 FixedArray::kHeaderSize - kPointerSize)); |
| 5945 __ Bind(deferred->exit()); |
5898 __ Bind(&done); | 5946 __ Bind(&done); |
5899 } | 5947 } |
5900 | 5948 |
5901 } } // namespace v8::internal | 5949 } } // namespace v8::internal |
OLD | NEW |