OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5861 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5872 | 5872 |
5873 __ Bind(&global_object); | 5873 __ Bind(&global_object); |
5874 __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 5874 __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset)); |
5875 __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 5875 __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX)); |
5876 __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset)); | 5876 __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset)); |
5877 | 5877 |
5878 __ Bind(&done); | 5878 __ Bind(&done); |
5879 } | 5879 } |
5880 | 5880 |
5881 | 5881 |
| 5882 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5883 Register result, |
| 5884 Register object, |
| 5885 Register index) { |
| 5886 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5887 __ Push(object); |
| 5888 __ Push(index); |
| 5889 __ Mov(cp, 0); |
| 5890 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); |
| 5891 RecordSafepointWithRegisters( |
| 5892 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); |
| 5893 __ StoreToSafepointRegisterSlot(x0, result); |
| 5894 } |
| 5895 |
| 5896 |
5882 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5897 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5898 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { |
| 5899 public: |
| 5900 DeferredLoadMutableDouble(LCodeGen* codegen, |
| 5901 LLoadFieldByIndex* instr, |
| 5902 Register result, |
| 5903 Register object, |
| 5904 Register index) |
| 5905 : LDeferredCode(codegen), |
| 5906 instr_(instr), |
| 5907 result_(result), |
| 5908 object_(object), |
| 5909 index_(index) { |
| 5910 } |
| 5911 virtual void Generate() V8_OVERRIDE { |
| 5912 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); |
| 5913 } |
| 5914 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 5915 private: |
| 5916 LLoadFieldByIndex* instr_; |
| 5917 Register result_; |
| 5918 Register object_; |
| 5919 Register index_; |
| 5920 }; |
5883 Register object = ToRegister(instr->object()); | 5921 Register object = ToRegister(instr->object()); |
5884 Register index = ToRegister(instr->index()); | 5922 Register index = ToRegister(instr->index()); |
5885 Register result = ToRegister(instr->result()); | 5923 Register result = ToRegister(instr->result()); |
5886 | 5924 |
5887 __ AssertSmi(index); | 5925 __ AssertSmi(index); |
5888 | 5926 |
| 5927 DeferredLoadMutableDouble* deferred; |
| 5928 deferred = new(zone()) DeferredLoadMutableDouble( |
| 5929 this, instr, result, object, index); |
| 5930 |
5889 Label out_of_object, done; | 5931 Label out_of_object, done; |
| 5932 |
| 5933 __ TestAndBranchIfAnySet( |
| 5934 index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry()); |
| 5935 __ Mov(index, Operand(index, ASR, 1)); |
| 5936 |
5890 __ Cmp(index, Smi::FromInt(0)); | 5937 __ Cmp(index, Smi::FromInt(0)); |
5891 __ B(lt, &out_of_object); | 5938 __ B(lt, &out_of_object); |
5892 | 5939 |
5893 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); | 5940 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); |
5894 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5941 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5895 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); | 5942 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); |
5896 | 5943 |
5897 __ B(&done); | 5944 __ B(&done); |
5898 | 5945 |
5899 __ Bind(&out_of_object); | 5946 __ Bind(&out_of_object); |
5900 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5947 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5901 // Index is equal to negated out of object property index plus 1. | 5948 // Index is equal to negated out of object property index plus 1. |
5902 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5949 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5903 __ Ldr(result, FieldMemOperand(result, | 5950 __ Ldr(result, FieldMemOperand(result, |
5904 FixedArray::kHeaderSize - kPointerSize)); | 5951 FixedArray::kHeaderSize - kPointerSize)); |
| 5952 __ Bind(deferred->exit()); |
5905 __ Bind(&done); | 5953 __ Bind(&done); |
5906 } | 5954 } |
5907 | 5955 |
5908 } } // namespace v8::internal | 5956 } } // namespace v8::internal |
OLD | NEW |