| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
| 9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
| 10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 114 JSObject::kPropertiesOffset)); | 114 JSObject::kPropertiesOffset)); |
| 115 GenerateDictionaryLoad(masm, &slow, dictionary, | 115 GenerateDictionaryLoad(masm, &slow, dictionary, |
| 116 LoadDescriptor::NameRegister(), x0, x3, x4); | 116 LoadDescriptor::NameRegister(), x0, x3, x4); |
| 117 __ Ret(); | 117 __ Ret(); |
| 118 | 118 |
| 119 // Dictionary load failed, go slow (but don't miss). | 119 // Dictionary load failed, go slow (but don't miss). |
| 120 __ Bind(&slow); | 120 __ Bind(&slow); |
| 121 GenerateRuntimeGetProperty(masm); | 121 GenerateRuntimeGetProperty(masm); |
| 122 } | 122 } |
| 123 | 123 |
| 124 | |
| 125 void LoadIC::GenerateMiss(MacroAssembler* masm) { | |
| 126 // The return address is in lr. | |
| 127 Isolate* isolate = masm->isolate(); | |
| 128 ASM_LOCATION("LoadIC::GenerateMiss"); | |
| 129 | |
| 130 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(), | |
| 131 LoadWithVectorDescriptor::VectorRegister())); | |
| 132 __ IncrementCounter(isolate->counters()->ic_load_miss(), 1, x4, x5); | |
| 133 | |
| 134 // Perform tail call to the entry. | |
| 135 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), | |
| 136 LoadWithVectorDescriptor::NameRegister(), | |
| 137 LoadWithVectorDescriptor::SlotRegister(), | |
| 138 LoadWithVectorDescriptor::VectorRegister()); | |
| 139 __ TailCallRuntime(Runtime::kLoadIC_Miss); | |
| 140 } | |
| 141 | |
| 142 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 124 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 143 // The return address is in lr. | 125 // The return address is in lr. |
| 144 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 126 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 145 | 127 |
| 146 // Do tail-call to runtime routine. | 128 // Do tail-call to runtime routine. |
| 147 __ TailCallRuntime(Runtime::kGetProperty); | 129 __ TailCallRuntime(Runtime::kGetProperty); |
| 148 } | 130 } |
| 149 | 131 |
| 150 | |
| 151 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | |
| 152 // The return address is in lr. | |
| 153 Isolate* isolate = masm->isolate(); | |
| 154 | |
| 155 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(), | |
| 156 LoadWithVectorDescriptor::VectorRegister())); | |
| 157 __ IncrementCounter(isolate->counters()->ic_keyed_load_miss(), 1, x10, x11); | |
| 158 | |
| 159 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), | |
| 160 LoadWithVectorDescriptor::NameRegister(), | |
| 161 LoadWithVectorDescriptor::SlotRegister(), | |
| 162 LoadWithVectorDescriptor::VectorRegister()); | |
| 163 | |
| 164 // Perform tail call to the entry. | |
| 165 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss); | |
| 166 } | |
| 167 | |
| 168 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | |
| 169 // The return address is in lr. | |
| 170 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | |
| 171 | |
| 172 // Do tail-call to runtime routine. | |
| 173 __ TailCallRuntime(Runtime::kKeyedGetProperty); | |
| 174 } | |
| 175 | |
| 176 static void StoreIC_PushArgs(MacroAssembler* masm) { | 132 static void StoreIC_PushArgs(MacroAssembler* masm) { |
| 177 __ Push(StoreWithVectorDescriptor::ValueRegister(), | 133 __ Push(StoreWithVectorDescriptor::ValueRegister(), |
| 178 StoreWithVectorDescriptor::SlotRegister(), | 134 StoreWithVectorDescriptor::SlotRegister(), |
| 179 StoreWithVectorDescriptor::VectorRegister(), | 135 StoreWithVectorDescriptor::VectorRegister(), |
| 180 StoreWithVectorDescriptor::ReceiverRegister(), | 136 StoreWithVectorDescriptor::ReceiverRegister(), |
| 181 StoreWithVectorDescriptor::NameRegister()); | 137 StoreWithVectorDescriptor::NameRegister()); |
| 182 } | 138 } |
| 183 | 139 |
| 184 | 140 |
| 185 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { | 141 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 313 } else { | 269 } else { |
| 314 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 270 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
| 315 // This is JumpIfSmi(smi_reg, branch_imm). | 271 // This is JumpIfSmi(smi_reg, branch_imm). |
| 316 patcher.tbz(smi_reg, 0, branch_imm); | 272 patcher.tbz(smi_reg, 0, branch_imm); |
| 317 } | 273 } |
| 318 } | 274 } |
| 319 } // namespace internal | 275 } // namespace internal |
| 320 } // namespace v8 | 276 } // namespace v8 |
| 321 | 277 |
| 322 #endif // V8_TARGET_ARCH_ARM64 | 278 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |