| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
| 9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
| 10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 157 // | 157 // |
| 158 // elements - holds the elements of the receiver and its prototypes. Clobbered. | 158 // elements - holds the elements of the receiver and its prototypes. Clobbered. |
| 159 // | 159 // |
| 160 // result - holds the result on exit if the load succeeded. | 160 // result - holds the result on exit if the load succeeded. |
| 161 // Allowed to be the the same as 'receiver' or 'key'. | 161 // Allowed to be the the same as 'receiver' or 'key'. |
| 162 // Unchanged on bailout so 'receiver' and 'key' can be safely | 162 // Unchanged on bailout so 'receiver' and 'key' can be safely |
| 163 // used by further computation. | 163 // used by further computation. |
| 164 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 164 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
| 165 Register key, Register elements, | 165 Register key, Register elements, |
| 166 Register scratch1, Register scratch2, | 166 Register scratch1, Register scratch2, |
| 167 Register result, Label* slow, | 167 Register result, Label* slow) { |
| 168 LanguageMode language_mode) { | |
| 169 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2)); | 168 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2)); |
| 170 | 169 |
| 171 Label check_prototypes, check_next_prototype; | 170 Label check_prototypes, check_next_prototype; |
| 172 Label done, in_bounds, absent; | 171 Label done, in_bounds, absent; |
| 173 | 172 |
| 174 // Check for fast array. | 173 // Check for fast array. |
| 175 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 174 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 176 __ AssertFastElements(elements); | 175 __ AssertFastElements(elements); |
| 177 | 176 |
| 178 // Check that the key (index) is within bounds. | 177 // Check that the key (index) is within bounds. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 196 // scratch2: map of current prototype | 195 // scratch2: map of current prototype |
| 197 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); | 196 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); |
| 198 __ B(lo, slow); | 197 __ B(lo, slow); |
| 199 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | 198 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
| 200 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow); | 199 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow); |
| 201 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow); | 200 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow); |
| 202 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow); | 201 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow); |
| 203 __ B(&check_next_prototype); | 202 __ B(&check_next_prototype); |
| 204 | 203 |
| 205 __ Bind(&absent); | 204 __ Bind(&absent); |
| 206 if (is_strong(language_mode)) { | 205 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 207 // Strong mode accesses must throw in this case, so call the runtime. | 206 __ B(&done); |
| 208 __ B(slow); | |
| 209 } else { | |
| 210 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
| 211 __ B(&done); | |
| 212 } | |
| 213 | 207 |
| 214 __ Bind(&in_bounds); | 208 __ Bind(&in_bounds); |
| 215 // Fast case: Do the load. | 209 // Fast case: Do the load. |
| 216 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); | 210 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); |
| 217 __ SmiUntag(scratch2, key); | 211 __ SmiUntag(scratch2, key); |
| 218 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); | 212 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); |
| 219 | 213 |
| 220 // In case the loaded value is the_hole we have to check the prototype chain. | 214 // In case the loaded value is the_hole we have to check the prototype chain. |
| 221 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes); | 215 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes); |
| 222 | 216 |
| (...skipping 30 matching lines...) Expand all Loading... |
| 253 // Is the string internalized? We know it's a string, so a single bit test is | 247 // Is the string internalized? We know it's a string, so a single bit test is |
| 254 // enough. | 248 // enough. |
| 255 __ Ldrb(hash_scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset)); | 249 __ Ldrb(hash_scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset)); |
| 256 STATIC_ASSERT(kInternalizedTag == 0); | 250 STATIC_ASSERT(kInternalizedTag == 0); |
| 257 __ TestAndBranchIfAnySet(hash_scratch, kIsNotInternalizedMask, not_unique); | 251 __ TestAndBranchIfAnySet(hash_scratch, kIsNotInternalizedMask, not_unique); |
| 258 | 252 |
| 259 __ Bind(&unique); | 253 __ Bind(&unique); |
| 260 // Fall through if the key is a unique name. | 254 // Fall through if the key is a unique name. |
| 261 } | 255 } |
| 262 | 256 |
| 263 | 257 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
| 264 void LoadIC::GenerateNormal(MacroAssembler* masm, LanguageMode language_mode) { | |
| 265 Register dictionary = x0; | 258 Register dictionary = x0; |
| 266 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 259 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
| 267 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 260 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
| 268 Label slow; | 261 Label slow; |
| 269 | 262 |
| 270 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 263 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
| 271 JSObject::kPropertiesOffset)); | 264 JSObject::kPropertiesOffset)); |
| 272 GenerateDictionaryLoad(masm, &slow, dictionary, | 265 GenerateDictionaryLoad(masm, &slow, dictionary, |
| 273 LoadDescriptor::NameRegister(), x0, x3, x4); | 266 LoadDescriptor::NameRegister(), x0, x3, x4); |
| 274 __ Ret(); | 267 __ Ret(); |
| 275 | 268 |
| 276 // Dictionary load failed, go slow (but don't miss). | 269 // Dictionary load failed, go slow (but don't miss). |
| 277 __ Bind(&slow); | 270 __ Bind(&slow); |
| 278 GenerateRuntimeGetProperty(masm, language_mode); | 271 GenerateRuntimeGetProperty(masm); |
| 279 } | 272 } |
| 280 | 273 |
| 281 | 274 |
| 282 void LoadIC::GenerateMiss(MacroAssembler* masm) { | 275 void LoadIC::GenerateMiss(MacroAssembler* masm) { |
| 283 // The return address is in lr. | 276 // The return address is in lr. |
| 284 Isolate* isolate = masm->isolate(); | 277 Isolate* isolate = masm->isolate(); |
| 285 ASM_LOCATION("LoadIC::GenerateMiss"); | 278 ASM_LOCATION("LoadIC::GenerateMiss"); |
| 286 | 279 |
| 287 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(), | 280 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(), |
| 288 LoadWithVectorDescriptor::VectorRegister())); | 281 LoadWithVectorDescriptor::VectorRegister())); |
| 289 __ IncrementCounter(isolate->counters()->ic_load_miss(), 1, x4, x5); | 282 __ IncrementCounter(isolate->counters()->ic_load_miss(), 1, x4, x5); |
| 290 | 283 |
| 291 // Perform tail call to the entry. | 284 // Perform tail call to the entry. |
| 292 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), | 285 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), |
| 293 LoadWithVectorDescriptor::NameRegister(), | 286 LoadWithVectorDescriptor::NameRegister(), |
| 294 LoadWithVectorDescriptor::SlotRegister(), | 287 LoadWithVectorDescriptor::SlotRegister(), |
| 295 LoadWithVectorDescriptor::VectorRegister()); | 288 LoadWithVectorDescriptor::VectorRegister()); |
| 296 __ TailCallRuntime(Runtime::kLoadIC_Miss); | 289 __ TailCallRuntime(Runtime::kLoadIC_Miss); |
| 297 } | 290 } |
| 298 | 291 |
| 299 | 292 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 300 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, | |
| 301 LanguageMode language_mode) { | |
| 302 // The return address is in lr. | 293 // The return address is in lr. |
| 303 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 294 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 304 | 295 |
| 305 // Do tail-call to runtime routine. | 296 // Do tail-call to runtime routine. |
| 306 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kGetPropertyStrong | 297 __ TailCallRuntime(Runtime::kGetProperty); |
| 307 : Runtime::kGetProperty); | |
| 308 } | 298 } |
| 309 | 299 |
| 310 | 300 |
| 311 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 301 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 312 // The return address is in lr. | 302 // The return address is in lr. |
| 313 Isolate* isolate = masm->isolate(); | 303 Isolate* isolate = masm->isolate(); |
| 314 | 304 |
| 315 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(), | 305 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(), |
| 316 LoadWithVectorDescriptor::VectorRegister())); | 306 LoadWithVectorDescriptor::VectorRegister())); |
| 317 __ IncrementCounter(isolate->counters()->ic_keyed_load_miss(), 1, x10, x11); | 307 __ IncrementCounter(isolate->counters()->ic_keyed_load_miss(), 1, x10, x11); |
| 318 | 308 |
| 319 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), | 309 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), |
| 320 LoadWithVectorDescriptor::NameRegister(), | 310 LoadWithVectorDescriptor::NameRegister(), |
| 321 LoadWithVectorDescriptor::SlotRegister(), | 311 LoadWithVectorDescriptor::SlotRegister(), |
| 322 LoadWithVectorDescriptor::VectorRegister()); | 312 LoadWithVectorDescriptor::VectorRegister()); |
| 323 | 313 |
| 324 // Perform tail call to the entry. | 314 // Perform tail call to the entry. |
| 325 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss); | 315 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss); |
| 326 } | 316 } |
| 327 | 317 |
| 328 | 318 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 329 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, | |
| 330 LanguageMode language_mode) { | |
| 331 // The return address is in lr. | 319 // The return address is in lr. |
| 332 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 320 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 333 | 321 |
| 334 // Do tail-call to runtime routine. | 322 // Do tail-call to runtime routine. |
| 335 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kKeyedGetPropertyStrong | 323 __ TailCallRuntime(Runtime::kKeyedGetProperty); |
| 336 : Runtime::kKeyedGetProperty); | |
| 337 } | 324 } |
| 338 | 325 |
| 339 | |
| 340 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key, | 326 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key, |
| 341 Register receiver, Register scratch1, | 327 Register receiver, Register scratch1, |
| 342 Register scratch2, Register scratch3, | 328 Register scratch2, Register scratch3, |
| 343 Register scratch4, Register scratch5, | 329 Register scratch4, Register scratch5, |
| 344 Label* slow, | 330 Label* slow) { |
| 345 LanguageMode language_mode) { | |
| 346 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4, | 331 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4, |
| 347 scratch5)); | 332 scratch5)); |
| 348 | 333 |
| 349 Isolate* isolate = masm->isolate(); | 334 Isolate* isolate = masm->isolate(); |
| 350 Label check_number_dictionary; | 335 Label check_number_dictionary; |
| 351 // If we can load the value, it should be returned in x0. | 336 // If we can load the value, it should be returned in x0. |
| 352 Register result = x0; | 337 Register result = x0; |
| 353 | 338 |
| 354 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, | 339 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, |
| 355 Map::kHasIndexedInterceptor, slow); | 340 Map::kHasIndexedInterceptor, slow); |
| 356 | 341 |
| 357 // Check the receiver's map to see if it has fast elements. | 342 // Check the receiver's map to see if it has fast elements. |
| 358 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); | 343 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); |
| 359 | 344 |
| 360 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, | 345 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, |
| 361 result, slow, language_mode); | 346 result, slow); |
| 362 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1, | 347 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1, |
| 363 scratch1, scratch2); | 348 scratch1, scratch2); |
| 364 __ Ret(); | 349 __ Ret(); |
| 365 | 350 |
| 366 __ Bind(&check_number_dictionary); | 351 __ Bind(&check_number_dictionary); |
| 367 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 352 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 368 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); | 353 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); |
| 369 | 354 |
| 370 // Check whether we have a number dictionary. | 355 // Check whether we have a number dictionary. |
| 371 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); | 356 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 422 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 407 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 423 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 408 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 424 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); | 409 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); |
| 425 // Load the property. | 410 // Load the property. |
| 426 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3); | 411 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3); |
| 427 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1, | 412 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1, |
| 428 scratch1, scratch2); | 413 scratch1, scratch2); |
| 429 __ Ret(); | 414 __ Ret(); |
| 430 } | 415 } |
| 431 | 416 |
| 432 | 417 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
| 433 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, | |
| 434 LanguageMode language_mode) { | |
| 435 // The return address is in lr. | 418 // The return address is in lr. |
| 436 Label slow, check_name, index_smi, index_name; | 419 Label slow, check_name, index_smi, index_name; |
| 437 | 420 |
| 438 Register key = LoadDescriptor::NameRegister(); | 421 Register key = LoadDescriptor::NameRegister(); |
| 439 Register receiver = LoadDescriptor::ReceiverRegister(); | 422 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 440 DCHECK(key.is(x2)); | 423 DCHECK(key.is(x2)); |
| 441 DCHECK(receiver.is(x1)); | 424 DCHECK(receiver.is(x1)); |
| 442 | 425 |
| 443 __ JumpIfNotSmi(key, &check_name); | 426 __ JumpIfNotSmi(key, &check_name); |
| 444 __ Bind(&index_smi); | 427 __ Bind(&index_smi); |
| 445 // Now the key is known to be a smi. This place is also jumped to from below | 428 // Now the key is known to be a smi. This place is also jumped to from below |
| 446 // where a numeric string is converted to a smi. | 429 // where a numeric string is converted to a smi. |
| 447 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow, | 430 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow); |
| 448 language_mode); | |
| 449 | 431 |
| 450 // Slow case. | 432 // Slow case. |
| 451 __ Bind(&slow); | 433 __ Bind(&slow); |
| 452 __ IncrementCounter(masm->isolate()->counters()->ic_keyed_load_generic_slow(), | 434 __ IncrementCounter(masm->isolate()->counters()->ic_keyed_load_generic_slow(), |
| 453 1, x4, x3); | 435 1, x4, x3); |
| 454 GenerateRuntimeGetProperty(masm, language_mode); | 436 GenerateRuntimeGetProperty(masm); |
| 455 | 437 |
| 456 __ Bind(&check_name); | 438 __ Bind(&check_name); |
| 457 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow); | 439 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow); |
| 458 | 440 |
| 459 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); | 441 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); |
| 460 | 442 |
| 461 __ Bind(&index_name); | 443 __ Bind(&index_name); |
| 462 __ IndexFromHash(x3, key); | 444 __ IndexFromHash(x3, key); |
| 463 // Now jump to the place where smi keys are handled. | 445 // Now jump to the place where smi keys are handled. |
| 464 __ B(&index_smi); | 446 __ B(&index_smi); |
| (...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 876 } else { | 858 } else { |
| 877 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 859 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
| 878 // This is JumpIfSmi(smi_reg, branch_imm). | 860 // This is JumpIfSmi(smi_reg, branch_imm). |
| 879 patcher.tbz(smi_reg, 0, branch_imm); | 861 patcher.tbz(smi_reg, 0, branch_imm); |
| 880 } | 862 } |
| 881 } | 863 } |
| 882 } // namespace internal | 864 } // namespace internal |
| 883 } // namespace v8 | 865 } // namespace v8 |
| 884 | 866 |
| 885 #endif // V8_TARGET_ARCH_ARM64 | 867 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |