| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
| (...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 458 | 458 |
| 459 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 459 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 460 // The return address is in lr. | 460 // The return address is in lr. |
| 461 | 461 |
| 462 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 462 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 463 | 463 |
| 464 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 464 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
| 465 } | 465 } |
| 466 | 466 |
| 467 | 467 |
| 468 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 468 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
| 469 // The return address is in lr. | 469 // The return address is in lr. |
| 470 Label slow, check_name, index_smi, index_name, property_array_property; | 470 Label slow, check_name, index_smi, index_name, property_array_property; |
| 471 Label probe_dictionary, check_number_dictionary; | 471 Label probe_dictionary, check_number_dictionary; |
| 472 | 472 |
| 473 Register key = LoadDescriptor::NameRegister(); | 473 Register key = LoadDescriptor::NameRegister(); |
| 474 Register receiver = LoadDescriptor::ReceiverRegister(); | 474 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 475 DCHECK(key.is(r2)); | 475 DCHECK(key.is(r2)); |
| 476 DCHECK(receiver.is(r1)); | 476 DCHECK(receiver.is(r1)); |
| 477 | 477 |
| 478 Isolate* isolate = masm->isolate(); | 478 Isolate* isolate = masm->isolate(); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 512 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, r4, | 512 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, r4, |
| 513 r3); | 513 r3); |
| 514 GenerateRuntimeGetProperty(masm); | 514 GenerateRuntimeGetProperty(masm); |
| 515 | 515 |
| 516 __ bind(&check_name); | 516 __ bind(&check_name); |
| 517 GenerateKeyNameCheck(masm, key, r0, r3, &index_name, &slow); | 517 GenerateKeyNameCheck(masm, key, r0, r3, &index_name, &slow); |
| 518 | 518 |
| 519 GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3, | 519 GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3, |
| 520 Map::kHasNamedInterceptor, &slow); | 520 Map::kHasNamedInterceptor, &slow); |
| 521 | 521 |
| 522 // If the receiver is a fast-case object, check the keyed lookup | 522 // If the receiver is a fast-case object, check the stub cache. Otherwise |
| 523 // cache. Otherwise probe the dictionary. | 523 // probe the dictionary. |
| 524 __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 524 __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 525 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); | 525 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 526 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 526 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 527 __ cmp(r4, ip); | 527 __ cmp(r4, ip); |
| 528 __ b(eq, &probe_dictionary); | 528 __ b(eq, &probe_dictionary); |
| 529 | 529 |
| 530 // Load the map of the receiver, compute the keyed lookup cache hash | 530 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
| 531 // based on 32 bits of the map pointer and the name hash. | 531 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 532 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 532 masm->isolate()->stub_cache()->GenerateProbe( |
| 533 __ mov(r3, Operand(r0, ASR, KeyedLookupCache::kMapHashShift)); | 533 masm, Code::LOAD_IC, flags, false, receiver, key, r3, r4, r5, r6); |
| 534 __ ldr(r4, FieldMemOperand(key, Name::kHashFieldOffset)); | 534 // Cache miss. |
| 535 __ eor(r3, r3, Operand(r4, ASR, Name::kHashShift)); | 535 GenerateMiss(masm); |
| 536 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; | |
| 537 __ And(r3, r3, Operand(mask)); | |
| 538 | |
| 539 // Load the key (consisting of map and unique name) from the cache and | |
| 540 // check for match. | |
| 541 Label load_in_object_property; | |
| 542 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | |
| 543 Label hit_on_nth_entry[kEntriesPerBucket]; | |
| 544 ExternalReference cache_keys = | |
| 545 ExternalReference::keyed_lookup_cache_keys(isolate); | |
| 546 | |
| 547 __ mov(r4, Operand(cache_keys)); | |
| 548 __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1)); | |
| 549 | |
| 550 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | |
| 551 Label try_next_entry; | |
| 552 // Load map and move r4 to next entry. | |
| 553 __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex)); | |
| 554 __ cmp(r0, r5); | |
| 555 __ b(ne, &try_next_entry); | |
| 556 __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name | |
| 557 __ cmp(key, r5); | |
| 558 __ b(eq, &hit_on_nth_entry[i]); | |
| 559 __ bind(&try_next_entry); | |
| 560 } | |
| 561 | |
| 562 // Last entry: Load map and move r4 to name. | |
| 563 __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); | |
| 564 __ cmp(r0, r5); | |
| 565 __ b(ne, &slow); | |
| 566 __ ldr(r5, MemOperand(r4)); | |
| 567 __ cmp(key, r5); | |
| 568 __ b(ne, &slow); | |
| 569 | |
| 570 // Get field offset. | |
| 571 // r0 : receiver's map | |
| 572 // r3 : lookup cache index | |
| 573 ExternalReference cache_field_offsets = | |
| 574 ExternalReference::keyed_lookup_cache_field_offsets(isolate); | |
| 575 | |
| 576 // Hit on nth entry. | |
| 577 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { | |
| 578 __ bind(&hit_on_nth_entry[i]); | |
| 579 __ mov(r4, Operand(cache_field_offsets)); | |
| 580 if (i != 0) { | |
| 581 __ add(r3, r3, Operand(i)); | |
| 582 } | |
| 583 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); | |
| 584 __ ldrb(r6, FieldMemOperand(r0, Map::kInObjectPropertiesOffset)); | |
| 585 __ sub(r5, r5, r6, SetCC); | |
| 586 __ b(ge, &property_array_property); | |
| 587 if (i != 0) { | |
| 588 __ jmp(&load_in_object_property); | |
| 589 } | |
| 590 } | |
| 591 | |
| 592 // Load in-object property. | |
| 593 __ bind(&load_in_object_property); | |
| 594 __ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset)); | |
| 595 __ add(r6, r6, r5); // Index from start of object. | |
| 596 __ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag. | |
| 597 __ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2)); | |
| 598 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, | |
| 599 r4, r3); | |
| 600 __ Ret(); | |
| 601 | |
| 602 // Load property array property. | |
| 603 __ bind(&property_array_property); | |
| 604 __ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | |
| 605 __ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 606 __ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2)); | |
| 607 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, | |
| 608 r4, r3); | |
| 609 __ Ret(); | |
| 610 | 536 |
| 611 // Do a quick inline probe of the receiver's dictionary, if it | 537 // Do a quick inline probe of the receiver's dictionary, if it |
| 612 // exists. | 538 // exists. |
| 613 __ bind(&probe_dictionary); | 539 __ bind(&probe_dictionary); |
| 614 // r3: elements | 540 // r3: elements |
| 615 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 541 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 616 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); | 542 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); |
| 617 GenerateGlobalInstanceTypeCheck(masm, r0, &slow); | 543 GenerateGlobalInstanceTypeCheck(masm, r0, &slow); |
| 618 // Load the property to r0. | 544 // Load the property to r0. |
| 619 GenerateDictionaryLoad(masm, &slow, r3, key, r0, r5, r4); | 545 GenerateDictionaryLoad(masm, &slow, r3, key, r0, r5, r4); |
| (...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1040 patcher.EmitCondition(ne); | 966 patcher.EmitCondition(ne); |
| 1041 } else { | 967 } else { |
| 1042 DCHECK(Assembler::GetCondition(branch_instr) == ne); | 968 DCHECK(Assembler::GetCondition(branch_instr) == ne); |
| 1043 patcher.EmitCondition(eq); | 969 patcher.EmitCondition(eq); |
| 1044 } | 970 } |
| 1045 } | 971 } |
| 1046 } | 972 } |
| 1047 } // namespace v8::internal | 973 } // namespace v8::internal |
| 1048 | 974 |
| 1049 #endif // V8_TARGET_ARCH_ARM | 975 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |