OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); | 463 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); |
464 | 464 |
465 __ Bind(&index_name); | 465 __ Bind(&index_name); |
466 __ IndexFromHash(x3, key); | 466 __ IndexFromHash(x3, key); |
467 // Now jump to the place where smi keys are handled. | 467 // Now jump to the place where smi keys are handled. |
468 __ B(&index_smi); | 468 __ B(&index_smi); |
469 } | 469 } |
470 | 470 |
471 | 471 |
472 static void StoreIC_PushArgs(MacroAssembler* masm) { | 472 static void StoreIC_PushArgs(MacroAssembler* masm) { |
473 if (FLAG_vector_stores) { | 473 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), |
474 __ Push(StoreDescriptor::ReceiverRegister(), | 474 StoreDescriptor::ValueRegister(), |
475 StoreDescriptor::NameRegister(), StoreDescriptor::ValueRegister(), | 475 VectorStoreICDescriptor::SlotRegister(), |
476 VectorStoreICDescriptor::SlotRegister(), | 476 VectorStoreICDescriptor::VectorRegister()); |
477 VectorStoreICDescriptor::VectorRegister()); | |
478 } else { | |
479 __ Push(StoreDescriptor::ReceiverRegister(), | |
480 StoreDescriptor::NameRegister(), StoreDescriptor::ValueRegister()); | |
481 } | |
482 } | 477 } |
483 | 478 |
484 | 479 |
485 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { | 480 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { |
486 ASM_LOCATION("KeyedStoreIC::GenerateMiss"); | 481 ASM_LOCATION("KeyedStoreIC::GenerateMiss"); |
487 StoreIC_PushArgs(masm); | 482 StoreIC_PushArgs(masm); |
488 | 483 |
489 int args = FLAG_vector_stores ? 5 : 3; | 484 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss, 5, 1); |
490 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss, args, 1); | |
491 } | 485 } |
492 | 486 |
493 | 487 |
494 static void KeyedStoreGenerateMegamorphicHelper( | 488 static void KeyedStoreGenerateMegamorphicHelper( |
495 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, | 489 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, |
496 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length, | 490 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length, |
497 Register value, Register key, Register receiver, Register receiver_map, | 491 Register value, Register key, Register receiver, Register receiver_map, |
498 Register elements_map, Register elements) { | 492 Register elements_map, Register elements) { |
499 DCHECK(!AreAliased(value, key, receiver, receiver_map, elements_map, elements, | 493 DCHECK(!AreAliased(value, key, receiver, receiver_map, elements_map, elements, |
500 x10, x11)); | 494 x10, x11)); |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
683 // x1: key | 677 // x1: key |
684 // x2: receiver | 678 // x2: receiver |
685 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode); | 679 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode); |
686 // Never returns to here. | 680 // Never returns to here. |
687 | 681 |
688 __ bind(&maybe_name_key); | 682 __ bind(&maybe_name_key); |
689 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); | 683 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); |
690 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | 684 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); |
691 __ JumpIfNotUniqueNameInstanceType(x10, &slow); | 685 __ JumpIfNotUniqueNameInstanceType(x10, &slow); |
692 | 686 |
693 if (FLAG_vector_stores) { | 687 // The handlers in the stub cache expect a vector and slot. Since we won't |
694 // The handlers in the stub cache expect a vector and slot. Since we won't | 688 // change the IC from any downstream misses, a dummy vector can be used. |
695 // change the IC from any downstream misses, a dummy vector can be used. | 689 Register vector = VectorStoreICDescriptor::VectorRegister(); |
696 Register vector = VectorStoreICDescriptor::VectorRegister(); | 690 Register slot = VectorStoreICDescriptor::SlotRegister(); |
697 Register slot = VectorStoreICDescriptor::SlotRegister(); | 691 DCHECK(!AreAliased(vector, slot, x5, x6, x7, x8)); |
698 DCHECK(!AreAliased(vector, slot, x5, x6, x7, x8)); | 692 Handle<TypeFeedbackVector> dummy_vector = |
699 Handle<TypeFeedbackVector> dummy_vector = | 693 TypeFeedbackVector::DummyVector(masm->isolate()); |
700 TypeFeedbackVector::DummyVector(masm->isolate()); | 694 int slot_index = dummy_vector->GetIndex( |
701 int slot_index = dummy_vector->GetIndex( | 695 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); |
702 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); | 696 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
703 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | 697 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
704 __ Mov(slot, Operand(Smi::FromInt(slot_index))); | |
705 } | |
706 | 698 |
707 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 699 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
708 Code::ComputeHandlerFlags(Code::STORE_IC)); | 700 Code::ComputeHandlerFlags(Code::STORE_IC)); |
709 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, | 701 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, |
710 receiver, key, x5, x6, x7, x8); | 702 receiver, key, x5, x6, x7, x8); |
711 // Cache miss. | 703 // Cache miss. |
712 __ B(&miss); | 704 __ B(&miss); |
713 | 705 |
714 __ Bind(&extra); | 706 __ Bind(&extra); |
715 // Extra capacity case: Check if there is extra capacity to | 707 // Extra capacity case: Check if there is extra capacity to |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
771 | 763 |
772 // Cache miss: Jump to runtime. | 764 // Cache miss: Jump to runtime. |
773 GenerateMiss(masm); | 765 GenerateMiss(masm); |
774 } | 766 } |
775 | 767 |
776 | 768 |
777 void StoreIC::GenerateMiss(MacroAssembler* masm) { | 769 void StoreIC::GenerateMiss(MacroAssembler* masm) { |
778 StoreIC_PushArgs(masm); | 770 StoreIC_PushArgs(masm); |
779 | 771 |
780 // Tail call to the entry. | 772 // Tail call to the entry. |
781 int args = FLAG_vector_stores ? 5 : 3; | 773 __ TailCallRuntime(Runtime::kStoreIC_Miss, 5, 1); |
782 __ TailCallRuntime(Runtime::kStoreIC_Miss, args, 1); | |
783 } | 774 } |
784 | 775 |
785 | 776 |
786 void StoreIC::GenerateNormal(MacroAssembler* masm) { | 777 void StoreIC::GenerateNormal(MacroAssembler* masm) { |
787 Label miss; | 778 Label miss; |
788 Register value = StoreDescriptor::ValueRegister(); | 779 Register value = StoreDescriptor::ValueRegister(); |
789 Register receiver = StoreDescriptor::ReceiverRegister(); | 780 Register receiver = StoreDescriptor::ReceiverRegister(); |
790 Register name = StoreDescriptor::NameRegister(); | 781 Register name = StoreDescriptor::NameRegister(); |
791 Register dictionary = x5; | 782 Register dictionary = x5; |
792 DCHECK(!AreAliased(value, receiver, name, | 783 DCHECK(!AreAliased(value, receiver, name, |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
889 } else { | 880 } else { |
890 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 881 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
891 // This is JumpIfSmi(smi_reg, branch_imm). | 882 // This is JumpIfSmi(smi_reg, branch_imm). |
892 patcher.tbz(smi_reg, 0, branch_imm); | 883 patcher.tbz(smi_reg, 0, branch_imm); |
893 } | 884 } |
894 } | 885 } |
895 } // namespace internal | 886 } // namespace internal |
896 } // namespace v8 | 887 } // namespace v8 |
897 | 888 |
898 #endif // V8_TARGET_ARCH_ARM64 | 889 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |