OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
689 __ bind(&maybe_name_key); | 689 __ bind(&maybe_name_key); |
690 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); | 690 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); |
691 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | 691 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); |
692 __ JumpIfNotUniqueNameInstanceType(x10, &slow); | 692 __ JumpIfNotUniqueNameInstanceType(x10, &slow); |
693 | 693 |
694 if (FLAG_vector_stores) { | 694 if (FLAG_vector_stores) { |
695 // The handlers in the stub cache expect a vector and slot. Since we won't | 695 // The handlers in the stub cache expect a vector and slot. Since we won't |
696 // change the IC from any downstream misses, a dummy vector can be used. | 696 // change the IC from any downstream misses, a dummy vector can be used. |
697 Register vector = VectorStoreICDescriptor::VectorRegister(); | 697 Register vector = VectorStoreICDescriptor::VectorRegister(); |
698 Register slot = VectorStoreICDescriptor::SlotRegister(); | 698 Register slot = VectorStoreICDescriptor::SlotRegister(); |
699 DCHECK(!AreAliased(vector, slot, x5, x6, x7, x8)); | 699 DCHECK(!AreAliased(vector, slot, x3, x4, x5, x6)); |
700 Handle<TypeFeedbackVector> dummy_vector = | 700 Handle<TypeFeedbackVector> dummy_vector = |
701 TypeFeedbackVector::DummyVector(masm->isolate()); | 701 TypeFeedbackVector::DummyVector(masm->isolate()); |
702 int slot_index = dummy_vector->GetIndex( | 702 int slot_index = dummy_vector->GetIndex( |
703 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); | 703 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); |
704 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | 704 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
705 __ Mov(slot, Operand(Smi::FromInt(slot_index))); | 705 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
706 } | 706 } |
707 | 707 |
708 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 708 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
709 Code::ComputeHandlerFlags(Code::STORE_IC)); | 709 Code::ComputeHandlerFlags(Code::STORE_IC)); |
710 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, | 710 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, |
711 receiver, key, x5, x6, x7, x8); | 711 receiver, key, x3, x4, x5, x6); |
712 // Cache miss. | 712 // Cache miss. |
713 __ B(&miss); | 713 __ B(&miss); |
714 | 714 |
715 __ Bind(&extra); | 715 __ Bind(&extra); |
716 // Extra capacity case: Check if there is extra capacity to | 716 // Extra capacity case: Check if there is extra capacity to |
717 // perform the store and update the length. Used for adding one | 717 // perform the store and update the length. Used for adding one |
718 // element to the array by writing to array[array.length]. | 718 // element to the array by writing to array[array.length]. |
719 | 719 |
720 // Check for room in the elements backing store. | 720 // Check for room in the elements backing store. |
721 // Both the key and the length of FixedArray are smis. | 721 // Both the key and the length of FixedArray are smis. |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
782 int args = FLAG_vector_stores ? 5 : 3; | 782 int args = FLAG_vector_stores ? 5 : 3; |
783 __ TailCallRuntime(Runtime::kStoreIC_Miss, args, 1); | 783 __ TailCallRuntime(Runtime::kStoreIC_Miss, args, 1); |
784 } | 784 } |
785 | 785 |
786 | 786 |
787 void StoreIC::GenerateNormal(MacroAssembler* masm) { | 787 void StoreIC::GenerateNormal(MacroAssembler* masm) { |
788 Label miss; | 788 Label miss; |
789 Register value = StoreDescriptor::ValueRegister(); | 789 Register value = StoreDescriptor::ValueRegister(); |
790 Register receiver = StoreDescriptor::ReceiverRegister(); | 790 Register receiver = StoreDescriptor::ReceiverRegister(); |
791 Register name = StoreDescriptor::NameRegister(); | 791 Register name = StoreDescriptor::NameRegister(); |
792 Register vector = VectorStoreICDescriptor::VectorRegister(); | 792 Register dictionary = x3; |
793 Register slot = VectorStoreICDescriptor::SlotRegister(); | 793 DCHECK(!AreAliased(value, receiver, name, x3, x4, x5)); |
794 Register dictionary = x5; | |
795 DCHECK(!AreAliased(value, receiver, name, slot, vector, x5, x6, x7)); | |
796 | 794 |
797 __ Ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 795 __ Ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
798 | 796 |
799 GenerateDictionaryStore(masm, &miss, dictionary, name, value, x6, x7); | 797 GenerateDictionaryStore(masm, &miss, dictionary, name, value, x4, x5); |
800 Counters* counters = masm->isolate()->counters(); | 798 Counters* counters = masm->isolate()->counters(); |
801 __ IncrementCounter(counters->store_normal_hit(), 1, x6, x7); | 799 __ IncrementCounter(counters->store_normal_hit(), 1, x4, x5); |
802 __ Ret(); | 800 __ Ret(); |
803 | 801 |
804 // Cache miss: Jump to runtime. | 802 // Cache miss: Jump to runtime. |
805 __ Bind(&miss); | 803 __ Bind(&miss); |
806 __ IncrementCounter(counters->store_normal_miss(), 1, x6, x7); | 804 __ IncrementCounter(counters->store_normal_miss(), 1, x4, x5); |
807 GenerateMiss(masm); | 805 GenerateMiss(masm); |
808 } | 806 } |
809 | 807 |
810 | 808 |
811 Condition CompareIC::ComputeCondition(Token::Value op) { | 809 Condition CompareIC::ComputeCondition(Token::Value op) { |
812 switch (op) { | 810 switch (op) { |
813 case Token::EQ_STRICT: | 811 case Token::EQ_STRICT: |
814 case Token::EQ: | 812 case Token::EQ: |
815 return eq; | 813 return eq; |
816 case Token::LT: | 814 case Token::LT: |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
890 } else { | 888 } else { |
891 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 889 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
892 // This is JumpIfSmi(smi_reg, branch_imm). | 890 // This is JumpIfSmi(smi_reg, branch_imm). |
893 patcher.tbz(smi_reg, 0, branch_imm); | 891 patcher.tbz(smi_reg, 0, branch_imm); |
894 } | 892 } |
895 } | 893 } |
896 } // namespace internal | 894 } // namespace internal |
897 } // namespace v8 | 895 } // namespace v8 |
898 | 896 |
899 #endif // V8_TARGET_ARCH_ARM64 | 897 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |