OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
410 Handle<TypeFeedbackVector> dummy_vector = | 410 Handle<TypeFeedbackVector> dummy_vector = |
411 TypeFeedbackVector::DummyVector(masm->isolate()); | 411 TypeFeedbackVector::DummyVector(masm->isolate()); |
412 int slot_index = dummy_vector->GetIndex( | 412 int slot_index = dummy_vector->GetIndex( |
413 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); | 413 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); |
414 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | 414 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
415 __ Mov(slot, Operand(Smi::FromInt(slot_index))); | 415 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
416 | 416 |
417 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 417 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
418 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 418 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
419 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, | 419 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, |
420 false, receiver, key, scratch1, | 420 receiver, key, scratch1, |
421 scratch2, scratch3, scratch4); | 421 scratch2, scratch3, scratch4); |
422 // Cache miss. | 422 // Cache miss. |
423 KeyedLoadIC::GenerateMiss(masm); | 423 KeyedLoadIC::GenerateMiss(masm); |
424 | 424 |
425 // Do a quick inline probe of the receiver's dictionary, if it exists. | 425 // Do a quick inline probe of the receiver's dictionary, if it exists. |
426 __ Bind(&probe_dictionary); | 426 __ Bind(&probe_dictionary); |
427 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 427 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
428 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 428 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
429 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); | 429 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); |
430 // Load the property. | 430 // Load the property. |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
700 Handle<TypeFeedbackVector> dummy_vector = | 700 Handle<TypeFeedbackVector> dummy_vector = |
701 TypeFeedbackVector::DummyVector(masm->isolate()); | 701 TypeFeedbackVector::DummyVector(masm->isolate()); |
702 int slot_index = dummy_vector->GetIndex( | 702 int slot_index = dummy_vector->GetIndex( |
703 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); | 703 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); |
704 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | 704 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
705 __ Mov(slot, Operand(Smi::FromInt(slot_index))); | 705 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
706 } | 706 } |
707 | 707 |
708 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 708 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
709 Code::ComputeHandlerFlags(Code::STORE_IC)); | 709 Code::ComputeHandlerFlags(Code::STORE_IC)); |
710 masm->isolate()->stub_cache()->GenerateProbe( | 710 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, |
711 masm, Code::STORE_IC, flags, false, receiver, key, x3, x4, x5, x6); | 711 receiver, key, x3, x4, x5, x6); |
712 // Cache miss. | 712 // Cache miss. |
713 __ B(&miss); | 713 __ B(&miss); |
714 | 714 |
715 __ Bind(&extra); | 715 __ Bind(&extra); |
716 // Extra capacity case: Check if there is extra capacity to | 716 // Extra capacity case: Check if there is extra capacity to |
717 // perform the store and update the length. Used for adding one | 717 // perform the store and update the length. Used for adding one |
718 // element to the array by writing to array[array.length]. | 718 // element to the array by writing to array[array.length]. |
719 | 719 |
720 // Check for room in the elements backing store. | 720 // Check for room in the elements backing store. |
721 // Both the key and the length of FixedArray are smis. | 721 // Both the key and the length of FixedArray are smis. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
760 | 760 |
761 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { | 761 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { |
762 Register receiver = StoreDescriptor::ReceiverRegister(); | 762 Register receiver = StoreDescriptor::ReceiverRegister(); |
763 Register name = StoreDescriptor::NameRegister(); | 763 Register name = StoreDescriptor::NameRegister(); |
764 DCHECK(!AreAliased(receiver, name, StoreDescriptor::ValueRegister(), x3, x4, | 764 DCHECK(!AreAliased(receiver, name, StoreDescriptor::ValueRegister(), x3, x4, |
765 x5, x6)); | 765 x5, x6)); |
766 | 766 |
767 // Probe the stub cache. | 767 // Probe the stub cache. |
768 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 768 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
769 Code::ComputeHandlerFlags(Code::STORE_IC)); | 769 Code::ComputeHandlerFlags(Code::STORE_IC)); |
770 masm->isolate()->stub_cache()->GenerateProbe( | 770 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, |
771 masm, Code::STORE_IC, flags, false, receiver, name, x3, x4, x5, x6); | 771 receiver, name, x3, x4, x5, x6); |
772 | 772 |
773 // Cache miss: Jump to runtime. | 773 // Cache miss: Jump to runtime. |
774 GenerateMiss(masm); | 774 GenerateMiss(masm); |
775 } | 775 } |
776 | 776 |
777 | 777 |
778 void StoreIC::GenerateMiss(MacroAssembler* masm) { | 778 void StoreIC::GenerateMiss(MacroAssembler* masm) { |
779 StoreIC_PushArgs(masm); | 779 StoreIC_PushArgs(masm); |
780 | 780 |
781 // Tail call to the entry. | 781 // Tail call to the entry. |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
888 } else { | 888 } else { |
889 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 889 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
890 // This is JumpIfSmi(smi_reg, branch_imm). | 890 // This is JumpIfSmi(smi_reg, branch_imm). |
891 patcher.tbz(smi_reg, 0, branch_imm); | 891 patcher.tbz(smi_reg, 0, branch_imm); |
892 } | 892 } |
893 } | 893 } |
894 } // namespace internal | 894 } // namespace internal |
895 } // namespace v8 | 895 } // namespace v8 |
896 | 896 |
897 #endif // V8_TARGET_ARCH_ARM64 | 897 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |