| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
| (...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 390 __ Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | 390 __ Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
| 391 __ JumpIfRoot(scratch3, Heap::kHashTableMapRootIndex, &probe_dictionary); | 391 __ JumpIfRoot(scratch3, Heap::kHashTableMapRootIndex, &probe_dictionary); |
| 392 | 392 |
| 393 // The handlers in the stub cache expect a vector and slot. Since we won't | 393 // The handlers in the stub cache expect a vector and slot. Since we won't |
| 394 // change the IC from any downstream misses, a dummy vector can be used. | 394 // change the IC from any downstream misses, a dummy vector can be used. |
| 395 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 395 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
| 396 Register slot = LoadWithVectorDescriptor::SlotRegister(); | 396 Register slot = LoadWithVectorDescriptor::SlotRegister(); |
| 397 DCHECK(!AreAliased(vector, slot, scratch1, scratch2, scratch3, scratch4)); | 397 DCHECK(!AreAliased(vector, slot, scratch1, scratch2, scratch3, scratch4)); |
| 398 Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( | 398 Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( |
| 399 masm->isolate()->factory()->keyed_load_dummy_vector()); | 399 masm->isolate()->factory()->keyed_load_dummy_vector()); |
| 400 int int_slot = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); | 400 int slot_index = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); |
| 401 __ LoadRoot(vector, Heap::kKeyedLoadDummyVectorRootIndex); | 401 __ LoadRoot(vector, Heap::kKeyedLoadDummyVectorRootIndex); |
| 402 __ Mov(slot, Operand(Smi::FromInt(int_slot))); | 402 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
| 403 | 403 |
| 404 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 404 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
| 405 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 405 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 406 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, | 406 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, |
| 407 false, receiver, key, scratch1, | 407 false, receiver, key, scratch1, |
| 408 scratch2, scratch3, scratch4); | 408 scratch2, scratch3, scratch4); |
| 409 // Cache miss. | 409 // Cache miss. |
| 410 KeyedLoadIC::GenerateMiss(masm); | 410 KeyedLoadIC::GenerateMiss(masm); |
| 411 | 411 |
| 412 // Do a quick inline probe of the receiver's dictionary, if it exists. | 412 // Do a quick inline probe of the receiver's dictionary, if it exists. |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 656 // x0: value | 656 // x0: value |
| 657 // x1: key | 657 // x1: key |
| 658 // x2: receiver | 658 // x2: receiver |
| 659 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode); | 659 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode); |
| 660 // Never returns to here. | 660 // Never returns to here. |
| 661 | 661 |
| 662 __ bind(&maybe_name_key); | 662 __ bind(&maybe_name_key); |
| 663 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); | 663 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset)); |
| 664 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | 664 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); |
| 665 __ JumpIfNotUniqueNameInstanceType(x10, &slow); | 665 __ JumpIfNotUniqueNameInstanceType(x10, &slow); |
| 666 |
| 667 if (FLAG_vector_stores) { |
| 668 // The handlers in the stub cache expect a vector and slot. Since we won't |
| 669 // change the IC from any downstream misses, a dummy vector can be used. |
| 670 Register vector = VectorStoreICDescriptor::VectorRegister(); |
| 671 Register slot = VectorStoreICDescriptor::SlotRegister(); |
| 672 DCHECK(!AreAliased(vector, slot, x3, x4, x5, x6)); |
| 673 Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( |
| 674 masm->isolate()->factory()->keyed_store_dummy_vector()); |
| 675 int slot_index = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); |
| 676 __ LoadRoot(vector, Heap::kKeyedStoreDummyVectorRootIndex); |
| 677 __ Mov(slot, Operand(Smi::FromInt(slot_index))); |
| 678 } |
| 679 |
| 666 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 680 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
| 667 Code::ComputeHandlerFlags(Code::STORE_IC)); | 681 Code::ComputeHandlerFlags(Code::STORE_IC)); |
| 668 masm->isolate()->stub_cache()->GenerateProbe( | 682 masm->isolate()->stub_cache()->GenerateProbe( |
| 669 masm, Code::STORE_IC, flags, false, receiver, key, x3, x4, x5, x6); | 683 masm, Code::STORE_IC, flags, false, receiver, key, x3, x4, x5, x6); |
| 670 // Cache miss. | 684 // Cache miss. |
| 671 __ B(&miss); | 685 __ B(&miss); |
| 672 | 686 |
| 673 __ Bind(&extra); | 687 __ Bind(&extra); |
| 674 // Extra capacity case: Check if there is extra capacity to | 688 // Extra capacity case: Check if there is extra capacity to |
| 675 // perform the store and update the length. Used for adding one | 689 // perform the store and update the length. Used for adding one |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 848 } else { | 862 } else { |
| 849 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 863 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
| 850 // This is JumpIfSmi(smi_reg, branch_imm). | 864 // This is JumpIfSmi(smi_reg, branch_imm). |
| 851 patcher.tbz(smi_reg, 0, branch_imm); | 865 patcher.tbz(smi_reg, 0, branch_imm); |
| 852 } | 866 } |
| 853 } | 867 } |
| 854 } // namespace internal | 868 } // namespace internal |
| 855 } // namespace v8 | 869 } // namespace v8 |
| 856 | 870 |
| 857 #endif // V8_TARGET_ARCH_ARM64 | 871 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |