| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 #include "src/v8.h" | 6 #include "src/v8.h" |
| 7 | 7 |
| 8 #if V8_TARGET_ARCH_MIPS64 | 8 #if V8_TARGET_ARCH_MIPS64 |
| 9 | 9 |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 426 __ ld(a3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 426 __ ld(a3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 427 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); | 427 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 428 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 428 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
| 429 __ Branch(&probe_dictionary, eq, a4, Operand(at)); | 429 __ Branch(&probe_dictionary, eq, a4, Operand(at)); |
| 430 | 430 |
| 431 // The handlers in the stub cache expect a vector and slot. Since we won't | 431 // The handlers in the stub cache expect a vector and slot. Since we won't |
| 432 // change the IC from any downstream misses, a dummy vector can be used. | 432 // change the IC from any downstream misses, a dummy vector can be used. |
| 433 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 433 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
| 434 Register slot = LoadWithVectorDescriptor::SlotRegister(); | 434 Register slot = LoadWithVectorDescriptor::SlotRegister(); |
| 435 DCHECK(!AreAliased(vector, slot, a4, a5, a6, t1)); | 435 DCHECK(!AreAliased(vector, slot, a4, a5, a6, t1)); |
| 436 Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( | 436 Handle<TypeFeedbackVector> dummy_vector = |
| 437 masm->isolate()->factory()->keyed_load_dummy_vector()); | 437 TypeFeedbackVector::DummyVector(masm->isolate()); |
| 438 int slot_index = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); | 438 int slot_index = dummy_vector->GetIndex( |
| 439 __ LoadRoot(vector, Heap::kKeyedLoadDummyVectorRootIndex); | 439 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); |
| 440 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
| 440 __ li(slot, Operand(Smi::FromInt(slot_index))); | 441 __ li(slot, Operand(Smi::FromInt(slot_index))); |
| 441 | 442 |
| 442 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 443 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
| 443 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 444 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 444 masm->isolate()->stub_cache()->GenerateProbe( | 445 masm->isolate()->stub_cache()->GenerateProbe( |
| 445 masm, Code::LOAD_IC, flags, false, receiver, key, a4, a5, a6, t1); | 446 masm, Code::LOAD_IC, flags, false, receiver, key, a4, a5, a6, t1); |
| 446 // Cache miss. | 447 // Cache miss. |
| 447 GenerateMiss(masm); | 448 GenerateMiss(masm); |
| 448 | 449 |
| 449 // Do a quick inline probe of the receiver's dictionary, if it | 450 // Do a quick inline probe of the receiver's dictionary, if it |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 675 __ ld(a4, FieldMemOperand(key, HeapObject::kMapOffset)); | 676 __ ld(a4, FieldMemOperand(key, HeapObject::kMapOffset)); |
| 676 __ lb(a4, FieldMemOperand(a4, Map::kInstanceTypeOffset)); | 677 __ lb(a4, FieldMemOperand(a4, Map::kInstanceTypeOffset)); |
| 677 __ JumpIfNotUniqueNameInstanceType(a4, &slow); | 678 __ JumpIfNotUniqueNameInstanceType(a4, &slow); |
| 678 | 679 |
| 679 if (FLAG_vector_stores) { | 680 if (FLAG_vector_stores) { |
| 680 // The handlers in the stub cache expect a vector and slot. Since we won't | 681 // The handlers in the stub cache expect a vector and slot. Since we won't |
| 681 // change the IC from any downstream misses, a dummy vector can be used. | 682 // change the IC from any downstream misses, a dummy vector can be used. |
| 682 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 683 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
| 683 Register slot = LoadWithVectorDescriptor::SlotRegister(); | 684 Register slot = LoadWithVectorDescriptor::SlotRegister(); |
| 684 DCHECK(!AreAliased(vector, slot, a3, a4, a5, a6)); | 685 DCHECK(!AreAliased(vector, slot, a3, a4, a5, a6)); |
| 685 Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( | 686 Handle<TypeFeedbackVector> dummy_vector = |
| 686 masm->isolate()->factory()->keyed_store_dummy_vector()); | 687 TypeFeedbackVector::DummyVector(masm->isolate()); |
| 687 int slot_index = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); | 688 int slot_index = dummy_vector->GetIndex( |
| 688 __ LoadRoot(vector, Heap::kKeyedStoreDummyVectorRootIndex); | 689 FeedbackVectorICSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot)); |
| 690 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); |
| 689 __ li(slot, Operand(Smi::FromInt(slot_index))); | 691 __ li(slot, Operand(Smi::FromInt(slot_index))); |
| 690 } | 692 } |
| 691 | 693 |
| 692 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 694 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
| 693 Code::ComputeHandlerFlags(Code::STORE_IC)); | 695 Code::ComputeHandlerFlags(Code::STORE_IC)); |
| 694 masm->isolate()->stub_cache()->GenerateProbe( | 696 masm->isolate()->stub_cache()->GenerateProbe( |
| 695 masm, Code::STORE_IC, flags, false, receiver, key, a3, a4, a5, a6); | 697 masm, Code::STORE_IC, flags, false, receiver, key, a3, a4, a5, a6); |
| 696 // Cache miss. | 698 // Cache miss. |
| 697 __ Branch(&miss); | 699 __ Branch(&miss); |
| 698 | 700 |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 901 patcher.ChangeBranchCondition(ne); | 903 patcher.ChangeBranchCondition(ne); |
| 902 } else { | 904 } else { |
| 903 DCHECK(Assembler::IsBne(branch_instr)); | 905 DCHECK(Assembler::IsBne(branch_instr)); |
| 904 patcher.ChangeBranchCondition(eq); | 906 patcher.ChangeBranchCondition(eq); |
| 905 } | 907 } |
| 906 } | 908 } |
| 907 } // namespace internal | 909 } // namespace internal |
| 908 } // namespace v8 | 910 } // namespace v8 |
| 909 | 911 |
| 910 #endif // V8_TARGET_ARCH_MIPS64 | 912 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |