Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(167)

Side by Side Diff: src/mips64/code-stubs-mips64.cc

Issue 1144063002: Cleanup interface descriptors to reflect that vectors are part of loads. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fixes for test failures. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/lithium-mips.cc ('k') | src/mips64/debug-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 1368 matching lines...) Expand 10 before | Expand all | Expand 10 after
1379 1379
1380 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 1380 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1381 // Return address is in ra. 1381 // Return address is in ra.
1382 Label miss; 1382 Label miss;
1383 1383
1384 Register receiver = LoadDescriptor::ReceiverRegister(); 1384 Register receiver = LoadDescriptor::ReceiverRegister();
1385 Register index = LoadDescriptor::NameRegister(); 1385 Register index = LoadDescriptor::NameRegister();
1386 Register scratch = a5; 1386 Register scratch = a5;
1387 Register result = v0; 1387 Register result = v0;
1388 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 1388 DCHECK(!scratch.is(receiver) && !scratch.is(index));
1389 DCHECK(!scratch.is(VectorLoadICDescriptor::VectorRegister())); 1389 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()));
1390 1390
1391 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 1391 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1392 &miss, // When not a string. 1392 &miss, // When not a string.
1393 &miss, // When not a number. 1393 &miss, // When not a number.
1394 &miss, // When index out of range. 1394 &miss, // When index out of range.
1395 STRING_INDEX_IS_ARRAY_INDEX, 1395 STRING_INDEX_IS_ARRAY_INDEX,
1396 RECEIVER_IS_STRING); 1396 RECEIVER_IS_STRING);
1397 char_at_generator.GenerateFast(masm); 1397 char_at_generator.GenerateFast(masm);
1398 __ Ret(); 1398 __ Ret();
1399 1399
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
1603 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1603 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1604 } 1604 }
1605 } 1605 }
1606 1606
1607 1607
1608 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1608 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1609 Label miss; 1609 Label miss;
1610 Register receiver = LoadDescriptor::ReceiverRegister(); 1610 Register receiver = LoadDescriptor::ReceiverRegister();
1611 // Ensure that the vector and slot registers won't be clobbered before 1611 // Ensure that the vector and slot registers won't be clobbered before
1612 // calling the miss handler. 1612 // calling the miss handler.
1613 DCHECK(!AreAliased(a4, a5, VectorLoadICDescriptor::VectorRegister(), 1613 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(),
1614 VectorLoadICDescriptor::SlotRegister())); 1614 LoadWithVectorDescriptor::SlotRegister()));
1615 1615
1616 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4, 1616 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4,
1617 a5, &miss); 1617 a5, &miss);
1618 __ bind(&miss); 1618 __ bind(&miss);
1619 PropertyAccessCompiler::TailCallBuiltin( 1619 PropertyAccessCompiler::TailCallBuiltin(
1620 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 1620 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1621 } 1621 }
1622 1622
1623 1623
1624 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 1624 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
(...skipping 1498 matching lines...) Expand 10 before | Expand all | Expand 10 after
3123 __ bind(&index_not_smi_); 3123 __ bind(&index_not_smi_);
3124 // If index is a heap number, try converting it to an integer. 3124 // If index is a heap number, try converting it to an integer.
3125 __ CheckMap(index_, 3125 __ CheckMap(index_,
3126 result_, 3126 result_,
3127 Heap::kHeapNumberMapRootIndex, 3127 Heap::kHeapNumberMapRootIndex,
3128 index_not_number_, 3128 index_not_number_,
3129 DONT_DO_SMI_CHECK); 3129 DONT_DO_SMI_CHECK);
3130 call_helper.BeforeCall(masm); 3130 call_helper.BeforeCall(masm);
3131 // Consumed by runtime conversion function: 3131 // Consumed by runtime conversion function:
3132 if (embed_mode == PART_OF_IC_HANDLER) { 3132 if (embed_mode == PART_OF_IC_HANDLER) {
3133 __ Push(VectorLoadICDescriptor::VectorRegister(), 3133 __ Push(LoadWithVectorDescriptor::VectorRegister(),
3134 VectorLoadICDescriptor::SlotRegister(), object_, index_); 3134 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
3135 } else { 3135 } else {
3136 __ Push(object_, index_); 3136 __ Push(object_, index_);
3137 } 3137 }
3138 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 3138 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3139 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 3139 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3140 } else { 3140 } else {
3141 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 3141 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3142 // NumberToSmi discards numbers that are not exact integers. 3142 // NumberToSmi discards numbers that are not exact integers.
3143 __ CallRuntime(Runtime::kNumberToSmi, 1); 3143 __ CallRuntime(Runtime::kNumberToSmi, 1);
3144 } 3144 }
3145 3145
3146 // Save the conversion result before the pop instructions below 3146 // Save the conversion result before the pop instructions below
3147 // have a chance to overwrite it. 3147 // have a chance to overwrite it.
3148 3148
3149 __ Move(index_, v0); 3149 __ Move(index_, v0);
3150 if (embed_mode == PART_OF_IC_HANDLER) { 3150 if (embed_mode == PART_OF_IC_HANDLER) {
3151 __ Pop(VectorLoadICDescriptor::VectorRegister(), 3151 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
3152 VectorLoadICDescriptor::SlotRegister(), object_); 3152 LoadWithVectorDescriptor::SlotRegister(), object_);
3153 } else { 3153 } else {
3154 __ pop(object_); 3154 __ pop(object_);
3155 } 3155 }
3156 // Reload the instance type. 3156 // Reload the instance type.
3157 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3157 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
3158 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 3158 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
3159 call_helper.AfterCall(masm); 3159 call_helper.AfterCall(masm);
3160 // If index is still not a smi, it must be out of range. 3160 // If index is still not a smi, it must be out of range.
3161 __ JumpIfNotSmi(index_, index_out_of_range_); 3161 __ JumpIfNotSmi(index_, index_out_of_range_);
3162 // Otherwise, return to the fast path. 3162 // Otherwise, return to the fast path.
(...skipping 1456 matching lines...) Expand 10 before | Expand all | Expand 10 after
4619 __ Daddu(a1, a1, Operand(1)); 4619 __ Daddu(a1, a1, Operand(1));
4620 } 4620 }
4621 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4621 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4622 __ dsll(a1, a1, kPointerSizeLog2); 4622 __ dsll(a1, a1, kPointerSizeLog2);
4623 __ Ret(USE_DELAY_SLOT); 4623 __ Ret(USE_DELAY_SLOT);
4624 __ Daddu(sp, sp, a1); 4624 __ Daddu(sp, sp, a1);
4625 } 4625 }
4626 4626
4627 4627
4628 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 4628 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4629 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4629 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4630 VectorRawLoadStub stub(isolate(), state()); 4630 LoadICStub stub(isolate(), state());
4631 stub.GenerateForTrampoline(masm); 4631 stub.GenerateForTrampoline(masm);
4632 } 4632 }
4633 4633
4634 4634
4635 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 4635 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4636 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4636 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4637 VectorRawKeyedLoadStub stub(isolate()); 4637 KeyedLoadICStub stub(isolate());
4638 stub.GenerateForTrampoline(masm); 4638 stub.GenerateForTrampoline(masm);
4639 } 4639 }
4640 4640
4641 4641
4642 void CallICTrampolineStub::Generate(MacroAssembler* masm) { 4642 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4643 EmitLoadTypeFeedbackVector(masm, a2); 4643 EmitLoadTypeFeedbackVector(masm, a2);
4644 CallICStub stub(isolate(), state()); 4644 CallICStub stub(isolate(), state());
4645 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4645 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4646 } 4646 }
4647 4647
4648 4648
4649 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { 4649 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4650 EmitLoadTypeFeedbackVector(masm, a2); 4650 EmitLoadTypeFeedbackVector(masm, a2);
4651 CallIC_ArrayStub stub(isolate(), state()); 4651 CallIC_ArrayStub stub(isolate(), state());
4652 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4652 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4653 } 4653 }
4654 4654
4655 4655
4656 void VectorRawLoadStub::Generate(MacroAssembler* masm) { 4656 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4657 GenerateImpl(masm, false);
4658 }
4659 4657
4660 4658
4661 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4659 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4662 GenerateImpl(masm, true); 4660 GenerateImpl(masm, true);
4663 } 4661 }
4664 4662
4665 4663
4666 static void HandleArrayCases(MacroAssembler* masm, Register receiver, 4664 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4667 Register key, Register vector, Register slot, 4665 Register key, Register vector, Register slot,
4668 Register feedback, Register receiver_map, 4666 Register feedback, Register receiver_map,
4669 Register scratch1, Register scratch2, 4667 Register scratch1, Register scratch2,
4670 bool is_polymorphic, Label* miss) { 4668 bool is_polymorphic, Label* miss) {
4671 // feedback initially contains the feedback array 4669 // feedback initially contains the feedback array
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
4744 Register handler = feedback; 4742 Register handler = feedback;
4745 __ SmiScale(handler, slot, kPointerSizeLog2); 4743 __ SmiScale(handler, slot, kPointerSizeLog2);
4746 __ Daddu(handler, vector, Operand(handler)); 4744 __ Daddu(handler, vector, Operand(handler));
4747 __ ld(handler, 4745 __ ld(handler,
4748 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); 4746 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
4749 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); 4747 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag);
4750 __ Jump(t9); 4748 __ Jump(t9);
4751 } 4749 }
4752 4750
4753 4751
4754 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4752 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4755 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 4753 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
4756 Register name = VectorLoadICDescriptor::NameRegister(); // a2 4754 Register name = LoadWithVectorDescriptor::NameRegister(); // a2
4757 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 4755 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
4758 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 4756 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
4759 Register feedback = a4; 4757 Register feedback = a4;
4760 Register receiver_map = a5; 4758 Register receiver_map = a5;
4761 Register scratch1 = a6; 4759 Register scratch1 = a6;
4762 4760
4763 __ SmiScale(feedback, slot, kPointerSizeLog2); 4761 __ SmiScale(feedback, slot, kPointerSizeLog2);
4764 __ Daddu(feedback, vector, Operand(feedback)); 4762 __ Daddu(feedback, vector, Operand(feedback));
4765 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4763 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4766 4764
4767 // Try to quickly handle the monomorphic case without knowing for sure 4765 // Try to quickly handle the monomorphic case without knowing for sure
4768 // if we have a weak cell in feedback. We do know it's safe to look 4766 // if we have a weak cell in feedback. We do know it's safe to look
(...skipping 22 matching lines...) Expand all
4791 4789
4792 __ bind(&miss); 4790 __ bind(&miss);
4793 LoadIC::GenerateMiss(masm); 4791 LoadIC::GenerateMiss(masm);
4794 4792
4795 __ bind(&load_smi_map); 4793 __ bind(&load_smi_map);
4796 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4794 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4797 __ Branch(&compare_map); 4795 __ Branch(&compare_map);
4798 } 4796 }
4799 4797
4800 4798
4801 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { 4799 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4802 GenerateImpl(masm, false); 4800 GenerateImpl(masm, false);
4803 } 4801 }
4804 4802
4805 4803
4806 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4804 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4807 GenerateImpl(masm, true); 4805 GenerateImpl(masm, true);
4808 } 4806 }
4809 4807
4810 4808
4811 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4809 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4812 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 4810 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
4813 Register key = VectorLoadICDescriptor::NameRegister(); // a2 4811 Register key = LoadWithVectorDescriptor::NameRegister(); // a2
4814 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 4812 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
4815 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 4813 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
4816 Register feedback = a4; 4814 Register feedback = a4;
4817 Register receiver_map = a5; 4815 Register receiver_map = a5;
4818 Register scratch1 = a6; 4816 Register scratch1 = a6;
4819 4817
4820 __ SmiScale(feedback, slot, kPointerSizeLog2); 4818 __ SmiScale(feedback, slot, kPointerSizeLog2);
4821 __ Daddu(feedback, vector, Operand(feedback)); 4819 __ Daddu(feedback, vector, Operand(feedback));
4822 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4820 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4823 4821
4824 // Try to quickly handle the monomorphic case without knowing for sure 4822 // Try to quickly handle the monomorphic case without knowing for sure
4825 // if we have a weak cell in feedback. We do know it's safe to look 4823 // if we have a weak cell in feedback. We do know it's safe to look
(...skipping 730 matching lines...) Expand 10 before | Expand all | Expand 10 after
5556 kStackUnwindSpace, kInvalidStackOffset, 5554 kStackUnwindSpace, kInvalidStackOffset,
5557 MemOperand(fp, 6 * kPointerSize), NULL); 5555 MemOperand(fp, 6 * kPointerSize), NULL);
5558 } 5556 }
5559 5557
5560 5558
5561 #undef __ 5559 #undef __
5562 5560
5563 } } // namespace v8::internal 5561 } } // namespace v8::internal
5564 5562
5565 #endif // V8_TARGET_ARCH_MIPS64 5563 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips/lithium-mips.cc ('k') | src/mips64/debug-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698