Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(115)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 1145223003: PPC: Cleanup interface descriptors to reflect that vectors are part of loads. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/ppc/stub-cache-ppc.cc ('k') | src/ppc/debug-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_PPC 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1556 matching lines...) Expand 10 before | Expand all | Expand 10 after
1567 __ Ret(HasArgsInRegisters() ? 0 : 2); 1567 __ Ret(HasArgsInRegisters() ? 0 : 2);
1568 } 1568 }
1569 } 1569 }
1570 1570
1571 1571
1572 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1572 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1573 Label miss; 1573 Label miss;
1574 Register receiver = LoadDescriptor::ReceiverRegister(); 1574 Register receiver = LoadDescriptor::ReceiverRegister();
1575 // Ensure that the vector and slot registers won't be clobbered before 1575 // Ensure that the vector and slot registers won't be clobbered before
1576 // calling the miss handler. 1576 // calling the miss handler.
1577 DCHECK(!AreAliased(r7, r8, VectorLoadICDescriptor::VectorRegister(), 1577 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::VectorRegister(),
1578 VectorLoadICDescriptor::SlotRegister())); 1578 LoadWithVectorDescriptor::SlotRegister()));
1579 1579
1580 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r7, 1580 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r7,
1581 r8, &miss); 1581 r8, &miss);
1582 __ bind(&miss); 1582 __ bind(&miss);
1583 PropertyAccessCompiler::TailCallBuiltin( 1583 PropertyAccessCompiler::TailCallBuiltin(
1584 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 1584 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1585 } 1585 }
1586 1586
1587 1587
1588 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 1588 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1589 // Return address is in lr. 1589 // Return address is in lr.
1590 Label miss; 1590 Label miss;
1591 1591
1592 Register receiver = LoadDescriptor::ReceiverRegister(); 1592 Register receiver = LoadDescriptor::ReceiverRegister();
1593 Register index = LoadDescriptor::NameRegister(); 1593 Register index = LoadDescriptor::NameRegister();
1594 Register scratch = r8; 1594 Register scratch = r8;
1595 Register result = r3; 1595 Register result = r3;
1596 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 1596 DCHECK(!scratch.is(receiver) && !scratch.is(index));
1597 DCHECK(!scratch.is(VectorLoadICDescriptor::VectorRegister()) && 1597 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1598 result.is(VectorLoadICDescriptor::SlotRegister())); 1598 result.is(LoadWithVectorDescriptor::SlotRegister()));
1599 1599
1600 // StringCharAtGenerator doesn't use the result register until it's passed 1600 // StringCharAtGenerator doesn't use the result register until it's passed
1601 // the different miss possibilities. If it did, we would have a conflict 1601 // the different miss possibilities. If it did, we would have a conflict
1602 // when FLAG_vector_ics is true. 1602 // when FLAG_vector_ics is true.
1603 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 1603 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1604 &miss, // When not a string. 1604 &miss, // When not a string.
1605 &miss, // When not a number. 1605 &miss, // When not a number.
1606 &miss, // When index out of range. 1606 &miss, // When index out of range.
1607 STRING_INDEX_IS_ARRAY_INDEX, 1607 STRING_INDEX_IS_ARRAY_INDEX,
1608 RECEIVER_IS_STRING); 1608 RECEIVER_IS_STRING);
(...skipping 1508 matching lines...) Expand 10 before | Expand all | Expand 10 after
3117 const RuntimeCallHelper& call_helper) { 3117 const RuntimeCallHelper& call_helper) {
3118 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 3118 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3119 3119
3120 // Index is not a smi. 3120 // Index is not a smi.
3121 __ bind(&index_not_smi_); 3121 __ bind(&index_not_smi_);
3122 // If index is a heap number, try converting it to an integer. 3122 // If index is a heap number, try converting it to an integer.
3123 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_, 3123 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_,
3124 DONT_DO_SMI_CHECK); 3124 DONT_DO_SMI_CHECK);
3125 call_helper.BeforeCall(masm); 3125 call_helper.BeforeCall(masm);
3126 if (embed_mode == PART_OF_IC_HANDLER) { 3126 if (embed_mode == PART_OF_IC_HANDLER) {
3127 __ Push(VectorLoadICDescriptor::VectorRegister(), 3127 __ Push(LoadWithVectorDescriptor::VectorRegister(),
3128 VectorLoadICDescriptor::SlotRegister(), object_, index_); 3128 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
3129 } else { 3129 } else {
3130 // index_ is consumed by runtime conversion function. 3130 // index_ is consumed by runtime conversion function.
3131 __ Push(object_, index_); 3131 __ Push(object_, index_);
3132 } 3132 }
3133 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 3133 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3134 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 3134 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3135 } else { 3135 } else {
3136 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 3136 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3137 // NumberToSmi discards numbers that are not exact integers. 3137 // NumberToSmi discards numbers that are not exact integers.
3138 __ CallRuntime(Runtime::kNumberToSmi, 1); 3138 __ CallRuntime(Runtime::kNumberToSmi, 1);
3139 } 3139 }
3140 // Save the conversion result before the pop instructions below 3140 // Save the conversion result before the pop instructions below
3141 // have a chance to overwrite it. 3141 // have a chance to overwrite it.
3142 __ Move(index_, r3); 3142 __ Move(index_, r3);
3143 if (embed_mode == PART_OF_IC_HANDLER) { 3143 if (embed_mode == PART_OF_IC_HANDLER) {
3144 __ Pop(VectorLoadICDescriptor::VectorRegister(), 3144 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
3145 VectorLoadICDescriptor::SlotRegister(), object_); 3145 LoadWithVectorDescriptor::SlotRegister(), object_);
3146 } else { 3146 } else {
3147 __ pop(object_); 3147 __ pop(object_);
3148 } 3148 }
3149 // Reload the instance type. 3149 // Reload the instance type.
3150 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3150 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
3151 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 3151 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
3152 call_helper.AfterCall(masm); 3152 call_helper.AfterCall(masm);
3153 // If index is still not a smi, it must be out of range. 3153 // If index is still not a smi, it must be out of range.
3154 __ JumpIfNotSmi(index_, index_out_of_range_); 3154 __ JumpIfNotSmi(index_, index_out_of_range_);
3155 // Otherwise, return to the fast path. 3155 // Otherwise, return to the fast path.
(...skipping 1431 matching lines...) Expand 10 before | Expand all | Expand 10 after
4587 __ addi(r4, r4, Operand(1)); 4587 __ addi(r4, r4, Operand(1));
4588 } 4588 }
4589 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4589 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4590 __ slwi(r4, r4, Operand(kPointerSizeLog2)); 4590 __ slwi(r4, r4, Operand(kPointerSizeLog2));
4591 __ add(sp, sp, r4); 4591 __ add(sp, sp, r4);
4592 __ Ret(); 4592 __ Ret();
4593 } 4593 }
4594 4594
4595 4595
4596 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 4596 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4597 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4597 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4598 VectorRawLoadStub stub(isolate(), state()); 4598 LoadICStub stub(isolate(), state());
4599 stub.GenerateForTrampoline(masm); 4599 stub.GenerateForTrampoline(masm);
4600 } 4600 }
4601 4601
4602 4602
4603 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 4603 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4604 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4604 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4605 VectorRawKeyedLoadStub stub(isolate()); 4605 KeyedLoadICStub stub(isolate());
4606 stub.GenerateForTrampoline(masm); 4606 stub.GenerateForTrampoline(masm);
4607 } 4607 }
4608 4608
4609 4609
4610 void CallICTrampolineStub::Generate(MacroAssembler* masm) { 4610 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4611 EmitLoadTypeFeedbackVector(masm, r5); 4611 EmitLoadTypeFeedbackVector(masm, r5);
4612 CallICStub stub(isolate(), state()); 4612 CallICStub stub(isolate(), state());
4613 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4613 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4614 } 4614 }
4615 4615
4616 4616
4617 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { 4617 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4618 EmitLoadTypeFeedbackVector(masm, r5); 4618 EmitLoadTypeFeedbackVector(masm, r5);
4619 CallIC_ArrayStub stub(isolate(), state()); 4619 CallIC_ArrayStub stub(isolate(), state());
4620 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4620 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4621 } 4621 }
4622 4622
4623 4623
4624 void VectorRawLoadStub::Generate(MacroAssembler* masm) { 4624 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4625 GenerateImpl(masm, false);
4626 }
4627 4625
4628 4626
4629 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4627 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4630 GenerateImpl(masm, true); 4628 GenerateImpl(masm, true);
4631 } 4629 }
4632 4630
4633 4631
4634 static void HandleArrayCases(MacroAssembler* masm, Register receiver, 4632 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4635 Register key, Register vector, Register slot, 4633 Register key, Register vector, Register slot,
4636 Register feedback, Register receiver_map, 4634 Register feedback, Register receiver_map,
4637 Register scratch1, Register scratch2, 4635 Register scratch1, Register scratch2,
4638 bool is_polymorphic, Label* miss) { 4636 bool is_polymorphic, Label* miss) {
4639 // feedback initially contains the feedback array 4637 // feedback initially contains the feedback array
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
4719 Register handler = feedback; 4717 Register handler = feedback;
4720 __ SmiToPtrArrayOffset(r0, slot); 4718 __ SmiToPtrArrayOffset(r0, slot);
4721 __ add(handler, vector, r0); 4719 __ add(handler, vector, r0);
4722 __ LoadP(handler, 4720 __ LoadP(handler,
4723 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); 4721 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
4724 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); 4722 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4725 __ Jump(ip); 4723 __ Jump(ip);
4726 } 4724 }
4727 4725
4728 4726
4729 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4727 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4730 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // r4 4728 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r4
4731 Register name = VectorLoadICDescriptor::NameRegister(); // r5 4729 Register name = LoadWithVectorDescriptor::NameRegister(); // r5
4732 Register vector = VectorLoadICDescriptor::VectorRegister(); // r6 4730 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r6
4733 Register slot = VectorLoadICDescriptor::SlotRegister(); // r3 4731 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r3
4734 Register feedback = r7; 4732 Register feedback = r7;
4735 Register receiver_map = r8; 4733 Register receiver_map = r8;
4736 Register scratch1 = r9; 4734 Register scratch1 = r9;
4737 4735
4738 __ SmiToPtrArrayOffset(r0, slot); 4736 __ SmiToPtrArrayOffset(r0, slot);
4739 __ add(feedback, vector, r0); 4737 __ add(feedback, vector, r0);
4740 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4738 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4741 4739
4742 // Try to quickly handle the monomorphic case without knowing for sure 4740 // Try to quickly handle the monomorphic case without knowing for sure
4743 // if we have a weak cell in feedback. We do know it's safe to look 4741 // if we have a weak cell in feedback. We do know it's safe to look
(...skipping 22 matching lines...) Expand all
4766 4764
4767 __ bind(&miss); 4765 __ bind(&miss);
4768 LoadIC::GenerateMiss(masm); 4766 LoadIC::GenerateMiss(masm);
4769 4767
4770 __ bind(&load_smi_map); 4768 __ bind(&load_smi_map);
4771 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4769 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4772 __ b(&compare_map); 4770 __ b(&compare_map);
4773 } 4771 }
4774 4772
4775 4773
4776 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { 4774 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4777 GenerateImpl(masm, false); 4775 GenerateImpl(masm, false);
4778 } 4776 }
4779 4777
4780 4778
4781 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4779 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4782 GenerateImpl(masm, true); 4780 GenerateImpl(masm, true);
4783 } 4781 }
4784 4782
4785 4783
4786 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4784 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4787 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // r4 4785 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r4
4788 Register key = VectorLoadICDescriptor::NameRegister(); // r5 4786 Register key = LoadWithVectorDescriptor::NameRegister(); // r5
4789 Register vector = VectorLoadICDescriptor::VectorRegister(); // r6 4787 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r6
4790 Register slot = VectorLoadICDescriptor::SlotRegister(); // r3 4788 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r3
4791 Register feedback = r7; 4789 Register feedback = r7;
4792 Register receiver_map = r8; 4790 Register receiver_map = r8;
4793 Register scratch1 = r9; 4791 Register scratch1 = r9;
4794 4792
4795 __ SmiToPtrArrayOffset(r0, slot); 4793 __ SmiToPtrArrayOffset(r0, slot);
4796 __ add(feedback, vector, r0); 4794 __ add(feedback, vector, r0);
4797 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4795 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4798 4796
4799 // Try to quickly handle the monomorphic case without knowing for sure 4797 // Try to quickly handle the monomorphic case without knowing for sure
4800 // if we have a weak cell in feedback. We do know it's safe to look 4798 // if we have a weak cell in feedback. We do know it's safe to look
(...skipping 804 matching lines...) Expand 10 before | Expand all | Expand 10 after
5605 kStackUnwindSpace, NULL, 5603 kStackUnwindSpace, NULL,
5606 MemOperand(fp, 6 * kPointerSize), NULL); 5604 MemOperand(fp, 6 * kPointerSize), NULL);
5607 } 5605 }
5608 5606
5609 5607
5610 #undef __ 5608 #undef __
5611 } 5609 }
5612 } // namespace v8::internal 5610 } // namespace v8::internal
5613 5611
5614 #endif // V8_TARGET_ARCH_PPC 5612 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ic/ppc/stub-cache-ppc.cc ('k') | src/ppc/debug-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698