OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2686 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); | 2686 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); |
2687 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2687 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); |
2688 __ JumpToJSEntry(ip); | 2688 __ JumpToJSEntry(ip); |
2689 | 2689 |
2690 __ bind(&non_function); | 2690 __ bind(&non_function); |
2691 __ mr(r6, r4); | 2691 __ mr(r6, r4); |
2692 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2692 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2693 } | 2693 } |
2694 | 2694 |
2695 | 2695 |
2696 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { | |
2697 __ LoadP(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
2698 __ LoadP(vector, | |
2699 FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset)); | |
2700 __ LoadP(vector, | |
2701 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset)); | |
2702 } | |
2703 | |
2704 | |
2705 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 2696 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2706 // r4 - function | 2697 // r4 - function |
2707 // r6 - slot id | 2698 // r6 - slot id |
2708 // r5 - vector | 2699 // r5 - vector |
2709 // r7 - allocation site (loaded from vector[slot]) | 2700 // r7 - allocation site (loaded from vector[slot]) |
2710 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); | 2701 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); |
2711 __ cmp(r4, r8); | 2702 __ cmp(r4, r8); |
2712 __ bne(miss); | 2703 __ bne(miss); |
2713 | 2704 |
2714 __ mov(r3, Operand(arg_count())); | 2705 __ mov(r3, Operand(arg_count())); |
(...skipping 1755 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4470 __ addi(r4, r4, Operand(1)); | 4461 __ addi(r4, r4, Operand(1)); |
4471 } | 4462 } |
4472 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4463 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
4473 __ slwi(r4, r4, Operand(kPointerSizeLog2)); | 4464 __ slwi(r4, r4, Operand(kPointerSizeLog2)); |
4474 __ add(sp, sp, r4); | 4465 __ add(sp, sp, r4); |
4475 __ Ret(); | 4466 __ Ret(); |
4476 } | 4467 } |
4477 | 4468 |
4478 | 4469 |
4479 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4470 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4480 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); | 4471 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); |
4481 LoadICStub stub(isolate(), state()); | 4472 LoadICStub stub(isolate(), state()); |
4482 stub.GenerateForTrampoline(masm); | 4473 stub.GenerateForTrampoline(masm); |
4483 } | 4474 } |
4484 | 4475 |
4485 | 4476 |
4486 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4477 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4487 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); | 4478 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); |
4488 KeyedLoadICStub stub(isolate(), state()); | 4479 KeyedLoadICStub stub(isolate(), state()); |
4489 stub.GenerateForTrampoline(masm); | 4480 stub.GenerateForTrampoline(masm); |
4490 } | 4481 } |
4491 | 4482 |
4492 | 4483 |
4493 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4484 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4494 EmitLoadTypeFeedbackVector(masm, r5); | 4485 __ EmitLoadTypeFeedbackVector(r5); |
4495 CallICStub stub(isolate(), state()); | 4486 CallICStub stub(isolate(), state()); |
4496 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4487 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4497 } | 4488 } |
4498 | 4489 |
4499 | 4490 |
4500 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | 4491 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } |
4501 | 4492 |
4502 | 4493 |
4503 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4494 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4504 GenerateImpl(masm, true); | 4495 GenerateImpl(masm, true); |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4710 __ bind(&miss); | 4701 __ bind(&miss); |
4711 KeyedLoadIC::GenerateMiss(masm); | 4702 KeyedLoadIC::GenerateMiss(masm); |
4712 | 4703 |
4713 __ bind(&load_smi_map); | 4704 __ bind(&load_smi_map); |
4714 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 4705 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
4715 __ b(&compare_map); | 4706 __ b(&compare_map); |
4716 } | 4707 } |
4717 | 4708 |
4718 | 4709 |
4719 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 4710 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
4720 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); | 4711 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); |
4721 VectorStoreICStub stub(isolate(), state()); | 4712 VectorStoreICStub stub(isolate(), state()); |
4722 stub.GenerateForTrampoline(masm); | 4713 stub.GenerateForTrampoline(masm); |
4723 } | 4714 } |
4724 | 4715 |
4725 | 4716 |
4726 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 4717 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
4727 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); | 4718 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); |
4728 VectorKeyedStoreICStub stub(isolate(), state()); | 4719 VectorKeyedStoreICStub stub(isolate(), state()); |
4729 stub.GenerateForTrampoline(masm); | 4720 stub.GenerateForTrampoline(masm); |
4730 } | 4721 } |
4731 | 4722 |
4732 | 4723 |
4733 void VectorStoreICStub::Generate(MacroAssembler* masm) { | 4724 void VectorStoreICStub::Generate(MacroAssembler* masm) { |
4734 GenerateImpl(masm, false); | 4725 GenerateImpl(masm, false); |
4735 } | 4726 } |
4736 | 4727 |
4737 | 4728 |
(...skipping 1110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5848 kStackUnwindSpace, NULL, | 5839 kStackUnwindSpace, NULL, |
5849 MemOperand(fp, 6 * kPointerSize), NULL); | 5840 MemOperand(fp, 6 * kPointerSize), NULL); |
5850 } | 5841 } |
5851 | 5842 |
5852 | 5843 |
5853 #undef __ | 5844 #undef __ |
5854 } // namespace internal | 5845 } // namespace internal |
5855 } // namespace v8 | 5846 } // namespace v8 |
5856 | 5847 |
5857 #endif // V8_TARGET_ARCH_PPC | 5848 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |