Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(70)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 356393003: [Arm]: Enable use of extended out-of-line constant pool for Arm. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Formatted with git cl format Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/arm/lithium-codegen-arm.h" 7 #include "src/arm/lithium-codegen-arm.h"
8 #include "src/arm/lithium-gap-resolver-arm.h" 8 #include "src/arm/lithium-gap-resolver-arm.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/hydrogen-osr.h" 10 #include "src/hydrogen-osr.h"
(...skipping 2746 matching lines...) Expand 10 before | Expand all | Expand 10 after
2757 } 2757 }
2758 2758
2759 2759
2760 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 2760 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2761 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode { 2761 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode {
2762 public: 2762 public:
2763 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 2763 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2764 LInstanceOfKnownGlobal* instr) 2764 LInstanceOfKnownGlobal* instr)
2765 : LDeferredCode(codegen), instr_(instr) { } 2765 : LDeferredCode(codegen), instr_(instr) { }
2766 virtual void Generate() V8_OVERRIDE { 2766 virtual void Generate() V8_OVERRIDE {
2767 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); 2767 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
2768 &load_bool_);
2768 } 2769 }
2769 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } 2770 virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
2770 Label* map_check() { return &map_check_; } 2771 Label* map_check() { return &map_check_; }
2772 Label* load_bool() { return &load_bool_; }
2773
2771 private: 2774 private:
2772 LInstanceOfKnownGlobal* instr_; 2775 LInstanceOfKnownGlobal* instr_;
2773 Label map_check_; 2776 Label map_check_;
2777 Label load_bool_;
2774 }; 2778 };
2775 2779
2776 DeferredInstanceOfKnownGlobal* deferred; 2780 DeferredInstanceOfKnownGlobal* deferred;
2777 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); 2781 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
2778 2782
2779 Label done, false_result; 2783 Label done, false_result;
2780 Register object = ToRegister(instr->value()); 2784 Register object = ToRegister(instr->value());
2781 Register temp = ToRegister(instr->temp()); 2785 Register temp = ToRegister(instr->temp());
2782 Register result = ToRegister(instr->result()); 2786 Register result = ToRegister(instr->result());
2783 2787
2784 // A Smi is not instance of anything. 2788 // A Smi is not instance of anything.
2785 __ JumpIfSmi(object, &false_result); 2789 __ JumpIfSmi(object, &false_result);
2786 2790
2787 // This is the inlined call site instanceof cache. The two occurences of the 2791 // This is the inlined call site instanceof cache. The two occurences of the
2788 // hole value will be patched to the last map/result pair generated by the 2792 // hole value will be patched to the last map/result pair generated by the
2789 // instanceof stub. 2793 // instanceof stub.
2790 Label cache_miss; 2794 Label cache_miss;
2791 Register map = temp; 2795 Register map = temp;
2792 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); 2796 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2793 { 2797 {
2794 // Block constant pool emission to ensure the positions of instructions are 2798 // Block constant pool emission to ensure the positions of instructions are
2795 // as expected by the patcher. See InstanceofStub::Generate(). 2799 // as expected by the patcher. See InstanceofStub::Generate().
2796 Assembler::BlockConstPoolScope block_const_pool(masm()); 2800 Assembler::BlockConstPoolScope block_const_pool(masm());
2797 __ bind(deferred->map_check()); // Label for calculating code patching. 2801 __ bind(deferred->map_check()); // Label for calculating code patching.
2798 // We use Factory::the_hole_value() on purpose instead of loading from the 2802 // We use Factory::the_hole_value() on purpose instead of loading from the
2799 // root array to force relocation to be able to later patch with 2803 // root array to force relocation to be able to later patch with
2800 // the cached map. 2804 // the cached map.
2801 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize);
2802 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); 2805 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
2803 __ mov(ip, Operand(Handle<Object>(cell))); 2806 __ mov(ip, Operand(Handle<Object>(cell)));
2804 __ ldr(ip, FieldMemOperand(ip, PropertyCell::kValueOffset)); 2807 __ ldr(ip, FieldMemOperand(ip, PropertyCell::kValueOffset));
2805 __ cmp(map, Operand(ip)); 2808 __ cmp(map, Operand(ip));
2806 __ b(ne, &cache_miss); 2809 __ b(ne, &cache_miss);
2810 __ bind(deferred->load_bool()); // Label for calculating code patching.
2807 // We use Factory::the_hole_value() on purpose instead of loading from the 2811 // We use Factory::the_hole_value() on purpose instead of loading from the
2808 // root array to force relocation to be able to later patch 2812 // root array to force relocation to be able to later patch
2809 // with true or false. 2813 // with true or false.
2810 __ mov(result, Operand(factory()->the_hole_value())); 2814 __ mov(result, Operand(factory()->the_hole_value()));
2811 } 2815 }
2812 __ b(&done); 2816 __ b(&done);
2813 2817
2814 // The inlined call site cache did not match. Check null and string before 2818 // The inlined call site cache did not match. Check null and string before
2815 // calling the deferred code. 2819 // calling the deferred code.
2816 __ bind(&cache_miss); 2820 __ bind(&cache_miss);
(...skipping 13 matching lines...) Expand all
2830 __ LoadRoot(result, Heap::kFalseValueRootIndex); 2834 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2831 2835
2832 // Here result has either true or false. Deferred code also produces true or 2836 // Here result has either true or false. Deferred code also produces true or
2833 // false object. 2837 // false object.
2834 __ bind(deferred->exit()); 2838 __ bind(deferred->exit());
2835 __ bind(&done); 2839 __ bind(&done);
2836 } 2840 }
2837 2841
2838 2842
2839 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2843 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2840 Label* map_check) { 2844 Label* map_check,
2845 Label* bool_load) {
2841 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2846 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2842 flags = static_cast<InstanceofStub::Flags>( 2847 flags = static_cast<InstanceofStub::Flags>(
2843 flags | InstanceofStub::kArgsInRegisters); 2848 flags | InstanceofStub::kArgsInRegisters);
2844 flags = static_cast<InstanceofStub::Flags>( 2849 flags = static_cast<InstanceofStub::Flags>(
2845 flags | InstanceofStub::kCallSiteInlineCheck); 2850 flags | InstanceofStub::kCallSiteInlineCheck);
2846 flags = static_cast<InstanceofStub::Flags>( 2851 flags = static_cast<InstanceofStub::Flags>(
2847 flags | InstanceofStub::kReturnTrueFalseObject); 2852 flags | InstanceofStub::kReturnTrueFalseObject);
2848 InstanceofStub stub(isolate(), flags); 2853 InstanceofStub stub(isolate(), flags);
2849 2854
2850 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 2855 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2851 LoadContextFromDeferred(instr->context()); 2856 LoadContextFromDeferred(instr->context());
2852 2857
2853 __ Move(InstanceofStub::right(), instr->function()); 2858 __ Move(InstanceofStub::right(), instr->function());
2854 static const int kAdditionalDelta = 4; 2859
2860 int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET);
2861 int additional_delta = (call_size / Assembler::kInstrSize) + 4;
2855 // Make sure that code size is predicable, since we use specific constants 2862 // Make sure that code size is predicable, since we use specific constants
2856 // offsets in the code to find embedded values.. 2863 // offsets in the code to find embedded values..
2857 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); 2864 PredictableCodeSizeScope predictable(
2858 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2865 masm_, (additional_delta + 1) * Assembler::kInstrSize);
2859 Label before_push_delta; 2866 // Make sure we don't emit any additional entries in the constant pool before
2860 __ bind(&before_push_delta); 2867 // the call to ensure that the CallCodeSize() calculated the correct number of
2861 __ BlockConstPoolFor(kAdditionalDelta); 2868 // instructions for the constant pool load.
2862 // r5 is used to communicate the offset to the location of the map check. 2869 {
2863 __ mov(r5, Operand(delta * kPointerSize)); 2870 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2864 // The mov above can generate one or two instructions. The delta was computed 2871 int map_check_delta =
2865 // for two instructions, so we need to pad here in case of one instruction. 2872 masm_->InstructionsGeneratedSince(map_check) + additional_delta;
2866 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { 2873 int bool_load_delta =
2867 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); 2874 masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
2868 __ nop(); 2875 Label before_push_delta;
2876 __ bind(&before_push_delta);
2877 __ BlockConstPoolFor(additional_delta);
2878 // r5 is used to communicate the offset to the location of the map check.
2879 __ mov(r5, Operand(map_check_delta * kPointerSize));
2880 // r6 is used to communicate the offset to the location of the bool load.
2881 __ mov(r6, Operand(bool_load_delta * kPointerSize));
2882 // The mov above can generate one or two instructions. The delta was
2883 // computed for two instructions, so we need to pad here in case of one
2884 // instruction.
2885 while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
2886 __ nop();
2887 }
2869 } 2888 }
2870 CallCodeGeneric(stub.GetCode(), 2889 CallCodeGeneric(stub.GetCode(),
2871 RelocInfo::CODE_TARGET, 2890 RelocInfo::CODE_TARGET,
2872 instr, 2891 instr,
2873 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2892 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2874 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2893 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2875 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2894 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2876 // Put the result value (r0) into the result register slot and 2895 // Put the result value (r0) into the result register slot and
2877 // restore all registers. 2896 // restore all registers.
2878 __ StoreToSafepointRegisterSlot(r0, ToRegister(instr->result())); 2897 __ StoreToSafepointRegisterSlot(r0, ToRegister(instr->result()));
(...skipping 1042 matching lines...) Expand 10 before | Expand all | Expand 10 after
3921 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); 3940 Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3922 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); 3941 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET));
3923 PlatformCallInterfaceDescriptor* call_descriptor = 3942 PlatformCallInterfaceDescriptor* call_descriptor =
3924 instr->descriptor()->platform_specific_descriptor(); 3943 instr->descriptor()->platform_specific_descriptor();
3925 __ Call(code, RelocInfo::CODE_TARGET, TypeFeedbackId::None(), al, 3944 __ Call(code, RelocInfo::CODE_TARGET, TypeFeedbackId::None(), al,
3926 call_descriptor->storage_mode()); 3945 call_descriptor->storage_mode());
3927 } else { 3946 } else {
3928 ASSERT(instr->target()->IsRegister()); 3947 ASSERT(instr->target()->IsRegister());
3929 Register target = ToRegister(instr->target()); 3948 Register target = ToRegister(instr->target());
3930 generator.BeforeCall(__ CallSize(target)); 3949 generator.BeforeCall(__ CallSize(target));
3931 __ add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag)); 3950 // Make sure we don't emit any additional entries in the constant pool
3951 // before the call to ensure that the CallCodeSize() calculated the correct
3952 // number of instructions for the constant pool load.
3953 {
3954 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
3955 __ add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
3956 }
3932 __ Call(target); 3957 __ Call(target);
3933 } 3958 }
3934 generator.AfterCall(); 3959 generator.AfterCall();
3935 } 3960 }
3936 3961
3937 3962
3938 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { 3963 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3939 ASSERT(ToRegister(instr->function()).is(r1)); 3964 ASSERT(ToRegister(instr->function()).is(r1));
3940 ASSERT(ToRegister(instr->result()).is(r0)); 3965 ASSERT(ToRegister(instr->result()).is(r0));
3941 3966
(...skipping 1882 matching lines...) Expand 10 before | Expand all | Expand 10 after
5824 __ Push(scope_info); 5849 __ Push(scope_info);
5825 __ push(ToRegister(instr->function())); 5850 __ push(ToRegister(instr->function()));
5826 CallRuntime(Runtime::kPushBlockContext, 2, instr); 5851 CallRuntime(Runtime::kPushBlockContext, 2, instr);
5827 RecordSafepoint(Safepoint::kNoLazyDeopt); 5852 RecordSafepoint(Safepoint::kNoLazyDeopt);
5828 } 5853 }
5829 5854
5830 5855
5831 #undef __ 5856 #undef __
5832 5857
5833 } } // namespace v8::internal 5858 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698