Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(746)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 356393003: [Arm]: Enable use of extended out-of-line constant pool for Arm. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix issue with inline-constant pool. Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/arm/lithium-codegen-arm.h" 7 #include "src/arm/lithium-codegen-arm.h"
8 #include "src/arm/lithium-gap-resolver-arm.h" 8 #include "src/arm/lithium-gap-resolver-arm.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/hydrogen-osr.h" 10 #include "src/hydrogen-osr.h"
(...skipping 2746 matching lines...) Expand 10 before | Expand all | Expand 10 after
2757 } 2757 }
2758 2758
2759 2759
2760 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 2760 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2761 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode { 2761 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode {
2762 public: 2762 public:
2763 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 2763 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2764 LInstanceOfKnownGlobal* instr) 2764 LInstanceOfKnownGlobal* instr)
2765 : LDeferredCode(codegen), instr_(instr) { } 2765 : LDeferredCode(codegen), instr_(instr) { }
2766 virtual void Generate() V8_OVERRIDE { 2766 virtual void Generate() V8_OVERRIDE {
2767 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); 2767 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
2768 &load_bool_);
2768 } 2769 }
2769 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } 2770 virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
2770 Label* map_check() { return &map_check_; } 2771 Label* map_check() { return &map_check_; }
2772 Label* load_bool() { return &load_bool_; }
2771 private: 2773 private:
2772 LInstanceOfKnownGlobal* instr_; 2774 LInstanceOfKnownGlobal* instr_;
2773 Label map_check_; 2775 Label map_check_;
2776 Label load_bool_;
2774 }; 2777 };
2775 2778
2776 DeferredInstanceOfKnownGlobal* deferred; 2779 DeferredInstanceOfKnownGlobal* deferred;
2777 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); 2780 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
2778 2781
2779 Label done, false_result; 2782 Label done, false_result;
2780 Register object = ToRegister(instr->value()); 2783 Register object = ToRegister(instr->value());
2781 Register temp = ToRegister(instr->temp()); 2784 Register temp = ToRegister(instr->temp());
2782 Register result = ToRegister(instr->result()); 2785 Register result = ToRegister(instr->result());
2783 2786
2784 // A Smi is not instance of anything. 2787 // A Smi is not instance of anything.
2785 __ JumpIfSmi(object, &false_result); 2788 __ JumpIfSmi(object, &false_result);
2786 2789
2787 // This is the inlined call site instanceof cache. The two occurences of the 2790 // This is the inlined call site instanceof cache. The two occurences of the
2788 // hole value will be patched to the last map/result pair generated by the 2791 // hole value will be patched to the last map/result pair generated by the
2789 // instanceof stub. 2792 // instanceof stub.
2790 Label cache_miss; 2793 Label cache_miss;
2791 Register map = temp; 2794 Register map = temp;
2792 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); 2795 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2793 { 2796 {
2794 // Block constant pool emission to ensure the positions of instructions are 2797 // Block constant pool emission to ensure the positions of instructions are
2795 // as expected by the patcher. See InstanceofStub::Generate(). 2798 // as expected by the patcher. See InstanceofStub::Generate().
2796 Assembler::BlockConstPoolScope block_const_pool(masm()); 2799 Assembler::BlockConstPoolScope block_const_pool(masm());
2797 __ bind(deferred->map_check()); // Label for calculating code patching. 2800 __ bind(deferred->map_check()); // Label for calculating code patching.
2798 // We use Factory::the_hole_value() on purpose instead of loading from the 2801 // We use Factory::the_hole_value() on purpose instead of loading from the
2799 // root array to force relocation to be able to later patch with 2802 // root array to force relocation to be able to later patch with
2800 // the cached map. 2803 // the cached map.
2801 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize);
2802 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); 2804 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
2803 __ mov(ip, Operand(Handle<Object>(cell))); 2805 __ mov(ip, Operand(Handle<Object>(cell)));
2804 __ ldr(ip, FieldMemOperand(ip, PropertyCell::kValueOffset)); 2806 __ ldr(ip, FieldMemOperand(ip, PropertyCell::kValueOffset));
2805 __ cmp(map, Operand(ip)); 2807 __ cmp(map, Operand(ip));
2806 __ b(ne, &cache_miss); 2808 __ b(ne, &cache_miss);
2809 __ bind(deferred->load_bool()); // Label for calculating code patching.
2807 // We use Factory::the_hole_value() on purpose instead of loading from the 2810 // We use Factory::the_hole_value() on purpose instead of loading from the
2808 // root array to force relocation to be able to later patch 2811 // root array to force relocation to be able to later patch
2809 // with true or false. 2812 // with true or false.
2810 __ mov(result, Operand(factory()->the_hole_value())); 2813 __ mov(result, Operand(factory()->the_hole_value()));
2811 } 2814 }
2812 __ b(&done); 2815 __ b(&done);
2813 2816
2814 // The inlined call site cache did not match. Check null and string before 2817 // The inlined call site cache did not match. Check null and string before
2815 // calling the deferred code. 2818 // calling the deferred code.
2816 __ bind(&cache_miss); 2819 __ bind(&cache_miss);
(...skipping 13 matching lines...) Expand all
2830 __ LoadRoot(result, Heap::kFalseValueRootIndex); 2833 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2831 2834
2832 // Here result has either true or false. Deferred code also produces true or 2835 // Here result has either true or false. Deferred code also produces true or
2833 // false object. 2836 // false object.
2834 __ bind(deferred->exit()); 2837 __ bind(deferred->exit());
2835 __ bind(&done); 2838 __ bind(&done);
2836 } 2839 }
2837 2840
2838 2841
2839 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2842 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2840 Label* map_check) { 2843 Label* map_check,
2844 Label* bool_load) {
2841 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2845 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2842 flags = static_cast<InstanceofStub::Flags>( 2846 flags = static_cast<InstanceofStub::Flags>(
2843 flags | InstanceofStub::kArgsInRegisters); 2847 flags | InstanceofStub::kArgsInRegisters);
2844 flags = static_cast<InstanceofStub::Flags>( 2848 flags = static_cast<InstanceofStub::Flags>(
2845 flags | InstanceofStub::kCallSiteInlineCheck); 2849 flags | InstanceofStub::kCallSiteInlineCheck);
2846 flags = static_cast<InstanceofStub::Flags>( 2850 flags = static_cast<InstanceofStub::Flags>(
2847 flags | InstanceofStub::kReturnTrueFalseObject); 2851 flags | InstanceofStub::kReturnTrueFalseObject);
2848 InstanceofStub stub(isolate(), flags); 2852 InstanceofStub stub(isolate(), flags);
2849 2853
2850 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 2854 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2851 LoadContextFromDeferred(instr->context()); 2855 LoadContextFromDeferred(instr->context());
2852 2856
2853 __ Move(InstanceofStub::right(), instr->function()); 2857 __ Move(InstanceofStub::right(), instr->function());
2854 static const int kAdditionalDelta = 4; 2858
2859 int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET);
2860 int additional_delta = (call_size / Assembler::kInstrSize) + 4;
2855 // Make sure that code size is predicable, since we use specific constants 2861 // Make sure that code size is predicable, since we use specific constants
2856 // offsets in the code to find embedded values.. 2862 // offsets in the code to find embedded values..
2857 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); 2863 PredictableCodeSizeScope predictable(masm_,
2858 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2864 (additional_delta + 1) * Assembler::kInstrSize);
2865 int map_check_delta =
2866 masm_->InstructionsGeneratedSince(map_check) + additional_delta;
2867 int bool_load_delta =
2868 masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
2859 Label before_push_delta; 2869 Label before_push_delta;
2860 __ bind(&before_push_delta); 2870 __ bind(&before_push_delta);
2861 __ BlockConstPoolFor(kAdditionalDelta); 2871 __ BlockConstPoolFor(additional_delta);
2862 // r5 is used to communicate the offset to the location of the map check. 2872 // r5 is used to communicate the offset to the location of the map check.
2863 __ mov(r5, Operand(delta * kPointerSize)); 2873 __ mov(r5, Operand(map_check_delta * kPointerSize));
2874 // r6 is used to communicate the offset to the location of the bool load.
2875 __ mov(r6, Operand(bool_load_delta * kPointerSize));
2864 // The mov above can generate one or two instructions. The delta was computed 2876 // The mov above can generate one or two instructions. The delta was computed
2865 // for two instructions, so we need to pad here in case of one instruction. 2877 // for two instructions, so we need to pad here in case of one instruction.
2866 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { 2878 while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
2867 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta));
2868 __ nop(); 2879 __ nop();
2869 } 2880 }
2870 CallCodeGeneric(stub.GetCode(), 2881 CallCodeGeneric(stub.GetCode(),
2871 RelocInfo::CODE_TARGET, 2882 RelocInfo::CODE_TARGET,
2872 instr, 2883 instr,
2873 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2884 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2874 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2885 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2875 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2886 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2876 // Put the result value (r0) into the result register slot and 2887 // Put the result value (r0) into the result register slot and
2877 // restore all registers. 2888 // restore all registers.
(...skipping 2947 matching lines...) Expand 10 before | Expand all | Expand 10 after
5825 __ Push(scope_info); 5836 __ Push(scope_info);
5826 __ push(ToRegister(instr->function())); 5837 __ push(ToRegister(instr->function()));
5827 CallRuntime(Runtime::kPushBlockContext, 2, instr); 5838 CallRuntime(Runtime::kPushBlockContext, 2, instr);
5828 RecordSafepoint(Safepoint::kNoLazyDeopt); 5839 RecordSafepoint(Safepoint::kNoLazyDeopt);
5829 } 5840 }
5830 5841
5831 5842
5832 #undef __ 5843 #undef __
5833 5844
5834 } } // namespace v8::internal 5845 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698