OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/arm64/frames-arm64.h" | 5 #include "src/arm64/frames-arm64.h" |
6 #include "src/arm64/lithium-codegen-arm64.h" | 6 #include "src/arm64/lithium-codegen-arm64.h" |
7 #include "src/arm64/lithium-gap-resolver-arm64.h" | 7 #include "src/arm64/lithium-gap-resolver-arm64.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2989 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3000 if (instr->offset()->IsConstantOperand()) { | 3000 if (instr->offset()->IsConstantOperand()) { |
3001 __ Add(result, base, ToOperand32(instr->offset())); | 3001 __ Add(result, base, ToOperand32(instr->offset())); |
3002 } else { | 3002 } else { |
3003 __ Add(result, base, Operand(ToRegister32(instr->offset()), SXTW)); | 3003 __ Add(result, base, Operand(ToRegister32(instr->offset()), SXTW)); |
3004 } | 3004 } |
3005 } | 3005 } |
3006 | 3006 |
3007 | 3007 |
3008 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 3008 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
3009 DCHECK(ToRegister(instr->context()).is(cp)); | 3009 DCHECK(ToRegister(instr->context()).is(cp)); |
3010 // Assert that the arguments are in the registers expected by InstanceofStub. | 3010 DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
3011 DCHECK(ToRegister(instr->left()).Is(InstanceofStub::left())); | 3011 DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
3012 DCHECK(ToRegister(instr->right()).Is(InstanceofStub::right())); | 3012 DCHECK(ToRegister(instr->result()).is(x0)); |
3013 | 3013 InstanceOfStub stub(isolate()); |
3014 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | |
3015 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3014 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
3016 | |
3017 // InstanceofStub returns a result in x0: | |
3018 // 0 => not an instance | |
3019 // smi 1 => instance. | |
3020 __ Cmp(x0, 0); | |
3021 __ LoadTrueFalseRoots(x0, x1); | |
3022 __ Csel(x0, x0, x1, eq); | |
3023 } | 3015 } |
3024 | 3016 |
3025 | 3017 |
3026 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 3018 void LCodeGen::DoHasInPrototypeChainAndBranch( |
3027 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 3019 LHasInPrototypeChainAndBranch* instr) { |
3028 public: | 3020 Register const object = ToRegister(instr->object()); |
3029 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 3021 Register const object_map = ToRegister(instr->scratch()); |
3030 LInstanceOfKnownGlobal* instr) | 3022 Register const object_prototype = object_map; |
3031 : LDeferredCode(codegen), instr_(instr) { } | 3023 Register const prototype = ToRegister(instr->prototype()); |
3032 virtual void Generate() { | |
3033 codegen()->DoDeferredInstanceOfKnownGlobal(instr_); | |
3034 } | |
3035 virtual LInstruction* instr() { return instr_; } | |
3036 private: | |
3037 LInstanceOfKnownGlobal* instr_; | |
3038 }; | |
3039 | 3024 |
3040 DeferredInstanceOfKnownGlobal* deferred = | 3025 // The {object} must be a spec object. It's sufficient to know that {object} |
3041 new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 3026 // is not a smi, since all other non-spec objects have {null} prototypes and |
| 3027 // will be ruled out below. |
| 3028 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
| 3029 __ JumpIfSmi(object, instr->FalseLabel(chunk_)); |
| 3030 } |
3042 | 3031 |
3043 Label map_check, return_false, cache_miss, done; | 3032 // Loop through the {object}s prototype chain looking for the {prototype}. |
3044 Register object = ToRegister(instr->value()); | 3033 __ Ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
3045 Register result = ToRegister(instr->result()); | 3034 Label loop; |
3046 // x4 is expected in the associated deferred code and stub. | 3035 __ Bind(&loop); |
3047 Register map_check_site = x4; | 3036 __ Ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
3048 Register map = x5; | 3037 __ Cmp(object_prototype, prototype); |
3049 | 3038 __ B(eq, instr->TrueLabel(chunk_)); |
3050 // This instruction is marked as call. We can clobber any register. | 3039 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); |
3051 DCHECK(instr->IsMarkedAsCall()); | 3040 __ B(eq, instr->FalseLabel(chunk_)); |
3052 | 3041 __ Ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset)); |
3053 // We must take into account that object is in x11. | 3042 __ B(&loop); |
3054 DCHECK(object.Is(x11)); | |
3055 Register scratch = x10; | |
3056 | |
3057 // A Smi is not instance of anything. | |
3058 __ JumpIfSmi(object, &return_false); | |
3059 | |
3060 // This is the inlined call site instanceof cache. The two occurences of the | |
3061 // hole value will be patched to the last map/result pair generated by the | |
3062 // instanceof stub. | |
3063 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | |
3064 { | |
3065 // Below we use Factory::the_hole_value() on purpose instead of loading from | |
3066 // the root array to force relocation and later be able to patch with a | |
3067 // custom value. | |
3068 InstructionAccurateScope scope(masm(), 5); | |
3069 __ bind(&map_check); | |
3070 // Will be patched with the cached map. | |
3071 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | |
3072 __ ldr(scratch, Immediate(cell)); | |
3073 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
3074 __ cmp(map, scratch); | |
3075 __ b(&cache_miss, ne); | |
3076 // The address of this instruction is computed relative to the map check | |
3077 // above, so check the size of the code generated. | |
3078 DCHECK(masm()->InstructionsGeneratedSince(&map_check) == 4); | |
3079 // Will be patched with the cached result. | |
3080 __ ldr(result, Immediate(factory()->the_hole_value())); | |
3081 } | |
3082 __ B(&done); | |
3083 | |
3084 // The inlined call site cache did not match. | |
3085 // Check null and string before calling the deferred code. | |
3086 __ Bind(&cache_miss); | |
3087 // Compute the address of the map check. It must not be clobbered until the | |
3088 // InstanceOfStub has used it. | |
3089 __ Adr(map_check_site, &map_check); | |
3090 // Null is not instance of anything. | |
3091 __ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false); | |
3092 | |
3093 // String values are not instances of anything. | |
3094 // Return false if the object is a string. Otherwise, jump to the deferred | |
3095 // code. | |
3096 // Note that we can't jump directly to deferred code from | |
3097 // IsObjectJSStringType, because it uses tbz for the jump and the deferred | |
3098 // code can be out of range. | |
3099 __ IsObjectJSStringType(object, scratch, NULL, &return_false); | |
3100 __ B(deferred->entry()); | |
3101 | |
3102 __ Bind(&return_false); | |
3103 __ LoadRoot(result, Heap::kFalseValueRootIndex); | |
3104 | |
3105 // Here result is either true or false. | |
3106 __ Bind(deferred->exit()); | |
3107 __ Bind(&done); | |
3108 } | 3043 } |
3109 | 3044 |
3110 | 3045 |
3111 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | |
3112 Register result = ToRegister(instr->result()); | |
3113 DCHECK(result.Is(x0)); // InstanceofStub returns its result in x0. | |
3114 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | |
3115 flags = static_cast<InstanceofStub::Flags>( | |
3116 flags | InstanceofStub::kArgsInRegisters); | |
3117 flags = static_cast<InstanceofStub::Flags>( | |
3118 flags | InstanceofStub::kReturnTrueFalseObject); | |
3119 flags = static_cast<InstanceofStub::Flags>( | |
3120 flags | InstanceofStub::kCallSiteInlineCheck); | |
3121 | |
3122 PushSafepointRegistersScope scope(this); | |
3123 LoadContextFromDeferred(instr->context()); | |
3124 | |
3125 // Prepare InstanceofStub arguments. | |
3126 DCHECK(ToRegister(instr->value()).Is(InstanceofStub::left())); | |
3127 __ LoadObject(InstanceofStub::right(), instr->function()); | |
3128 | |
3129 InstanceofStub stub(isolate(), flags); | |
3130 CallCodeGeneric(stub.GetCode(), | |
3131 RelocInfo::CODE_TARGET, | |
3132 instr, | |
3133 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
3134 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | |
3135 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
3136 | |
3137 // Put the result value into the result register slot. | |
3138 __ StoreToSafepointRegisterSlot(result, result); | |
3139 } | |
3140 | |
3141 | |
3142 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 3046 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
3143 DoGap(instr); | 3047 DoGap(instr); |
3144 } | 3048 } |
3145 | 3049 |
3146 | 3050 |
3147 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 3051 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
3148 Register value = ToRegister32(instr->value()); | 3052 Register value = ToRegister32(instr->value()); |
3149 DoubleRegister result = ToDoubleRegister(instr->result()); | 3053 DoubleRegister result = ToDoubleRegister(instr->result()); |
3150 __ Scvtf(result, value); | 3054 __ Scvtf(result, value); |
3151 } | 3055 } |
(...skipping 3014 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6166 Handle<ScopeInfo> scope_info = instr->scope_info(); | 6070 Handle<ScopeInfo> scope_info = instr->scope_info(); |
6167 __ Push(scope_info); | 6071 __ Push(scope_info); |
6168 __ Push(ToRegister(instr->function())); | 6072 __ Push(ToRegister(instr->function())); |
6169 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6073 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6170 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6074 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6171 } | 6075 } |
6172 | 6076 |
6173 | 6077 |
6174 } // namespace internal | 6078 } // namespace internal |
6175 } // namespace v8 | 6079 } // namespace v8 |
OLD | NEW |