| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 2312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2323 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); | 2323 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); |
| 2324 } else if (right_block == next_block) { | 2324 } else if (right_block == next_block) { |
| 2325 __ j(cc, chunk_->GetAssemblyLabel(left_block)); | 2325 __ j(cc, chunk_->GetAssemblyLabel(left_block)); |
| 2326 } else { | 2326 } else { |
| 2327 __ j(cc, chunk_->GetAssemblyLabel(left_block)); | 2327 __ j(cc, chunk_->GetAssemblyLabel(left_block)); |
| 2328 __ jmp(chunk_->GetAssemblyLabel(right_block)); | 2328 __ jmp(chunk_->GetAssemblyLabel(right_block)); |
| 2329 } | 2329 } |
| 2330 } | 2330 } |
| 2331 | 2331 |
| 2332 | 2332 |
| 2333 template <class InstrType> |
| 2334 void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) { |
| 2335 int true_block = instr->TrueDestination(chunk_); |
| 2336 if (cc == no_condition) { |
| 2337 __ jmp(chunk_->GetAssemblyLabel(true_block)); |
| 2338 } else { |
| 2339 __ j(cc, chunk_->GetAssemblyLabel(true_block)); |
| 2340 } |
| 2341 } |
| 2342 |
| 2343 |
| 2333 template<class InstrType> | 2344 template<class InstrType> |
| 2334 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) { | 2345 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) { |
| 2335 int false_block = instr->FalseDestination(chunk_); | 2346 int false_block = instr->FalseDestination(chunk_); |
| 2336 if (cc == no_condition) { | 2347 if (cc == no_condition) { |
| 2337 __ jmp(chunk_->GetAssemblyLabel(false_block)); | 2348 __ jmp(chunk_->GetAssemblyLabel(false_block)); |
| 2338 } else { | 2349 } else { |
| 2339 __ j(cc, chunk_->GetAssemblyLabel(false_block)); | 2350 __ j(cc, chunk_->GetAssemblyLabel(false_block)); |
| 2340 } | 2351 } |
| 2341 } | 2352 } |
| 2342 | 2353 |
| (...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2888 | 2899 |
| 2889 | 2900 |
| 2890 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2901 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 2891 Register reg = ToRegister(instr->value()); | 2902 Register reg = ToRegister(instr->value()); |
| 2892 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2903 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 2893 EmitBranch(instr, equal); | 2904 EmitBranch(instr, equal); |
| 2894 } | 2905 } |
| 2895 | 2906 |
| 2896 | 2907 |
| 2897 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2908 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2898 // Object and function are in fixed registers defined by the stub. | |
| 2899 DCHECK(ToRegister(instr->context()).is(esi)); | 2909 DCHECK(ToRegister(instr->context()).is(esi)); |
| 2900 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2910 DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
| 2911 DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
| 2912 DCHECK(ToRegister(instr->result()).is(eax)); |
| 2913 InstanceOfStub stub(isolate()); |
| 2901 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2914 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2902 | |
| 2903 Label true_value, done; | |
| 2904 __ test(eax, Operand(eax)); | |
| 2905 __ j(zero, &true_value, Label::kNear); | |
| 2906 __ mov(ToRegister(instr->result()), factory()->false_value()); | |
| 2907 __ jmp(&done, Label::kNear); | |
| 2908 __ bind(&true_value); | |
| 2909 __ mov(ToRegister(instr->result()), factory()->true_value()); | |
| 2910 __ bind(&done); | |
| 2911 } | 2915 } |
| 2912 | 2916 |
| 2913 | 2917 |
| 2914 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2918 void LCodeGen::DoHasInPrototypeChainAndBranch( |
| 2915 class DeferredInstanceOfKnownGlobal final : public LDeferredCode { | 2919 LHasInPrototypeChainAndBranch* instr) { |
| 2916 public: | 2920 Register const object = ToRegister(instr->object()); |
| 2917 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2921 Register const object_map = ToRegister(instr->scratch()); |
| 2918 LInstanceOfKnownGlobal* instr, | 2922 Register const object_prototype = object_map; |
| 2919 const X87Stack& x87_stack) | 2923 Register const prototype = ToRegister(instr->prototype()); |
| 2920 : LDeferredCode(codegen, x87_stack), instr_(instr) { } | |
| 2921 void Generate() override { | |
| 2922 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | |
| 2923 } | |
| 2924 LInstruction* instr() override { return instr_; } | |
| 2925 Label* map_check() { return &map_check_; } | |
| 2926 private: | |
| 2927 LInstanceOfKnownGlobal* instr_; | |
| 2928 Label map_check_; | |
| 2929 }; | |
| 2930 | 2924 |
| 2931 DeferredInstanceOfKnownGlobal* deferred; | 2925 // The {object} must be a spec object. It's sufficient to know that {object} |
| 2932 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_); | 2926 // is not a smi, since all other non-spec objects have {null} prototypes and |
| 2927 // will be ruled out below. |
| 2928 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
| 2929 __ test(object, Immediate(kSmiTagMask)); |
| 2930 EmitFalseBranch(instr, zero); |
| 2931 } |
| 2933 | 2932 |
| 2934 Label done, false_result; | 2933 // Loop through the {object}s prototype chain looking for the {prototype}. |
| 2935 Register object = ToRegister(instr->value()); | 2934 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2936 Register temp = ToRegister(instr->temp()); | 2935 Label loop; |
| 2937 | 2936 __ bind(&loop); |
| 2938 // A Smi is not an instance of anything. | 2937 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
| 2939 __ JumpIfSmi(object, &false_result, Label::kNear); | 2938 __ cmp(object_prototype, prototype); |
| 2940 | 2939 EmitTrueBranch(instr, equal); |
| 2941 // This is the inlined call site instanceof cache. The two occurences of the | 2940 __ cmp(object_prototype, factory()->null_value()); |
| 2942 // hole value will be patched to the last map/result pair generated by the | 2941 EmitFalseBranch(instr, equal); |
| 2943 // instanceof stub. | 2942 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
| 2944 Label cache_miss; | 2943 __ jmp(&loop); |
| 2945 Register map = ToRegister(instr->temp()); | |
| 2946 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | |
| 2947 __ bind(deferred->map_check()); // Label for calculating code patching. | |
| 2948 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | |
| 2949 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. | |
| 2950 __ j(not_equal, &cache_miss, Label::kNear); | |
| 2951 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. | |
| 2952 __ jmp(&done, Label::kNear); | |
| 2953 | |
| 2954 // The inlined call site cache did not match. Check for null and string | |
| 2955 // before calling the deferred code. | |
| 2956 __ bind(&cache_miss); | |
| 2957 // Null is not an instance of anything. | |
| 2958 __ cmp(object, factory()->null_value()); | |
| 2959 __ j(equal, &false_result, Label::kNear); | |
| 2960 | |
| 2961 // String values are not instances of anything. | |
| 2962 Condition is_string = masm_->IsObjectStringType(object, temp, temp); | |
| 2963 __ j(is_string, &false_result, Label::kNear); | |
| 2964 | |
| 2965 // Go to the deferred code. | |
| 2966 __ jmp(deferred->entry()); | |
| 2967 | |
| 2968 __ bind(&false_result); | |
| 2969 __ mov(ToRegister(instr->result()), factory()->false_value()); | |
| 2970 | |
| 2971 // Here result has either true or false. Deferred code also produces true or | |
| 2972 // false object. | |
| 2973 __ bind(deferred->exit()); | |
| 2974 __ bind(&done); | |
| 2975 } | 2944 } |
| 2976 | 2945 |
| 2977 | 2946 |
| 2978 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
| 2979 Label* map_check) { | |
| 2980 PushSafepointRegistersScope scope(this); | |
| 2981 | |
| 2982 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | |
| 2983 flags = static_cast<InstanceofStub::Flags>( | |
| 2984 flags | InstanceofStub::kArgsInRegisters); | |
| 2985 flags = static_cast<InstanceofStub::Flags>( | |
| 2986 flags | InstanceofStub::kCallSiteInlineCheck); | |
| 2987 flags = static_cast<InstanceofStub::Flags>( | |
| 2988 flags | InstanceofStub::kReturnTrueFalseObject); | |
| 2989 InstanceofStub stub(isolate(), flags); | |
| 2990 | |
| 2991 // Get the temp register reserved by the instruction. This needs to be a | |
| 2992 // register which is pushed last by PushSafepointRegisters as top of the | |
| 2993 // stack is used to pass the offset to the location of the map check to | |
| 2994 // the stub. | |
| 2995 Register temp = ToRegister(instr->temp()); | |
| 2996 DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | |
| 2997 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | |
| 2998 static const int kAdditionalDelta = 13; | |
| 2999 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | |
| 3000 __ mov(temp, Immediate(delta)); | |
| 3001 __ StoreToSafepointRegisterSlot(temp, temp); | |
| 3002 CallCodeGeneric(stub.GetCode(), | |
| 3003 RelocInfo::CODE_TARGET, | |
| 3004 instr, | |
| 3005 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
| 3006 // Get the deoptimization index of the LLazyBailout-environment that | |
| 3007 // corresponds to this instruction. | |
| 3008 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | |
| 3009 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 3010 | |
| 3011 // Put the result value into the eax slot and restore all registers. | |
| 3012 __ StoreToSafepointRegisterSlot(eax, eax); | |
| 3013 } | |
| 3014 | |
| 3015 | |
| 3016 void LCodeGen::DoCmpT(LCmpT* instr) { | 2947 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 3017 Token::Value op = instr->op(); | 2948 Token::Value op = instr->op(); |
| 3018 | 2949 |
| 3019 Handle<Code> ic = | 2950 Handle<Code> ic = |
| 3020 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); | 2951 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); |
| 3021 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2952 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3022 | 2953 |
| 3023 Condition condition = ComputeCompareCondition(op); | 2954 Condition condition = ComputeCompareCondition(op); |
| 3024 Label true_value, done; | 2955 Label true_value, done; |
| 3025 __ test(eax, Operand(eax)); | 2956 __ test(eax, Operand(eax)); |
| (...skipping 3440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6466 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6397 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 6467 } | 6398 } |
| 6468 | 6399 |
| 6469 | 6400 |
| 6470 #undef __ | 6401 #undef __ |
| 6471 | 6402 |
| 6472 } // namespace internal | 6403 } // namespace internal |
| 6473 } // namespace v8 | 6404 } // namespace v8 |
| 6474 | 6405 |
| 6475 #endif // V8_TARGET_ARCH_X87 | 6406 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |