| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 196 if (info()->saves_caller_doubles()) { | 196 if (info()->saves_caller_doubles()) { |
| 197 SaveCallerDoubles(); | 197 SaveCallerDoubles(); |
| 198 } | 198 } |
| 199 | 199 |
| 200 // Possibly allocate a local context. | 200 // Possibly allocate a local context. |
| 201 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 201 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 202 if (heap_slots > 0) { | 202 if (heap_slots > 0) { |
| 203 Comment(";;; Allocate local context"); | 203 Comment(";;; Allocate local context"); |
| 204 // Argument to NewContext is the function, which is in a1. | 204 // Argument to NewContext is the function, which is in a1. |
| 205 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 205 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 206 FastNewContextStub stub(heap_slots); | 206 FastNewContextStub stub(isolate(), heap_slots); |
| 207 __ CallStub(&stub); | 207 __ CallStub(&stub); |
| 208 } else { | 208 } else { |
| 209 __ push(a1); | 209 __ push(a1); |
| 210 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); | 210 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
| 211 } | 211 } |
| 212 RecordSafepoint(Safepoint::kNoLazyDeopt); | 212 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 213 // Context is returned in both v0. It replaces the context passed to us. | 213 // Context is returned in both v0. It replaces the context passed to us. |
| 214 // It's saved in the stack and kept live in cp. | 214 // It's saved in the stack and kept live in cp. |
| 215 __ mov(cp, v0); | 215 __ mov(cp, v0); |
| 216 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 216 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| (...skipping 822 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1039 void LCodeGen::DoParameter(LParameter* instr) { | 1039 void LCodeGen::DoParameter(LParameter* instr) { |
| 1040 // Nothing to do. | 1040 // Nothing to do. |
| 1041 } | 1041 } |
| 1042 | 1042 |
| 1043 | 1043 |
| 1044 void LCodeGen::DoCallStub(LCallStub* instr) { | 1044 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 1045 ASSERT(ToRegister(instr->context()).is(cp)); | 1045 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1046 ASSERT(ToRegister(instr->result()).is(v0)); | 1046 ASSERT(ToRegister(instr->result()).is(v0)); |
| 1047 switch (instr->hydrogen()->major_key()) { | 1047 switch (instr->hydrogen()->major_key()) { |
| 1048 case CodeStub::RegExpExec: { | 1048 case CodeStub::RegExpExec: { |
| 1049 RegExpExecStub stub; | 1049 RegExpExecStub stub(isolate()); |
| 1050 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1050 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1051 break; | 1051 break; |
| 1052 } | 1052 } |
| 1053 case CodeStub::SubString: { | 1053 case CodeStub::SubString: { |
| 1054 SubStringStub stub; | 1054 SubStringStub stub(isolate()); |
| 1055 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1055 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1056 break; | 1056 break; |
| 1057 } | 1057 } |
| 1058 case CodeStub::StringCompare: { | 1058 case CodeStub::StringCompare: { |
| 1059 StringCompareStub stub; | 1059 StringCompareStub stub(isolate()); |
| 1060 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1060 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1061 break; | 1061 break; |
| 1062 } | 1062 } |
| 1063 default: | 1063 default: |
| 1064 UNREACHABLE(); | 1064 UNREACHABLE(); |
| 1065 } | 1065 } |
| 1066 } | 1066 } |
| 1067 | 1067 |
| 1068 | 1068 |
| 1069 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1069 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| (...skipping 934 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2004 } | 2004 } |
| 2005 } | 2005 } |
| 2006 | 2006 |
| 2007 | 2007 |
| 2008 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 2008 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 2009 ASSERT(ToRegister(instr->context()).is(cp)); | 2009 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2010 ASSERT(ToRegister(instr->left()).is(a1)); | 2010 ASSERT(ToRegister(instr->left()).is(a1)); |
| 2011 ASSERT(ToRegister(instr->right()).is(a0)); | 2011 ASSERT(ToRegister(instr->right()).is(a0)); |
| 2012 ASSERT(ToRegister(instr->result()).is(v0)); | 2012 ASSERT(ToRegister(instr->result()).is(v0)); |
| 2013 | 2013 |
| 2014 BinaryOpICStub stub(instr->op(), NO_OVERWRITE); | 2014 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); |
| 2015 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2015 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2016 // Other arch use a nop here, to signal that there is no inlined | 2016 // Other arch use a nop here, to signal that there is no inlined |
| 2017 // patchable code. Mips does not need the nop, since our marker | 2017 // patchable code. Mips does not need the nop, since our marker |
| 2018 // instruction (andi zero_reg) will never be used in normal code. | 2018 // instruction (andi zero_reg) will never be used in normal code. |
| 2019 } | 2019 } |
| 2020 | 2020 |
| 2021 | 2021 |
| 2022 template<class InstrType> | 2022 template<class InstrType> |
| 2023 void LCodeGen::EmitBranch(InstrType instr, | 2023 void LCodeGen::EmitBranch(InstrType instr, |
| 2024 Condition condition, | 2024 Condition condition, |
| (...skipping 621 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2646 | 2646 |
| 2647 | 2647 |
| 2648 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2648 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2649 ASSERT(ToRegister(instr->context()).is(cp)); | 2649 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2650 Label true_label, done; | 2650 Label true_label, done; |
| 2651 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. | 2651 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. |
| 2652 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. | 2652 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. |
| 2653 Register result = ToRegister(instr->result()); | 2653 Register result = ToRegister(instr->result()); |
| 2654 ASSERT(result.is(v0)); | 2654 ASSERT(result.is(v0)); |
| 2655 | 2655 |
| 2656 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 2656 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
| 2657 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2657 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2658 | 2658 |
| 2659 __ Branch(&true_label, eq, result, Operand(zero_reg)); | 2659 __ Branch(&true_label, eq, result, Operand(zero_reg)); |
| 2660 __ li(result, Operand(factory()->false_value())); | 2660 __ li(result, Operand(factory()->false_value())); |
| 2661 __ Branch(&done); | 2661 __ Branch(&done); |
| 2662 __ bind(&true_label); | 2662 __ bind(&true_label); |
| 2663 __ li(result, Operand(factory()->true_value())); | 2663 __ li(result, Operand(factory()->true_value())); |
| 2664 __ bind(&done); | 2664 __ bind(&done); |
| 2665 } | 2665 } |
| 2666 | 2666 |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2747 Register result = ToRegister(instr->result()); | 2747 Register result = ToRegister(instr->result()); |
| 2748 ASSERT(result.is(v0)); | 2748 ASSERT(result.is(v0)); |
| 2749 | 2749 |
| 2750 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 2750 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 2751 flags = static_cast<InstanceofStub::Flags>( | 2751 flags = static_cast<InstanceofStub::Flags>( |
| 2752 flags | InstanceofStub::kArgsInRegisters); | 2752 flags | InstanceofStub::kArgsInRegisters); |
| 2753 flags = static_cast<InstanceofStub::Flags>( | 2753 flags = static_cast<InstanceofStub::Flags>( |
| 2754 flags | InstanceofStub::kCallSiteInlineCheck); | 2754 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2755 flags = static_cast<InstanceofStub::Flags>( | 2755 flags = static_cast<InstanceofStub::Flags>( |
| 2756 flags | InstanceofStub::kReturnTrueFalseObject); | 2756 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2757 InstanceofStub stub(flags); | 2757 InstanceofStub stub(isolate(), flags); |
| 2758 | 2758 |
| 2759 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2759 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2760 LoadContextFromDeferred(instr->context()); | 2760 LoadContextFromDeferred(instr->context()); |
| 2761 | 2761 |
| 2762 // Get the temp register reserved by the instruction. This needs to be t0 as | 2762 // Get the temp register reserved by the instruction. This needs to be t0 as |
| 2763 // its slot of the pushing of safepoint registers is used to communicate the | 2763 // its slot of the pushing of safepoint registers is used to communicate the |
| 2764 // offset to the location of the map check. | 2764 // offset to the location of the map check. |
| 2765 Register temp = ToRegister(instr->temp()); | 2765 Register temp = ToRegister(instr->temp()); |
| 2766 ASSERT(temp.is(t0)); | 2766 ASSERT(temp.is(t0)); |
| 2767 __ li(InstanceofStub::right(), instr->function()); | 2767 __ li(InstanceofStub::right(), instr->function()); |
| (...skipping 1073 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3841 // Having marked this as a call, we can use any registers. | 3841 // Having marked this as a call, we can use any registers. |
| 3842 // Just make sure that the input/output registers are the expected ones. | 3842 // Just make sure that the input/output registers are the expected ones. |
| 3843 ASSERT(!instr->right()->IsDoubleRegister() || | 3843 ASSERT(!instr->right()->IsDoubleRegister() || |
| 3844 ToDoubleRegister(instr->right()).is(f4)); | 3844 ToDoubleRegister(instr->right()).is(f4)); |
| 3845 ASSERT(!instr->right()->IsRegister() || | 3845 ASSERT(!instr->right()->IsRegister() || |
| 3846 ToRegister(instr->right()).is(a2)); | 3846 ToRegister(instr->right()).is(a2)); |
| 3847 ASSERT(ToDoubleRegister(instr->left()).is(f2)); | 3847 ASSERT(ToDoubleRegister(instr->left()).is(f2)); |
| 3848 ASSERT(ToDoubleRegister(instr->result()).is(f0)); | 3848 ASSERT(ToDoubleRegister(instr->result()).is(f0)); |
| 3849 | 3849 |
| 3850 if (exponent_type.IsSmi()) { | 3850 if (exponent_type.IsSmi()) { |
| 3851 MathPowStub stub(MathPowStub::TAGGED); | 3851 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3852 __ CallStub(&stub); | 3852 __ CallStub(&stub); |
| 3853 } else if (exponent_type.IsTagged()) { | 3853 } else if (exponent_type.IsTagged()) { |
| 3854 Label no_deopt; | 3854 Label no_deopt; |
| 3855 __ JumpIfSmi(a2, &no_deopt); | 3855 __ JumpIfSmi(a2, &no_deopt); |
| 3856 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset)); | 3856 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 3857 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3857 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 3858 DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); | 3858 DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); |
| 3859 __ bind(&no_deopt); | 3859 __ bind(&no_deopt); |
| 3860 MathPowStub stub(MathPowStub::TAGGED); | 3860 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3861 __ CallStub(&stub); | 3861 __ CallStub(&stub); |
| 3862 } else if (exponent_type.IsInteger32()) { | 3862 } else if (exponent_type.IsInteger32()) { |
| 3863 MathPowStub stub(MathPowStub::INTEGER); | 3863 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
| 3864 __ CallStub(&stub); | 3864 __ CallStub(&stub); |
| 3865 } else { | 3865 } else { |
| 3866 ASSERT(exponent_type.IsDouble()); | 3866 ASSERT(exponent_type.IsDouble()); |
| 3867 MathPowStub stub(MathPowStub::DOUBLE); | 3867 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
| 3868 __ CallStub(&stub); | 3868 __ CallStub(&stub); |
| 3869 } | 3869 } |
| 3870 } | 3870 } |
| 3871 | 3871 |
| 3872 | 3872 |
| 3873 void LCodeGen::DoMathExp(LMathExp* instr) { | 3873 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3874 DoubleRegister input = ToDoubleRegister(instr->value()); | 3874 DoubleRegister input = ToDoubleRegister(instr->value()); |
| 3875 DoubleRegister result = ToDoubleRegister(instr->result()); | 3875 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 3876 DoubleRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); | 3876 DoubleRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); |
| 3877 DoubleRegister double_scratch2 = double_scratch0(); | 3877 DoubleRegister double_scratch2 = double_scratch0(); |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3961 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3961 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 3962 } | 3962 } |
| 3963 | 3963 |
| 3964 | 3964 |
| 3965 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3965 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3966 ASSERT(ToRegister(instr->context()).is(cp)); | 3966 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3967 ASSERT(ToRegister(instr->function()).is(a1)); | 3967 ASSERT(ToRegister(instr->function()).is(a1)); |
| 3968 ASSERT(ToRegister(instr->result()).is(v0)); | 3968 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3969 | 3969 |
| 3970 int arity = instr->arity(); | 3970 int arity = instr->arity(); |
| 3971 CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); | 3971 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
| 3972 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3972 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3973 } | 3973 } |
| 3974 | 3974 |
| 3975 | 3975 |
| 3976 void LCodeGen::DoCallNew(LCallNew* instr) { | 3976 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3977 ASSERT(ToRegister(instr->context()).is(cp)); | 3977 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3978 ASSERT(ToRegister(instr->constructor()).is(a1)); | 3978 ASSERT(ToRegister(instr->constructor()).is(a1)); |
| 3979 ASSERT(ToRegister(instr->result()).is(v0)); | 3979 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3980 | 3980 |
| 3981 __ li(a0, Operand(instr->arity())); | 3981 __ li(a0, Operand(instr->arity())); |
| 3982 // No cell in a2 for construct type feedback in optimized code | 3982 // No cell in a2 for construct type feedback in optimized code |
| 3983 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 3983 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 3984 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 3984 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); |
| 3985 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3985 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3986 } | 3986 } |
| 3987 | 3987 |
| 3988 | 3988 |
| 3989 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 3989 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 3990 ASSERT(ToRegister(instr->context()).is(cp)); | 3990 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3991 ASSERT(ToRegister(instr->constructor()).is(a1)); | 3991 ASSERT(ToRegister(instr->constructor()).is(a1)); |
| 3992 ASSERT(ToRegister(instr->result()).is(v0)); | 3992 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3993 | 3993 |
| 3994 __ li(a0, Operand(instr->arity())); | 3994 __ li(a0, Operand(instr->arity())); |
| 3995 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 3995 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 3996 ElementsKind kind = instr->hydrogen()->elements_kind(); | 3996 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 3997 AllocationSiteOverrideMode override_mode = | 3997 AllocationSiteOverrideMode override_mode = |
| 3998 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 3998 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 3999 ? DISABLE_ALLOCATION_SITES | 3999 ? DISABLE_ALLOCATION_SITES |
| 4000 : DONT_OVERRIDE; | 4000 : DONT_OVERRIDE; |
| 4001 | 4001 |
| 4002 if (instr->arity() == 0) { | 4002 if (instr->arity() == 0) { |
| 4003 ArrayNoArgumentConstructorStub stub(kind, override_mode); | 4003 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 4004 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4004 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4005 } else if (instr->arity() == 1) { | 4005 } else if (instr->arity() == 1) { |
| 4006 Label done; | 4006 Label done; |
| 4007 if (IsFastPackedElementsKind(kind)) { | 4007 if (IsFastPackedElementsKind(kind)) { |
| 4008 Label packed_case; | 4008 Label packed_case; |
| 4009 // We might need a change here, | 4009 // We might need a change here, |
| 4010 // look at the first argument. | 4010 // look at the first argument. |
| 4011 __ lw(t1, MemOperand(sp, 0)); | 4011 __ lw(t1, MemOperand(sp, 0)); |
| 4012 __ Branch(&packed_case, eq, t1, Operand(zero_reg)); | 4012 __ Branch(&packed_case, eq, t1, Operand(zero_reg)); |
| 4013 | 4013 |
| 4014 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 4014 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
| 4015 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); | 4015 ArraySingleArgumentConstructorStub stub(isolate(), |
| 4016 holey_kind, |
| 4017 override_mode); |
| 4016 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4018 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4017 __ jmp(&done); | 4019 __ jmp(&done); |
| 4018 __ bind(&packed_case); | 4020 __ bind(&packed_case); |
| 4019 } | 4021 } |
| 4020 | 4022 |
| 4021 ArraySingleArgumentConstructorStub stub(kind, override_mode); | 4023 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 4022 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4024 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4023 __ bind(&done); | 4025 __ bind(&done); |
| 4024 } else { | 4026 } else { |
| 4025 ArrayNArgumentsConstructorStub stub(kind, override_mode); | 4027 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
| 4026 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4028 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4027 } | 4029 } |
| 4028 } | 4030 } |
| 4029 | 4031 |
| 4030 | 4032 |
| 4031 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 4033 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 4032 CallRuntime(instr->function(), instr->arity(), instr); | 4034 CallRuntime(instr->function(), instr->arity(), instr); |
| 4033 } | 4035 } |
| 4034 | 4036 |
| 4035 | 4037 |
| (...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4423 // Write barrier. | 4425 // Write barrier. |
| 4424 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4426 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4425 scratch, GetRAState(), kDontSaveFPRegs); | 4427 scratch, GetRAState(), kDontSaveFPRegs); |
| 4426 } else { | 4428 } else { |
| 4427 ASSERT(object_reg.is(a0)); | 4429 ASSERT(object_reg.is(a0)); |
| 4428 ASSERT(ToRegister(instr->context()).is(cp)); | 4430 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4429 PushSafepointRegistersScope scope( | 4431 PushSafepointRegistersScope scope( |
| 4430 this, Safepoint::kWithRegistersAndDoubles); | 4432 this, Safepoint::kWithRegistersAndDoubles); |
| 4431 __ li(a1, Operand(to_map)); | 4433 __ li(a1, Operand(to_map)); |
| 4432 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; | 4434 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; |
| 4433 TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); | 4435 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); |
| 4434 __ CallStub(&stub); | 4436 __ CallStub(&stub); |
| 4435 RecordSafepointWithRegistersAndDoubles( | 4437 RecordSafepointWithRegistersAndDoubles( |
| 4436 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | 4438 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
| 4437 } | 4439 } |
| 4438 __ bind(¬_applicable); | 4440 __ bind(¬_applicable); |
| 4439 } | 4441 } |
| 4440 | 4442 |
| 4441 | 4443 |
| 4442 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4444 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4443 Register object = ToRegister(instr->object()); | 4445 Register object = ToRegister(instr->object()); |
| 4444 Register temp = ToRegister(instr->temp()); | 4446 Register temp = ToRegister(instr->temp()); |
| 4445 Label no_memento_found; | 4447 Label no_memento_found; |
| 4446 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, | 4448 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, |
| 4447 ne, &no_memento_found); | 4449 ne, &no_memento_found); |
| 4448 DeoptimizeIf(al, instr->environment()); | 4450 DeoptimizeIf(al, instr->environment()); |
| 4449 __ bind(&no_memento_found); | 4451 __ bind(&no_memento_found); |
| 4450 } | 4452 } |
| 4451 | 4453 |
| 4452 | 4454 |
| 4453 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4455 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4454 ASSERT(ToRegister(instr->context()).is(cp)); | 4456 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4455 ASSERT(ToRegister(instr->left()).is(a1)); | 4457 ASSERT(ToRegister(instr->left()).is(a1)); |
| 4456 ASSERT(ToRegister(instr->right()).is(a0)); | 4458 ASSERT(ToRegister(instr->right()).is(a0)); |
| 4457 StringAddStub stub(instr->hydrogen()->flags(), | 4459 StringAddStub stub(isolate(), |
| 4460 instr->hydrogen()->flags(), |
| 4458 instr->hydrogen()->pretenure_flag()); | 4461 instr->hydrogen()->pretenure_flag()); |
| 4459 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4462 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4460 } | 4463 } |
| 4461 | 4464 |
| 4462 | 4465 |
| 4463 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4466 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4464 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4467 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4465 public: | 4468 public: |
| 4466 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4469 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 4467 : LDeferredCode(codegen), instr_(instr) { } | 4470 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5445 } | 5448 } |
| 5446 } | 5449 } |
| 5447 | 5450 |
| 5448 | 5451 |
| 5449 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5452 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5450 ASSERT(ToRegister(instr->context()).is(cp)); | 5453 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5451 // Use the fast case closure allocation code that allocates in new | 5454 // Use the fast case closure allocation code that allocates in new |
| 5452 // space for nested functions that don't need literals cloning. | 5455 // space for nested functions that don't need literals cloning. |
| 5453 bool pretenure = instr->hydrogen()->pretenure(); | 5456 bool pretenure = instr->hydrogen()->pretenure(); |
| 5454 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5457 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5455 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), | 5458 FastNewClosureStub stub(isolate(), |
| 5459 instr->hydrogen()->strict_mode(), |
| 5456 instr->hydrogen()->is_generator()); | 5460 instr->hydrogen()->is_generator()); |
| 5457 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5461 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
| 5458 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5462 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5459 } else { | 5463 } else { |
| 5460 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5464 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
| 5461 __ li(a1, Operand(pretenure ? factory()->true_value() | 5465 __ li(a1, Operand(pretenure ? factory()->true_value() |
| 5462 : factory()->false_value())); | 5466 : factory()->false_value())); |
| 5463 __ Push(cp, a2, a1); | 5467 __ Push(cp, a2, a1); |
| 5464 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); | 5468 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); |
| 5465 } | 5469 } |
| (...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5893 __ lw(result, FieldMemOperand(scratch, | 5897 __ lw(result, FieldMemOperand(scratch, |
| 5894 FixedArray::kHeaderSize - kPointerSize)); | 5898 FixedArray::kHeaderSize - kPointerSize)); |
| 5895 __ bind(deferred->exit()); | 5899 __ bind(deferred->exit()); |
| 5896 __ bind(&done); | 5900 __ bind(&done); |
| 5897 } | 5901 } |
| 5898 | 5902 |
| 5899 | 5903 |
| 5900 #undef __ | 5904 #undef __ |
| 5901 | 5905 |
| 5902 } } // namespace v8::internal | 5906 } } // namespace v8::internal |
| OLD | NEW |