| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 200 if (info()->saves_caller_doubles()) { | 200 if (info()->saves_caller_doubles()) { |
| 201 SaveCallerDoubles(); | 201 SaveCallerDoubles(); |
| 202 } | 202 } |
| 203 | 203 |
| 204 // Possibly allocate a local context. | 204 // Possibly allocate a local context. |
| 205 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 205 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 206 if (heap_slots > 0) { | 206 if (heap_slots > 0) { |
| 207 Comment(";;; Allocate local context"); | 207 Comment(";;; Allocate local context"); |
| 208 // Argument to NewContext is the function, which is in r1. | 208 // Argument to NewContext is the function, which is in r1. |
| 209 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 209 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 210 FastNewContextStub stub(heap_slots); | 210 FastNewContextStub stub(isolate(), heap_slots); |
| 211 __ CallStub(&stub); | 211 __ CallStub(&stub); |
| 212 } else { | 212 } else { |
| 213 __ push(r1); | 213 __ push(r1); |
| 214 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); | 214 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
| 215 } | 215 } |
| 216 RecordSafepoint(Safepoint::kNoLazyDeopt); | 216 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 217 // Context is returned in both r0 and cp. It replaces the context | 217 // Context is returned in both r0 and cp. It replaces the context |
| 218 // passed to us. It's saved in the stack and kept live in cp. | 218 // passed to us. It's saved in the stack and kept live in cp. |
| 219 __ mov(cp, r0); | 219 __ mov(cp, r0); |
| 220 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 220 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| (...skipping 878 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1099 void LCodeGen::DoParameter(LParameter* instr) { | 1099 void LCodeGen::DoParameter(LParameter* instr) { |
| 1100 // Nothing to do. | 1100 // Nothing to do. |
| 1101 } | 1101 } |
| 1102 | 1102 |
| 1103 | 1103 |
| 1104 void LCodeGen::DoCallStub(LCallStub* instr) { | 1104 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 1105 ASSERT(ToRegister(instr->context()).is(cp)); | 1105 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1106 ASSERT(ToRegister(instr->result()).is(r0)); | 1106 ASSERT(ToRegister(instr->result()).is(r0)); |
| 1107 switch (instr->hydrogen()->major_key()) { | 1107 switch (instr->hydrogen()->major_key()) { |
| 1108 case CodeStub::RegExpExec: { | 1108 case CodeStub::RegExpExec: { |
| 1109 RegExpExecStub stub; | 1109 RegExpExecStub stub(isolate()); |
| 1110 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1110 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1111 break; | 1111 break; |
| 1112 } | 1112 } |
| 1113 case CodeStub::SubString: { | 1113 case CodeStub::SubString: { |
| 1114 SubStringStub stub; | 1114 SubStringStub stub(isolate()); |
| 1115 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1115 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1116 break; | 1116 break; |
| 1117 } | 1117 } |
| 1118 case CodeStub::StringCompare: { | 1118 case CodeStub::StringCompare: { |
| 1119 StringCompareStub stub; | 1119 StringCompareStub stub(isolate()); |
| 1120 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1120 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1121 break; | 1121 break; |
| 1122 } | 1122 } |
| 1123 default: | 1123 default: |
| 1124 UNREACHABLE(); | 1124 UNREACHABLE(); |
| 1125 } | 1125 } |
| 1126 } | 1126 } |
| 1127 | 1127 |
| 1128 | 1128 |
| 1129 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1129 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| (...skipping 1022 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2152 } | 2152 } |
| 2153 } | 2153 } |
| 2154 | 2154 |
| 2155 | 2155 |
| 2156 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 2156 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 2157 ASSERT(ToRegister(instr->context()).is(cp)); | 2157 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2158 ASSERT(ToRegister(instr->left()).is(r1)); | 2158 ASSERT(ToRegister(instr->left()).is(r1)); |
| 2159 ASSERT(ToRegister(instr->right()).is(r0)); | 2159 ASSERT(ToRegister(instr->right()).is(r0)); |
| 2160 ASSERT(ToRegister(instr->result()).is(r0)); | 2160 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2161 | 2161 |
| 2162 BinaryOpICStub stub(instr->op(), NO_OVERWRITE); | 2162 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); |
| 2163 // Block literal pool emission to ensure nop indicating no inlined smi code | 2163 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 2164 // is in the correct position. | 2164 // is in the correct position. |
| 2165 Assembler::BlockConstPoolScope block_const_pool(masm()); | 2165 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 2166 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2166 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2167 } | 2167 } |
| 2168 | 2168 |
| 2169 | 2169 |
| 2170 template<class InstrType> | 2170 template<class InstrType> |
| 2171 void LCodeGen::EmitBranch(InstrType instr, Condition condition) { | 2171 void LCodeGen::EmitBranch(InstrType instr, Condition condition) { |
| 2172 int left_block = instr->TrueDestination(chunk_); | 2172 int left_block = instr->TrueDestination(chunk_); |
| (...skipping 574 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2747 __ cmp(temp, Operand(instr->map())); | 2747 __ cmp(temp, Operand(instr->map())); |
| 2748 EmitBranch(instr, eq); | 2748 EmitBranch(instr, eq); |
| 2749 } | 2749 } |
| 2750 | 2750 |
| 2751 | 2751 |
| 2752 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2752 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2753 ASSERT(ToRegister(instr->context()).is(cp)); | 2753 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2754 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. | 2754 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. |
| 2755 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. | 2755 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. |
| 2756 | 2756 |
| 2757 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 2757 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
| 2758 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2758 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2759 | 2759 |
| 2760 __ cmp(r0, Operand::Zero()); | 2760 __ cmp(r0, Operand::Zero()); |
| 2761 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); | 2761 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); |
| 2762 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); | 2762 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); |
| 2763 } | 2763 } |
| 2764 | 2764 |
| 2765 | 2765 |
| 2766 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2766 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 2767 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode { | 2767 class DeferredInstanceOfKnownGlobal V8_FINAL : public LDeferredCode { |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2844 | 2844 |
| 2845 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 2845 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 2846 Label* map_check) { | 2846 Label* map_check) { |
| 2847 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 2847 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 2848 flags = static_cast<InstanceofStub::Flags>( | 2848 flags = static_cast<InstanceofStub::Flags>( |
| 2849 flags | InstanceofStub::kArgsInRegisters); | 2849 flags | InstanceofStub::kArgsInRegisters); |
| 2850 flags = static_cast<InstanceofStub::Flags>( | 2850 flags = static_cast<InstanceofStub::Flags>( |
| 2851 flags | InstanceofStub::kCallSiteInlineCheck); | 2851 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2852 flags = static_cast<InstanceofStub::Flags>( | 2852 flags = static_cast<InstanceofStub::Flags>( |
| 2853 flags | InstanceofStub::kReturnTrueFalseObject); | 2853 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2854 InstanceofStub stub(flags); | 2854 InstanceofStub stub(isolate(), flags); |
| 2855 | 2855 |
| 2856 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2856 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2857 LoadContextFromDeferred(instr->context()); | 2857 LoadContextFromDeferred(instr->context()); |
| 2858 | 2858 |
| 2859 __ Move(InstanceofStub::right(), instr->function()); | 2859 __ Move(InstanceofStub::right(), instr->function()); |
| 2860 static const int kAdditionalDelta = 4; | 2860 static const int kAdditionalDelta = 4; |
| 2861 // Make sure that code size is predicable, since we use specific constants | 2861 // Make sure that code size is predicable, since we use specific constants |
| 2862 // offsets in the code to find embedded values.. | 2862 // offsets in the code to find embedded values.. |
| 2863 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); | 2863 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); |
| 2864 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2864 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| (...skipping 998 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3863 // Having marked this as a call, we can use any registers. | 3863 // Having marked this as a call, we can use any registers. |
| 3864 // Just make sure that the input/output registers are the expected ones. | 3864 // Just make sure that the input/output registers are the expected ones. |
| 3865 ASSERT(!instr->right()->IsDoubleRegister() || | 3865 ASSERT(!instr->right()->IsDoubleRegister() || |
| 3866 ToDoubleRegister(instr->right()).is(d1)); | 3866 ToDoubleRegister(instr->right()).is(d1)); |
| 3867 ASSERT(!instr->right()->IsRegister() || | 3867 ASSERT(!instr->right()->IsRegister() || |
| 3868 ToRegister(instr->right()).is(r2)); | 3868 ToRegister(instr->right()).is(r2)); |
| 3869 ASSERT(ToDoubleRegister(instr->left()).is(d0)); | 3869 ASSERT(ToDoubleRegister(instr->left()).is(d0)); |
| 3870 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 3870 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
| 3871 | 3871 |
| 3872 if (exponent_type.IsSmi()) { | 3872 if (exponent_type.IsSmi()) { |
| 3873 MathPowStub stub(MathPowStub::TAGGED); | 3873 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3874 __ CallStub(&stub); | 3874 __ CallStub(&stub); |
| 3875 } else if (exponent_type.IsTagged()) { | 3875 } else if (exponent_type.IsTagged()) { |
| 3876 Label no_deopt; | 3876 Label no_deopt; |
| 3877 __ JumpIfSmi(r2, &no_deopt); | 3877 __ JumpIfSmi(r2, &no_deopt); |
| 3878 __ ldr(r6, FieldMemOperand(r2, HeapObject::kMapOffset)); | 3878 __ ldr(r6, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3879 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 3879 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 3880 __ cmp(r6, Operand(ip)); | 3880 __ cmp(r6, Operand(ip)); |
| 3881 DeoptimizeIf(ne, instr->environment()); | 3881 DeoptimizeIf(ne, instr->environment()); |
| 3882 __ bind(&no_deopt); | 3882 __ bind(&no_deopt); |
| 3883 MathPowStub stub(MathPowStub::TAGGED); | 3883 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3884 __ CallStub(&stub); | 3884 __ CallStub(&stub); |
| 3885 } else if (exponent_type.IsInteger32()) { | 3885 } else if (exponent_type.IsInteger32()) { |
| 3886 MathPowStub stub(MathPowStub::INTEGER); | 3886 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
| 3887 __ CallStub(&stub); | 3887 __ CallStub(&stub); |
| 3888 } else { | 3888 } else { |
| 3889 ASSERT(exponent_type.IsDouble()); | 3889 ASSERT(exponent_type.IsDouble()); |
| 3890 MathPowStub stub(MathPowStub::DOUBLE); | 3890 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
| 3891 __ CallStub(&stub); | 3891 __ CallStub(&stub); |
| 3892 } | 3892 } |
| 3893 } | 3893 } |
| 3894 | 3894 |
| 3895 | 3895 |
| 3896 void LCodeGen::DoMathExp(LMathExp* instr) { | 3896 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3897 DwVfpRegister input = ToDoubleRegister(instr->value()); | 3897 DwVfpRegister input = ToDoubleRegister(instr->value()); |
| 3898 DwVfpRegister result = ToDoubleRegister(instr->result()); | 3898 DwVfpRegister result = ToDoubleRegister(instr->result()); |
| 3899 DwVfpRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); | 3899 DwVfpRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); |
| 3900 DwVfpRegister double_scratch2 = double_scratch0(); | 3900 DwVfpRegister double_scratch2 = double_scratch0(); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3987 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3987 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 3988 } | 3988 } |
| 3989 | 3989 |
| 3990 | 3990 |
| 3991 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3991 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3992 ASSERT(ToRegister(instr->context()).is(cp)); | 3992 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3993 ASSERT(ToRegister(instr->function()).is(r1)); | 3993 ASSERT(ToRegister(instr->function()).is(r1)); |
| 3994 ASSERT(ToRegister(instr->result()).is(r0)); | 3994 ASSERT(ToRegister(instr->result()).is(r0)); |
| 3995 | 3995 |
| 3996 int arity = instr->arity(); | 3996 int arity = instr->arity(); |
| 3997 CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); | 3997 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
| 3998 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3998 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3999 } | 3999 } |
| 4000 | 4000 |
| 4001 | 4001 |
| 4002 void LCodeGen::DoCallNew(LCallNew* instr) { | 4002 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 4003 ASSERT(ToRegister(instr->context()).is(cp)); | 4003 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4004 ASSERT(ToRegister(instr->constructor()).is(r1)); | 4004 ASSERT(ToRegister(instr->constructor()).is(r1)); |
| 4005 ASSERT(ToRegister(instr->result()).is(r0)); | 4005 ASSERT(ToRegister(instr->result()).is(r0)); |
| 4006 | 4006 |
| 4007 __ mov(r0, Operand(instr->arity())); | 4007 __ mov(r0, Operand(instr->arity())); |
| 4008 // No cell in r2 for construct type feedback in optimized code | 4008 // No cell in r2 for construct type feedback in optimized code |
| 4009 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 4009 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 4010 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 4010 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); |
| 4011 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4011 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4012 } | 4012 } |
| 4013 | 4013 |
| 4014 | 4014 |
| 4015 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 4015 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 4016 ASSERT(ToRegister(instr->context()).is(cp)); | 4016 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4017 ASSERT(ToRegister(instr->constructor()).is(r1)); | 4017 ASSERT(ToRegister(instr->constructor()).is(r1)); |
| 4018 ASSERT(ToRegister(instr->result()).is(r0)); | 4018 ASSERT(ToRegister(instr->result()).is(r0)); |
| 4019 | 4019 |
| 4020 __ mov(r0, Operand(instr->arity())); | 4020 __ mov(r0, Operand(instr->arity())); |
| 4021 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 4021 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 4022 ElementsKind kind = instr->hydrogen()->elements_kind(); | 4022 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 4023 AllocationSiteOverrideMode override_mode = | 4023 AllocationSiteOverrideMode override_mode = |
| 4024 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 4024 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 4025 ? DISABLE_ALLOCATION_SITES | 4025 ? DISABLE_ALLOCATION_SITES |
| 4026 : DONT_OVERRIDE; | 4026 : DONT_OVERRIDE; |
| 4027 | 4027 |
| 4028 if (instr->arity() == 0) { | 4028 if (instr->arity() == 0) { |
| 4029 ArrayNoArgumentConstructorStub stub(kind, override_mode); | 4029 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 4030 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4030 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4031 } else if (instr->arity() == 1) { | 4031 } else if (instr->arity() == 1) { |
| 4032 Label done; | 4032 Label done; |
| 4033 if (IsFastPackedElementsKind(kind)) { | 4033 if (IsFastPackedElementsKind(kind)) { |
| 4034 Label packed_case; | 4034 Label packed_case; |
| 4035 // We might need a change here | 4035 // We might need a change here |
| 4036 // look at the first argument | 4036 // look at the first argument |
| 4037 __ ldr(r5, MemOperand(sp, 0)); | 4037 __ ldr(r5, MemOperand(sp, 0)); |
| 4038 __ cmp(r5, Operand::Zero()); | 4038 __ cmp(r5, Operand::Zero()); |
| 4039 __ b(eq, &packed_case); | 4039 __ b(eq, &packed_case); |
| 4040 | 4040 |
| 4041 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 4041 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
| 4042 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); | 4042 ArraySingleArgumentConstructorStub stub(isolate(), |
| 4043 holey_kind, |
| 4044 override_mode); |
| 4043 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4045 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4044 __ jmp(&done); | 4046 __ jmp(&done); |
| 4045 __ bind(&packed_case); | 4047 __ bind(&packed_case); |
| 4046 } | 4048 } |
| 4047 | 4049 |
| 4048 ArraySingleArgumentConstructorStub stub(kind, override_mode); | 4050 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 4049 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4051 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4050 __ bind(&done); | 4052 __ bind(&done); |
| 4051 } else { | 4053 } else { |
| 4052 ArrayNArgumentsConstructorStub stub(kind, override_mode); | 4054 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
| 4053 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4055 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4054 } | 4056 } |
| 4055 } | 4057 } |
| 4056 | 4058 |
| 4057 | 4059 |
| 4058 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 4060 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 4059 CallRuntime(instr->function(), instr->arity(), instr); | 4061 CallRuntime(instr->function(), instr->arity(), instr); |
| 4060 } | 4062 } |
| 4061 | 4063 |
| 4062 | 4064 |
| (...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4439 // Write barrier. | 4441 // Write barrier. |
| 4440 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4442 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4441 scratch, GetLinkRegisterState(), kDontSaveFPRegs); | 4443 scratch, GetLinkRegisterState(), kDontSaveFPRegs); |
| 4442 } else { | 4444 } else { |
| 4443 ASSERT(ToRegister(instr->context()).is(cp)); | 4445 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4444 ASSERT(object_reg.is(r0)); | 4446 ASSERT(object_reg.is(r0)); |
| 4445 PushSafepointRegistersScope scope( | 4447 PushSafepointRegistersScope scope( |
| 4446 this, Safepoint::kWithRegistersAndDoubles); | 4448 this, Safepoint::kWithRegistersAndDoubles); |
| 4447 __ Move(r1, to_map); | 4449 __ Move(r1, to_map); |
| 4448 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; | 4450 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; |
| 4449 TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); | 4451 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); |
| 4450 __ CallStub(&stub); | 4452 __ CallStub(&stub); |
| 4451 RecordSafepointWithRegistersAndDoubles( | 4453 RecordSafepointWithRegistersAndDoubles( |
| 4452 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | 4454 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
| 4453 } | 4455 } |
| 4454 __ bind(¬_applicable); | 4456 __ bind(¬_applicable); |
| 4455 } | 4457 } |
| 4456 | 4458 |
| 4457 | 4459 |
| 4458 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4460 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4459 Register object = ToRegister(instr->object()); | 4461 Register object = ToRegister(instr->object()); |
| 4460 Register temp = ToRegister(instr->temp()); | 4462 Register temp = ToRegister(instr->temp()); |
| 4461 Label no_memento_found; | 4463 Label no_memento_found; |
| 4462 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4464 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
| 4463 DeoptimizeIf(eq, instr->environment()); | 4465 DeoptimizeIf(eq, instr->environment()); |
| 4464 __ bind(&no_memento_found); | 4466 __ bind(&no_memento_found); |
| 4465 } | 4467 } |
| 4466 | 4468 |
| 4467 | 4469 |
| 4468 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4470 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4469 ASSERT(ToRegister(instr->context()).is(cp)); | 4471 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4470 ASSERT(ToRegister(instr->left()).is(r1)); | 4472 ASSERT(ToRegister(instr->left()).is(r1)); |
| 4471 ASSERT(ToRegister(instr->right()).is(r0)); | 4473 ASSERT(ToRegister(instr->right()).is(r0)); |
| 4472 StringAddStub stub(instr->hydrogen()->flags(), | 4474 StringAddStub stub(isolate(), |
| 4475 instr->hydrogen()->flags(), |
| 4473 instr->hydrogen()->pretenure_flag()); | 4476 instr->hydrogen()->pretenure_flag()); |
| 4474 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4477 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4475 } | 4478 } |
| 4476 | 4479 |
| 4477 | 4480 |
| 4478 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4481 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4479 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4482 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4480 public: | 4483 public: |
| 4481 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4484 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 4482 : LDeferredCode(codegen), instr_(instr) { } | 4485 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 957 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5440 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); | 5443 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); |
| 5441 } | 5444 } |
| 5442 | 5445 |
| 5443 | 5446 |
| 5444 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5447 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5445 ASSERT(ToRegister(instr->context()).is(cp)); | 5448 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5446 // Use the fast case closure allocation code that allocates in new | 5449 // Use the fast case closure allocation code that allocates in new |
| 5447 // space for nested functions that don't need literals cloning. | 5450 // space for nested functions that don't need literals cloning. |
| 5448 bool pretenure = instr->hydrogen()->pretenure(); | 5451 bool pretenure = instr->hydrogen()->pretenure(); |
| 5449 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5452 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5450 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), | 5453 FastNewClosureStub stub(isolate(), |
| 5454 instr->hydrogen()->strict_mode(), |
| 5451 instr->hydrogen()->is_generator()); | 5455 instr->hydrogen()->is_generator()); |
| 5452 __ mov(r2, Operand(instr->hydrogen()->shared_info())); | 5456 __ mov(r2, Operand(instr->hydrogen()->shared_info())); |
| 5453 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5457 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5454 } else { | 5458 } else { |
| 5455 __ mov(r2, Operand(instr->hydrogen()->shared_info())); | 5459 __ mov(r2, Operand(instr->hydrogen()->shared_info())); |
| 5456 __ mov(r1, Operand(pretenure ? factory()->true_value() | 5460 __ mov(r1, Operand(pretenure ? factory()->true_value() |
| 5457 : factory()->false_value())); | 5461 : factory()->false_value())); |
| 5458 __ Push(cp, r2, r1); | 5462 __ Push(cp, r2, r1); |
| 5459 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); | 5463 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); |
| 5460 } | 5464 } |
| (...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5855 __ ldr(result, FieldMemOperand(scratch, | 5859 __ ldr(result, FieldMemOperand(scratch, |
| 5856 FixedArray::kHeaderSize - kPointerSize)); | 5860 FixedArray::kHeaderSize - kPointerSize)); |
| 5857 __ bind(deferred->exit()); | 5861 __ bind(deferred->exit()); |
| 5858 __ bind(&done); | 5862 __ bind(&done); |
| 5859 } | 5863 } |
| 5860 | 5864 |
| 5861 | 5865 |
| 5862 #undef __ | 5866 #undef __ |
| 5863 | 5867 |
| 5864 } } // namespace v8::internal | 5868 } } // namespace v8::internal |
| OLD | NEW |