OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1052 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1063 void LCodeGen::DoParameter(LParameter* instr) { | 1063 void LCodeGen::DoParameter(LParameter* instr) { |
1064 // Nothing to do. | 1064 // Nothing to do. |
1065 } | 1065 } |
1066 | 1066 |
1067 | 1067 |
1068 void LCodeGen::DoCallStub(LCallStub* instr) { | 1068 void LCodeGen::DoCallStub(LCallStub* instr) { |
1069 ASSERT(ToRegister(instr->result()).is(r0)); | 1069 ASSERT(ToRegister(instr->result()).is(r0)); |
1070 switch (instr->hydrogen()->major_key()) { | 1070 switch (instr->hydrogen()->major_key()) { |
1071 case CodeStub::RegExpConstructResult: { | 1071 case CodeStub::RegExpConstructResult: { |
1072 RegExpConstructResultStub stub; | 1072 RegExpConstructResultStub stub; |
1073 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1073 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1074 break; | 1074 break; |
1075 } | 1075 } |
1076 case CodeStub::RegExpExec: { | 1076 case CodeStub::RegExpExec: { |
1077 RegExpExecStub stub; | 1077 RegExpExecStub stub; |
1078 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1078 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1079 break; | 1079 break; |
1080 } | 1080 } |
1081 case CodeStub::SubString: { | 1081 case CodeStub::SubString: { |
1082 SubStringStub stub; | 1082 SubStringStub stub; |
1083 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1083 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1084 break; | 1084 break; |
1085 } | 1085 } |
1086 case CodeStub::NumberToString: { | 1086 case CodeStub::NumberToString: { |
1087 NumberToStringStub stub; | 1087 NumberToStringStub stub; |
1088 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1088 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1089 break; | 1089 break; |
1090 } | 1090 } |
1091 case CodeStub::StringAdd: { | 1091 case CodeStub::StringAdd: { |
1092 StringAddStub stub(NO_STRING_ADD_FLAGS); | 1092 StringAddStub stub(NO_STRING_ADD_FLAGS); |
1093 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1093 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1094 break; | 1094 break; |
1095 } | 1095 } |
1096 case CodeStub::StringCompare: { | 1096 case CodeStub::StringCompare: { |
1097 StringCompareStub stub; | 1097 StringCompareStub stub; |
1098 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1098 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1099 break; | 1099 break; |
1100 } | 1100 } |
1101 case CodeStub::TranscendentalCache: { | 1101 case CodeStub::TranscendentalCache: { |
1102 __ ldr(r0, MemOperand(sp, 0)); | 1102 __ ldr(r0, MemOperand(sp, 0)); |
1103 TranscendentalCacheStub stub(instr->transcendental_type(), | 1103 TranscendentalCacheStub stub(instr->transcendental_type(), |
1104 TranscendentalCacheStub::TAGGED); | 1104 TranscendentalCacheStub::TAGGED); |
1105 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1105 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1106 break; | 1106 break; |
1107 } | 1107 } |
1108 default: | 1108 default: |
1109 UNREACHABLE(); | 1109 UNREACHABLE(); |
1110 } | 1110 } |
1111 } | 1111 } |
1112 | 1112 |
1113 | 1113 |
1114 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1114 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
1115 // Nothing to do. | 1115 // Nothing to do. |
(...skipping 1001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2117 | 2117 |
2118 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 2118 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
2119 ASSERT(ToRegister(instr->left()).is(r1)); | 2119 ASSERT(ToRegister(instr->left()).is(r1)); |
2120 ASSERT(ToRegister(instr->right()).is(r0)); | 2120 ASSERT(ToRegister(instr->right()).is(r0)); |
2121 ASSERT(ToRegister(instr->result()).is(r0)); | 2121 ASSERT(ToRegister(instr->result()).is(r0)); |
2122 | 2122 |
2123 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 2123 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
2124 // Block literal pool emission to ensure nop indicating no inlined smi code | 2124 // Block literal pool emission to ensure nop indicating no inlined smi code |
2125 // is in the correct position. | 2125 // is in the correct position. |
2126 Assembler::BlockConstPoolScope block_const_pool(masm()); | 2126 Assembler::BlockConstPoolScope block_const_pool(masm()); |
2127 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2127 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
2128 __ nop(); // Signals no inlined code. | 2128 __ nop(); // Signals no inlined code. |
2129 } | 2129 } |
2130 | 2130 |
2131 | 2131 |
2132 int LCodeGen::GetNextEmittedBlock(int block) { | 2132 int LCodeGen::GetNextEmittedBlock(int block) { |
2133 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 2133 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
2134 LLabel* label = chunk_->GetLabel(i); | 2134 LLabel* label = chunk_->GetLabel(i); |
2135 if (!label->HasReplacement()) return i; | 2135 if (!label->HasReplacement()) return i; |
2136 } | 2136 } |
2137 return -1; | 2137 return -1; |
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2531 return kNoCondition; | 2531 return kNoCondition; |
2532 } | 2532 } |
2533 } | 2533 } |
2534 | 2534 |
2535 | 2535 |
2536 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2536 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
2537 Token::Value op = instr->op(); | 2537 Token::Value op = instr->op(); |
2538 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 2538 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
2539 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 2539 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
2540 | 2540 |
2541 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2541 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
2542 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2542 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2543 // This instruction also signals no smi code inlined. | 2543 // This instruction also signals no smi code inlined. |
2544 __ cmp(r0, Operand::Zero()); | 2544 __ cmp(r0, Operand::Zero()); |
2545 | 2545 |
2546 Condition condition = ComputeCompareCondition(op); | 2546 Condition condition = ComputeCompareCondition(op); |
2547 | 2547 |
2548 EmitBranch(true_block, false_block, condition); | 2548 EmitBranch(true_block, false_block, condition); |
2549 } | 2549 } |
2550 | 2550 |
2551 | 2551 |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2706 __ cmp(temp, Operand(instr->map())); | 2706 __ cmp(temp, Operand(instr->map())); |
2707 EmitBranch(true_block, false_block, eq); | 2707 EmitBranch(true_block, false_block, eq); |
2708 } | 2708 } |
2709 | 2709 |
2710 | 2710 |
2711 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2711 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
2712 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. | 2712 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. |
2713 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. | 2713 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. |
2714 | 2714 |
2715 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 2715 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
2716 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2716 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
2717 | 2717 |
2718 __ cmp(r0, Operand::Zero()); | 2718 __ cmp(r0, Operand::Zero()); |
2719 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); | 2719 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); |
2720 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); | 2720 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); |
2721 } | 2721 } |
2722 | 2722 |
2723 | 2723 |
2724 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2724 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
2725 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 2725 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
2726 public: | 2726 public: |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2835 __ bind(&before_push_delta); | 2835 __ bind(&before_push_delta); |
2836 __ BlockConstPoolFor(kAdditionalDelta); | 2836 __ BlockConstPoolFor(kAdditionalDelta); |
2837 __ mov(temp, Operand(delta * kPointerSize)); | 2837 __ mov(temp, Operand(delta * kPointerSize)); |
2838 // The mov above can generate one or two instructions. The delta was computed | 2838 // The mov above can generate one or two instructions. The delta was computed |
2839 // for two instructions, so we need to pad here in case of one instruction. | 2839 // for two instructions, so we need to pad here in case of one instruction. |
2840 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { | 2840 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { |
2841 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); | 2841 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); |
2842 __ nop(); | 2842 __ nop(); |
2843 } | 2843 } |
2844 __ StoreToSafepointRegisterSlot(temp, temp); | 2844 __ StoreToSafepointRegisterSlot(temp, temp); |
2845 CallCodeGeneric(stub.GetCode(), | 2845 CallCodeGeneric(stub.GetCode(isolate()), |
2846 RelocInfo::CODE_TARGET, | 2846 RelocInfo::CODE_TARGET, |
2847 instr, | 2847 instr, |
2848 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 2848 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
2849 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | 2849 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
2850 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 2850 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
2851 // Put the result value into the result register slot and | 2851 // Put the result value into the result register slot and |
2852 // restore all registers. | 2852 // restore all registers. |
2853 __ StoreToSafepointRegisterSlot(result, result); | 2853 __ StoreToSafepointRegisterSlot(result, result); |
2854 } | 2854 } |
2855 | 2855 |
2856 | 2856 |
2857 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { | 2857 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { |
2858 Register object = ToRegister(instr->object()); | 2858 Register object = ToRegister(instr->object()); |
2859 Register result = ToRegister(instr->result()); | 2859 Register result = ToRegister(instr->result()); |
2860 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 2860 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); |
2861 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); | 2861 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); |
2862 } | 2862 } |
2863 | 2863 |
2864 | 2864 |
2865 void LCodeGen::DoCmpT(LCmpT* instr) { | 2865 void LCodeGen::DoCmpT(LCmpT* instr) { |
2866 Token::Value op = instr->op(); | 2866 Token::Value op = instr->op(); |
2867 | 2867 |
2868 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2868 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
2869 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2869 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2870 // This instruction also signals no smi code inlined. | 2870 // This instruction also signals no smi code inlined. |
2871 __ cmp(r0, Operand::Zero()); | 2871 __ cmp(r0, Operand::Zero()); |
2872 | 2872 |
2873 Condition condition = ComputeCompareCondition(op); | 2873 Condition condition = ComputeCompareCondition(op); |
2874 __ LoadRoot(ToRegister(instr->result()), | 2874 __ LoadRoot(ToRegister(instr->result()), |
2875 Heap::kTrueValueRootIndex, | 2875 Heap::kTrueValueRootIndex, |
2876 condition); | 2876 condition); |
2877 __ LoadRoot(ToRegister(instr->result()), | 2877 __ LoadRoot(ToRegister(instr->result()), |
2878 Heap::kFalseValueRootIndex, | 2878 Heap::kFalseValueRootIndex, |
(...skipping 1248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4127 MathExpGenerator::EmitMathExp( | 4127 MathExpGenerator::EmitMathExp( |
4128 masm(), input, result, double_scratch1, double_scratch2, | 4128 masm(), input, result, double_scratch1, double_scratch2, |
4129 temp1, temp2, scratch0()); | 4129 temp1, temp2, scratch0()); |
4130 } | 4130 } |
4131 | 4131 |
4132 | 4132 |
4133 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 4133 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
4134 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 4134 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
4135 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 4135 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
4136 TranscendentalCacheStub::UNTAGGED); | 4136 TranscendentalCacheStub::UNTAGGED); |
4137 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4137 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4138 } | 4138 } |
4139 | 4139 |
4140 | 4140 |
4141 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) { | 4141 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) { |
4142 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 4142 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
4143 TranscendentalCacheStub stub(TranscendentalCache::TAN, | 4143 TranscendentalCacheStub stub(TranscendentalCache::TAN, |
4144 TranscendentalCacheStub::UNTAGGED); | 4144 TranscendentalCacheStub::UNTAGGED); |
4145 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4145 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4146 } | 4146 } |
4147 | 4147 |
4148 | 4148 |
4149 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { | 4149 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
4150 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 4150 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
4151 TranscendentalCacheStub stub(TranscendentalCache::COS, | 4151 TranscendentalCacheStub stub(TranscendentalCache::COS, |
4152 TranscendentalCacheStub::UNTAGGED); | 4152 TranscendentalCacheStub::UNTAGGED); |
4153 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4153 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4154 } | 4154 } |
4155 | 4155 |
4156 | 4156 |
4157 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { | 4157 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
4158 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 4158 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
4159 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 4159 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
4160 TranscendentalCacheStub::UNTAGGED); | 4160 TranscendentalCacheStub::UNTAGGED); |
4161 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4161 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4162 } | 4162 } |
4163 | 4163 |
4164 | 4164 |
4165 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { | 4165 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { |
4166 switch (instr->op()) { | 4166 switch (instr->op()) { |
4167 case kMathAbs: | 4167 case kMathAbs: |
4168 DoMathAbs(instr); | 4168 DoMathAbs(instr); |
4169 break; | 4169 break; |
4170 case kMathFloor: | 4170 case kMathFloor: |
4171 DoMathFloor(instr); | 4171 DoMathFloor(instr); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4243 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4243 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4244 } | 4244 } |
4245 | 4245 |
4246 | 4246 |
4247 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 4247 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
4248 ASSERT(ToRegister(instr->function()).is(r1)); | 4248 ASSERT(ToRegister(instr->function()).is(r1)); |
4249 ASSERT(ToRegister(instr->result()).is(r0)); | 4249 ASSERT(ToRegister(instr->result()).is(r0)); |
4250 | 4250 |
4251 int arity = instr->arity(); | 4251 int arity = instr->arity(); |
4252 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); | 4252 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); |
4253 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4253 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4254 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4254 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4255 } | 4255 } |
4256 | 4256 |
4257 | 4257 |
4258 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 4258 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
4259 ASSERT(ToRegister(instr->result()).is(r0)); | 4259 ASSERT(ToRegister(instr->result()).is(r0)); |
4260 | 4260 |
4261 int arity = instr->arity(); | 4261 int arity = instr->arity(); |
4262 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; | 4262 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; |
4263 Handle<Code> ic = | 4263 Handle<Code> ic = |
(...skipping 13 matching lines...) Expand all Loading... |
4277 R1_UNINITIALIZED); | 4277 R1_UNINITIALIZED); |
4278 } | 4278 } |
4279 | 4279 |
4280 | 4280 |
4281 void LCodeGen::DoCallNew(LCallNew* instr) { | 4281 void LCodeGen::DoCallNew(LCallNew* instr) { |
4282 ASSERT(ToRegister(instr->constructor()).is(r1)); | 4282 ASSERT(ToRegister(instr->constructor()).is(r1)); |
4283 ASSERT(ToRegister(instr->result()).is(r0)); | 4283 ASSERT(ToRegister(instr->result()).is(r0)); |
4284 | 4284 |
4285 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 4285 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
4286 __ mov(r0, Operand(instr->arity())); | 4286 __ mov(r0, Operand(instr->arity())); |
4287 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 4287 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4288 } | 4288 } |
4289 | 4289 |
4290 | 4290 |
4291 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 4291 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
4292 CallRuntime(instr->function(), instr->arity(), instr); | 4292 CallRuntime(instr->function(), instr->arity(), instr); |
4293 } | 4293 } |
4294 | 4294 |
4295 | 4295 |
4296 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 4296 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
4297 Register object = ToRegister(instr->object()); | 4297 Register object = ToRegister(instr->object()); |
(...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4639 Register temp = ToRegister(instr->temp()); | 4639 Register temp = ToRegister(instr->temp()); |
4640 __ TestJSArrayForAllocationSiteInfo(object, temp); | 4640 __ TestJSArrayForAllocationSiteInfo(object, temp); |
4641 DeoptimizeIf(eq, instr->environment()); | 4641 DeoptimizeIf(eq, instr->environment()); |
4642 } | 4642 } |
4643 | 4643 |
4644 | 4644 |
4645 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4645 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
4646 __ push(ToRegister(instr->left())); | 4646 __ push(ToRegister(instr->left())); |
4647 __ push(ToRegister(instr->right())); | 4647 __ push(ToRegister(instr->right())); |
4648 StringAddStub stub(NO_STRING_CHECK_IN_STUB); | 4648 StringAddStub stub(NO_STRING_CHECK_IN_STUB); |
4649 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4649 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4650 } | 4650 } |
4651 | 4651 |
4652 | 4652 |
4653 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4653 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
4654 class DeferredStringCharCodeAt: public LDeferredCode { | 4654 class DeferredStringCharCodeAt: public LDeferredCode { |
4655 public: | 4655 public: |
4656 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4656 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
4657 : LDeferredCode(codegen), instr_(instr) { } | 4657 : LDeferredCode(codegen), instr_(instr) { } |
4658 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } | 4658 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } |
4659 virtual LInstruction* instr() { return instr_; } | 4659 virtual LInstruction* instr() { return instr_; } |
(...skipping 1041 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5701 __ mov(r1, Operand(isolate()->factory()->empty_fixed_array())); | 5701 __ mov(r1, Operand(isolate()->factory()->empty_fixed_array())); |
5702 __ Push(r3, r2, r1); | 5702 __ Push(r3, r2, r1); |
5703 | 5703 |
5704 // Pick the right runtime function or stub to call. | 5704 // Pick the right runtime function or stub to call. |
5705 int length = instr->hydrogen()->length(); | 5705 int length = instr->hydrogen()->length(); |
5706 if (instr->hydrogen()->IsCopyOnWrite()) { | 5706 if (instr->hydrogen()->IsCopyOnWrite()) { |
5707 ASSERT(instr->hydrogen()->depth() == 1); | 5707 ASSERT(instr->hydrogen()->depth() == 1); |
5708 FastCloneShallowArrayStub::Mode mode = | 5708 FastCloneShallowArrayStub::Mode mode = |
5709 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 5709 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
5710 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); | 5710 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); |
5711 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5711 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5712 } else if (instr->hydrogen()->depth() > 1) { | 5712 } else if (instr->hydrogen()->depth() > 1) { |
5713 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 5713 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); |
5714 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 5714 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
5715 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 5715 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); |
5716 } else { | 5716 } else { |
5717 FastCloneShallowArrayStub::Mode mode = | 5717 FastCloneShallowArrayStub::Mode mode = |
5718 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 5718 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
5719 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 5719 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
5720 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 5720 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
5721 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 5721 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
5722 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5722 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5723 } | 5723 } |
5724 } | 5724 } |
5725 | 5725 |
5726 | 5726 |
5727 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | 5727 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, |
5728 Register result, | 5728 Register result, |
5729 Register source, | 5729 Register source, |
5730 int* offset, | 5730 int* offset, |
5731 AllocationSiteMode mode) { | 5731 AllocationSiteMode mode) { |
5732 ASSERT(!source.is(r2)); | 5732 ASSERT(!source.is(r2)); |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5906 int properties_count = constant_properties->length() / 2; | 5906 int properties_count = constant_properties->length() / 2; |
5907 if (instr->hydrogen()->depth() > 1) { | 5907 if (instr->hydrogen()->depth() > 1) { |
5908 __ Push(r3, r2, r1, r0); | 5908 __ Push(r3, r2, r1, r0); |
5909 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 5909 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
5910 } else if (flags != ObjectLiteral::kFastElements || | 5910 } else if (flags != ObjectLiteral::kFastElements || |
5911 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { | 5911 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { |
5912 __ Push(r3, r2, r1, r0); | 5912 __ Push(r3, r2, r1, r0); |
5913 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); | 5913 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
5914 } else { | 5914 } else { |
5915 FastCloneShallowObjectStub stub(properties_count); | 5915 FastCloneShallowObjectStub stub(properties_count); |
5916 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5916 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5917 } | 5917 } |
5918 } | 5918 } |
5919 | 5919 |
5920 | 5920 |
5921 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5921 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
5922 ASSERT(ToRegister(instr->value()).is(r0)); | 5922 ASSERT(ToRegister(instr->value()).is(r0)); |
5923 __ push(r0); | 5923 __ push(r0); |
5924 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5924 CallRuntime(Runtime::kToFastProperties, 1, instr); |
5925 } | 5925 } |
5926 | 5926 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5980 | 5980 |
5981 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5981 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
5982 // Use the fast case closure allocation code that allocates in new | 5982 // Use the fast case closure allocation code that allocates in new |
5983 // space for nested functions that don't need literals cloning. | 5983 // space for nested functions that don't need literals cloning. |
5984 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 5984 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
5985 bool pretenure = instr->hydrogen()->pretenure(); | 5985 bool pretenure = instr->hydrogen()->pretenure(); |
5986 if (!pretenure && shared_info->num_literals() == 0) { | 5986 if (!pretenure && shared_info->num_literals() == 0) { |
5987 FastNewClosureStub stub(shared_info->language_mode()); | 5987 FastNewClosureStub stub(shared_info->language_mode()); |
5988 __ mov(r1, Operand(shared_info)); | 5988 __ mov(r1, Operand(shared_info)); |
5989 __ push(r1); | 5989 __ push(r1); |
5990 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5990 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5991 } else { | 5991 } else { |
5992 __ mov(r2, Operand(shared_info)); | 5992 __ mov(r2, Operand(shared_info)); |
5993 __ mov(r1, Operand(pretenure | 5993 __ mov(r1, Operand(pretenure |
5994 ? factory()->true_value() | 5994 ? factory()->true_value() |
5995 : factory()->false_value())); | 5995 : factory()->false_value())); |
5996 __ Push(cp, r2, r1); | 5996 __ Push(cp, r2, r1); |
5997 CallRuntime(Runtime::kNewClosure, 3, instr); | 5997 CallRuntime(Runtime::kNewClosure, 3, instr); |
5998 } | 5998 } |
5999 } | 5999 } |
6000 | 6000 |
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6218 // There is no LLazyBailout instruction for stack-checks. We have to | 6218 // There is no LLazyBailout instruction for stack-checks. We have to |
6219 // prepare for lazy deoptimization explicitly here. | 6219 // prepare for lazy deoptimization explicitly here. |
6220 if (instr->hydrogen()->is_function_entry()) { | 6220 if (instr->hydrogen()->is_function_entry()) { |
6221 // Perform stack overflow check. | 6221 // Perform stack overflow check. |
6222 Label done; | 6222 Label done; |
6223 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 6223 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
6224 __ cmp(sp, Operand(ip)); | 6224 __ cmp(sp, Operand(ip)); |
6225 __ b(hs, &done); | 6225 __ b(hs, &done); |
6226 StackCheckStub stub; | 6226 StackCheckStub stub; |
6227 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 6227 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); |
6228 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 6228 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
6229 EnsureSpaceForLazyDeopt(); | 6229 EnsureSpaceForLazyDeopt(); |
6230 __ bind(&done); | 6230 __ bind(&done); |
6231 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6231 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
6232 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6232 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
6233 } else { | 6233 } else { |
6234 ASSERT(instr->hydrogen()->is_backwards_branch()); | 6234 ASSERT(instr->hydrogen()->is_backwards_branch()); |
6235 // Perform stack overflow check if this goto needs it before jumping. | 6235 // Perform stack overflow check if this goto needs it before jumping. |
6236 DeferredStackCheck* deferred_stack_check = | 6236 DeferredStackCheck* deferred_stack_check = |
6237 new(zone()) DeferredStackCheck(this, instr); | 6237 new(zone()) DeferredStackCheck(this, instr); |
6238 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 6238 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6356 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 6356 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
6357 __ ldr(result, FieldMemOperand(scratch, | 6357 __ ldr(result, FieldMemOperand(scratch, |
6358 FixedArray::kHeaderSize - kPointerSize)); | 6358 FixedArray::kHeaderSize - kPointerSize)); |
6359 __ bind(&done); | 6359 __ bind(&done); |
6360 } | 6360 } |
6361 | 6361 |
6362 | 6362 |
6363 #undef __ | 6363 #undef __ |
6364 | 6364 |
6365 } } // namespace v8::internal | 6365 } } // namespace v8::internal |
OLD | NEW |