Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(28)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 12317141: Added Isolate parameter to CodeStub::GetCode(). (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fixed whitespace. Rebased. Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/full-codegen-mips.cc ('k') | src/mips/macro-assembler-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1034 matching lines...) Expand 10 before | Expand all | Expand 10 after
1045 void LCodeGen::DoParameter(LParameter* instr) { 1045 void LCodeGen::DoParameter(LParameter* instr) {
1046 // Nothing to do. 1046 // Nothing to do.
1047 } 1047 }
1048 1048
1049 1049
1050 void LCodeGen::DoCallStub(LCallStub* instr) { 1050 void LCodeGen::DoCallStub(LCallStub* instr) {
1051 ASSERT(ToRegister(instr->result()).is(v0)); 1051 ASSERT(ToRegister(instr->result()).is(v0));
1052 switch (instr->hydrogen()->major_key()) { 1052 switch (instr->hydrogen()->major_key()) {
1053 case CodeStub::RegExpConstructResult: { 1053 case CodeStub::RegExpConstructResult: {
1054 RegExpConstructResultStub stub; 1054 RegExpConstructResultStub stub;
1055 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1055 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1056 break; 1056 break;
1057 } 1057 }
1058 case CodeStub::RegExpExec: { 1058 case CodeStub::RegExpExec: {
1059 RegExpExecStub stub; 1059 RegExpExecStub stub;
1060 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1060 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1061 break; 1061 break;
1062 } 1062 }
1063 case CodeStub::SubString: { 1063 case CodeStub::SubString: {
1064 SubStringStub stub; 1064 SubStringStub stub;
1065 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1065 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1066 break; 1066 break;
1067 } 1067 }
1068 case CodeStub::NumberToString: { 1068 case CodeStub::NumberToString: {
1069 NumberToStringStub stub; 1069 NumberToStringStub stub;
1070 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1070 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1071 break; 1071 break;
1072 } 1072 }
1073 case CodeStub::StringAdd: { 1073 case CodeStub::StringAdd: {
1074 StringAddStub stub(NO_STRING_ADD_FLAGS); 1074 StringAddStub stub(NO_STRING_ADD_FLAGS);
1075 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1075 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1076 break; 1076 break;
1077 } 1077 }
1078 case CodeStub::StringCompare: { 1078 case CodeStub::StringCompare: {
1079 StringCompareStub stub; 1079 StringCompareStub stub;
1080 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1080 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1081 break; 1081 break;
1082 } 1082 }
1083 case CodeStub::TranscendentalCache: { 1083 case CodeStub::TranscendentalCache: {
1084 __ lw(a0, MemOperand(sp, 0)); 1084 __ lw(a0, MemOperand(sp, 0));
1085 TranscendentalCacheStub stub(instr->transcendental_type(), 1085 TranscendentalCacheStub stub(instr->transcendental_type(),
1086 TranscendentalCacheStub::TAGGED); 1086 TranscendentalCacheStub::TAGGED);
1087 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1087 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1088 break; 1088 break;
1089 } 1089 }
1090 default: 1090 default:
1091 UNREACHABLE(); 1091 UNREACHABLE();
1092 } 1092 }
1093 } 1093 }
1094 1094
1095 1095
1096 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 1096 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1097 // Nothing to do. 1097 // Nothing to do.
(...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after
1740 } 1740 }
1741 } 1741 }
1742 1742
1743 1743
1744 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1744 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1745 ASSERT(ToRegister(instr->left()).is(a1)); 1745 ASSERT(ToRegister(instr->left()).is(a1));
1746 ASSERT(ToRegister(instr->right()).is(a0)); 1746 ASSERT(ToRegister(instr->right()).is(a0));
1747 ASSERT(ToRegister(instr->result()).is(v0)); 1747 ASSERT(ToRegister(instr->result()).is(v0));
1748 1748
1749 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 1749 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1750 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1750 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1751 // Other arch use a nop here, to signal that there is no inlined 1751 // Other arch use a nop here, to signal that there is no inlined
1752 // patchable code. Mips does not need the nop, since our marker 1752 // patchable code. Mips does not need the nop, since our marker
1753 // instruction (andi zero_reg) will never be used in normal code. 1753 // instruction (andi zero_reg) will never be used in normal code.
1754 } 1754 }
1755 1755
1756 1756
1757 int LCodeGen::GetNextEmittedBlock(int block) { 1757 int LCodeGen::GetNextEmittedBlock(int block) {
1758 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { 1758 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1759 LLabel* label = chunk_->GetLabel(i); 1759 LLabel* label = chunk_->GetLabel(i);
1760 if (!label->HasReplacement()) return i; 1760 if (!label->HasReplacement()) return i;
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after
2178 return kNoCondition; 2178 return kNoCondition;
2179 } 2179 }
2180 } 2180 }
2181 2181
2182 2182
2183 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2183 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2184 Token::Value op = instr->op(); 2184 Token::Value op = instr->op();
2185 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2185 int true_block = chunk_->LookupDestination(instr->true_block_id());
2186 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2186 int false_block = chunk_->LookupDestination(instr->false_block_id());
2187 2187
2188 Handle<Code> ic = CompareIC::GetUninitialized(op); 2188 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2189 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2189 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2190 2190
2191 Condition condition = ComputeCompareCondition(op); 2191 Condition condition = ComputeCompareCondition(op);
2192 2192
2193 EmitBranch(true_block, false_block, condition, v0, Operand(zero_reg)); 2193 EmitBranch(true_block, false_block, condition, v0, Operand(zero_reg));
2194 } 2194 }
2195 2195
2196 2196
2197 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { 2197 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
2198 InstanceType from = instr->from(); 2198 InstanceType from = instr->from();
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
2356 2356
2357 2357
2358 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2358 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2359 Label true_label, done; 2359 Label true_label, done;
2360 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. 2360 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0.
2361 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. 2361 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1.
2362 Register result = ToRegister(instr->result()); 2362 Register result = ToRegister(instr->result());
2363 ASSERT(result.is(v0)); 2363 ASSERT(result.is(v0));
2364 2364
2365 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 2365 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2366 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2366 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2367 2367
2368 __ Branch(&true_label, eq, result, Operand(zero_reg)); 2368 __ Branch(&true_label, eq, result, Operand(zero_reg));
2369 __ li(result, Operand(factory()->false_value())); 2369 __ li(result, Operand(factory()->false_value()));
2370 __ Branch(&done); 2370 __ Branch(&done);
2371 __ bind(&true_label); 2371 __ bind(&true_label);
2372 __ li(result, Operand(factory()->true_value())); 2372 __ li(result, Operand(factory()->true_value()));
2373 __ bind(&done); 2373 __ bind(&done);
2374 } 2374 }
2375 2375
2376 2376
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
2476 __ LoadHeapObject(InstanceofStub::right(), instr->function()); 2476 __ LoadHeapObject(InstanceofStub::right(), instr->function());
2477 static const int kAdditionalDelta = 7; 2477 static const int kAdditionalDelta = 7;
2478 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2478 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2479 Label before_push_delta; 2479 Label before_push_delta;
2480 __ bind(&before_push_delta); 2480 __ bind(&before_push_delta);
2481 { 2481 {
2482 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 2482 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2483 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE); 2483 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
2484 __ StoreToSafepointRegisterSlot(temp, temp); 2484 __ StoreToSafepointRegisterSlot(temp, temp);
2485 } 2485 }
2486 CallCodeGeneric(stub.GetCode(), 2486 CallCodeGeneric(stub.GetCode(isolate()),
2487 RelocInfo::CODE_TARGET, 2487 RelocInfo::CODE_TARGET,
2488 instr, 2488 instr,
2489 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2489 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2490 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2490 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2491 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2491 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2492 // Put the result value into the result register slot and 2492 // Put the result value into the result register slot and
2493 // restore all registers. 2493 // restore all registers.
2494 __ StoreToSafepointRegisterSlot(result, result); 2494 __ StoreToSafepointRegisterSlot(result, result);
2495 } 2495 }
2496 2496
2497 2497
2498 void LCodeGen::DoCmpT(LCmpT* instr) { 2498 void LCodeGen::DoCmpT(LCmpT* instr) {
2499 Token::Value op = instr->op(); 2499 Token::Value op = instr->op();
2500 2500
2501 Handle<Code> ic = CompareIC::GetUninitialized(op); 2501 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2502 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2502 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2503 // On MIPS there is no need for a "no inlined smi code" marker (nop). 2503 // On MIPS there is no need for a "no inlined smi code" marker (nop).
2504 2504
2505 Condition condition = ComputeCompareCondition(op); 2505 Condition condition = ComputeCompareCondition(op);
2506 // A minor optimization that relies on LoadRoot always emitting one 2506 // A minor optimization that relies on LoadRoot always emitting one
2507 // instruction. 2507 // instruction.
2508 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm()); 2508 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
2509 Label done; 2509 Label done;
2510 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg)); 2510 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg));
2511 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2511 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
(...skipping 1277 matching lines...) Expand 10 before | Expand all | Expand 10 after
3789 MathExpGenerator::EmitMathExp( 3789 MathExpGenerator::EmitMathExp(
3790 masm(), input, result, double_scratch1, double_scratch2, 3790 masm(), input, result, double_scratch1, double_scratch2,
3791 temp1, temp2, scratch0()); 3791 temp1, temp2, scratch0());
3792 } 3792 }
3793 3793
3794 3794
3795 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 3795 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3796 ASSERT(ToDoubleRegister(instr->result()).is(f4)); 3796 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3797 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3797 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3798 TranscendentalCacheStub::UNTAGGED); 3798 TranscendentalCacheStub::UNTAGGED);
3799 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3799 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3800 } 3800 }
3801 3801
3802 3802
3803 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) { 3803 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3804 ASSERT(ToDoubleRegister(instr->result()).is(f4)); 3804 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3805 TranscendentalCacheStub stub(TranscendentalCache::TAN, 3805 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3806 TranscendentalCacheStub::UNTAGGED); 3806 TranscendentalCacheStub::UNTAGGED);
3807 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3807 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3808 } 3808 }
3809 3809
3810 3810
3811 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 3811 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3812 ASSERT(ToDoubleRegister(instr->result()).is(f4)); 3812 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3813 TranscendentalCacheStub stub(TranscendentalCache::COS, 3813 TranscendentalCacheStub stub(TranscendentalCache::COS,
3814 TranscendentalCacheStub::UNTAGGED); 3814 TranscendentalCacheStub::UNTAGGED);
3815 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3815 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3816 } 3816 }
3817 3817
3818 3818
3819 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 3819 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3820 ASSERT(ToDoubleRegister(instr->result()).is(f4)); 3820 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3821 TranscendentalCacheStub stub(TranscendentalCache::SIN, 3821 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3822 TranscendentalCacheStub::UNTAGGED); 3822 TranscendentalCacheStub::UNTAGGED);
3823 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3823 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3824 } 3824 }
3825 3825
3826 3826
3827 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 3827 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3828 switch (instr->op()) { 3828 switch (instr->op()) {
3829 case kMathAbs: 3829 case kMathAbs:
3830 DoMathAbs(instr); 3830 DoMathAbs(instr);
3831 break; 3831 break;
3832 case kMathFloor: 3832 case kMathFloor:
3833 DoMathFloor(instr); 3833 DoMathFloor(instr);
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
3905 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3905 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3906 } 3906 }
3907 3907
3908 3908
3909 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3909 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3910 ASSERT(ToRegister(instr->function()).is(a1)); 3910 ASSERT(ToRegister(instr->function()).is(a1));
3911 ASSERT(ToRegister(instr->result()).is(v0)); 3911 ASSERT(ToRegister(instr->result()).is(v0));
3912 3912
3913 int arity = instr->arity(); 3913 int arity = instr->arity();
3914 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); 3914 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
3915 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3915 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3916 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3916 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3917 } 3917 }
3918 3918
3919 3919
3920 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 3920 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3921 ASSERT(ToRegister(instr->result()).is(v0)); 3921 ASSERT(ToRegister(instr->result()).is(v0));
3922 3922
3923 int arity = instr->arity(); 3923 int arity = instr->arity();
3924 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 3924 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3925 Handle<Code> ic = 3925 Handle<Code> ic =
(...skipping 13 matching lines...) Expand all
3939 A1_UNINITIALIZED); 3939 A1_UNINITIALIZED);
3940 } 3940 }
3941 3941
3942 3942
3943 void LCodeGen::DoCallNew(LCallNew* instr) { 3943 void LCodeGen::DoCallNew(LCallNew* instr) {
3944 ASSERT(ToRegister(instr->constructor()).is(a1)); 3944 ASSERT(ToRegister(instr->constructor()).is(a1));
3945 ASSERT(ToRegister(instr->result()).is(v0)); 3945 ASSERT(ToRegister(instr->result()).is(v0));
3946 3946
3947 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 3947 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
3948 __ li(a0, Operand(instr->arity())); 3948 __ li(a0, Operand(instr->arity()));
3949 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); 3949 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3950 } 3950 }
3951 3951
3952 3952
3953 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 3953 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3954 CallRuntime(instr->function(), instr->arity(), instr); 3954 CallRuntime(instr->function(), instr->arity(), instr);
3955 } 3955 }
3956 3956
3957 3957
3958 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 3958 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3959 Register object = ToRegister(instr->object()); 3959 Register object = ToRegister(instr->object());
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
4312 __ TestJSArrayForAllocationSiteInfo(object, temp, ne, &fail); 4312 __ TestJSArrayForAllocationSiteInfo(object, temp, ne, &fail);
4313 DeoptimizeIf(al, instr->environment()); 4313 DeoptimizeIf(al, instr->environment());
4314 __ bind(&fail); 4314 __ bind(&fail);
4315 } 4315 }
4316 4316
4317 4317
4318 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4318 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4319 __ push(ToRegister(instr->left())); 4319 __ push(ToRegister(instr->left()));
4320 __ push(ToRegister(instr->right())); 4320 __ push(ToRegister(instr->right()));
4321 StringAddStub stub(NO_STRING_CHECK_IN_STUB); 4321 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
4322 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4322 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4323 } 4323 }
4324 4324
4325 4325
4326 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4326 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4327 class DeferredStringCharCodeAt: public LDeferredCode { 4327 class DeferredStringCharCodeAt: public LDeferredCode {
4328 public: 4328 public:
4329 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 4329 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
4330 : LDeferredCode(codegen), instr_(instr) { } 4330 : LDeferredCode(codegen), instr_(instr) { }
4331 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 4331 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
4332 virtual LInstruction* instr() { return instr_; } 4332 virtual LInstruction* instr() { return instr_; }
(...skipping 1017 matching lines...) Expand 10 before | Expand all | Expand 10 after
5350 __ li(a1, Operand(isolate()->factory()->empty_fixed_array())); 5350 __ li(a1, Operand(isolate()->factory()->empty_fixed_array()));
5351 __ Push(a3, a2, a1); 5351 __ Push(a3, a2, a1);
5352 5352
5353 // Pick the right runtime function or stub to call. 5353 // Pick the right runtime function or stub to call.
5354 int length = instr->hydrogen()->length(); 5354 int length = instr->hydrogen()->length();
5355 if (instr->hydrogen()->IsCopyOnWrite()) { 5355 if (instr->hydrogen()->IsCopyOnWrite()) {
5356 ASSERT(instr->hydrogen()->depth() == 1); 5356 ASSERT(instr->hydrogen()->depth() == 1);
5357 FastCloneShallowArrayStub::Mode mode = 5357 FastCloneShallowArrayStub::Mode mode =
5358 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 5358 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
5359 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); 5359 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
5360 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5360 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5361 } else if (instr->hydrogen()->depth() > 1) { 5361 } else if (instr->hydrogen()->depth() > 1) {
5362 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); 5362 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
5363 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5363 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5364 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); 5364 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
5365 } else { 5365 } else {
5366 FastCloneShallowArrayStub::Mode mode = 5366 FastCloneShallowArrayStub::Mode mode =
5367 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS 5367 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
5368 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 5368 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5369 : FastCloneShallowArrayStub::CLONE_ELEMENTS; 5369 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5370 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 5370 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
5371 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5371 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5372 } 5372 }
5373 } 5373 }
5374 5374
5375 5375
5376 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, 5376 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
5377 Register result, 5377 Register result,
5378 Register source, 5378 Register source,
5379 int* offset, 5379 int* offset,
5380 AllocationSiteMode mode) { 5380 AllocationSiteMode mode) {
5381 ASSERT(!source.is(a2)); 5381 ASSERT(!source.is(a2));
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
5555 5555
5556 // Pick the right runtime function or stub to call. 5556 // Pick the right runtime function or stub to call.
5557 int properties_count = constant_properties->length() / 2; 5557 int properties_count = constant_properties->length() / 2;
5558 if (instr->hydrogen()->depth() > 1) { 5558 if (instr->hydrogen()->depth() > 1) {
5559 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 5559 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
5560 } else if (flags != ObjectLiteral::kFastElements || 5560 } else if (flags != ObjectLiteral::kFastElements ||
5561 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 5561 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
5562 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 5562 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
5563 } else { 5563 } else {
5564 FastCloneShallowObjectStub stub(properties_count); 5564 FastCloneShallowObjectStub stub(properties_count);
5565 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5565 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5566 } 5566 }
5567 } 5567 }
5568 5568
5569 5569
5570 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5570 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5571 ASSERT(ToRegister(instr->value()).is(a0)); 5571 ASSERT(ToRegister(instr->value()).is(a0));
5572 ASSERT(ToRegister(instr->result()).is(v0)); 5572 ASSERT(ToRegister(instr->result()).is(v0));
5573 __ push(a0); 5573 __ push(a0);
5574 CallRuntime(Runtime::kToFastProperties, 1, instr); 5574 CallRuntime(Runtime::kToFastProperties, 1, instr);
5575 } 5575 }
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
5629 5629
5630 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5630 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5631 // Use the fast case closure allocation code that allocates in new 5631 // Use the fast case closure allocation code that allocates in new
5632 // space for nested functions that don't need literals cloning. 5632 // space for nested functions that don't need literals cloning.
5633 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 5633 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
5634 bool pretenure = instr->hydrogen()->pretenure(); 5634 bool pretenure = instr->hydrogen()->pretenure();
5635 if (!pretenure && shared_info->num_literals() == 0) { 5635 if (!pretenure && shared_info->num_literals() == 0) {
5636 FastNewClosureStub stub(shared_info->language_mode()); 5636 FastNewClosureStub stub(shared_info->language_mode());
5637 __ li(a1, Operand(shared_info)); 5637 __ li(a1, Operand(shared_info));
5638 __ push(a1); 5638 __ push(a1);
5639 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5639 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5640 } else { 5640 } else {
5641 __ li(a2, Operand(shared_info)); 5641 __ li(a2, Operand(shared_info));
5642 __ li(a1, Operand(pretenure 5642 __ li(a1, Operand(pretenure
5643 ? factory()->true_value() 5643 ? factory()->true_value()
5644 : factory()->false_value())); 5644 : factory()->false_value()));
5645 __ Push(cp, a2, a1); 5645 __ Push(cp, a2, a1);
5646 CallRuntime(Runtime::kNewClosure, 3, instr); 5646 CallRuntime(Runtime::kNewClosure, 3, instr);
5647 } 5647 }
5648 } 5648 }
5649 5649
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
5902 ASSERT(instr->HasEnvironment()); 5902 ASSERT(instr->HasEnvironment());
5903 LEnvironment* env = instr->environment(); 5903 LEnvironment* env = instr->environment();
5904 // There is no LLazyBailout instruction for stack-checks. We have to 5904 // There is no LLazyBailout instruction for stack-checks. We have to
5905 // prepare for lazy deoptimization explicitly here. 5905 // prepare for lazy deoptimization explicitly here.
5906 if (instr->hydrogen()->is_function_entry()) { 5906 if (instr->hydrogen()->is_function_entry()) {
5907 // Perform stack overflow check. 5907 // Perform stack overflow check.
5908 Label done; 5908 Label done;
5909 __ LoadRoot(at, Heap::kStackLimitRootIndex); 5909 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5910 __ Branch(&done, hs, sp, Operand(at)); 5910 __ Branch(&done, hs, sp, Operand(at));
5911 StackCheckStub stub; 5911 StackCheckStub stub;
5912 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5912 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5913 EnsureSpaceForLazyDeopt(); 5913 EnsureSpaceForLazyDeopt();
5914 __ bind(&done); 5914 __ bind(&done);
5915 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5915 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5916 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5916 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5917 } else { 5917 } else {
5918 ASSERT(instr->hydrogen()->is_backwards_branch()); 5918 ASSERT(instr->hydrogen()->is_backwards_branch());
5919 // Perform stack overflow check if this goto needs it before jumping. 5919 // Perform stack overflow check if this goto needs it before jumping.
5920 DeferredStackCheck* deferred_stack_check = 5920 DeferredStackCheck* deferred_stack_check =
5921 new(zone()) DeferredStackCheck(this, instr); 5921 new(zone()) DeferredStackCheck(this, instr);
5922 __ LoadRoot(at, Heap::kStackLimitRootIndex); 5922 __ LoadRoot(at, Heap::kStackLimitRootIndex);
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
6037 __ Subu(scratch, result, scratch); 6037 __ Subu(scratch, result, scratch);
6038 __ lw(result, FieldMemOperand(scratch, 6038 __ lw(result, FieldMemOperand(scratch,
6039 FixedArray::kHeaderSize - kPointerSize)); 6039 FixedArray::kHeaderSize - kPointerSize));
6040 __ bind(&done); 6040 __ bind(&done);
6041 } 6041 }
6042 6042
6043 6043
6044 #undef __ 6044 #undef __
6045 6045
6046 } } // namespace v8::internal 6046 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/full-codegen-mips.cc ('k') | src/mips/macro-assembler-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698