Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 8112008: MIPS: port all relevant commits since the new-gc was landed. (Closed)
Patch Set: rebased on r9598. Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 __ mov(cp, v0); 183 __ mov(cp, v0);
184 __ Pop(); 184 __ Pop();
185 __ Ret(); 185 __ Ret();
186 186
187 // Need to collect. Call into runtime system. 187 // Need to collect. Call into runtime system.
188 __ bind(&gc); 188 __ bind(&gc);
189 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); 189 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
190 } 190 }
191 191
192 192
193 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
194 // Stack layout on entry:
195 //
196 // [sp]: function.
197 // [sp + kPointerSize]: serialized scope info
198
199 // Try to allocate the context in new space.
200 Label gc;
201 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
202 __ AllocateInNewSpace(FixedArray::SizeFor(length),
203 v0, a1, a2, &gc, TAG_OBJECT);
204
205 // Load the function from the stack.
206 __ lw(a3, MemOperand(sp, 0));
207
208 // Load the serialized scope info from the stack.
209 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
210
211 // Setup the object header.
212 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
213 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
214 __ li(a2, Operand(Smi::FromInt(length)));
215 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
216
217 // If this block context is nested in the global context we get a smi
218 // sentinel instead of a function. The block context should get the
219 // canonical empty function of the global context as its closure which
220 // we still have to look up.
221 Label after_sentinel;
222 __ JumpIfNotSmi(a3, &after_sentinel);
223 if (FLAG_debug_code) {
224 const char* message = "Expected 0 as a Smi sentinel";
225 __ Assert(eq, message, a3, Operand(zero_reg));
226 }
227 __ lw(a3, GlobalObjectOperand());
228 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
229 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
230 __ bind(&after_sentinel);
231
232 // Setup the fixed slots.
233 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
234 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
235 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
236
237 // Copy the global object from the previous context.
238 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
239 __ sw(a1, ContextOperand(v0, Context::GLOBAL_INDEX));
240
241 // Initialize the rest of the slots to the hole value.
242 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
243 for (int i = 0; i < slots_; i++) {
244 __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS));
245 }
246
247 // Remove the on-stack argument and return.
248 __ mov(cp, v0);
249 __ Addu(sp, sp, Operand(2 * kPointerSize));
250 __ Ret();
251
252 // Need to collect. Call into runtime system.
253 __ bind(&gc);
254 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
255 }
256
257
193 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
194 // Stack layout on entry: 259 // Stack layout on entry:
195 // [sp]: constant elements. 260 // [sp]: constant elements.
196 // [sp + kPointerSize]: literal index. 261 // [sp + kPointerSize]: literal index.
197 // [sp + (2 * kPointerSize)]: literals array. 262 // [sp + (2 * kPointerSize)]: literals array.
198 263
199 // All sizes here are multiples of kPointerSize. 264 // All sizes here are multiples of kPointerSize.
200 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 265 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
201 int size = JSArray::kSize + elements_size; 266 int size = JSArray::kSize + elements_size;
202 267
(...skipping 676 matching lines...) Expand 10 before | Expand all | Expand 10 after
879 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset)); 944 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset));
880 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset)); 945 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset));
881 } 946 }
882 // Place heap_number_result in v0 and return to the pushed return address. 947 // Place heap_number_result in v0 and return to the pushed return address.
883 __ mov(v0, heap_number_result); 948 __ mov(v0, heap_number_result);
884 __ pop(ra); 949 __ pop(ra);
885 __ Ret(); 950 __ Ret();
886 } 951 }
887 952
888 953
889 bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() { 954 bool WriteInt32ToHeapNumberStub::IsPregenerated() {
890 // These variants are compiled ahead of time. See next method. 955 // These variants are compiled ahead of time. See next method.
891 if (the_int_.is(a1) && 956 if (the_int_.is(a1) &&
892 the_heap_number_.is(v0) && 957 the_heap_number_.is(v0) &&
893 scratch_.is(a2) && 958 scratch_.is(a2) &&
894 sign_.is(a3)) { 959 sign_.is(a3)) {
895 return true; 960 return true;
896 } 961 }
897 if (the_int_.is(a2) && 962 if (the_int_.is(a2) &&
898 the_heap_number_.is(v0) && 963 the_heap_number_.is(v0) &&
899 scratch_.is(a3) && 964 scratch_.is(a3) &&
900 sign_.is(a0)) { 965 sign_.is(a0)) {
901 return true; 966 return true;
902 } 967 }
903 // Other register combinations are generated as and when they are needed, 968 // Other register combinations are generated as and when they are needed,
904 // so it is unsafe to call them from stubs (we can't generate a stub while 969 // so it is unsafe to call them from stubs (we can't generate a stub while
905 // we are generating a stub). 970 // we are generating a stub).
906 return false; 971 return false;
907 } 972 }
908 973
909 974
910 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() { 975 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
911 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); 976 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
912 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); 977 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
913 Handle<Code> code1 = stub1.GetCode(); 978 stub1.GetCode()->set_is_pregenerated(true);
914 Handle<Code> code2 = stub2.GetCode(); 979 stub2.GetCode()->set_is_pregenerated(true);
915 } 980 }
916 981
917 982
918 // See comment for class, this does NOT work for int32's that are in Smi range. 983 // See comment for class, this does NOT work for int32's that are in Smi range.
919 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 984 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
920 Label max_negative_int; 985 Label max_negative_int;
921 // the_int_ has the answer which is a signed int32 but not a Smi. 986 // the_int_ has the answer which is a signed int32 but not a Smi.
922 // We test for the special value that has a different exponent. 987 // We test for the special value that has a different exponent.
923 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 988 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
924 // Test sign, and save for later conditionals. 989 // Test sign, and save for later conditionals.
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after
1275 __ PrepareCallCFunction(0, 2, t4); 1340 __ PrepareCallCFunction(0, 2, t4);
1276 if (!IsMipsSoftFloatABI) { 1341 if (!IsMipsSoftFloatABI) {
1277 // We are not using MIPS FPU instructions, and parameters for the runtime 1342 // We are not using MIPS FPU instructions, and parameters for the runtime
1278 // function call are prepaired in a0-a3 registers, but function we are 1343 // function call are prepaired in a0-a3 registers, but function we are
1279 // calling is compiled with hard-float flag and expecting hard float ABI 1344 // calling is compiled with hard-float flag and expecting hard float ABI
1280 // (parameters in f12/f14 registers). We need to copy parameters from 1345 // (parameters in f12/f14 registers). We need to copy parameters from
1281 // a0-a3 registers to f12/f14 register pairs. 1346 // a0-a3 registers to f12/f14 register pairs.
1282 __ Move(f12, a0, a1); 1347 __ Move(f12, a0, a1);
1283 __ Move(f14, a2, a3); 1348 __ Move(f14, a2, a3);
1284 } 1349 }
1350
1351 AllowExternalCallThatCantCauseGC scope(masm);
1285 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()), 1352 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1286 0, 2); 1353 0, 2);
1287 __ pop(ra); // Because this function returns int, result is in v0. 1354 __ pop(ra); // Because this function returns int, result is in v0.
1288 __ Ret(); 1355 __ Ret();
1289 } else { 1356 } else {
1290 CpuFeatures::Scope scope(FPU); 1357 CpuFeatures::Scope scope(FPU);
1291 Label equal, less_than; 1358 Label equal, less_than;
1292 __ BranchF(&equal, NULL, eq, f12, f14); 1359 __ BranchF(&equal, NULL, eq, f12, f14);
1293 __ BranchF(&less_than, NULL, lt, f12, f14); 1360 __ BranchF(&less_than, NULL, lt, f12, f14);
1294 1361
(...skipping 11 matching lines...) Expand all
1306 } 1373 }
1307 } 1374 }
1308 1375
1309 1376
1310 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 1377 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1311 Register lhs, 1378 Register lhs,
1312 Register rhs) { 1379 Register rhs) {
1313 // If either operand is a JS object or an oddball value, then they are 1380 // If either operand is a JS object or an oddball value, then they are
1314 // not equal since their pointers are different. 1381 // not equal since their pointers are different.
1315 // There is no test for undetectability in strict equality. 1382 // There is no test for undetectability in strict equality.
1316 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); 1383 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
1317 Label first_non_object; 1384 Label first_non_object;
1318 // Get the type of the first operand into a2 and compare it with 1385 // Get the type of the first operand into a2 and compare it with
1319 // FIRST_SPEC_OBJECT_TYPE. 1386 // FIRST_SPEC_OBJECT_TYPE.
1320 __ GetObjectType(lhs, a2, a2); 1387 __ GetObjectType(lhs, a2, a2);
1321 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE)); 1388 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1322 1389
1323 // Return non-zero. 1390 // Return non-zero.
1324 Label return_not_equal; 1391 Label return_not_equal;
1325 __ bind(&return_not_equal); 1392 __ bind(&return_not_equal);
1326 __ li(v0, Operand(1)); 1393 __ li(v0, Operand(1));
(...skipping 845 matching lines...) Expand 10 before | Expand all | Expand 10 after
2172 } 2239 }
2173 2240
2174 2241
2175 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs( 2242 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2176 MacroAssembler* masm) { 2243 MacroAssembler* masm) {
2177 UNIMPLEMENTED(); 2244 UNIMPLEMENTED();
2178 } 2245 }
2179 2246
2180 2247
2181 void BinaryOpStub::Generate(MacroAssembler* masm) { 2248 void BinaryOpStub::Generate(MacroAssembler* masm) {
2249 // Explicitly allow generation of nested stubs. It is safe here because
2250 // generation code does not use any raw pointers.
2251 AllowStubCallsScope allow_stub_calls(masm, true);
2182 switch (operands_type_) { 2252 switch (operands_type_) {
2183 case BinaryOpIC::UNINITIALIZED: 2253 case BinaryOpIC::UNINITIALIZED:
2184 GenerateTypeTransition(masm); 2254 GenerateTypeTransition(masm);
2185 break; 2255 break;
2186 case BinaryOpIC::SMI: 2256 case BinaryOpIC::SMI:
2187 GenerateSmiStub(masm); 2257 GenerateSmiStub(masm);
2188 break; 2258 break;
2189 case BinaryOpIC::INT32: 2259 case BinaryOpIC::INT32:
2190 GenerateInt32Stub(masm); 2260 GenerateInt32Stub(masm);
2191 break; 2261 break;
(...skipping 784 matching lines...) Expand 10 before | Expand all | Expand 10 after
2976 __ mtc1(a2, double_scratch); 3046 __ mtc1(a2, double_scratch);
2977 __ Cvt_d_uw(double_scratch, double_scratch, single_scratch); 3047 __ Cvt_d_uw(double_scratch, double_scratch, single_scratch);
2978 } 3048 }
2979 3049
2980 // Store the result. 3050 // Store the result.
2981 __ mov(v0, heap_number_result); 3051 __ mov(v0, heap_number_result);
2982 __ sdc1(double_scratch, FieldMemOperand(v0, HeapNumber::kValueOffset)); 3052 __ sdc1(double_scratch, FieldMemOperand(v0, HeapNumber::kValueOffset));
2983 __ Ret(); 3053 __ Ret();
2984 } else { 3054 } else {
2985 // Tail call that writes the int32 in a2 to the heap number in v0, using 3055 // Tail call that writes the int32 in a2 to the heap number in v0, using
2986 // a3 and a1 as scratch. v0 is preserved and returned. 3056 // a3 and a0 as scratch. v0 is preserved and returned.
2987 __ mov(a0, t1); 3057 __ mov(a0, t1);
2988 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a1); 3058 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
2989 __ TailCallStub(&stub); 3059 __ TailCallStub(&stub);
2990 } 3060 }
2991 3061
2992 break; 3062 break;
2993 } 3063 }
2994 3064
2995 default: 3065 default:
2996 UNREACHABLE(); 3066 UNREACHABLE();
2997 } 3067 }
2998 3068
(...skipping 536 matching lines...) Expand 10 before | Expand all | Expand 10 after
3535 } 3605 }
3536 3606
3537 3607
3538 bool CEntryStub::IsPregenerated() { 3608 bool CEntryStub::IsPregenerated() {
3539 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && 3609 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
3540 result_size_ == 1; 3610 result_size_ == 1;
3541 } 3611 }
3542 3612
3543 3613
3544 void CodeStub::GenerateStubsAheadOfTime() { 3614 void CodeStub::GenerateStubsAheadOfTime() {
3615 CEntryStub::GenerateAheadOfTime();
3545 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(); 3616 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
3617 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
3618 RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
3546 } 3619 }
3547 3620
3548 3621
3549 void CodeStub::GenerateFPStubs() { 3622 void CodeStub::GenerateFPStubs() {
3550 CEntryStub save_doubles(1, kSaveFPRegs); 3623 CEntryStub save_doubles(1, kSaveFPRegs);
3551 Handle<Code> code = save_doubles.GetCode(); 3624 Handle<Code> code = save_doubles.GetCode();
3625 code->set_is_pregenerated(true);
3626 StoreBufferOverflowStub stub(kSaveFPRegs);
3627 stub.GetCode()->set_is_pregenerated(true);
3552 code->GetIsolate()->set_fp_stubs_generated(true); 3628 code->GetIsolate()->set_fp_stubs_generated(true);
3553 } 3629 }
3554 3630
3555 3631
3632 void CEntryStub::GenerateAheadOfTime() {
3633 CEntryStub stub(1, kDontSaveFPRegs);
3634 Handle<Code> code = stub.GetCode();
3635 code->set_is_pregenerated(true);
3636 }
3637
3638
3556 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3639 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3557 __ Throw(v0); 3640 __ Throw(v0);
3558 } 3641 }
3559 3642
3560 3643
3561 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, 3644 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3562 UncatchableExceptionType type) { 3645 UncatchableExceptionType type) {
3563 __ ThrowUncatchable(type, v0); 3646 __ ThrowUncatchable(type, v0);
3564 } 3647 }
3565 3648
3566 3649
3567 void CEntryStub::GenerateCore(MacroAssembler* masm, 3650 void CEntryStub::GenerateCore(MacroAssembler* masm,
3568 Label* throw_normal_exception, 3651 Label* throw_normal_exception,
3569 Label* throw_termination_exception, 3652 Label* throw_termination_exception,
3570 Label* throw_out_of_memory_exception, 3653 Label* throw_out_of_memory_exception,
3571 bool do_gc, 3654 bool do_gc,
3572 bool always_allocate) { 3655 bool always_allocate) {
3573 // v0: result parameter for PerformGC, if any 3656 // v0: result parameter for PerformGC, if any
3574 // s0: number of arguments including receiver (C callee-saved) 3657 // s0: number of arguments including receiver (C callee-saved)
3575 // s1: pointer to the first argument (C callee-saved) 3658 // s1: pointer to the first argument (C callee-saved)
3576 // s2: pointer to builtin function (C callee-saved) 3659 // s2: pointer to builtin function (C callee-saved)
3577 3660
3661 Isolate* isolate = masm->isolate();
3662
3578 if (do_gc) { 3663 if (do_gc) {
3579 // Move result passed in v0 into a0 to call PerformGC. 3664 // Move result passed in v0 into a0 to call PerformGC.
3580 __ mov(a0, v0); 3665 __ mov(a0, v0);
3581 __ PrepareCallCFunction(1, 0, a1); 3666 __ PrepareCallCFunction(1, 0, a1);
3582 __ CallCFunction( 3667 __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1, 0);
3583 ExternalReference::perform_gc_function(masm->isolate()),
3584 1, 0);
3585 } 3668 }
3586 3669
3587 ExternalReference scope_depth = 3670 ExternalReference scope_depth =
3588 ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); 3671 ExternalReference::heap_always_allocate_scope_depth(isolate);
3589 if (always_allocate) { 3672 if (always_allocate) {
3590 __ li(a0, Operand(scope_depth)); 3673 __ li(a0, Operand(scope_depth));
3591 __ lw(a1, MemOperand(a0)); 3674 __ lw(a1, MemOperand(a0));
3592 __ Addu(a1, a1, Operand(1)); 3675 __ Addu(a1, a1, Operand(1));
3593 __ sw(a1, MemOperand(a0)); 3676 __ sw(a1, MemOperand(a0));
3594 } 3677 }
3595 3678
3596 // Prepare arguments for C routine: a0 = argc, a1 = argv 3679 // Prepare arguments for C routine: a0 = argc, a1 = argv
3597 __ mov(a0, s0); 3680 __ mov(a0, s0);
3598 __ mov(a1, s1); 3681 __ mov(a1, s1);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
3667 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 3750 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3668 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize); 3751 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
3669 __ Branch(&retry, eq, t0, Operand(zero_reg)); 3752 __ Branch(&retry, eq, t0, Operand(zero_reg));
3670 3753
3671 // Special handling of out of memory exceptions. 3754 // Special handling of out of memory exceptions.
3672 Failure* out_of_memory = Failure::OutOfMemoryException(); 3755 Failure* out_of_memory = Failure::OutOfMemoryException();
3673 __ Branch(throw_out_of_memory_exception, eq, 3756 __ Branch(throw_out_of_memory_exception, eq,
3674 v0, Operand(reinterpret_cast<int32_t>(out_of_memory))); 3757 v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3675 3758
3676 // Retrieve the pending exception and clear the variable. 3759 // Retrieve the pending exception and clear the variable.
3677 __ li(t0, 3760 __ li(a3, Operand(isolate->factory()->the_hole_value()));
3678 Operand(ExternalReference::the_hole_value_location(masm->isolate())));
3679 __ lw(a3, MemOperand(t0));
3680 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3761 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3681 masm->isolate()))); 3762 isolate)));
3682 __ lw(v0, MemOperand(t0)); 3763 __ lw(v0, MemOperand(t0));
3683 __ sw(a3, MemOperand(t0)); 3764 __ sw(a3, MemOperand(t0));
3684 3765
3685 // Special handling of termination exceptions which are uncatchable 3766 // Special handling of termination exceptions which are uncatchable
3686 // by javascript code. 3767 // by javascript code.
3687 __ Branch(throw_termination_exception, eq, 3768 __ Branch(throw_termination_exception, eq,
3688 v0, Operand(masm->isolate()->factory()->termination_exception())); 3769 v0, Operand(isolate->factory()->termination_exception()));
3689 3770
3690 // Handle normal exception. 3771 // Handle normal exception.
3691 __ jmp(throw_normal_exception); 3772 __ jmp(throw_normal_exception);
3692 3773
3693 __ bind(&retry); 3774 __ bind(&retry);
3694 // Last failure (v0) will be moved to (a0) for parameter when retrying. 3775 // Last failure (v0) will be moved to (a0) for parameter when retrying.
3695 } 3776 }
3696 3777
3697 3778
3698 void CEntryStub::Generate(MacroAssembler* masm) { 3779 void CEntryStub::Generate(MacroAssembler* masm) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3761 __ bind(&throw_termination_exception); 3842 __ bind(&throw_termination_exception);
3762 GenerateThrowUncatchable(masm, TERMINATION); 3843 GenerateThrowUncatchable(masm, TERMINATION);
3763 3844
3764 __ bind(&throw_normal_exception); 3845 __ bind(&throw_normal_exception);
3765 GenerateThrowTOS(masm); 3846 GenerateThrowTOS(masm);
3766 } 3847 }
3767 3848
3768 3849
3769 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 3850 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3770 Label invoke, exit; 3851 Label invoke, exit;
3852 Isolate* isolate = masm->isolate();
3771 3853
3772 // Registers: 3854 // Registers:
3773 // a0: entry address 3855 // a0: entry address
3774 // a1: function 3856 // a1: function
3775 // a2: reveiver 3857 // a2: reveiver
3776 // a3: argc 3858 // a3: argc
3777 // 3859 //
3778 // Stack: 3860 // Stack:
3779 // 4 args slots 3861 // 4 args slots
3780 // args 3862 // args
(...skipping 17 matching lines...) Expand all
3798 } 3880 }
3799 3881
3800 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize)); 3882 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
3801 3883
3802 // We build an EntryFrame. 3884 // We build an EntryFrame.
3803 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. 3885 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
3804 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 3886 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3805 __ li(t2, Operand(Smi::FromInt(marker))); 3887 __ li(t2, Operand(Smi::FromInt(marker)));
3806 __ li(t1, Operand(Smi::FromInt(marker))); 3888 __ li(t1, Operand(Smi::FromInt(marker)));
3807 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, 3889 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
3808 masm->isolate()))); 3890 isolate)));
3809 __ lw(t0, MemOperand(t0)); 3891 __ lw(t0, MemOperand(t0));
3810 __ Push(t3, t2, t1, t0); 3892 __ Push(t3, t2, t1, t0);
3811 // Setup frame pointer for the frame to be pushed. 3893 // Setup frame pointer for the frame to be pushed.
3812 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); 3894 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
3813 3895
3814 // Registers: 3896 // Registers:
3815 // a0: entry_address 3897 // a0: entry_address
3816 // a1: function 3898 // a1: function
3817 // a2: reveiver_pointer 3899 // a2: reveiver_pointer
3818 // a3: argc 3900 // a3: argc
3819 // s0: argv 3901 // s0: argv
3820 // 3902 //
3821 // Stack: 3903 // Stack:
3822 // caller fp | 3904 // caller fp |
3823 // function slot | entry frame 3905 // function slot | entry frame
3824 // context slot | 3906 // context slot |
3825 // bad fp (0xff...f) | 3907 // bad fp (0xff...f) |
3826 // callee saved registers + ra 3908 // callee saved registers + ra
3827 // 4 args slots 3909 // 4 args slots
3828 // args 3910 // args
3829 3911
3830 // If this is the outermost JS call, set js_entry_sp value. 3912 // If this is the outermost JS call, set js_entry_sp value.
3831 Label non_outermost_js; 3913 Label non_outermost_js;
3832 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, 3914 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
3833 masm->isolate());
3834 __ li(t1, Operand(ExternalReference(js_entry_sp))); 3915 __ li(t1, Operand(ExternalReference(js_entry_sp)));
3835 __ lw(t2, MemOperand(t1)); 3916 __ lw(t2, MemOperand(t1));
3836 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); 3917 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
3837 __ sw(fp, MemOperand(t1)); 3918 __ sw(fp, MemOperand(t1));
3838 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 3919 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3839 Label cont; 3920 Label cont;
3840 __ b(&cont); 3921 __ b(&cont);
3841 __ nop(); // Branch delay slot nop. 3922 __ nop(); // Branch delay slot nop.
3842 __ bind(&non_outermost_js); 3923 __ bind(&non_outermost_js);
3843 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); 3924 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
3844 __ bind(&cont); 3925 __ bind(&cont);
3845 __ push(t0); 3926 __ push(t0);
3846 3927
3847 // Call a faked try-block that does the invoke. 3928 // Call a faked try-block that does the invoke.
3848 __ bal(&invoke); // bal exposes branch delay slot. 3929 __ bal(&invoke); // bal exposes branch delay slot.
3849 __ nop(); // Branch delay slot nop. 3930 __ nop(); // Branch delay slot nop.
3850 3931
3851 // Caught exception: Store result (exception) in the pending 3932 // Caught exception: Store result (exception) in the pending
3852 // exception field in the JSEnv and return a failure sentinel. 3933 // exception field in the JSEnv and return a failure sentinel.
3853 // Coming in here the fp will be invalid because the PushTryHandler below 3934 // Coming in here the fp will be invalid because the PushTryHandler below
3854 // sets it to 0 to signal the existence of the JSEntry frame. 3935 // sets it to 0 to signal the existence of the JSEntry frame.
3855 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3936 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3856 masm->isolate()))); 3937 isolate)));
3857 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0. 3938 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
3858 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); 3939 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
3859 __ b(&exit); // b exposes branch delay slot. 3940 __ b(&exit); // b exposes branch delay slot.
3860 __ nop(); // Branch delay slot nop. 3941 __ nop(); // Branch delay slot nop.
3861 3942
3862 // Invoke: Link this frame into the handler chain. 3943 // Invoke: Link this frame into the handler chain.
3863 __ bind(&invoke); 3944 __ bind(&invoke);
3864 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); 3945 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3865 // If an exception not caught by another handler occurs, this handler 3946 // If an exception not caught by another handler occurs, this handler
3866 // returns control to the code after the bal(&invoke) above, which 3947 // returns control to the code after the bal(&invoke) above, which
3867 // restores all kCalleeSaved registers (including cp and fp) to their 3948 // restores all kCalleeSaved registers (including cp and fp) to their
3868 // saved values before returning a failure to C. 3949 // saved values before returning a failure to C.
3869 3950
3870 // Clear any pending exceptions. 3951 // Clear any pending exceptions.
3871 __ li(t0, 3952 __ li(t1, Operand(isolate->factory()->the_hole_value()));
3872 Operand(ExternalReference::the_hole_value_location(masm->isolate())));
3873 __ lw(t1, MemOperand(t0));
3874 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3953 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3875 masm->isolate()))); 3954 isolate)));
3876 __ sw(t1, MemOperand(t0)); 3955 __ sw(t1, MemOperand(t0));
3877 3956
3878 // Invoke the function by calling through JS entry trampoline builtin. 3957 // Invoke the function by calling through JS entry trampoline builtin.
3879 // Notice that we cannot store a reference to the trampoline code directly in 3958 // Notice that we cannot store a reference to the trampoline code directly in
3880 // this stub, because runtime stubs are not traversed when doing GC. 3959 // this stub, because runtime stubs are not traversed when doing GC.
3881 3960
3882 // Registers: 3961 // Registers:
3883 // a0: entry_address 3962 // a0: entry_address
3884 // a1: function 3963 // a1: function
3885 // a2: reveiver_pointer 3964 // a2: reveiver_pointer
3886 // a3: argc 3965 // a3: argc
3887 // s0: argv 3966 // s0: argv
3888 // 3967 //
3889 // Stack: 3968 // Stack:
3890 // handler frame 3969 // handler frame
3891 // entry frame 3970 // entry frame
3892 // callee saved registers + ra 3971 // callee saved registers + ra
3893 // 4 args slots 3972 // 4 args slots
3894 // args 3973 // args
3895 3974
3896 if (is_construct) { 3975 if (is_construct) {
3897 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 3976 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3898 masm->isolate()); 3977 isolate);
3899 __ li(t0, Operand(construct_entry)); 3978 __ li(t0, Operand(construct_entry));
3900 } else { 3979 } else {
3901 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate()); 3980 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
3902 __ li(t0, Operand(entry)); 3981 __ li(t0, Operand(entry));
3903 } 3982 }
3904 __ lw(t9, MemOperand(t0)); // Deref address. 3983 __ lw(t9, MemOperand(t0)); // Deref address.
3905 3984
3906 // Call JSEntryTrampoline. 3985 // Call JSEntryTrampoline.
3907 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); 3986 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
3908 __ Call(t9); 3987 __ Call(t9);
3909 3988
3910 // Unlink this frame from the handler chain. 3989 // Unlink this frame from the handler chain.
3911 __ PopTryHandler(); 3990 __ PopTryHandler();
3912 3991
3913 __ bind(&exit); // v0 holds result 3992 __ bind(&exit); // v0 holds result
3914 // Check if the current stack frame is marked as the outermost JS frame. 3993 // Check if the current stack frame is marked as the outermost JS frame.
3915 Label non_outermost_js_2; 3994 Label non_outermost_js_2;
3916 __ pop(t1); 3995 __ pop(t1);
3917 __ Branch(&non_outermost_js_2, ne, t1, 3996 __ Branch(&non_outermost_js_2, ne, t1,
3918 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 3997 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3919 __ li(t1, Operand(ExternalReference(js_entry_sp))); 3998 __ li(t1, Operand(ExternalReference(js_entry_sp)));
3920 __ sw(zero_reg, MemOperand(t1)); 3999 __ sw(zero_reg, MemOperand(t1));
3921 __ bind(&non_outermost_js_2); 4000 __ bind(&non_outermost_js_2);
3922 4001
3923 // Restore the top frame descriptors from the stack. 4002 // Restore the top frame descriptors from the stack.
3924 __ pop(t1); 4003 __ pop(t1);
3925 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, 4004 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
3926 masm->isolate()))); 4005 isolate)));
3927 __ sw(t1, MemOperand(t0)); 4006 __ sw(t1, MemOperand(t0));
3928 4007
3929 // Reset the stack to the callee saved registers. 4008 // Reset the stack to the callee saved registers.
3930 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); 4009 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
3931 4010
3932 if (CpuFeatures::IsSupported(FPU)) { 4011 if (CpuFeatures::IsSupported(FPU)) {
3933 CpuFeatures::Scope scope(FPU); 4012 CpuFeatures::Scope scope(FPU);
3934 // Restore callee-saved fpu registers. 4013 // Restore callee-saved fpu registers.
3935 __ MultiPopFPU(kCalleeSavedFPU); 4014 __ MultiPopFPU(kCalleeSavedFPU);
3936 } 4015 }
(...skipping 597 matching lines...) Expand 10 before | Expand all | Expand 10 after
4534 // sp[0]: last_match_info (expected JSArray) 4613 // sp[0]: last_match_info (expected JSArray)
4535 // sp[4]: previous index 4614 // sp[4]: previous index
4536 // sp[8]: subject string 4615 // sp[8]: subject string
4537 // sp[12]: JSRegExp object 4616 // sp[12]: JSRegExp object
4538 4617
4539 static const int kLastMatchInfoOffset = 0 * kPointerSize; 4618 static const int kLastMatchInfoOffset = 0 * kPointerSize;
4540 static const int kPreviousIndexOffset = 1 * kPointerSize; 4619 static const int kPreviousIndexOffset = 1 * kPointerSize;
4541 static const int kSubjectOffset = 2 * kPointerSize; 4620 static const int kSubjectOffset = 2 * kPointerSize;
4542 static const int kJSRegExpOffset = 3 * kPointerSize; 4621 static const int kJSRegExpOffset = 3 * kPointerSize;
4543 4622
4623 Isolate* isolate = masm->isolate();
4624
4544 Label runtime, invoke_regexp; 4625 Label runtime, invoke_regexp;
4545 4626
4546 // Allocation of registers for this function. These are in callee save 4627 // Allocation of registers for this function. These are in callee save
4547 // registers and will be preserved by the call to the native RegExp code, as 4628 // registers and will be preserved by the call to the native RegExp code, as
4548 // this code is called using the normal C calling convention. When calling 4629 // this code is called using the normal C calling convention. When calling
4549 // directly from generated code the native RegExp code will not do a GC and 4630 // directly from generated code the native RegExp code will not do a GC and
4550 // therefore the content of these registers are safe to use after the call. 4631 // therefore the content of these registers are safe to use after the call.
4551 // MIPS - using s0..s2, since we are not using CEntry Stub. 4632 // MIPS - using s0..s2, since we are not using CEntry Stub.
4552 Register subject = s0; 4633 Register subject = s0;
4553 Register regexp_data = s1; 4634 Register regexp_data = s1;
4554 Register last_match_info_elements = s2; 4635 Register last_match_info_elements = s2;
4555 4636
4556 // Ensure that a RegExp stack is allocated. 4637 // Ensure that a RegExp stack is allocated.
4557 ExternalReference address_of_regexp_stack_memory_address = 4638 ExternalReference address_of_regexp_stack_memory_address =
4558 ExternalReference::address_of_regexp_stack_memory_address( 4639 ExternalReference::address_of_regexp_stack_memory_address(
4559 masm->isolate()); 4640 isolate);
4560 ExternalReference address_of_regexp_stack_memory_size = 4641 ExternalReference address_of_regexp_stack_memory_size =
4561 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); 4642 ExternalReference::address_of_regexp_stack_memory_size(isolate);
4562 __ li(a0, Operand(address_of_regexp_stack_memory_size)); 4643 __ li(a0, Operand(address_of_regexp_stack_memory_size));
4563 __ lw(a0, MemOperand(a0, 0)); 4644 __ lw(a0, MemOperand(a0, 0));
4564 __ Branch(&runtime, eq, a0, Operand(zero_reg)); 4645 __ Branch(&runtime, eq, a0, Operand(zero_reg));
4565 4646
4566 // Check that the first argument is a JSRegExp object. 4647 // Check that the first argument is a JSRegExp object.
4567 __ lw(a0, MemOperand(sp, kJSRegExpOffset)); 4648 __ lw(a0, MemOperand(sp, kJSRegExpOffset));
4568 STATIC_ASSERT(kSmiTag == 0); 4649 STATIC_ASSERT(kSmiTag == 0);
4569 __ JumpIfSmi(a0, &runtime); 4650 __ JumpIfSmi(a0, &runtime);
4570 __ GetObjectType(a0, a1, a1); 4651 __ GetObjectType(a0, a1, a1);
4571 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); 4652 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
4632 // Check that the fourth object is a JSArray object. 4713 // Check that the fourth object is a JSArray object.
4633 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset)); 4714 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset));
4634 __ JumpIfSmi(a0, &runtime); 4715 __ JumpIfSmi(a0, &runtime);
4635 __ GetObjectType(a0, a1, a1); 4716 __ GetObjectType(a0, a1, a1);
4636 __ Branch(&runtime, ne, a1, Operand(JS_ARRAY_TYPE)); 4717 __ Branch(&runtime, ne, a1, Operand(JS_ARRAY_TYPE));
4637 // Check that the JSArray is in fast case. 4718 // Check that the JSArray is in fast case.
4638 __ lw(last_match_info_elements, 4719 __ lw(last_match_info_elements,
4639 FieldMemOperand(a0, JSArray::kElementsOffset)); 4720 FieldMemOperand(a0, JSArray::kElementsOffset));
4640 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); 4721 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
4641 __ Branch(&runtime, ne, a0, Operand( 4722 __ Branch(&runtime, ne, a0, Operand(
4642 masm->isolate()->factory()->fixed_array_map())); 4723 isolate->factory()->fixed_array_map()));
4643 // Check that the last match info has space for the capture registers and the 4724 // Check that the last match info has space for the capture registers and the
4644 // additional information. 4725 // additional information.
4645 __ lw(a0, 4726 __ lw(a0,
4646 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset)); 4727 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
4647 __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead)); 4728 __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead));
4648 __ sra(at, a0, kSmiTagSize); // Untag length for comparison. 4729 __ sra(at, a0, kSmiTagSize); // Untag length for comparison.
4649 __ Branch(&runtime, gt, a2, Operand(at)); 4730 __ Branch(&runtime, gt, a2, Operand(at));
4650 4731
4651 // Reset offset for possibly sliced string. 4732 // Reset offset for possibly sliced string.
4652 __ mov(t0, zero_reg); 4733 __ mov(t0, zero_reg);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
4723 // RegExp code to avoid handling changing stack height. 4804 // RegExp code to avoid handling changing stack height.
4724 __ lw(a1, MemOperand(sp, kPreviousIndexOffset)); 4805 __ lw(a1, MemOperand(sp, kPreviousIndexOffset));
4725 __ sra(a1, a1, kSmiTagSize); // Untag the Smi. 4806 __ sra(a1, a1, kSmiTagSize); // Untag the Smi.
4726 4807
4727 // a1: previous index 4808 // a1: previous index
4728 // a3: encoding of subject string (1 if ASCII, 0 if two_byte); 4809 // a3: encoding of subject string (1 if ASCII, 0 if two_byte);
4729 // t9: code 4810 // t9: code
4730 // subject: Subject string 4811 // subject: Subject string
4731 // regexp_data: RegExp data (FixedArray) 4812 // regexp_data: RegExp data (FixedArray)
4732 // All checks done. Now push arguments for native regexp code. 4813 // All checks done. Now push arguments for native regexp code.
4733 __ IncrementCounter(masm->isolate()->counters()->regexp_entry_native(), 4814 __ IncrementCounter(isolate->counters()->regexp_entry_native(),
4734 1, a0, a2); 4815 1, a0, a2);
4735 4816
4736 // Isolates: note we add an additional parameter here (isolate pointer). 4817 // Isolates: note we add an additional parameter here (isolate pointer).
4737 static const int kRegExpExecuteArguments = 8; 4818 static const int kRegExpExecuteArguments = 8;
4738 static const int kParameterRegisters = 4; 4819 static const int kParameterRegisters = 4;
4739 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); 4820 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
4740 4821
4741 // Stack pointer now points to cell where return address is to be written. 4822 // Stack pointer now points to cell where return address is to be written.
4742 // Arguments are before that on the stack or in registers, meaning we 4823 // Arguments are before that on the stack or in registers, meaning we
4743 // treat the return address as argument 5. Thus every argument after that 4824 // treat the return address as argument 5. Thus every argument after that
(...skipping 19 matching lines...) Expand all
4763 // Argument 6: Start (high end) of backtracking stack memory area. 4844 // Argument 6: Start (high end) of backtracking stack memory area.
4764 __ li(a0, Operand(address_of_regexp_stack_memory_address)); 4845 __ li(a0, Operand(address_of_regexp_stack_memory_address));
4765 __ lw(a0, MemOperand(a0, 0)); 4846 __ lw(a0, MemOperand(a0, 0));
4766 __ li(a2, Operand(address_of_regexp_stack_memory_size)); 4847 __ li(a2, Operand(address_of_regexp_stack_memory_size));
4767 __ lw(a2, MemOperand(a2, 0)); 4848 __ lw(a2, MemOperand(a2, 0));
4768 __ addu(a0, a0, a2); 4849 __ addu(a0, a0, a2);
4769 __ sw(a0, MemOperand(sp, 2 * kPointerSize)); 4850 __ sw(a0, MemOperand(sp, 2 * kPointerSize));
4770 4851
4771 // Argument 5: static offsets vector buffer. 4852 // Argument 5: static offsets vector buffer.
4772 __ li(a0, Operand( 4853 __ li(a0, Operand(
4773 ExternalReference::address_of_static_offsets_vector(masm->isolate()))); 4854 ExternalReference::address_of_static_offsets_vector(isolate)));
4774 __ sw(a0, MemOperand(sp, 1 * kPointerSize)); 4855 __ sw(a0, MemOperand(sp, 1 * kPointerSize));
4775 4856
4776 // For arguments 4 and 3 get string length, calculate start of string data 4857 // For arguments 4 and 3 get string length, calculate start of string data
4777 // and calculate the shift of the index (0 for ASCII and 1 for two byte). 4858 // and calculate the shift of the index (0 for ASCII and 1 for two byte).
4778 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); 4859 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
4779 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. 4860 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
4780 // Load the length from the original subject string from the previous stack 4861 // Load the length from the original subject string from the previous stack
4781 // frame. Therefore we have to use fp, which points exactly to two pointer 4862 // frame. Therefore we have to use fp, which points exactly to two pointer
4782 // sizes below the previous sp. (Because creating a new stack frame pushes 4863 // sizes below the previous sp. (Because creating a new stack frame pushes
4783 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.) 4864 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
4821 Label failure; 4902 Label failure;
4822 __ Branch(&failure, eq, 4903 __ Branch(&failure, eq,
4823 v0, Operand(NativeRegExpMacroAssembler::FAILURE)); 4904 v0, Operand(NativeRegExpMacroAssembler::FAILURE));
4824 // If not exception it can only be retry. Handle that in the runtime system. 4905 // If not exception it can only be retry. Handle that in the runtime system.
4825 __ Branch(&runtime, ne, 4906 __ Branch(&runtime, ne,
4826 v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); 4907 v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
4827 // Result must now be exception. If there is no pending exception already a 4908 // Result must now be exception. If there is no pending exception already a
4828 // stack overflow (on the backtrack stack) was detected in RegExp code but 4909 // stack overflow (on the backtrack stack) was detected in RegExp code but
4829 // haven't created the exception yet. Handle that in the runtime system. 4910 // haven't created the exception yet. Handle that in the runtime system.
4830 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 4911 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
4831 __ li(a1, Operand( 4912 __ li(a1, Operand(isolate->factory()->the_hole_value()));
4832 ExternalReference::the_hole_value_location(masm->isolate())));
4833 __ lw(a1, MemOperand(a1, 0));
4834 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 4913 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4835 masm->isolate()))); 4914 isolate)));
4836 __ lw(v0, MemOperand(a2, 0)); 4915 __ lw(v0, MemOperand(a2, 0));
4837 __ Branch(&runtime, eq, v0, Operand(a1)); 4916 __ Branch(&runtime, eq, v0, Operand(a1));
4838 4917
4839 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception. 4918 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception.
4840 4919
4841 // Check if the exception is a termination. If so, throw as uncatchable. 4920 // Check if the exception is a termination. If so, throw as uncatchable.
4842 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex); 4921 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
4843 Label termination_exception; 4922 Label termination_exception;
4844 __ Branch(&termination_exception, eq, v0, Operand(a0)); 4923 __ Branch(&termination_exception, eq, v0, Operand(a0));
4845 4924
4846 __ Throw(v0); // Expects thrown value in v0. 4925 __ Throw(v0); // Expects thrown value in v0.
4847 4926
4848 __ bind(&termination_exception); 4927 __ bind(&termination_exception);
4849 __ ThrowUncatchable(TERMINATION, v0); // Expects thrown value in v0. 4928 __ ThrowUncatchable(TERMINATION, v0); // Expects thrown value in v0.
4850 4929
4851 __ bind(&failure); 4930 __ bind(&failure);
4852 // For failure and exception return null. 4931 // For failure and exception return null.
4853 __ li(v0, Operand(masm->isolate()->factory()->null_value())); 4932 __ li(v0, Operand(isolate->factory()->null_value()));
4854 __ Addu(sp, sp, Operand(4 * kPointerSize)); 4933 __ Addu(sp, sp, Operand(4 * kPointerSize));
4855 __ Ret(); 4934 __ Ret();
4856 4935
4857 // Process the result from the native regexp code. 4936 // Process the result from the native regexp code.
4858 __ bind(&success); 4937 __ bind(&success);
4859 __ lw(a1, 4938 __ lw(a1,
4860 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); 4939 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
4861 // Calculate number of capture registers (number_of_captures + 1) * 2. 4940 // Calculate number of capture registers (number_of_captures + 1) * 2.
4862 STATIC_ASSERT(kSmiTag == 0); 4941 STATIC_ASSERT(kSmiTag == 0);
4863 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 4942 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
(...skipping 21 matching lines...) Expand all
4885 RegExpImpl::kLastInputOffset)); 4964 RegExpImpl::kLastInputOffset));
4886 __ RecordWriteField(last_match_info_elements, 4965 __ RecordWriteField(last_match_info_elements,
4887 RegExpImpl::kLastInputOffset, 4966 RegExpImpl::kLastInputOffset,
4888 subject, 4967 subject,
4889 t3, 4968 t3,
4890 kRAHasNotBeenSaved, 4969 kRAHasNotBeenSaved,
4891 kDontSaveFPRegs); 4970 kDontSaveFPRegs);
4892 4971
4893 // Get the static offsets vector filled by the native regexp code. 4972 // Get the static offsets vector filled by the native regexp code.
4894 ExternalReference address_of_static_offsets_vector = 4973 ExternalReference address_of_static_offsets_vector =
4895 ExternalReference::address_of_static_offsets_vector(masm->isolate()); 4974 ExternalReference::address_of_static_offsets_vector(isolate);
4896 __ li(a2, Operand(address_of_static_offsets_vector)); 4975 __ li(a2, Operand(address_of_static_offsets_vector));
4897 4976
4898 // a1: number of capture registers 4977 // a1: number of capture registers
4899 // a2: offsets vector 4978 // a2: offsets vector
4900 Label next_capture, done; 4979 Label next_capture, done;
4901 // Capture register counter starts from number of capture registers and 4980 // Capture register counter starts from number of capture registers and
4902 // counts down until wrapping after zero. 4981 // counts down until wrapping after zero.
4903 __ Addu(a0, 4982 __ Addu(a0,
4904 last_match_info_elements, 4983 last_match_info_elements,
4905 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag)); 4984 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
5010 5089
5011 __ bind(&done); 5090 __ bind(&done);
5012 __ Addu(sp, sp, Operand(3 * kPointerSize)); 5091 __ Addu(sp, sp, Operand(3 * kPointerSize));
5013 __ Ret(); 5092 __ Ret();
5014 5093
5015 __ bind(&slowcase); 5094 __ bind(&slowcase);
5016 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 5095 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
5017 } 5096 }
5018 5097
5019 5098
5099 void CallFunctionStub::FinishCode(Code* code) {
5100 code->set_has_function_cache(false);
5101 }
5102
5103
5104 void CallFunctionStub::Clear(Heap* heap, Address address) {
5105 UNREACHABLE();
5106 }
5107
5108
5109 Object* CallFunctionStub::GetCachedValue(Address address) {
5110 UNREACHABLE();
5111 return NULL;
5112 }
5113
5114
5020 void CallFunctionStub::Generate(MacroAssembler* masm) { 5115 void CallFunctionStub::Generate(MacroAssembler* masm) {
5021 Label slow, non_function; 5116 Label slow, non_function;
5022 5117
5023 // The receiver might implicitly be the global object. This is 5118 // The receiver might implicitly be the global object. This is
5024 // indicated by passing the hole as the receiver to the call 5119 // indicated by passing the hole as the receiver to the call
5025 // function stub. 5120 // function stub.
5026 if (ReceiverMightBeImplicit()) { 5121 if (ReceiverMightBeImplicit()) {
5027 Label call; 5122 Label call;
5028 // Get the receiver from the stack. 5123 // Get the receiver from the stack.
5029 // function, receiver [, arguments] 5124 // function, receiver [, arguments]
(...skipping 2058 matching lines...) Expand 10 before | Expand all | Expand 10 after
7088 } 7183 }
7089 7184
7090 7185
7091 struct AheadOfTimeWriteBarrierStubList { 7186 struct AheadOfTimeWriteBarrierStubList {
7092 Register object, value, address; 7187 Register object, value, address;
7093 RememberedSetAction action; 7188 RememberedSetAction action;
7094 }; 7189 };
7095 7190
7096 7191
7097 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { 7192 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7098 // TODO(1696): Fill this in for MIPS. 7193 // Used in RegExpExecStub.
7194 { s2, s0, t3, EMIT_REMEMBERED_SET },
7195 { s2, a2, t3, EMIT_REMEMBERED_SET },
7196 // Used in CompileArrayPushCall.
7197 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
7198 // Also used in KeyedStoreIC::GenerateGeneric.
7199 { a3, t0, t1, EMIT_REMEMBERED_SET },
7200 // Used in CompileStoreGlobal.
7201 { t0, a1, a2, OMIT_REMEMBERED_SET },
7202 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
7203 { a1, a2, a3, EMIT_REMEMBERED_SET },
7204 { a3, a2, a1, EMIT_REMEMBERED_SET },
7205 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
7206 { a2, a1, a3, EMIT_REMEMBERED_SET },
7207 { a3, a1, a2, EMIT_REMEMBERED_SET },
7208 // KeyedStoreStubCompiler::GenerateStoreFastElement.
7209 { t0, a2, a3, EMIT_REMEMBERED_SET },
7099 // Null termination. 7210 // Null termination.
7100 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} 7211 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
7101 }; 7212 };
7102 7213
7103 7214
7104 bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() { 7215 bool RecordWriteStub::IsPregenerated() {
7105 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; 7216 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7106 !entry->object.is(no_reg); 7217 !entry->object.is(no_reg);
7107 entry++) { 7218 entry++) {
7108 if (object_.is(entry->object) && 7219 if (object_.is(entry->object) &&
7109 value_.is(entry->value) && 7220 value_.is(entry->value) &&
7110 address_.is(entry->address) && 7221 address_.is(entry->address) &&
7111 remembered_set_action_ == entry->action && 7222 remembered_set_action_ == entry->action &&
7112 save_fp_regs_mode_ == kDontSaveFPRegs) { 7223 save_fp_regs_mode_ == kDontSaveFPRegs) {
7113 return true; 7224 return true;
7114 } 7225 }
7115 } 7226 }
7116 return true; // TODO(1696): Should be false. 7227 return false;
7228 }
7229
7230
7231 bool StoreBufferOverflowStub::IsPregenerated() {
7232 return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
7117 } 7233 }
7118 7234
7119 7235
7120 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { 7236 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
7121 StoreBufferOverflowStub stub1(kDontSaveFPRegs); 7237 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
7122 stub1.GetCode(); 7238 stub1.GetCode()->set_is_pregenerated(true);
7123 StoreBufferOverflowStub stub2(kSaveFPRegs);
7124 stub2.GetCode();
7125 } 7239 }
7126 7240
7127 7241
7128 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { 7242 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
7129 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; 7243 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7130 !entry->object.is(no_reg); 7244 !entry->object.is(no_reg);
7131 entry++) { 7245 entry++) {
7132 RecordWriteStub stub(entry->object, 7246 RecordWriteStub stub(entry->object,
7133 entry->value, 7247 entry->value,
7134 entry->address, 7248 entry->address,
7135 entry->action, 7249 entry->action,
7136 kDontSaveFPRegs); 7250 kDontSaveFPRegs);
7137 stub.GetCode(); 7251 stub.GetCode()->set_is_pregenerated(true);
7138 } 7252 }
7139 } 7253 }
7140 7254
7141 7255
7142 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 7256 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
7143 // the value has just been written into the object, now this stub makes sure 7257 // the value has just been written into the object, now this stub makes sure
7144 // we keep the GC informed. The word in the object where the value has been 7258 // we keep the GC informed. The word in the object where the value has been
7145 // written is in the address register. 7259 // written is in the address register.
7146 void RecordWriteStub::Generate(MacroAssembler* masm) { 7260 void RecordWriteStub::Generate(MacroAssembler* masm) {
7147 Label skip_to_incremental_noncompacting; 7261 Label skip_to_incremental_noncompacting;
7148 Label skip_to_incremental_compacting; 7262 Label skip_to_incremental_compacting;
7149 7263
7150 // The first two branch+nop instructions are generated with labels so as to 7264 // The first two branch+nop instructions are generated with labels so as to
7151 // get the offset fixed up correctly by the bind(Label*) call. We patch it 7265 // get the offset fixed up correctly by the bind(Label*) call. We patch it
7152 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this 7266 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
7153 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop 7267 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
7154 // incremental heap marking. 7268 // incremental heap marking.
7155 // See RecordWriteStub::Patch for details. 7269 // See RecordWriteStub::Patch for details.
7156 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); 7270 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
7157 __ nop(); 7271 __ nop();
7158 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); 7272 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
7159 __ nop(); 7273 __ nop();
7160 7274
7161 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 7275 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7162 __ RememberedSetHelper( 7276 __ RememberedSetHelper(object_,
7163 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); 7277 address_,
7278 value_,
7279 save_fp_regs_mode_,
7280 MacroAssembler::kReturnAtEnd);
7164 } 7281 }
7165 __ Ret(); 7282 __ Ret();
7166 7283
7167 __ bind(&skip_to_incremental_noncompacting); 7284 __ bind(&skip_to_incremental_noncompacting);
7168 GenerateIncremental(masm, INCREMENTAL); 7285 GenerateIncremental(masm, INCREMENTAL);
7169 7286
7170 __ bind(&skip_to_incremental_compacting); 7287 __ bind(&skip_to_incremental_compacting);
7171 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 7288 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
7172 7289
7173 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 7290 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
7174 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 7291 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
7175 7292
7176 PatchBranchIntoNop(masm, 0); 7293 PatchBranchIntoNop(masm, 0);
7177 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize); 7294 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
7178 } 7295 }
7179 7296
7180 7297
7181 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 7298 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7182 regs_.Save(masm); 7299 regs_.Save(masm);
7183 7300
7184 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 7301 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7185 Label dont_need_remembered_set; 7302 Label dont_need_remembered_set;
7186 7303
7187 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); 7304 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7188 __ JumpIfNotInNewSpace(regs_.scratch0(), 7305 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
7189 regs_.scratch0(), 7306 regs_.scratch0(),
7190 &dont_need_remembered_set); 7307 &dont_need_remembered_set);
7191 7308
7192 __ CheckPageFlag(regs_.object(), 7309 __ CheckPageFlag(regs_.object(),
7193 regs_.scratch0(), 7310 regs_.scratch0(),
7194 1 << MemoryChunk::SCAN_ON_SCAVENGE, 7311 1 << MemoryChunk::SCAN_ON_SCAVENGE,
7195 ne, 7312 ne,
7196 &dont_need_remembered_set); 7313 &dont_need_remembered_set);
7197 7314
7198 // First notify the incremental marker if necessary, then update the 7315 // First notify the incremental marker if necessary, then update the
7199 // remembered set. 7316 // remembered set.
7200 CheckNeedsToInformIncrementalMarker( 7317 CheckNeedsToInformIncrementalMarker(
7201 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 7318 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7202 InformIncrementalMarker(masm, mode); 7319 InformIncrementalMarker(masm, mode);
7203 regs_.Restore(masm); 7320 regs_.Restore(masm);
7204 __ RememberedSetHelper( 7321 __ RememberedSetHelper(object_,
7205 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); 7322 address_,
7323 value_,
7324 save_fp_regs_mode_,
7325 MacroAssembler::kReturnAtEnd);
7206 7326
7207 __ bind(&dont_need_remembered_set); 7327 __ bind(&dont_need_remembered_set);
7208 } 7328 }
7209 7329
7210 CheckNeedsToInformIncrementalMarker( 7330 CheckNeedsToInformIncrementalMarker(
7211 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 7331 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7212 InformIncrementalMarker(masm, mode); 7332 InformIncrementalMarker(masm, mode);
7213 regs_.Restore(masm); 7333 regs_.Restore(masm);
7214 __ Ret(); 7334 __ Ret();
7215 } 7335 }
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
7257 Label on_black; 7377 Label on_black;
7258 Label need_incremental; 7378 Label need_incremental;
7259 Label need_incremental_pop_scratch; 7379 Label need_incremental_pop_scratch;
7260 7380
7261 // Let's look at the color of the object: If it is not black we don't have 7381 // Let's look at the color of the object: If it is not black we don't have
7262 // to inform the incremental marker. 7382 // to inform the incremental marker.
7263 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); 7383 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7264 7384
7265 regs_.Restore(masm); 7385 regs_.Restore(masm);
7266 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 7386 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7267 __ RememberedSetHelper( 7387 __ RememberedSetHelper(object_,
7268 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); 7388 address_,
7389 value_,
7390 save_fp_regs_mode_,
7391 MacroAssembler::kReturnAtEnd);
7269 } else { 7392 } else {
7270 __ Ret(); 7393 __ Ret();
7271 } 7394 }
7272 7395
7273 __ bind(&on_black); 7396 __ bind(&on_black);
7274 7397
7275 // Get the value from the slot. 7398 // Get the value from the slot.
7276 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); 7399 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7277 7400
7278 if (mode == INCREMENTAL_COMPACTION) { 7401 if (mode == INCREMENTAL_COMPACTION) {
(...skipping 19 matching lines...) Expand all
7298 __ Push(regs_.object(), regs_.address()); 7421 __ Push(regs_.object(), regs_.address());
7299 __ EnsureNotWhite(regs_.scratch0(), // The value. 7422 __ EnsureNotWhite(regs_.scratch0(), // The value.
7300 regs_.scratch1(), // Scratch. 7423 regs_.scratch1(), // Scratch.
7301 regs_.object(), // Scratch. 7424 regs_.object(), // Scratch.
7302 regs_.address(), // Scratch. 7425 regs_.address(), // Scratch.
7303 &need_incremental_pop_scratch); 7426 &need_incremental_pop_scratch);
7304 __ Pop(regs_.object(), regs_.address()); 7427 __ Pop(regs_.object(), regs_.address());
7305 7428
7306 regs_.Restore(masm); 7429 regs_.Restore(masm);
7307 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 7430 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7308 __ RememberedSetHelper( 7431 __ RememberedSetHelper(object_,
7309 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); 7432 address_,
7433 value_,
7434 save_fp_regs_mode_,
7435 MacroAssembler::kReturnAtEnd);
7310 } else { 7436 } else {
7311 __ Ret(); 7437 __ Ret();
7312 } 7438 }
7313 7439
7314 __ bind(&need_incremental_pop_scratch); 7440 __ bind(&need_incremental_pop_scratch);
7315 __ Pop(regs_.object(), regs_.address()); 7441 __ Pop(regs_.object(), regs_.address());
7316 7442
7317 __ bind(&need_incremental); 7443 __ bind(&need_incremental);
7318 7444
7319 // Fall through when we need to inform the incremental marker. 7445 // Fall through when we need to inform the incremental marker.
7320 } 7446 }
7321 7447
7322 7448
7323 #undef __ 7449 #undef __
7324 7450
7325 } } // namespace v8::internal 7451 } } // namespace v8::internal
7326 7452
7327 #endif // V8_TARGET_ARCH_MIPS 7453 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698