| OLD | NEW | 
|     1 // Copyright 2012 the V8 project authors. All rights reserved. |     1 // Copyright 2012 the V8 project authors. All rights reserved. | 
|     2 // Use of this source code is governed by a BSD-style license that can be |     2 // Use of this source code is governed by a BSD-style license that can be | 
|     3 // found in the LICENSE file. |     3 // found in the LICENSE file. | 
|     4  |     4  | 
|     5 #if V8_TARGET_ARCH_MIPS |     5 #if V8_TARGET_ARCH_MIPS | 
|     6  |     6  | 
|     7 #include "src/codegen.h" |     7 #include "src/codegen.h" | 
|     8 #include "src/debug/debug.h" |     8 #include "src/debug/debug.h" | 
|     9 #include "src/deoptimizer.h" |     9 #include "src/deoptimizer.h" | 
|    10 #include "src/full-codegen/full-codegen.h" |    10 #include "src/full-codegen/full-codegen.h" | 
| (...skipping 1127 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  1138   __ bind(&switch_to_different_code_kind); |  1138   __ bind(&switch_to_different_code_kind); | 
|  1139   __ LeaveFrame(StackFrame::JAVA_SCRIPT); |  1139   __ LeaveFrame(StackFrame::JAVA_SCRIPT); | 
|  1140   __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |  1140   __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 
|  1141   __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset)); |  1141   __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset)); | 
|  1142   __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); |  1142   __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 
|  1143   __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |  1143   __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 
|  1144   __ RecordWriteCodeEntryField(a1, t0, t1); |  1144   __ RecordWriteCodeEntryField(a1, t0, t1); | 
|  1145   __ Jump(t0); |  1145   __ Jump(t0); | 
|  1146 } |  1146 } | 
|  1147  |  1147  | 
|  1148 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { |  | 
|  1149   // Save the function and context for call to CompileBaseline. |  | 
|  1150   __ lw(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); |  | 
|  1151   __ lw(kContextRegister, |  | 
|  1152         MemOperand(fp, StandardFrameConstants::kContextOffset)); |  | 
|  1153  |  | 
|  1154   // Leave the frame before recompiling for baseline so that we don't count as |  | 
|  1155   // an activation on the stack. |  | 
|  1156   LeaveInterpreterFrame(masm, t0); |  | 
|  1157  |  | 
|  1158   { |  | 
|  1159     FrameScope frame_scope(masm, StackFrame::INTERNAL); |  | 
|  1160     // Push return value. |  | 
|  1161     __ push(v0); |  | 
|  1162  |  | 
|  1163     // Push function as argument and compile for baseline. |  | 
|  1164     __ push(a1); |  | 
|  1165     __ CallRuntime(Runtime::kCompileBaseline); |  | 
|  1166  |  | 
|  1167     // Restore return value. |  | 
|  1168     __ pop(v0); |  | 
|  1169   } |  | 
|  1170   __ Jump(ra); |  | 
|  1171 } |  | 
|  1172  |  | 
|  1173 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, |  1148 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, | 
|  1174                                         Register scratch1, Register scratch2, |  1149                                         Register scratch1, Register scratch2, | 
|  1175                                         Label* stack_overflow) { |  1150                                         Label* stack_overflow) { | 
|  1176   // Check the stack for overflow. We are not trying to catch |  1151   // Check the stack for overflow. We are not trying to catch | 
|  1177   // interruptions (e.g. debug break and preemption) here, so the "real stack |  1152   // interruptions (e.g. debug break and preemption) here, so the "real stack | 
|  1178   // limit" is checked. |  1153   // limit" is checked. | 
|  1179   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); |  1154   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); | 
|  1180   // Make scratch1 the space we have left. The stack might already be overflowed |  1155   // Make scratch1 the space we have left. The stack might already be overflowed | 
|  1181   // here which will cause scratch1 to become negative. |  1156   // here which will cause scratch1 to become negative. | 
|  1182   __ subu(scratch1, sp, scratch1); |  1157   __ subu(scratch1, sp, scratch1); | 
| (...skipping 1805 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2988     __ break_(0xCC); |  2963     __ break_(0xCC); | 
|  2989   } |  2964   } | 
|  2990 } |  2965 } | 
|  2991  |  2966  | 
|  2992 #undef __ |  2967 #undef __ | 
|  2993  |  2968  | 
|  2994 }  // namespace internal |  2969 }  // namespace internal | 
|  2995 }  // namespace v8 |  2970 }  // namespace v8 | 
|  2996  |  2971  | 
|  2997 #endif  // V8_TARGET_ARCH_MIPS |  2972 #endif  // V8_TARGET_ARCH_MIPS | 
| OLD | NEW |