| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 1046 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1057 Operand(ExternalReference::interpreter_dispatch_table_address( | 1057 Operand(ExternalReference::interpreter_dispatch_table_address( |
| 1058 masm->isolate()))); | 1058 masm->isolate()))); |
| 1059 | 1059 |
| 1060 // Dispatch to the first bytecode handler for the function. | 1060 // Dispatch to the first bytecode handler for the function. |
| 1061 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, | 1061 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, |
| 1062 kInterpreterBytecodeOffsetRegister)); | 1062 kInterpreterBytecodeOffsetRegister)); |
| 1063 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); | 1063 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); |
| 1064 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | 1064 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); |
| 1065 __ Call(ip); | 1065 __ Call(ip); |
| 1066 | 1066 |
| 1067 // Even though the first bytecode handler was called, we will never return. | 1067 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); |
| 1068 __ Abort(kUnexpectedReturnFromBytecodeHandler); | 1068 |
| 1069 // The return value is in r2. |
| 1070 |
| 1071 // Get the arguments + reciever count. |
| 1072 __ LoadP(r4, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); |
| 1073 __ LoadlW(r4, FieldMemOperand(r4, BytecodeArray::kParameterSizeOffset)); |
| 1074 |
| 1075 // Leave the frame (also dropping the register file). |
| 1076 __ LeaveFrame(StackFrame::JAVA_SCRIPT); |
| 1077 |
| 1078 __ lay(sp, MemOperand(sp, r4)); |
| 1079 __ Ret(); |
| 1069 | 1080 |
| 1070 // If the bytecode array is no longer present, then the underlying function | 1081 // If the bytecode array is no longer present, then the underlying function |
| 1071 // has been switched to a different kind of code and we heal the closure by | 1082 // has been switched to a different kind of code and we heal the closure by |
| 1072 // switching the code entry field over to the new code object as well. | 1083 // switching the code entry field over to the new code object as well. |
| 1073 __ bind(&bytecode_array_not_present); | 1084 __ bind(&bytecode_array_not_present); |
| 1074 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | 1085 __ LeaveFrame(StackFrame::JAVA_SCRIPT); |
| 1075 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | 1086 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 1076 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset)); | 1087 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset)); |
| 1077 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1088 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1078 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0); | 1089 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0); |
| 1079 __ RecordWriteCodeEntryField(r3, r6, r7); | 1090 __ RecordWriteCodeEntryField(r3, r6, r7); |
| 1080 __ JumpToJSEntry(r6); | 1091 __ JumpToJSEntry(r6); |
| 1081 } | 1092 } |
| 1082 | 1093 |
| 1083 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) { | |
| 1084 // The return value is in accumulator, which is already in r2. | |
| 1085 | |
| 1086 // Leave the frame (also dropping the register file). | |
| 1087 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
| 1088 | |
| 1089 // Drop receiver + arguments and return. | |
| 1090 __ LoadlW(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister, | |
| 1091 BytecodeArray::kParameterSizeOffset)); | |
| 1092 __ AddP(sp, sp, r0); | |
| 1093 __ Ret(); | |
| 1094 } | |
| 1095 | |
| 1096 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, | 1094 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, |
| 1097 Register count, Register scratch) { | 1095 Register count, Register scratch) { |
| 1098 Label loop; | 1096 Label loop; |
| 1099 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU | 1097 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU |
| 1100 __ LoadRR(r0, count); | 1098 __ LoadRR(r0, count); |
| 1101 __ bind(&loop); | 1099 __ bind(&loop); |
| 1102 __ LoadP(scratch, MemOperand(index, -kPointerSize)); | 1100 __ LoadP(scratch, MemOperand(index, -kPointerSize)); |
| 1103 __ lay(index, MemOperand(index, -kPointerSize)); | 1101 __ lay(index, MemOperand(index, -kPointerSize)); |
| 1104 __ push(scratch); | 1102 __ push(scratch); |
| 1105 __ SubP(r0, Operand(1)); | 1103 __ SubP(r0, Operand(1)); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1146 Label skip; | 1144 Label skip; |
| 1147 __ CmpP(r2, Operand::Zero()); | 1145 __ CmpP(r2, Operand::Zero()); |
| 1148 __ beq(&skip); | 1146 __ beq(&skip); |
| 1149 Generate_InterpreterPushArgs(masm, r4, r2, r6); | 1147 Generate_InterpreterPushArgs(masm, r4, r2, r6); |
| 1150 __ bind(&skip); | 1148 __ bind(&skip); |
| 1151 | 1149 |
| 1152 // Call the constructor with r2, r3, and r5 unmodified. | 1150 // Call the constructor with r2, r3, and r5 unmodified. |
| 1153 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1151 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1154 } | 1152 } |
| 1155 | 1153 |
| 1156 static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) { | 1154 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { |
| 1155 // Set the return address to the correct point in the interpreter entry |
| 1156 // trampoline. |
| 1157 Smi* interpreter_entry_return_pc_offset( |
| 1158 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); |
| 1159 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); |
| 1160 __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
| 1161 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() + |
| 1162 Code::kHeaderSize - kHeapObjectTag)); |
| 1163 |
| 1157 // Initialize the dispatch table register. | 1164 // Initialize the dispatch table register. |
| 1158 __ mov(kInterpreterDispatchTableRegister, | 1165 __ mov(kInterpreterDispatchTableRegister, |
| 1159 Operand(ExternalReference::interpreter_dispatch_table_address( | 1166 Operand(ExternalReference::interpreter_dispatch_table_address( |
| 1160 masm->isolate()))); | 1167 masm->isolate()))); |
| 1161 | 1168 |
| 1162 // Get the bytecode array pointer from the frame. | 1169 // Get the bytecode array pointer from the frame. |
| 1163 __ LoadP(kInterpreterBytecodeArrayRegister, | 1170 __ LoadP(kInterpreterBytecodeArrayRegister, |
| 1164 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | 1171 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); |
| 1165 | 1172 |
| 1166 if (FLAG_debug_code) { | 1173 if (FLAG_debug_code) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1178 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | 1185 __ SmiUntag(kInterpreterBytecodeOffsetRegister); |
| 1179 | 1186 |
| 1180 // Dispatch to the target bytecode. | 1187 // Dispatch to the target bytecode. |
| 1181 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, | 1188 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, |
| 1182 kInterpreterBytecodeOffsetRegister)); | 1189 kInterpreterBytecodeOffsetRegister)); |
| 1183 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); | 1190 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); |
| 1184 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | 1191 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); |
| 1185 __ Jump(ip); | 1192 __ Jump(ip); |
| 1186 } | 1193 } |
| 1187 | 1194 |
| 1188 static void Generate_InterpreterNotifyDeoptimizedHelper( | |
| 1189 MacroAssembler* masm, Deoptimizer::BailoutType type) { | |
| 1190 // Enter an internal frame. | |
| 1191 { | |
| 1192 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 1193 | |
| 1194 // Pass the deoptimization type to the runtime system. | |
| 1195 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); | |
| 1196 __ Push(r3); | |
| 1197 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
| 1198 // Tear down internal frame. | |
| 1199 } | |
| 1200 | |
| 1201 // Drop state (we don't use these for interpreter deopts) and and pop the | |
| 1202 // accumulator value into the accumulator register. | |
| 1203 __ Drop(1); | |
| 1204 __ Pop(kInterpreterAccumulatorRegister); | |
| 1205 | |
| 1206 // Enter the bytecode dispatch. | |
| 1207 Generate_EnterBytecodeDispatch(masm); | |
| 1208 } | |
| 1209 | |
| 1210 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { | |
| 1211 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
| 1212 } | |
| 1213 | |
| 1214 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { | |
| 1215 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
| 1216 } | |
| 1217 | |
| 1218 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { | |
| 1219 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
| 1220 } | |
| 1221 | |
| 1222 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
| 1223 // Set the address of the interpreter entry trampoline as a return address. | |
| 1224 // This simulates the initial call to bytecode handlers in interpreter entry | |
| 1225 // trampoline. The return will never actually be taken, but our stack walker | |
| 1226 // uses this address to determine whether a frame is interpreted. | |
| 1227 __ mov(r14, | |
| 1228 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); | |
| 1229 | |
| 1230 Generate_EnterBytecodeDispatch(masm); | |
| 1231 } | |
| 1232 | |
| 1233 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 1195 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 1234 // ----------- S t a t e ------------- | 1196 // ----------- S t a t e ------------- |
| 1235 // -- r2 : argument count (preserved for callee) | 1197 // -- r2 : argument count (preserved for callee) |
| 1236 // -- r5 : new target (preserved for callee) | 1198 // -- r5 : new target (preserved for callee) |
| 1237 // -- r3 : target function (preserved for callee) | 1199 // -- r3 : target function (preserved for callee) |
| 1238 // ----------------------------------- | 1200 // ----------------------------------- |
| 1239 // First lookup code, maybe we don't need to compile! | 1201 // First lookup code, maybe we don't need to compile! |
| 1240 Label gotta_call_runtime; | 1202 Label gotta_call_runtime; |
| 1241 Label maybe_call_runtime; | 1203 Label maybe_call_runtime; |
| 1242 Label try_shared; | 1204 Label try_shared; |
| (...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1510 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type))); | 1472 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type))); |
| 1511 __ push(r2); | 1473 __ push(r2); |
| 1512 __ CallRuntime(Runtime::kNotifyDeoptimized); | 1474 __ CallRuntime(Runtime::kNotifyDeoptimized); |
| 1513 } | 1475 } |
| 1514 | 1476 |
| 1515 // Get the full codegen state from the stack and untag it -> r8. | 1477 // Get the full codegen state from the stack and untag it -> r8. |
| 1516 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize)); | 1478 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize)); |
| 1517 __ SmiUntag(r8); | 1479 __ SmiUntag(r8); |
| 1518 // Switch on the state. | 1480 // Switch on the state. |
| 1519 Label with_tos_register, unknown_state; | 1481 Label with_tos_register, unknown_state; |
| 1520 __ CmpP(r8, Operand(FullCodeGenerator::NO_REGISTERS)); | 1482 __ CmpP(r8, |
| 1483 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS))); |
| 1521 __ bne(&with_tos_register); | 1484 __ bne(&with_tos_register); |
| 1522 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state. | 1485 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state. |
| 1523 __ Ret(); | 1486 __ Ret(); |
| 1524 | 1487 |
| 1525 __ bind(&with_tos_register); | 1488 __ bind(&with_tos_register); |
| 1489 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code()); |
| 1526 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize)); | 1490 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize)); |
| 1527 __ CmpP(r8, Operand(FullCodeGenerator::TOS_REG)); | 1491 __ CmpP(r8, |
| 1492 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); |
| 1528 __ bne(&unknown_state); | 1493 __ bne(&unknown_state); |
| 1529 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state. | 1494 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state. |
| 1530 __ Ret(); | 1495 __ Ret(); |
| 1531 | 1496 |
| 1532 __ bind(&unknown_state); | 1497 __ bind(&unknown_state); |
| 1533 __ stop("no cases left"); | 1498 __ stop("no cases left"); |
| 1534 } | 1499 } |
| 1535 | 1500 |
| 1536 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 1501 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
| 1537 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 1502 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| (...skipping 1286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2824 __ bkpt(0); | 2789 __ bkpt(0); |
| 2825 } | 2790 } |
| 2826 } | 2791 } |
| 2827 | 2792 |
| 2828 #undef __ | 2793 #undef __ |
| 2829 | 2794 |
| 2830 } // namespace internal | 2795 } // namespace internal |
| 2831 } // namespace v8 | 2796 } // namespace v8 |
| 2832 | 2797 |
| 2833 #endif // V8_TARGET_ARCH_S390 | 2798 #endif // V8_TARGET_ARCH_S390 |
| OLD | NEW |