| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 namespace v8 { | 34 namespace v8 { |
| 35 namespace internal { | 35 namespace internal { |
| 36 | 36 |
| 37 // Forward declaration. | 37 // Forward declaration. |
| 38 class JumpTarget; | 38 class JumpTarget; |
| 39 | 39 |
| 40 // Register at is used for instruction generation. So it is not safe to use it | 40 // Register at is used for instruction generation. So it is not safe to use it |
| 41 // unless we know exactly what we do. | 41 // unless we know exactly what we do. |
| 42 | 42 |
| 43 // Registers aliases | 43 // Registers aliases |
| 44 // cp is assumed to be a callee saved register. |
| 44 const Register cp = s7; // JavaScript context pointer | 45 const Register cp = s7; // JavaScript context pointer |
| 45 const Register fp = s8_fp; // Alias fp | 46 const Register fp = s8_fp; // Alias fp |
| 46 | 47 |
| 47 enum InvokeJSFlags { | 48 enum InvokeJSFlags { |
| 48 CALL_JS, | 49 CALL_JS, |
| 49 JUMP_JS | 50 JUMP_JS |
| 50 }; | 51 }; |
| 51 | 52 |
| 52 // MacroAssembler implements a collection of frequently used macros. | 53 // MacroAssembler implements a collection of frequently used macros. |
| 53 class MacroAssembler: public Assembler { | 54 class MacroAssembler: public Assembler { |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 95 | 96 |
| 96 // Emit code to discard a non-negative number of pointer-sized elements | 97 // Emit code to discard a non-negative number of pointer-sized elements |
| 97 // from the stack, clobbering only the sp register. | 98 // from the stack, clobbering only the sp register. |
| 98 void Drop(int count, Condition cond = cc_always); | 99 void Drop(int count, Condition cond = cc_always); |
| 99 | 100 |
| 100 void Call(Label* target); | 101 void Call(Label* target); |
| 101 | 102 |
| 102 // Jump unconditionally to given label. | 103 // Jump unconditionally to given label. |
| 103 // We NEED a nop in the branch delay slot, as it used by v8, for example in | 104 // We NEED a nop in the branch delay slot, as it used by v8, for example in |
| 104 // CodeGenerator::ProcessDeferred(). | 105 // CodeGenerator::ProcessDeferred(). |
| 106 // Currently the branch delay slot is filled by the MacroAssembler. |
| 105 // Use rather b(Label) for code generation. | 107 // Use rather b(Label) for code generation. |
| 106 void jmp(Label* L) { | 108 void jmp(Label* L) { |
| 107 Branch(cc_always, L); | 109 Branch(cc_always, L); |
| 108 nop(); | |
| 109 } | 110 } |
| 110 | 111 |
| 111 // Load an object from the root table. | 112 // Load an object from the root table. |
| 112 void LoadRoot(Register destination, | 113 void LoadRoot(Register destination, |
| 113 Heap::RootListIndex index); | 114 Heap::RootListIndex index); |
| 114 void LoadRoot(Register destination, | 115 void LoadRoot(Register destination, |
| 115 Heap::RootListIndex index, | 116 Heap::RootListIndex index, |
| 116 Condition cond, Register src1, const Operand& src2); | 117 Condition cond, Register src1, const Operand& src2); |
| 117 | 118 |
| 119 // Load an external reference. |
| 120 void LoadExternalReference(Register reg, ExternalReference ext) { |
| 121 li(reg, Operand(ext)); |
| 122 } |
| 123 |
| 118 // Sets the remembered set bit for [address+offset]. | 124 // Sets the remembered set bit for [address+offset]. |
| 119 void RecordWrite(Register object, Register offset, Register scratch); | 125 void RecordWrite(Register object, Register offset, Register scratch); |
| 120 | 126 |
| 121 | 127 |
| 122 // --------------------------------------------------------------------------- | 128 // --------------------------------------------------------------------------- |
| 123 // Instruction macros | 129 // Instruction macros |
| 124 | 130 |
| 125 #define DEFINE_INSTRUCTION(instr) \ | 131 #define DEFINE_INSTRUCTION(instr) \ |
| 126 void instr(Register rd, Register rs, const Operand& rt); \ | 132 void instr(Register rd, Register rs, const Operand& rt); \ |
| 127 void instr(Register rd, Register rs, Register rt) { \ | 133 void instr(Register rd, Register rs, Register rt) { \ |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 184 void MultiPushReversed(RegList regs); | 190 void MultiPushReversed(RegList regs); |
| 185 void Push(Register src) { | 191 void Push(Register src) { |
| 186 Addu(sp, sp, Operand(-kPointerSize)); | 192 Addu(sp, sp, Operand(-kPointerSize)); |
| 187 sw(src, MemOperand(sp, 0)); | 193 sw(src, MemOperand(sp, 0)); |
| 188 } | 194 } |
| 189 inline void push(Register src) { Push(src); } | 195 inline void push(Register src) { Push(src); } |
| 190 | 196 |
| 191 void Push(Register src, Condition cond, Register tst1, Register tst2) { | 197 void Push(Register src, Condition cond, Register tst1, Register tst2) { |
| 192 // Since we don't have conditionnal execution we use a Branch. | 198 // Since we don't have conditionnal execution we use a Branch. |
| 193 Branch(cond, 3, tst1, Operand(tst2)); | 199 Branch(cond, 3, tst1, Operand(tst2)); |
| 194 nop(); | |
| 195 Addu(sp, sp, Operand(-kPointerSize)); | 200 Addu(sp, sp, Operand(-kPointerSize)); |
| 196 sw(src, MemOperand(sp, 0)); | 201 sw(src, MemOperand(sp, 0)); |
| 197 } | 202 } |
| 198 | 203 |
| 199 // Pops multiple values from the stack and load them in the | 204 // Pops multiple values from the stack and load them in the |
| 200 // registers specified in regs. Pop order is the opposite as in MultiPush. | 205 // registers specified in regs. Pop order is the opposite as in MultiPush. |
| 201 void MultiPop(RegList regs); | 206 void MultiPop(RegList regs); |
| 202 void MultiPopReversed(RegList regs); | 207 void MultiPopReversed(RegList regs); |
| 203 void Pop(Register dst) { | 208 void Pop(Register dst) { |
| 204 lw(dst, MemOperand(sp, 0)); | 209 lw(dst, MemOperand(sp, 0)); |
| 205 Addu(sp, sp, Operand(kPointerSize)); | 210 Addu(sp, sp, Operand(kPointerSize)); |
| 206 } | 211 } |
| 207 void Pop() { | 212 void Pop() { |
| 208 Add(sp, sp, Operand(kPointerSize)); | 213 Add(sp, sp, Operand(kPointerSize)); |
| 209 } | 214 } |
| 210 | 215 |
| 211 | 216 |
| 217 // --------------------------------------------------------------------------- |
| 218 // Activation frames |
| 219 |
| 220 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); } |
| 221 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); } |
| 222 |
| 223 // Enter specific kind of exit frame; either EXIT or |
| 224 // EXIT_DEBUG. Expects the number of arguments in register a0 and |
| 225 // the builtin function to call in register a1. |
| 226 // On output hold_argc, hold_function, and hold_argv are setup. |
| 227 void EnterExitFrame(ExitFrame::Mode mode, |
| 228 Register hold_argc, |
| 229 Register hold_argv, |
| 230 Register hold_function); |
| 231 |
| 232 // Leave the current exit frame. Expects the return value in v0. |
| 233 void LeaveExitFrame(ExitFrame::Mode mode); |
| 234 |
| 235 // Align the stack by optionally pushing a Smi zero. |
| 236 void AlignStack(int offset); |
| 237 |
| 238 void SetupAlignedCall(Register scratch, int arg_count = 0); |
| 239 void ReturnFromAlignedCall(); |
| 240 |
| 241 |
| 242 // --------------------------------------------------------------------------- |
| 243 // JavaScript invokes |
| 244 |
| 245 // Invoke the JavaScript function code by either calling or jumping. |
| 246 void InvokeCode(Register code, |
| 247 const ParameterCount& expected, |
| 248 const ParameterCount& actual, |
| 249 InvokeFlag flag); |
| 250 |
| 251 void InvokeCode(Handle<Code> code, |
| 252 const ParameterCount& expected, |
| 253 const ParameterCount& actual, |
| 254 RelocInfo::Mode rmode, |
| 255 InvokeFlag flag); |
| 256 |
| 257 // Invoke the JavaScript function in the given register. Changes the |
| 258 // current context to the context in the function before invoking. |
| 259 void InvokeFunction(Register function, |
| 260 const ParameterCount& actual, |
| 261 InvokeFlag flag); |
| 262 |
| 263 |
| 212 #ifdef ENABLE_DEBUGGER_SUPPORT | 264 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 213 // --------------------------------------------------------------------------- | 265 // --------------------------------------------------------------------------- |
| 214 // Debugger Support | 266 // Debugger Support |
| 215 | 267 |
| 216 void SaveRegistersToMemory(RegList regs); | 268 void SaveRegistersToMemory(RegList regs); |
| 217 void RestoreRegistersFromMemory(RegList regs); | 269 void RestoreRegistersFromMemory(RegList regs); |
| 218 void CopyRegistersFromMemoryToStack(Register base, RegList regs); | 270 void CopyRegistersFromMemoryToStack(Register base, RegList regs); |
| 219 void CopyRegistersFromStackToMemory(Register base, | 271 void CopyRegistersFromStackToMemory(Register base, |
| 220 Register scratch, | 272 Register scratch, |
| 221 RegList regs); | 273 RegList regs); |
| 222 void DebugBreak(); | 274 void DebugBreak(); |
| 223 #endif | 275 #endif |
| 224 | 276 |
| 225 | 277 |
| 226 // --------------------------------------------------------------------------- | 278 // --------------------------------------------------------------------------- |
| 227 // Exception handling | 279 // Exception handling |
| 228 | 280 |
| 229 // Push a new try handler and link into try handler chain. | 281 // Push a new try handler and link into try handler chain. |
| 230 // The return address must be passed in register lr. | 282 // The return address must be passed in register ra. |
| 231 // On exit, r0 contains TOS (code slot). | |
| 232 void PushTryHandler(CodeLocation try_location, HandlerType type); | 283 void PushTryHandler(CodeLocation try_location, HandlerType type); |
| 233 | 284 |
| 234 // Unlink the stack handler on top of the stack from the try handler chain. | 285 // Unlink the stack handler on top of the stack from the try handler chain. |
| 235 // Must preserve the result register. | 286 // Must preserve the result register. |
| 236 void PopTryHandler(); | 287 void PopTryHandler(); |
| 237 | 288 |
| 238 | 289 |
| 239 // --------------------------------------------------------------------------- | 290 // --------------------------------------------------------------------------- |
| 240 // Support functions. | 291 // Support functions. |
| 241 | 292 |
| 293 void GetObjectType(Register function, |
| 294 Register map, |
| 295 Register type_reg); |
| 296 |
| 242 inline void BranchOnSmi(Register value, Label* smi_label, | 297 inline void BranchOnSmi(Register value, Label* smi_label, |
| 243 Register scratch = at) { | 298 Register scratch = at) { |
| 244 ASSERT_EQ(0, kSmiTag); | 299 ASSERT_EQ(0, kSmiTag); |
| 245 andi(scratch, value, kSmiTagMask); | 300 andi(scratch, value, kSmiTagMask); |
| 246 Branch(eq, smi_label, scratch, Operand(zero_reg)); | 301 Branch(eq, smi_label, scratch, Operand(zero_reg)); |
| 247 } | 302 } |
| 248 | 303 |
| 249 | 304 |
| 250 inline void BranchOnNotSmi(Register value, Label* not_smi_label, | 305 inline void BranchOnNotSmi(Register value, Label* not_smi_label, |
| 251 Register scratch = at) { | 306 Register scratch = at) { |
| 252 ASSERT_EQ(0, kSmiTag); | 307 ASSERT_EQ(0, kSmiTag); |
| 253 andi(scratch, value, kSmiTagMask); | 308 andi(scratch, value, kSmiTagMask); |
| 254 Branch(ne, not_smi_label, scratch, Operand(zero_reg)); | 309 Branch(ne, not_smi_label, scratch, Operand(zero_reg)); |
| 255 } | 310 } |
| 256 | 311 |
| 312 void CallBuiltin(ExternalReference builtin_entry); |
| 313 void CallBuiltin(Register target); |
| 314 void JumpToBuiltin(ExternalReference builtin_entry); |
| 315 void JumpToBuiltin(Register target); |
| 316 |
| 317 // Generates code for reporting that an illegal operation has |
| 318 // occurred. |
| 319 void IllegalOperation(int num_arguments); |
| 320 |
| 257 | 321 |
| 258 // --------------------------------------------------------------------------- | 322 // --------------------------------------------------------------------------- |
| 259 // Runtime calls | 323 // Runtime calls |
| 260 | 324 |
| 261 // Call a code stub. | 325 // Call a code stub. |
| 262 void CallStub(CodeStub* stub, Condition cond = cc_always, | 326 void CallStub(CodeStub* stub, Condition cond = cc_always, |
| 263 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); | 327 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); |
| 264 void CallJSExitStub(CodeStub* stub); | 328 void CallJSExitStub(CodeStub* stub); |
| 265 | 329 |
| 266 // Return from a code stub after popping its arguments. | 330 // Return from a code stub after popping its arguments. |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 335 // Print a message to stdout and abort execution. | 399 // Print a message to stdout and abort execution. |
| 336 void Abort(const char* msg); | 400 void Abort(const char* msg); |
| 337 | 401 |
| 338 // Verify restrictions about code generated in stubs. | 402 // Verify restrictions about code generated in stubs. |
| 339 void set_generating_stub(bool value) { generating_stub_ = value; } | 403 void set_generating_stub(bool value) { generating_stub_ = value; } |
| 340 bool generating_stub() { return generating_stub_; } | 404 bool generating_stub() { return generating_stub_; } |
| 341 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; } | 405 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; } |
| 342 bool allow_stub_calls() { return allow_stub_calls_; } | 406 bool allow_stub_calls() { return allow_stub_calls_; } |
| 343 | 407 |
| 344 private: | 408 private: |
| 409 List<Unresolved> unresolved_; |
| 410 bool generating_stub_; |
| 411 bool allow_stub_calls_; |
| 412 // This handle will be patched with the code object on installation. |
| 413 Handle<Object> code_object_; |
| 414 |
| 345 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always, | 415 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always, |
| 346 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); | 416 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); |
| 347 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always, | 417 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always, |
| 348 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); | 418 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg)); |
| 349 | 419 |
| 420 // Helper functions for generating invokes. |
| 421 void InvokePrologue(const ParameterCount& expected, |
| 422 const ParameterCount& actual, |
| 423 Handle<Code> code_constant, |
| 424 Register code_reg, |
| 425 Label* done, |
| 426 InvokeFlag flag); |
| 427 |
| 350 // Get the code for the given builtin. Returns if able to resolve | 428 // Get the code for the given builtin. Returns if able to resolve |
| 351 // the function in the 'resolved' flag. | 429 // the function in the 'resolved' flag. |
| 352 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved); | 430 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved); |
| 353 | 431 |
| 354 List<Unresolved> unresolved_; | 432 // Activation support. |
| 355 bool generating_stub_; | 433 // EnterFrame clobbers t0 and t1. |
| 356 bool allow_stub_calls_; | 434 void EnterFrame(StackFrame::Type type); |
| 357 // This handle will be patched with the code object on installation. | 435 void LeaveFrame(StackFrame::Type type); |
| 358 Handle<Object> code_object_; | |
| 359 }; | 436 }; |
| 360 | 437 |
| 361 | 438 |
| 362 // ----------------------------------------------------------------------------- | 439 // ----------------------------------------------------------------------------- |
| 363 // Static helper functions. | 440 // Static helper functions. |
| 364 | 441 |
| 365 // Generate a MemOperand for loading a field from an object. | 442 // Generate a MemOperand for loading a field from an object. |
| 366 static inline MemOperand FieldMemOperand(Register object, int offset) { | 443 static inline MemOperand FieldMemOperand(Register object, int offset) { |
| 367 return MemOperand(object, offset - kHeapObjectTag); | 444 return MemOperand(object, offset - kHeapObjectTag); |
| 368 } | 445 } |
| 369 | 446 |
| 370 | 447 |
| 371 | 448 |
| 372 #ifdef GENERATED_CODE_COVERAGE | 449 #ifdef GENERATED_CODE_COVERAGE |
| 373 #define CODE_COVERAGE_STRINGIFY(x) #x | 450 #define CODE_COVERAGE_STRINGIFY(x) #x |
| 374 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) | 451 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) |
| 375 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) | 452 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) |
| 376 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> | 453 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> |
| 377 #else | 454 #else |
| 378 #define ACCESS_MASM(masm) masm-> | 455 #define ACCESS_MASM(masm) masm-> |
| 379 #endif | 456 #endif |
| 380 | 457 |
| 381 } } // namespace v8::internal | 458 } } // namespace v8::internal |
| 382 | 459 |
| 383 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ | 460 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ |
| 384 | 461 |
| OLD | NEW |