OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
491 __ popq(RAX); // Restore result. | 491 __ popq(RAX); // Restore result. |
492 } else if (kind == kLazyDeoptFromThrow) { | 492 } else if (kind == kLazyDeoptFromThrow) { |
493 __ popq(RDX); // Restore stacktrace. | 493 __ popq(RDX); // Restore stacktrace. |
494 __ popq(RAX); // Restore exception. | 494 __ popq(RAX); // Restore exception. |
495 } | 495 } |
496 __ LeaveStubFrame(); | 496 __ LeaveStubFrame(); |
497 | 497 |
498 __ popq(RCX); // Pop return address. | 498 __ popq(RCX); // Pop return address. |
499 __ addq(RSP, RBX); // Remove materialization arguments. | 499 __ addq(RSP, RBX); // Remove materialization arguments. |
500 __ pushq(RCX); // Push return address. | 500 __ pushq(RCX); // Push return address. |
501 __ ret(); | 501 // The caller is responsible for emitting the return instruction. |
502 } | 502 } |
503 | 503 |
504 | 504 |
505 // RAX: result, must be preserved | 505 // RAX: result, must be preserved |
506 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 506 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
507 // Push zap value instead of CODE_REG for lazy deopt. | 507 // Push zap value instead of CODE_REG for lazy deopt. |
508 __ pushq(Immediate(0xf1f1f1f1)); | 508 __ pushq(Immediate(kZapCodeReg)); |
509 // Return address for "call" to deopt stub. | 509 // Return address for "call" to deopt stub. |
510 __ pushq(Immediate(0xe1e1e1e1)); | 510 __ pushq(Immediate(kZapReturnAddress)); |
511 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 511 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
512 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 512 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 513 __ ret(); |
513 } | 514 } |
514 | 515 |
515 | 516 |
516 // RAX: exception, must be preserved | 517 // RAX: exception, must be preserved |
517 // RDX: stacktrace, must be preserved | 518 // RDX: stacktrace, must be preserved |
518 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 519 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
519 // Push zap value instead of CODE_REG for lazy deopt. | 520 // Push zap value instead of CODE_REG for lazy deopt. |
520 __ pushq(Immediate(0xf1f1f1f1)); | 521 __ pushq(Immediate(kZapCodeReg)); |
521 // Return address for "call" to deopt stub. | 522 // Return address for "call" to deopt stub. |
522 __ pushq(Immediate(0xe1e1e1e1)); | 523 __ pushq(Immediate(kZapReturnAddress)); |
523 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 524 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
524 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 525 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 526 __ ret(); |
525 } | 527 } |
526 | 528 |
527 | 529 |
528 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 530 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
529 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 531 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 532 __ ret(); |
530 } | 533 } |
531 | 534 |
532 | 535 |
533 static void GenerateDispatcherCode(Assembler* assembler, | 536 static void GenerateDispatcherCode(Assembler* assembler, |
534 Label* call_target_function) { | 537 Label* call_target_function) { |
535 __ Comment("NoSuchMethodDispatch"); | 538 __ Comment("NoSuchMethodDispatch"); |
536 // When lazily generated invocation dispatchers are disabled, the | 539 // When lazily generated invocation dispatchers are disabled, the |
537 // miss-handler may return null. | 540 // miss-handler may return null. |
538 __ CompareObject(RAX, Object::null_object()); | 541 __ CompareObject(RAX, Object::null_object()); |
539 __ j(NOT_EQUAL, call_target_function); | 542 __ j(NOT_EQUAL, call_target_function); |
(...skipping 1344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1884 | 1887 |
1885 // Load the stacktrace from the current thread. | 1888 // Load the stacktrace from the current thread. |
1886 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); | 1889 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); |
1887 __ movq(kStackTraceObjectReg, stacktrace_addr); | 1890 __ movq(kStackTraceObjectReg, stacktrace_addr); |
1888 __ movq(stacktrace_addr, Immediate(0)); | 1891 __ movq(stacktrace_addr, Immediate(0)); |
1889 | 1892 |
1890 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point. | 1893 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point. |
1891 } | 1894 } |
1892 | 1895 |
1893 | 1896 |
| 1897 // Deoptimize a frame on the call stack before rewinding. |
| 1898 // The arguments are stored in the Thread object. |
| 1899 // No result. |
| 1900 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
| 1901 // Push zap value instead of CODE_REG. |
| 1902 __ pushq(Immediate(kZapCodeReg)); |
| 1903 |
| 1904 // Push the deopt pc. |
| 1905 __ pushq(Address(THR, Thread::resume_pc_offset())); |
| 1906 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 1907 |
| 1908 // After we have deoptimized, jump to the correct frame. |
| 1909 __ EnterStubFrame(); |
| 1910 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); |
| 1911 __ LeaveStubFrame(); |
| 1912 __ int3(); |
| 1913 } |
| 1914 |
| 1915 |
1894 // Calls to the runtime to optimize the given function. | 1916 // Calls to the runtime to optimize the given function. |
1895 // RDI: function to be reoptimized. | 1917 // RDI: function to be reoptimized. |
1896 // R10: argument descriptor (preserved). | 1918 // R10: argument descriptor (preserved). |
1897 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1919 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1898 __ EnterStubFrame(); | 1920 __ EnterStubFrame(); |
1899 __ pushq(R10); // Preserve args descriptor. | 1921 __ pushq(R10); // Preserve args descriptor. |
1900 __ pushq(Immediate(0)); // Result slot. | 1922 __ pushq(Immediate(0)); // Result slot. |
1901 __ pushq(RDI); // Arg0: function to optimize | 1923 __ pushq(RDI); // Arg0: function to optimize |
1902 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1924 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1903 __ popq(RAX); // Disard argument. | 1925 __ popq(RAX); // Disard argument. |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2254 } | 2276 } |
2255 | 2277 |
2256 | 2278 |
2257 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2279 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2258 __ int3(); | 2280 __ int3(); |
2259 } | 2281 } |
2260 | 2282 |
2261 } // namespace dart | 2283 } // namespace dart |
2262 | 2284 |
2263 #endif // defined TARGET_ARCH_X64 | 2285 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |