OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
491 __ popq(RAX); // Restore result. | 491 __ popq(RAX); // Restore result. |
492 } else if (kind == kLazyDeoptFromThrow) { | 492 } else if (kind == kLazyDeoptFromThrow) { |
493 __ popq(RDX); // Restore stacktrace. | 493 __ popq(RDX); // Restore stacktrace. |
494 __ popq(RAX); // Restore exception. | 494 __ popq(RAX); // Restore exception. |
495 } | 495 } |
496 __ LeaveStubFrame(); | 496 __ LeaveStubFrame(); |
497 | 497 |
498 __ popq(RCX); // Pop return address. | 498 __ popq(RCX); // Pop return address. |
499 __ addq(RSP, RBX); // Remove materialization arguments. | 499 __ addq(RSP, RBX); // Remove materialization arguments. |
500 __ pushq(RCX); // Push return address. | 500 __ pushq(RCX); // Push return address. |
501 // The caller is responsible for emitting the return instruction. | 501 __ ret(); |
502 } | 502 } |
503 | 503 |
504 | 504 |
505 // RAX: result, must be preserved | 505 // RAX: result, must be preserved |
506 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 506 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
507 // Push zap value instead of CODE_REG for lazy deopt. | 507 // Push zap value instead of CODE_REG for lazy deopt. |
508 __ pushq(Immediate(kZapCodeReg)); | 508 __ pushq(Immediate(0xf1f1f1f1)); |
509 // Return address for "call" to deopt stub. | 509 // Return address for "call" to deopt stub. |
510 __ pushq(Immediate(kZapReturnAddress)); | 510 __ pushq(Immediate(0xe1e1e1e1)); |
511 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 511 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
512 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 512 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
513 __ ret(); | |
514 } | 513 } |
515 | 514 |
516 | 515 |
517 // RAX: exception, must be preserved | 516 // RAX: exception, must be preserved |
518 // RDX: stacktrace, must be preserved | 517 // RDX: stacktrace, must be preserved |
519 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 518 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
520 // Push zap value instead of CODE_REG for lazy deopt. | 519 // Push zap value instead of CODE_REG for lazy deopt. |
521 __ pushq(Immediate(kZapCodeReg)); | 520 __ pushq(Immediate(0xf1f1f1f1)); |
522 // Return address for "call" to deopt stub. | 521 // Return address for "call" to deopt stub. |
523 __ pushq(Immediate(kZapReturnAddress)); | 522 __ pushq(Immediate(0xe1e1e1e1)); |
524 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 523 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
525 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 524 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
526 __ ret(); | |
527 } | 525 } |
528 | 526 |
529 | 527 |
530 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 528 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
531 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 529 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
532 __ ret(); | |
533 } | 530 } |
534 | 531 |
535 | 532 |
536 static void GenerateDispatcherCode(Assembler* assembler, | 533 static void GenerateDispatcherCode(Assembler* assembler, |
537 Label* call_target_function) { | 534 Label* call_target_function) { |
538 __ Comment("NoSuchMethodDispatch"); | 535 __ Comment("NoSuchMethodDispatch"); |
539 // When lazily generated invocation dispatchers are disabled, the | 536 // When lazily generated invocation dispatchers are disabled, the |
540 // miss-handler may return null. | 537 // miss-handler may return null. |
541 __ CompareObject(RAX, Object::null_object()); | 538 __ CompareObject(RAX, Object::null_object()); |
542 __ j(NOT_EQUAL, call_target_function); | 539 __ j(NOT_EQUAL, call_target_function); |
(...skipping 1344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1887 | 1884 |
1888 // Load the stacktrace from the current thread. | 1885 // Load the stacktrace from the current thread. |
1889 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); | 1886 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); |
1890 __ movq(kStackTraceObjectReg, stacktrace_addr); | 1887 __ movq(kStackTraceObjectReg, stacktrace_addr); |
1891 __ movq(stacktrace_addr, Immediate(0)); | 1888 __ movq(stacktrace_addr, Immediate(0)); |
1892 | 1889 |
1893 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point. | 1890 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point. |
1894 } | 1891 } |
1895 | 1892 |
1896 | 1893 |
1897 // Deoptimize a frame on the call stack before rewinding. | |
1898 // The arguments are stored in the Thread object. | |
1899 // No result. | |
1900 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | |
1901 // Push zap value instead of CODE_REG. | |
1902 __ pushq(Immediate(kZapCodeReg)); | |
1903 | |
1904 // Push the deopt pc. | |
1905 __ pushq(Address(THR, Thread::resume_pc_offset())); | |
1906 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | |
1907 | |
1908 // After we have deoptimized, jump to the correct frame. | |
1909 __ EnterStubFrame(); | |
1910 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); | |
1911 __ LeaveStubFrame(); | |
1912 __ int3(); | |
1913 } | |
1914 | |
1915 | |
1916 // Calls to the runtime to optimize the given function. | 1894 // Calls to the runtime to optimize the given function. |
1917 // RDI: function to be reoptimized. | 1895 // RDI: function to be reoptimized. |
1918 // R10: argument descriptor (preserved). | 1896 // R10: argument descriptor (preserved). |
1919 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1897 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1920 __ EnterStubFrame(); | 1898 __ EnterStubFrame(); |
1921 __ pushq(R10); // Preserve args descriptor. | 1899 __ pushq(R10); // Preserve args descriptor. |
1922 __ pushq(Immediate(0)); // Result slot. | 1900 __ pushq(Immediate(0)); // Result slot. |
1923 __ pushq(RDI); // Arg0: function to optimize | 1901 __ pushq(RDI); // Arg0: function to optimize |
1924 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1902 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1925 __ popq(RAX); // Disard argument. | 1903 __ popq(RAX); // Disard argument. |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2276 } | 2254 } |
2277 | 2255 |
2278 | 2256 |
2279 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2257 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2280 __ int3(); | 2258 __ int3(); |
2281 } | 2259 } |
2282 | 2260 |
2283 } // namespace dart | 2261 } // namespace dart |
2284 | 2262 |
2285 #endif // defined TARGET_ARCH_X64 | 2263 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |