Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(40)

Side by Side Diff: runtime/vm/stub_code_x64.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/stub_code_ia32_test.cc ('k') | runtime/vm/stub_code_x64_test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/compiler.h" 9 #include "vm/compiler.h"
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 // Mark that the thread is executing Dart code. 89 // Mark that the thread is executing Dart code.
90 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); 90 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
91 91
92 // Reset exit frame information in Isolate structure. 92 // Reset exit frame information in Isolate structure.
93 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); 93 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));
94 94
95 __ LeaveStubFrame(); 95 __ LeaveStubFrame();
96 __ ret(); 96 __ ret();
97 } 97 }
98 98
99
100 // Print the stop message. 99 // Print the stop message.
101 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { 100 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) {
102 OS::Print("Stop message: %s\n", message); 101 OS::Print("Stop message: %s\n", message);
103 } 102 }
104 END_LEAF_RUNTIME_ENTRY 103 END_LEAF_RUNTIME_ENTRY
105 104
106
107 // Input parameters: 105 // Input parameters:
108 // RSP : points to return address. 106 // RSP : points to return address.
109 // RDI : stop message (const char*). 107 // RDI : stop message (const char*).
110 // Must preserve all registers. 108 // Must preserve all registers.
111 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { 109 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) {
112 __ EnterCallRuntimeFrame(0); 110 __ EnterCallRuntimeFrame(0);
113 // Call the runtime leaf function. RDI already contains the parameter. 111 // Call the runtime leaf function. RDI already contains the parameter.
114 #if defined(_WIN64) 112 #if defined(_WIN64)
115 __ movq(CallingConventions::kArg1Reg, RDI); 113 __ movq(CallingConventions::kArg1Reg, RDI);
116 #endif 114 #endif
117 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); 115 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1);
118 __ LeaveCallRuntimeFrame(); 116 __ LeaveCallRuntimeFrame();
119 __ ret(); 117 __ ret();
120 } 118 }
121 119
122
123 // Input parameters: 120 // Input parameters:
124 // RSP : points to return address. 121 // RSP : points to return address.
125 // RSP + 8 : address of return value. 122 // RSP + 8 : address of return value.
126 // RAX : address of first argument in argument array. 123 // RAX : address of first argument in argument array.
127 // RBX : address of the native function to call. 124 // RBX : address of the native function to call.
128 // R10 : argc_tag including number of arguments and function kind. 125 // R10 : argc_tag including number of arguments and function kind.
129 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, 126 static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
130 Address wrapper_address) { 127 Address wrapper_address) {
131 const intptr_t native_args_struct_offset = 0; 128 const intptr_t native_args_struct_offset = 0;
132 const intptr_t thread_offset = 129 const intptr_t thread_offset =
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
185 // Mark that the thread is executing Dart code. 182 // Mark that the thread is executing Dart code.
186 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); 183 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
187 184
188 // Reset exit frame information in Isolate structure. 185 // Reset exit frame information in Isolate structure.
189 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); 186 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));
190 187
191 __ LeaveStubFrame(); 188 __ LeaveStubFrame();
192 __ ret(); 189 __ ret();
193 } 190 }
194 191
195
196 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { 192 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) {
197 GenerateCallNativeWithWrapperStub( 193 GenerateCallNativeWithWrapperStub(
198 assembler, 194 assembler,
199 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset())); 195 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset()));
200 } 196 }
201 197
202
203 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { 198 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) {
204 GenerateCallNativeWithWrapperStub( 199 GenerateCallNativeWithWrapperStub(
205 assembler, 200 assembler,
206 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset())); 201 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset()));
207 } 202 }
208 203
209
210 // Input parameters: 204 // Input parameters:
211 // RSP : points to return address. 205 // RSP : points to return address.
212 // RSP + 8 : address of return value. 206 // RSP + 8 : address of return value.
213 // RAX : address of first argument in argument array. 207 // RAX : address of first argument in argument array.
214 // RBX : address of the native function to call. 208 // RBX : address of the native function to call.
215 // R10 : argc_tag including number of arguments and function kind. 209 // R10 : argc_tag including number of arguments and function kind.
216 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { 210 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) {
217 const intptr_t native_args_struct_offset = 0; 211 const intptr_t native_args_struct_offset = 0;
218 const intptr_t thread_offset = 212 const intptr_t thread_offset =
219 NativeArguments::thread_offset() + native_args_struct_offset; 213 NativeArguments::thread_offset() + native_args_struct_offset;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
267 // Mark that the thread is executing Dart code. 261 // Mark that the thread is executing Dart code.
268 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); 262 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
269 263
270 // Reset exit frame information in Isolate structure. 264 // Reset exit frame information in Isolate structure.
271 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); 265 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));
272 266
273 __ LeaveStubFrame(); 267 __ LeaveStubFrame();
274 __ ret(); 268 __ ret();
275 } 269 }
276 270
277
278 // Input parameters: 271 // Input parameters:
279 // R10: arguments descriptor array. 272 // R10: arguments descriptor array.
280 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { 273 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) {
281 __ EnterStubFrame(); 274 __ EnterStubFrame();
282 __ pushq(R10); // Preserve arguments descriptor array. 275 __ pushq(R10); // Preserve arguments descriptor array.
283 // Setup space on stack for return value. 276 // Setup space on stack for return value.
284 __ pushq(Immediate(0)); 277 __ pushq(Immediate(0));
285 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); 278 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
286 __ popq(CODE_REG); // Get Code object result. 279 __ popq(CODE_REG); // Get Code object result.
287 __ popq(R10); // Restore arguments descriptor array. 280 __ popq(R10); // Restore arguments descriptor array.
288 // Remove the stub frame as we are about to jump to the dart function. 281 // Remove the stub frame as we are about to jump to the dart function.
289 __ LeaveStubFrame(); 282 __ LeaveStubFrame();
290 283
291 __ movq(RBX, FieldAddress(CODE_REG, Code::entry_point_offset())); 284 __ movq(RBX, FieldAddress(CODE_REG, Code::entry_point_offset()));
292 __ jmp(RBX); 285 __ jmp(RBX);
293 } 286 }
294 287
295
296 // Called from a static call only when an invalid code has been entered 288 // Called from a static call only when an invalid code has been entered
297 // (invalid because its function was optimized or deoptimized). 289 // (invalid because its function was optimized or deoptimized).
298 // R10: arguments descriptor array. 290 // R10: arguments descriptor array.
299 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { 291 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) {
300 // Load code pointer to this stub from the thread: 292 // Load code pointer to this stub from the thread:
301 // The one that is passed in, is not correct - it points to the code object 293 // The one that is passed in, is not correct - it points to the code object
302 // that needs to be replaced. 294 // that needs to be replaced.
303 __ movq(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); 295 __ movq(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset()));
304 __ EnterStubFrame(); 296 __ EnterStubFrame();
305 __ pushq(R10); // Preserve arguments descriptor array. 297 __ pushq(R10); // Preserve arguments descriptor array.
306 // Setup space on stack for return value. 298 // Setup space on stack for return value.
307 __ pushq(Immediate(0)); 299 __ pushq(Immediate(0));
308 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); 300 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
309 __ popq(CODE_REG); // Get Code object. 301 __ popq(CODE_REG); // Get Code object.
310 __ popq(R10); // Restore arguments descriptor array. 302 __ popq(R10); // Restore arguments descriptor array.
311 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); 303 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset()));
312 __ LeaveStubFrame(); 304 __ LeaveStubFrame();
313 __ jmp(RAX); 305 __ jmp(RAX);
314 __ int3(); 306 __ int3();
315 } 307 }
316 308
317
318 // Called from object allocate instruction when the allocation stub has been 309 // Called from object allocate instruction when the allocation stub has been
319 // disabled. 310 // disabled.
320 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { 311 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) {
321 // Load code pointer to this stub from the thread: 312 // Load code pointer to this stub from the thread:
322 // The one that is passed in, is not correct - it points to the code object 313 // The one that is passed in, is not correct - it points to the code object
323 // that needs to be replaced. 314 // that needs to be replaced.
324 __ movq(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); 315 __ movq(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset()));
325 __ EnterStubFrame(); 316 __ EnterStubFrame();
326 // Setup space on stack for return value. 317 // Setup space on stack for return value.
327 __ pushq(Immediate(0)); 318 __ pushq(Immediate(0));
328 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); 319 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
329 __ popq(CODE_REG); // Get Code object. 320 __ popq(CODE_REG); // Get Code object.
330 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); 321 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset()));
331 __ LeaveStubFrame(); 322 __ LeaveStubFrame();
332 __ jmp(RAX); 323 __ jmp(RAX);
333 __ int3(); 324 __ int3();
334 } 325 }
335 326
336
337 // Input parameters: 327 // Input parameters:
338 // R10: smi-tagged argument count, may be zero. 328 // R10: smi-tagged argument count, may be zero.
339 // RBP[kParamEndSlotFromFp + 1]: last argument. 329 // RBP[kParamEndSlotFromFp + 1]: last argument.
340 static void PushArgumentsArray(Assembler* assembler) { 330 static void PushArgumentsArray(Assembler* assembler) {
341 __ LoadObject(R12, Object::null_object()); 331 __ LoadObject(R12, Object::null_object());
342 // Allocate array to store arguments of caller. 332 // Allocate array to store arguments of caller.
343 __ movq(RBX, R12); // Null element type for raw Array. 333 __ movq(RBX, R12); // Null element type for raw Array.
344 __ Call(*StubCode::AllocateArray_entry()); 334 __ Call(*StubCode::AllocateArray_entry());
345 __ SmiUntag(R10); 335 __ SmiUntag(R10);
346 // RAX: newly allocated array. 336 // RAX: newly allocated array.
(...skipping 14 matching lines...) Expand all
361 __ movq(RDI, Address(R12, 0)); 351 __ movq(RDI, Address(R12, 0));
362 // Generational barrier is needed, array is not necessarily in new space. 352 // Generational barrier is needed, array is not necessarily in new space.
363 __ StoreIntoObject(RAX, Address(RBX, 0), RDI); 353 __ StoreIntoObject(RAX, Address(RBX, 0), RDI);
364 __ addq(RBX, Immediate(kWordSize)); 354 __ addq(RBX, Immediate(kWordSize));
365 __ subq(R12, Immediate(kWordSize)); 355 __ subq(R12, Immediate(kWordSize));
366 __ Bind(&loop_condition); 356 __ Bind(&loop_condition);
367 __ decq(R10); 357 __ decq(R10);
368 __ j(POSITIVE, &loop, Assembler::kNearJump); 358 __ j(POSITIVE, &loop, Assembler::kNearJump);
369 } 359 }
370 360
371
372 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary. 361 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary.
373 // This stub translates optimized frame into unoptimized frame. The optimized 362 // This stub translates optimized frame into unoptimized frame. The optimized
374 // frame can contain values in registers and on stack, the unoptimized 363 // frame can contain values in registers and on stack, the unoptimized
375 // frame contains all values on stack. 364 // frame contains all values on stack.
376 // Deoptimization occurs in following steps: 365 // Deoptimization occurs in following steps:
377 // - Push all registers that can contain values. 366 // - Push all registers that can contain values.
378 // - Call C routine to copy the stack and saved registers into temporary buffer. 367 // - Call C routine to copy the stack and saved registers into temporary buffer.
379 // - Adjust caller's frame to correct unoptimized frame size. 368 // - Adjust caller's frame to correct unoptimized frame size.
380 // - Fill the unoptimized frame. 369 // - Fill the unoptimized frame.
381 // - Materialize objects that require allocation (e.g. Double instances). 370 // - Materialize objects that require allocation (e.g. Double instances).
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
509 __ popq(RAX); // Restore exception. 498 __ popq(RAX); // Restore exception.
510 } 499 }
511 __ LeaveStubFrame(); 500 __ LeaveStubFrame();
512 501
513 __ popq(RCX); // Pop return address. 502 __ popq(RCX); // Pop return address.
514 __ addq(RSP, RBX); // Remove materialization arguments. 503 __ addq(RSP, RBX); // Remove materialization arguments.
515 __ pushq(RCX); // Push return address. 504 __ pushq(RCX); // Push return address.
516 // The caller is responsible for emitting the return instruction. 505 // The caller is responsible for emitting the return instruction.
517 } 506 }
518 507
519
520 // RAX: result, must be preserved 508 // RAX: result, must be preserved
521 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { 509 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) {
522 // Push zap value instead of CODE_REG for lazy deopt. 510 // Push zap value instead of CODE_REG for lazy deopt.
523 __ pushq(Immediate(kZapCodeReg)); 511 __ pushq(Immediate(kZapCodeReg));
524 // Return address for "call" to deopt stub. 512 // Return address for "call" to deopt stub.
525 __ pushq(Immediate(kZapReturnAddress)); 513 __ pushq(Immediate(kZapReturnAddress));
526 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); 514 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset()));
527 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); 515 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn);
528 __ ret(); 516 __ ret();
529 } 517 }
530 518
531
532 // RAX: exception, must be preserved 519 // RAX: exception, must be preserved
533 // RDX: stacktrace, must be preserved 520 // RDX: stacktrace, must be preserved
534 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { 521 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) {
535 // Push zap value instead of CODE_REG for lazy deopt. 522 // Push zap value instead of CODE_REG for lazy deopt.
536 __ pushq(Immediate(kZapCodeReg)); 523 __ pushq(Immediate(kZapCodeReg));
537 // Return address for "call" to deopt stub. 524 // Return address for "call" to deopt stub.
538 __ pushq(Immediate(kZapReturnAddress)); 525 __ pushq(Immediate(kZapReturnAddress));
539 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); 526 __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset()));
540 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); 527 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow);
541 __ ret(); 528 __ ret();
542 } 529 }
543 530
544
545 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { 531 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) {
546 GenerateDeoptimizationSequence(assembler, kEagerDeopt); 532 GenerateDeoptimizationSequence(assembler, kEagerDeopt);
547 __ ret(); 533 __ ret();
548 } 534 }
549 535
550
551 static void GenerateDispatcherCode(Assembler* assembler, 536 static void GenerateDispatcherCode(Assembler* assembler,
552 Label* call_target_function) { 537 Label* call_target_function) {
553 __ Comment("NoSuchMethodDispatch"); 538 __ Comment("NoSuchMethodDispatch");
554 // When lazily generated invocation dispatchers are disabled, the 539 // When lazily generated invocation dispatchers are disabled, the
555 // miss-handler may return null. 540 // miss-handler may return null.
556 __ CompareObject(RAX, Object::null_object()); 541 __ CompareObject(RAX, Object::null_object());
557 __ j(NOT_EQUAL, call_target_function); 542 __ j(NOT_EQUAL, call_target_function);
558 __ EnterStubFrame(); 543 __ EnterStubFrame();
559 // Load the receiver. 544 // Load the receiver.
560 __ movq(RDI, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 545 __ movq(RDI, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
(...skipping 16 matching lines...) Expand all
577 // R10: Smi-tagged arguments array length. 562 // R10: Smi-tagged arguments array length.
578 PushArgumentsArray(assembler); 563 PushArgumentsArray(assembler);
579 const intptr_t kNumArgs = 4; 564 const intptr_t kNumArgs = 4;
580 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); 565 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs);
581 __ Drop(4); 566 __ Drop(4);
582 __ popq(RAX); // Return value. 567 __ popq(RAX); // Return value.
583 __ LeaveStubFrame(); 568 __ LeaveStubFrame();
584 __ ret(); 569 __ ret();
585 } 570 }
586 571
587
588 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { 572 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) {
589 __ EnterStubFrame(); 573 __ EnterStubFrame();
590 // Load the receiver into RAX. The argument count in the arguments 574 // Load the receiver into RAX. The argument count in the arguments
591 // descriptor in R10 is a smi. 575 // descriptor in R10 is a smi.
592 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 576 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
593 // Three words (saved pp, saved fp, stub's pc marker) 577 // Three words (saved pp, saved fp, stub's pc marker)
594 // in the stack above the return address. 578 // in the stack above the return address.
595 __ movq(RAX, 579 __ movq(RAX,
596 Address(RSP, RAX, TIMES_4, kSavedAboveReturnAddress * kWordSize)); 580 Address(RSP, RAX, TIMES_4, kSavedAboveReturnAddress * kWordSize));
597 // Preserve IC data and arguments descriptor. 581 // Preserve IC data and arguments descriptor.
(...skipping 18 matching lines...) Expand all
616 if (!FLAG_lazy_dispatchers) { 600 if (!FLAG_lazy_dispatchers) {
617 Label call_target_function; 601 Label call_target_function;
618 GenerateDispatcherCode(assembler, &call_target_function); 602 GenerateDispatcherCode(assembler, &call_target_function);
619 __ Bind(&call_target_function); 603 __ Bind(&call_target_function);
620 } 604 }
621 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); 605 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
622 __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset())); 606 __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset()));
623 __ jmp(RCX); 607 __ jmp(RCX);
624 } 608 }
625 609
626
627 // Called for inline allocation of arrays. 610 // Called for inline allocation of arrays.
628 // Input parameters: 611 // Input parameters:
629 // R10 : Array length as Smi. 612 // R10 : Array length as Smi.
630 // RBX : array element type (either NULL or an instantiated type). 613 // RBX : array element type (either NULL or an instantiated type).
631 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. 614 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved.
632 // The newly allocated object is returned in RAX. 615 // The newly allocated object is returned in RAX.
633 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { 616 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
634 Label slow_case; 617 Label slow_case;
635 // Compute the size to be allocated, it is based on the array length 618 // Compute the size to be allocated, it is based on the array length
636 // and is computed as: 619 // and is computed as:
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
749 __ pushq(R10); // Array length as Smi. 732 __ pushq(R10); // Array length as Smi.
750 __ pushq(RBX); // Element type. 733 __ pushq(RBX); // Element type.
751 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); 734 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
752 __ popq(RAX); // Pop element type argument. 735 __ popq(RAX); // Pop element type argument.
753 __ popq(R10); // Pop array length argument. 736 __ popq(R10); // Pop array length argument.
754 __ popq(RAX); // Pop return value from return slot. 737 __ popq(RAX); // Pop return value from return slot.
755 __ LeaveStubFrame(); 738 __ LeaveStubFrame();
756 __ ret(); 739 __ ret();
757 } 740 }
758 741
759
760 // Called when invoking Dart code from C++ (VM code). 742 // Called when invoking Dart code from C++ (VM code).
761 // Input parameters: 743 // Input parameters:
762 // RSP : points to return address. 744 // RSP : points to return address.
763 // RDI : target code 745 // RDI : target code
764 // RSI : arguments descriptor array. 746 // RSI : arguments descriptor array.
765 // RDX : arguments array. 747 // RDX : arguments array.
766 // RCX : current thread. 748 // RCX : current thread.
767 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { 749 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) {
768 // Save frame pointer coming in. 750 // Save frame pointer coming in.
769 __ EnterFrame(0); 751 __ EnterFrame(0);
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
881 __ PopRegisters(CallingConventions::kCalleeSaveCpuRegisters, 863 __ PopRegisters(CallingConventions::kCalleeSaveCpuRegisters,
882 CallingConventions::kCalleeSaveXmmRegisters); 864 CallingConventions::kCalleeSaveXmmRegisters);
883 __ set_constant_pool_allowed(false); 865 __ set_constant_pool_allowed(false);
884 866
885 // Restore the frame pointer. 867 // Restore the frame pointer.
886 __ LeaveFrame(); 868 __ LeaveFrame();
887 869
888 __ ret(); 870 __ ret();
889 } 871 }
890 872
891
892 // Called for inline allocation of contexts. 873 // Called for inline allocation of contexts.
893 // Input: 874 // Input:
894 // R10: number of context variables. 875 // R10: number of context variables.
895 // Output: 876 // Output:
896 // RAX: new allocated RawContext object. 877 // RAX: new allocated RawContext object.
897 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { 878 void StubCode::GenerateAllocateContextStub(Assembler* assembler) {
898 __ LoadObject(R9, Object::null_object()); 879 __ LoadObject(R9, Object::null_object());
899 if (FLAG_inline_alloc) { 880 if (FLAG_inline_alloc) {
900 Label slow_case; 881 Label slow_case;
901 // First compute the rounded instance size. 882 // First compute the rounded instance size.
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1011 __ pushq(R10); // Push number of context variables. 992 __ pushq(R10); // Push number of context variables.
1012 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. 993 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context.
1013 __ popq(RAX); // Pop number of context variables argument. 994 __ popq(RAX); // Pop number of context variables argument.
1014 __ popq(RAX); // Pop the new context object. 995 __ popq(RAX); // Pop the new context object.
1015 // RAX: new object 996 // RAX: new object
1016 // Restore the frame pointer. 997 // Restore the frame pointer.
1017 __ LeaveStubFrame(); 998 __ LeaveStubFrame();
1018 __ ret(); 999 __ ret();
1019 } 1000 }
1020 1001
1021
1022 // Helper stub to implement Assembler::StoreIntoObject. 1002 // Helper stub to implement Assembler::StoreIntoObject.
1023 // Input parameters: 1003 // Input parameters:
1024 // RDX: Address being stored 1004 // RDX: Address being stored
1025 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { 1005 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) {
1026 // Save registers being destroyed. 1006 // Save registers being destroyed.
1027 __ pushq(RAX); 1007 __ pushq(RAX);
1028 __ pushq(RCX); 1008 __ pushq(RCX);
1029 1009
1030 Label add_to_buffer; 1010 Label add_to_buffer;
1031 // Check whether this object has already been remembered. Skip adding to the 1011 // Check whether this object has already been remembered. Skip adding to the
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1076 // Handle overflow: Call the runtime leaf function. 1056 // Handle overflow: Call the runtime leaf function.
1077 __ Bind(&L); 1057 __ Bind(&L);
1078 // Setup frame, push callee-saved registers. 1058 // Setup frame, push callee-saved registers.
1079 __ EnterCallRuntimeFrame(0); 1059 __ EnterCallRuntimeFrame(0);
1080 __ movq(CallingConventions::kArg1Reg, THR); 1060 __ movq(CallingConventions::kArg1Reg, THR);
1081 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); 1061 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1);
1082 __ LeaveCallRuntimeFrame(); 1062 __ LeaveCallRuntimeFrame();
1083 __ ret(); 1063 __ ret();
1084 } 1064 }
1085 1065
1086
1087 // Called for inline allocation of objects. 1066 // Called for inline allocation of objects.
1088 // Input parameters: 1067 // Input parameters:
1089 // RSP + 8 : type arguments object (only if class is parameterized). 1068 // RSP + 8 : type arguments object (only if class is parameterized).
1090 // RSP : points to return address. 1069 // RSP : points to return address.
1091 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, 1070 void StubCode::GenerateAllocationStubForClass(Assembler* assembler,
1092 const Class& cls) { 1071 const Class& cls) {
1093 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize; 1072 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize;
1094 // The generated code is different if the class is parameterized. 1073 // The generated code is different if the class is parameterized.
1095 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; 1074 const bool is_cls_parameterized = cls.NumTypeArguments() > 0;
1096 ASSERT(!is_cls_parameterized || 1075 ASSERT(!is_cls_parameterized ||
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
1204 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. 1183 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object.
1205 __ popq(RAX); // Pop argument (type arguments of object). 1184 __ popq(RAX); // Pop argument (type arguments of object).
1206 __ popq(RAX); // Pop argument (class of object). 1185 __ popq(RAX); // Pop argument (class of object).
1207 __ popq(RAX); // Pop result (newly allocated object). 1186 __ popq(RAX); // Pop result (newly allocated object).
1208 // RAX: new object 1187 // RAX: new object
1209 // Restore the frame pointer. 1188 // Restore the frame pointer.
1210 __ LeaveStubFrame(); 1189 __ LeaveStubFrame();
1211 __ ret(); 1190 __ ret();
1212 } 1191 }
1213 1192
1214
1215 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function 1193 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function
1216 // from the entry code of a dart function after an error in passed argument 1194 // from the entry code of a dart function after an error in passed argument
1217 // name or number is detected. 1195 // name or number is detected.
1218 // Input parameters: 1196 // Input parameters:
1219 // RSP : points to return address. 1197 // RSP : points to return address.
1220 // RSP + 8 : address of last argument. 1198 // RSP + 8 : address of last argument.
1221 // R10 : arguments descriptor array. 1199 // R10 : arguments descriptor array.
1222 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { 1200 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) {
1223 __ EnterStubFrame(); 1201 __ EnterStubFrame();
1224 1202
(...skipping 16 matching lines...) Expand all
1241 1219
1242 // R10: Smi-tagged arguments array length. 1220 // R10: Smi-tagged arguments array length.
1243 PushArgumentsArray(assembler); 1221 PushArgumentsArray(assembler);
1244 1222
1245 const intptr_t kNumArgs = 3; 1223 const intptr_t kNumArgs = 3;
1246 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); 1224 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs);
1247 // noSuchMethod on closures always throws an error, so it will never return. 1225 // noSuchMethod on closures always throws an error, so it will never return.
1248 __ int3(); 1226 __ int3();
1249 } 1227 }
1250 1228
1251
1252 // Cannot use function object from ICData as it may be the inlined 1229 // Cannot use function object from ICData as it may be the inlined
1253 // function and not the top-scope function. 1230 // function and not the top-scope function.
1254 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { 1231 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) {
1255 Register ic_reg = RBX; 1232 Register ic_reg = RBX;
1256 Register func_reg = RDI; 1233 Register func_reg = RDI;
1257 if (FLAG_trace_optimized_ic_calls) { 1234 if (FLAG_trace_optimized_ic_calls) {
1258 __ EnterStubFrame(); 1235 __ EnterStubFrame();
1259 __ pushq(func_reg); // Preserve 1236 __ pushq(func_reg); // Preserve
1260 __ pushq(ic_reg); // Preserve. 1237 __ pushq(ic_reg); // Preserve.
1261 __ pushq(ic_reg); // Argument. 1238 __ pushq(ic_reg); // Argument.
1262 __ pushq(func_reg); // Argument. 1239 __ pushq(func_reg); // Argument.
1263 __ CallRuntime(kTraceICCallRuntimeEntry, 2); 1240 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
1264 __ popq(RAX); // Discard argument; 1241 __ popq(RAX); // Discard argument;
1265 __ popq(RAX); // Discard argument; 1242 __ popq(RAX); // Discard argument;
1266 __ popq(ic_reg); // Restore. 1243 __ popq(ic_reg); // Restore.
1267 __ popq(func_reg); // Restore. 1244 __ popq(func_reg); // Restore.
1268 __ LeaveStubFrame(); 1245 __ LeaveStubFrame();
1269 } 1246 }
1270 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); 1247 __ incl(FieldAddress(func_reg, Function::usage_counter_offset()));
1271 } 1248 }
1272 1249
1273
1274 // Loads function into 'temp_reg', preserves 'ic_reg'. 1250 // Loads function into 'temp_reg', preserves 'ic_reg'.
1275 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, 1251 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler,
1276 Register temp_reg) { 1252 Register temp_reg) {
1277 if (FLAG_optimization_counter_threshold >= 0) { 1253 if (FLAG_optimization_counter_threshold >= 0) {
1278 Register ic_reg = RBX; 1254 Register ic_reg = RBX;
1279 Register func_reg = temp_reg; 1255 Register func_reg = temp_reg;
1280 ASSERT(ic_reg != func_reg); 1256 ASSERT(ic_reg != func_reg);
1281 __ Comment("Increment function counter"); 1257 __ Comment("Increment function counter");
1282 __ movq(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); 1258 __ movq(func_reg, FieldAddress(ic_reg, ICData::owner_offset()));
1283 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); 1259 __ incl(FieldAddress(func_reg, Function::usage_counter_offset()));
1284 } 1260 }
1285 } 1261 }
1286 1262
1287
1288 // Note: RBX must be preserved. 1263 // Note: RBX must be preserved.
1289 // Attempt a quick Smi operation for known operations ('kind'). The ICData 1264 // Attempt a quick Smi operation for known operations ('kind'). The ICData
1290 // must have been primed with a Smi/Smi check that will be used for counting 1265 // must have been primed with a Smi/Smi check that will be used for counting
1291 // the invocations. 1266 // the invocations.
1292 static void EmitFastSmiOp(Assembler* assembler, 1267 static void EmitFastSmiOp(Assembler* assembler,
1293 Token::Kind kind, 1268 Token::Kind kind,
1294 intptr_t num_args, 1269 intptr_t num_args,
1295 Label* not_smi_or_overflow) { 1270 Label* not_smi_or_overflow) {
1296 __ Comment("Fast Smi op"); 1271 __ Comment("Fast Smi op");
1297 ASSERT(num_args == 2); 1272 ASSERT(num_args == 2);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1348 1323
1349 if (FLAG_optimization_counter_threshold >= 0) { 1324 if (FLAG_optimization_counter_threshold >= 0) {
1350 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; 1325 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize;
1351 // Update counter, ignore overflow. 1326 // Update counter, ignore overflow.
1352 __ addq(Address(R13, count_offset), Immediate(Smi::RawValue(1))); 1327 __ addq(Address(R13, count_offset), Immediate(Smi::RawValue(1)));
1353 } 1328 }
1354 1329
1355 __ ret(); 1330 __ ret();
1356 } 1331 }
1357 1332
1358
1359 // Generate inline cache check for 'num_args'. 1333 // Generate inline cache check for 'num_args'.
1360 // RBX: Inline cache data object. 1334 // RBX: Inline cache data object.
1361 // TOS(0): return address 1335 // TOS(0): return address
1362 // Control flow: 1336 // Control flow:
1363 // - If receiver is null -> jump to IC miss. 1337 // - If receiver is null -> jump to IC miss.
1364 // - If receiver is Smi -> load Smi class. 1338 // - If receiver is Smi -> load Smi class.
1365 // - If receiver is not-Smi -> load receiver's class. 1339 // - If receiver is not-Smi -> load receiver's class.
1366 // - Check if 'num_args' (including receiver) match any IC data group. 1340 // - Check if 'num_args' (including receiver) match any IC data group.
1367 // - Match found -> jump to target. 1341 // - Match found -> jump to target.
1368 // - Match not found -> jump to IC miss. 1342 // - Match not found -> jump to IC miss.
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
1516 __ EnterStubFrame(); 1490 __ EnterStubFrame();
1517 __ pushq(RBX); 1491 __ pushq(RBX);
1518 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1492 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1519 __ popq(RBX); 1493 __ popq(RBX);
1520 __ RestoreCodePointer(); 1494 __ RestoreCodePointer();
1521 __ LeaveStubFrame(); 1495 __ LeaveStubFrame();
1522 __ jmp(&done_stepping); 1496 __ jmp(&done_stepping);
1523 } 1497 }
1524 } 1498 }
1525 1499
1526
1527 // Use inline cache data array to invoke the target or continue in inline 1500 // Use inline cache data array to invoke the target or continue in inline
1528 // cache miss handler. Stub for 1-argument check (receiver class). 1501 // cache miss handler. Stub for 1-argument check (receiver class).
1529 // RBX: Inline cache data object. 1502 // RBX: Inline cache data object.
1530 // TOS(0): Return address. 1503 // TOS(0): Return address.
1531 // Inline cache data object structure: 1504 // Inline cache data object structure:
1532 // 0: function-name 1505 // 0: function-name
1533 // 1: N, number of arguments checked. 1506 // 1: N, number of arguments checked.
1534 // 2 .. (length - 1): group of checks, each check containing: 1507 // 2 .. (length - 1): group of checks, each check containing:
1535 // - N classes. 1508 // - N classes.
1536 // - 1 target function. 1509 // - 1 target function.
1537 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { 1510 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) {
1538 GenerateUsageCounterIncrement(assembler, RCX); 1511 GenerateUsageCounterIncrement(assembler, RCX);
1539 GenerateNArgsCheckInlineCacheStub( 1512 GenerateNArgsCheckInlineCacheStub(
1540 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); 1513 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL);
1541 } 1514 }
1542 1515
1543
1544 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { 1516 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) {
1545 GenerateUsageCounterIncrement(assembler, RCX); 1517 GenerateUsageCounterIncrement(assembler, RCX);
1546 GenerateNArgsCheckInlineCacheStub(assembler, 2, 1518 GenerateNArgsCheckInlineCacheStub(assembler, 2,
1547 kInlineCacheMissHandlerTwoArgsRuntimeEntry, 1519 kInlineCacheMissHandlerTwoArgsRuntimeEntry,
1548 Token::kILLEGAL); 1520 Token::kILLEGAL);
1549 } 1521 }
1550 1522
1551
1552 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { 1523 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) {
1553 GenerateUsageCounterIncrement(assembler, RCX); 1524 GenerateUsageCounterIncrement(assembler, RCX);
1554 GenerateNArgsCheckInlineCacheStub( 1525 GenerateNArgsCheckInlineCacheStub(
1555 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); 1526 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD);
1556 } 1527 }
1557 1528
1558
1559 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { 1529 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) {
1560 GenerateUsageCounterIncrement(assembler, RCX); 1530 GenerateUsageCounterIncrement(assembler, RCX);
1561 GenerateNArgsCheckInlineCacheStub( 1531 GenerateNArgsCheckInlineCacheStub(
1562 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); 1532 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB);
1563 } 1533 }
1564 1534
1565
1566 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { 1535 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) {
1567 GenerateUsageCounterIncrement(assembler, RCX); 1536 GenerateUsageCounterIncrement(assembler, RCX);
1568 GenerateNArgsCheckInlineCacheStub( 1537 GenerateNArgsCheckInlineCacheStub(
1569 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); 1538 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ);
1570 } 1539 }
1571 1540
1572
1573 // Use inline cache data array to invoke the target or continue in inline 1541 // Use inline cache data array to invoke the target or continue in inline
1574 // cache miss handler. Stub for 1-argument check (receiver class). 1542 // cache miss handler. Stub for 1-argument check (receiver class).
1575 // RDI: function which counter needs to be incremented. 1543 // RDI: function which counter needs to be incremented.
1576 // RBX: Inline cache data object. 1544 // RBX: Inline cache data object.
1577 // TOS(0): Return address. 1545 // TOS(0): Return address.
1578 // Inline cache data object structure: 1546 // Inline cache data object structure:
1579 // 0: function-name 1547 // 0: function-name
1580 // 1: N, number of arguments checked. 1548 // 1: N, number of arguments checked.
1581 // 2 .. (length - 1): group of checks, each check containing: 1549 // 2 .. (length - 1): group of checks, each check containing:
1582 // - N classes. 1550 // - N classes.
1583 // - 1 target function. 1551 // - 1 target function.
1584 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( 1552 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub(
1585 Assembler* assembler) { 1553 Assembler* assembler) {
1586 GenerateOptimizedUsageCounterIncrement(assembler); 1554 GenerateOptimizedUsageCounterIncrement(assembler);
1587 GenerateNArgsCheckInlineCacheStub(assembler, 1, 1555 GenerateNArgsCheckInlineCacheStub(assembler, 1,
1588 kInlineCacheMissHandlerOneArgRuntimeEntry, 1556 kInlineCacheMissHandlerOneArgRuntimeEntry,
1589 Token::kILLEGAL, true /* optimized */); 1557 Token::kILLEGAL, true /* optimized */);
1590 } 1558 }
1591 1559
1592
1593 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( 1560 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub(
1594 Assembler* assembler) { 1561 Assembler* assembler) {
1595 GenerateOptimizedUsageCounterIncrement(assembler); 1562 GenerateOptimizedUsageCounterIncrement(assembler);
1596 GenerateNArgsCheckInlineCacheStub(assembler, 2, 1563 GenerateNArgsCheckInlineCacheStub(assembler, 2,
1597 kInlineCacheMissHandlerTwoArgsRuntimeEntry, 1564 kInlineCacheMissHandlerTwoArgsRuntimeEntry,
1598 Token::kILLEGAL, true /* optimized */); 1565 Token::kILLEGAL, true /* optimized */);
1599 } 1566 }
1600 1567
1601
1602 // Intermediary stub between a static call and its target. ICData contains 1568 // Intermediary stub between a static call and its target. ICData contains
1603 // the target function and the call count. 1569 // the target function and the call count.
1604 // RBX: ICData 1570 // RBX: ICData
1605 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { 1571 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
1606 GenerateUsageCounterIncrement(assembler, RCX); 1572 GenerateUsageCounterIncrement(assembler, RCX);
1607 #if defined(DEBUG) 1573 #if defined(DEBUG)
1608 { 1574 {
1609 Label ok; 1575 Label ok;
1610 // Check that the IC data array has NumArgsTested() == 0. 1576 // Check that the IC data array has NumArgsTested() == 0.
1611 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. 1577 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'.
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1661 __ EnterStubFrame(); 1627 __ EnterStubFrame();
1662 __ pushq(RBX); // Preserve IC data object. 1628 __ pushq(RBX); // Preserve IC data object.
1663 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1629 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1664 __ popq(RBX); 1630 __ popq(RBX);
1665 __ RestoreCodePointer(); 1631 __ RestoreCodePointer();
1666 __ LeaveStubFrame(); 1632 __ LeaveStubFrame();
1667 __ jmp(&done_stepping, Assembler::kNearJump); 1633 __ jmp(&done_stepping, Assembler::kNearJump);
1668 } 1634 }
1669 } 1635 }
1670 1636
1671
1672 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { 1637 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
1673 GenerateUsageCounterIncrement(assembler, RCX); 1638 GenerateUsageCounterIncrement(assembler, RCX);
1674 GenerateNArgsCheckInlineCacheStub( 1639 GenerateNArgsCheckInlineCacheStub(
1675 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); 1640 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL);
1676 } 1641 }
1677 1642
1678
1679 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { 1643 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) {
1680 GenerateUsageCounterIncrement(assembler, RCX); 1644 GenerateUsageCounterIncrement(assembler, RCX);
1681 GenerateNArgsCheckInlineCacheStub( 1645 GenerateNArgsCheckInlineCacheStub(
1682 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); 1646 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL);
1683 } 1647 }
1684 1648
1685
1686 // Stub for compiling a function and jumping to the compiled code. 1649 // Stub for compiling a function and jumping to the compiled code.
1687 // RCX: IC-Data (for methods). 1650 // RCX: IC-Data (for methods).
1688 // R10: Arguments descriptor. 1651 // R10: Arguments descriptor.
1689 // RAX: Function. 1652 // RAX: Function.
1690 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { 1653 void StubCode::GenerateLazyCompileStub(Assembler* assembler) {
1691 __ EnterStubFrame(); 1654 __ EnterStubFrame();
1692 __ pushq(R10); // Preserve arguments descriptor array. 1655 __ pushq(R10); // Preserve arguments descriptor array.
1693 __ pushq(RBX); // Preserve IC data object. 1656 __ pushq(RBX); // Preserve IC data object.
1694 __ pushq(RAX); // Pass function. 1657 __ pushq(RAX); // Pass function.
1695 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); 1658 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
1696 __ popq(RAX); // Restore function. 1659 __ popq(RAX); // Restore function.
1697 __ popq(RBX); // Restore IC data array. 1660 __ popq(RBX); // Restore IC data array.
1698 __ popq(R10); // Restore arguments descriptor array. 1661 __ popq(R10); // Restore arguments descriptor array.
1699 __ LeaveStubFrame(); 1662 __ LeaveStubFrame();
1700 1663
1701 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); 1664 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
1702 __ movq(RAX, FieldAddress(RAX, Function::entry_point_offset())); 1665 __ movq(RAX, FieldAddress(RAX, Function::entry_point_offset()));
1703 __ jmp(RAX); 1666 __ jmp(RAX);
1704 } 1667 }
1705 1668
1706
1707 // RBX: Contains an ICData. 1669 // RBX: Contains an ICData.
1708 // TOS(0): return address (Dart code). 1670 // TOS(0): return address (Dart code).
1709 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { 1671 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) {
1710 __ EnterStubFrame(); 1672 __ EnterStubFrame();
1711 __ pushq(RBX); // Preserve IC data. 1673 __ pushq(RBX); // Preserve IC data.
1712 __ pushq(Immediate(0)); // Result slot. 1674 __ pushq(Immediate(0)); // Result slot.
1713 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1675 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1714 __ popq(CODE_REG); // Original stub. 1676 __ popq(CODE_REG); // Original stub.
1715 __ popq(RBX); // Restore IC data. 1677 __ popq(RBX); // Restore IC data.
1716 __ LeaveStubFrame(); 1678 __ LeaveStubFrame();
1717 1679
1718 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); 1680 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset()));
1719 __ jmp(RAX); // Jump to original stub. 1681 __ jmp(RAX); // Jump to original stub.
1720 } 1682 }
1721 1683
1722
1723 // TOS(0): return address (Dart code). 1684 // TOS(0): return address (Dart code).
1724 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { 1685 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) {
1725 __ EnterStubFrame(); 1686 __ EnterStubFrame();
1726 __ pushq(Immediate(0)); // Result slot. 1687 __ pushq(Immediate(0)); // Result slot.
1727 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1688 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1728 __ popq(CODE_REG); // Original stub. 1689 __ popq(CODE_REG); // Original stub.
1729 __ LeaveStubFrame(); 1690 __ LeaveStubFrame();
1730 1691
1731 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); 1692 __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset()));
1732 __ jmp(RAX); // Jump to original stub. 1693 __ jmp(RAX); // Jump to original stub.
1733 } 1694 }
1734 1695
1735
1736 // Called only from unoptimized code. 1696 // Called only from unoptimized code.
1737 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { 1697 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) {
1738 // Check single stepping. 1698 // Check single stepping.
1739 Label stepping, done_stepping; 1699 Label stepping, done_stepping;
1740 __ LoadIsolate(RAX); 1700 __ LoadIsolate(RAX);
1741 __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); 1701 __ movzxb(RAX, Address(RAX, Isolate::single_step_offset()));
1742 __ cmpq(RAX, Immediate(0)); 1702 __ cmpq(RAX, Immediate(0));
1743 __ j(NOT_EQUAL, &stepping, Assembler::kNearJump); 1703 __ j(NOT_EQUAL, &stepping, Assembler::kNearJump);
1744 __ Bind(&done_stepping); 1704 __ Bind(&done_stepping);
1745 __ ret(); 1705 __ ret();
1746 1706
1747 __ Bind(&stepping); 1707 __ Bind(&stepping);
1748 __ EnterStubFrame(); 1708 __ EnterStubFrame();
1749 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1709 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1750 __ LeaveStubFrame(); 1710 __ LeaveStubFrame();
1751 __ jmp(&done_stepping, Assembler::kNearJump); 1711 __ jmp(&done_stepping, Assembler::kNearJump);
1752 } 1712 }
1753 1713
1754
1755 // Used to check class and type arguments. Arguments passed on stack: 1714 // Used to check class and type arguments. Arguments passed on stack:
1756 // TOS + 0: return address. 1715 // TOS + 0: return address.
1757 // TOS + 1: function type arguments (only if n == 4, can be raw_null). 1716 // TOS + 1: function type arguments (only if n == 4, can be raw_null).
1758 // TOS + 2: instantiator type arguments (only if n == 4, can be raw_null). 1717 // TOS + 2: instantiator type arguments (only if n == 4, can be raw_null).
1759 // TOS + 3: instance. 1718 // TOS + 3: instance.
1760 // TOS + 4: SubtypeTestCache. 1719 // TOS + 4: SubtypeTestCache.
1761 // Result in RCX: null -> not found, otherwise result (true or false). 1720 // Result in RCX: null -> not found, otherwise result (true or false).
1762 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { 1721 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
1763 ASSERT((n == 1) || (n == 2) || (n == 4)); 1722 ASSERT((n == 1) || (n == 2) || (n == 4));
1764 const intptr_t kFunctionTypeArgumentsInBytes = 1 * kWordSize; 1723 const intptr_t kFunctionTypeArgumentsInBytes = 1 * kWordSize;
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
1836 // Fall through to not found. 1795 // Fall through to not found.
1837 __ Bind(&not_found); 1796 __ Bind(&not_found);
1838 __ movq(RCX, R9); 1797 __ movq(RCX, R9);
1839 __ ret(); 1798 __ ret();
1840 1799
1841 __ Bind(&found); 1800 __ Bind(&found);
1842 __ movq(RCX, Address(RDX, kWordSize * SubtypeTestCache::kTestResult)); 1801 __ movq(RCX, Address(RDX, kWordSize * SubtypeTestCache::kTestResult));
1843 __ ret(); 1802 __ ret();
1844 } 1803 }
1845 1804
1846
1847 // Used to check class and type arguments. Arguments passed on stack: 1805 // Used to check class and type arguments. Arguments passed on stack:
1848 // TOS + 0: return address. 1806 // TOS + 0: return address.
1849 // TOS + 1: raw_null. 1807 // TOS + 1: raw_null.
1850 // TOS + 2: raw_null. 1808 // TOS + 2: raw_null.
1851 // TOS + 3: instance. 1809 // TOS + 3: instance.
1852 // TOS + 4: SubtypeTestCache. 1810 // TOS + 4: SubtypeTestCache.
1853 // Result in RCX: null -> not found, otherwise result (true or false). 1811 // Result in RCX: null -> not found, otherwise result (true or false).
1854 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { 1812 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) {
1855 GenerateSubtypeNTestCacheStub(assembler, 1); 1813 GenerateSubtypeNTestCacheStub(assembler, 1);
1856 } 1814 }
1857 1815
1858
1859 // Used to check class and type arguments. Arguments passed on stack: 1816 // Used to check class and type arguments. Arguments passed on stack:
1860 // TOS + 0: return address. 1817 // TOS + 0: return address.
1861 // TOS + 1: raw_null. 1818 // TOS + 1: raw_null.
1862 // TOS + 2: raw_null. 1819 // TOS + 2: raw_null.
1863 // TOS + 3: instance. 1820 // TOS + 3: instance.
1864 // TOS + 4: SubtypeTestCache. 1821 // TOS + 4: SubtypeTestCache.
1865 // Result in RCX: null -> not found, otherwise result (true or false). 1822 // Result in RCX: null -> not found, otherwise result (true or false).
1866 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { 1823 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) {
1867 GenerateSubtypeNTestCacheStub(assembler, 2); 1824 GenerateSubtypeNTestCacheStub(assembler, 2);
1868 } 1825 }
1869 1826
1870
1871 // Used to check class and type arguments. Arguments passed on stack: 1827 // Used to check class and type arguments. Arguments passed on stack:
1872 // TOS + 0: return address. 1828 // TOS + 0: return address.
1873 // TOS + 1: function type arguments (can be raw_null). 1829 // TOS + 1: function type arguments (can be raw_null).
1874 // TOS + 2: instantiator type arguments (can be raw_null). 1830 // TOS + 2: instantiator type arguments (can be raw_null).
1875 // TOS + 3: instance. 1831 // TOS + 3: instance.
1876 // TOS + 4: SubtypeTestCache. 1832 // TOS + 4: SubtypeTestCache.
1877 // Result in RCX: null -> not found, otherwise result (true or false). 1833 // Result in RCX: null -> not found, otherwise result (true or false).
1878 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { 1834 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) {
1879 GenerateSubtypeNTestCacheStub(assembler, 4); 1835 GenerateSubtypeNTestCacheStub(assembler, 4);
1880 } 1836 }
1881 1837
1882
1883 // Return the current stack pointer address, used to stack alignment 1838 // Return the current stack pointer address, used to stack alignment
1884 // checks. 1839 // checks.
1885 // TOS + 0: return address 1840 // TOS + 0: return address
1886 // Result in RAX. 1841 // Result in RAX.
1887 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { 1842 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) {
1888 __ leaq(RAX, Address(RSP, kWordSize)); 1843 __ leaq(RAX, Address(RSP, kWordSize));
1889 __ ret(); 1844 __ ret();
1890 } 1845 }
1891 1846
1892
1893 // Jump to a frame on the call stack. 1847 // Jump to a frame on the call stack.
1894 // TOS + 0: return address 1848 // TOS + 0: return address
1895 // Arg1: program counter 1849 // Arg1: program counter
1896 // Arg2: stack pointer 1850 // Arg2: stack pointer
1897 // Arg3: frame_pointer 1851 // Arg3: frame_pointer
1898 // Arg4: thread 1852 // Arg4: thread
1899 // No Result. 1853 // No Result.
1900 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { 1854 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) {
1901 __ movq(THR, CallingConventions::kArg4Reg); 1855 __ movq(THR, CallingConventions::kArg4Reg);
1902 __ movq(RBP, CallingConventions::kArg3Reg); 1856 __ movq(RBP, CallingConventions::kArg3Reg);
1903 __ movq(RSP, CallingConventions::kArg2Reg); 1857 __ movq(RSP, CallingConventions::kArg2Reg);
1904 // Set the tag. 1858 // Set the tag.
1905 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); 1859 __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
1906 // Clear top exit frame. 1860 // Clear top exit frame.
1907 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); 1861 __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));
1908 // Restore the pool pointer. 1862 // Restore the pool pointer.
1909 __ RestoreCodePointer(); 1863 __ RestoreCodePointer();
1910 __ LoadPoolPointer(PP); 1864 __ LoadPoolPointer(PP);
1911 __ jmp(CallingConventions::kArg1Reg); // Jump to program counter. 1865 __ jmp(CallingConventions::kArg1Reg); // Jump to program counter.
1912 } 1866 }
1913 1867
1914
1915 // Run an exception handler. Execution comes from JumpToFrame stub. 1868 // Run an exception handler. Execution comes from JumpToFrame stub.
1916 // 1869 //
1917 // The arguments are stored in the Thread object. 1870 // The arguments are stored in the Thread object.
1918 // No result. 1871 // No result.
1919 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { 1872 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) {
1920 ASSERT(kExceptionObjectReg == RAX); 1873 ASSERT(kExceptionObjectReg == RAX);
1921 ASSERT(kStackTraceObjectReg == RDX); 1874 ASSERT(kStackTraceObjectReg == RDX);
1922 __ movq(CallingConventions::kArg1Reg, 1875 __ movq(CallingConventions::kArg1Reg,
1923 Address(THR, Thread::resume_pc_offset())); 1876 Address(THR, Thread::resume_pc_offset()));
1924 1877
1925 // Load the exception from the current thread. 1878 // Load the exception from the current thread.
1926 Address exception_addr(THR, Thread::active_exception_offset()); 1879 Address exception_addr(THR, Thread::active_exception_offset());
1927 __ movq(kExceptionObjectReg, exception_addr); 1880 __ movq(kExceptionObjectReg, exception_addr);
1928 __ movq(exception_addr, Immediate(0)); 1881 __ movq(exception_addr, Immediate(0));
1929 1882
1930 // Load the stacktrace from the current thread. 1883 // Load the stacktrace from the current thread.
1931 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); 1884 Address stacktrace_addr(THR, Thread::active_stacktrace_offset());
1932 __ movq(kStackTraceObjectReg, stacktrace_addr); 1885 __ movq(kStackTraceObjectReg, stacktrace_addr);
1933 __ movq(stacktrace_addr, Immediate(0)); 1886 __ movq(stacktrace_addr, Immediate(0));
1934 1887
1935 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point. 1888 __ jmp(CallingConventions::kArg1Reg); // Jump to continuation point.
1936 } 1889 }
1937 1890
1938
1939 // Deoptimize a frame on the call stack before rewinding. 1891 // Deoptimize a frame on the call stack before rewinding.
1940 // The arguments are stored in the Thread object. 1892 // The arguments are stored in the Thread object.
1941 // No result. 1893 // No result.
1942 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { 1894 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) {
1943 // Push zap value instead of CODE_REG. 1895 // Push zap value instead of CODE_REG.
1944 __ pushq(Immediate(kZapCodeReg)); 1896 __ pushq(Immediate(kZapCodeReg));
1945 1897
1946 // Push the deopt pc. 1898 // Push the deopt pc.
1947 __ pushq(Address(THR, Thread::resume_pc_offset())); 1899 __ pushq(Address(THR, Thread::resume_pc_offset()));
1948 GenerateDeoptimizationSequence(assembler, kEagerDeopt); 1900 GenerateDeoptimizationSequence(assembler, kEagerDeopt);
1949 1901
1950 // After we have deoptimized, jump to the correct frame. 1902 // After we have deoptimized, jump to the correct frame.
1951 __ EnterStubFrame(); 1903 __ EnterStubFrame();
1952 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); 1904 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
1953 __ LeaveStubFrame(); 1905 __ LeaveStubFrame();
1954 __ int3(); 1906 __ int3();
1955 } 1907 }
1956 1908
1957
1958 // Calls to the runtime to optimize the given function. 1909 // Calls to the runtime to optimize the given function.
1959 // RDI: function to be reoptimized. 1910 // RDI: function to be reoptimized.
1960 // R10: argument descriptor (preserved). 1911 // R10: argument descriptor (preserved).
1961 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { 1912 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) {
1962 __ EnterStubFrame(); 1913 __ EnterStubFrame();
1963 __ pushq(R10); // Preserve args descriptor. 1914 __ pushq(R10); // Preserve args descriptor.
1964 __ pushq(Immediate(0)); // Result slot. 1915 __ pushq(Immediate(0)); // Result slot.
1965 __ pushq(RDI); // Arg0: function to optimize 1916 __ pushq(RDI); // Arg0: function to optimize
1966 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); 1917 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
1967 __ popq(RAX); // Discard argument. 1918 __ popq(RAX); // Discard argument.
1968 __ popq(RAX); // Get Code object. 1919 __ popq(RAX); // Get Code object.
1969 __ popq(R10); // Restore argument descriptor. 1920 __ popq(R10); // Restore argument descriptor.
1970 __ LeaveStubFrame(); 1921 __ LeaveStubFrame();
1971 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); 1922 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
1972 __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset())); 1923 __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset()));
1973 __ jmp(RCX); 1924 __ jmp(RCX);
1974 __ int3(); 1925 __ int3();
1975 } 1926 }
1976 1927
1977
1978 // Does identical check (object references are equal or not equal) with special 1928 // Does identical check (object references are equal or not equal) with special
1979 // checks for boxed numbers. 1929 // checks for boxed numbers.
1980 // Left and right are pushed on stack. 1930 // Left and right are pushed on stack.
1981 // Return ZF set. 1931 // Return ZF set.
1982 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint 1932 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint
1983 // cannot contain a value that fits in Mint or Smi. 1933 // cannot contain a value that fits in Mint or Smi.
1984 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, 1934 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
1985 const Register left, 1935 const Register left,
1986 const Register right) { 1936 const Register right) {
1987 Label reference_compare, done, check_mint, check_bigint; 1937 Label reference_compare, done, check_mint, check_bigint;
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2024 // Result in RAX, 0 means equal. 1974 // Result in RAX, 0 means equal.
2025 __ LeaveStubFrame(); 1975 __ LeaveStubFrame();
2026 __ cmpq(RAX, Immediate(0)); 1976 __ cmpq(RAX, Immediate(0));
2027 __ jmp(&done); 1977 __ jmp(&done);
2028 1978
2029 __ Bind(&reference_compare); 1979 __ Bind(&reference_compare);
2030 __ cmpq(left, right); 1980 __ cmpq(left, right);
2031 __ Bind(&done); 1981 __ Bind(&done);
2032 } 1982 }
2033 1983
2034
2035 // Called only from unoptimized code. All relevant registers have been saved. 1984 // Called only from unoptimized code. All relevant registers have been saved.
2036 // TOS + 0: return address 1985 // TOS + 0: return address
2037 // TOS + 1: right argument. 1986 // TOS + 1: right argument.
2038 // TOS + 2: left argument. 1987 // TOS + 2: left argument.
2039 // Returns ZF set. 1988 // Returns ZF set.
2040 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( 1989 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
2041 Assembler* assembler) { 1990 Assembler* assembler) {
2042 // Check single stepping. 1991 // Check single stepping.
2043 Label stepping, done_stepping; 1992 Label stepping, done_stepping;
2044 if (FLAG_support_debugger) { 1993 if (FLAG_support_debugger) {
(...skipping 15 matching lines...) Expand all
2060 if (FLAG_support_debugger) { 2009 if (FLAG_support_debugger) {
2061 __ Bind(&stepping); 2010 __ Bind(&stepping);
2062 __ EnterStubFrame(); 2011 __ EnterStubFrame();
2063 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 2012 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2064 __ RestoreCodePointer(); 2013 __ RestoreCodePointer();
2065 __ LeaveStubFrame(); 2014 __ LeaveStubFrame();
2066 __ jmp(&done_stepping); 2015 __ jmp(&done_stepping);
2067 } 2016 }
2068 } 2017 }
2069 2018
2070
2071 // Called from optimized code only. 2019 // Called from optimized code only.
2072 // TOS + 0: return address 2020 // TOS + 0: return address
2073 // TOS + 1: right argument. 2021 // TOS + 1: right argument.
2074 // TOS + 2: left argument. 2022 // TOS + 2: left argument.
2075 // Returns ZF set. 2023 // Returns ZF set.
2076 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( 2024 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub(
2077 Assembler* assembler) { 2025 Assembler* assembler) {
2078 const Register left = RAX; 2026 const Register left = RAX;
2079 const Register right = RDX; 2027 const Register right = RDX;
2080 2028
2081 __ movq(left, Address(RSP, 2 * kWordSize)); 2029 __ movq(left, Address(RSP, 2 * kWordSize));
2082 __ movq(right, Address(RSP, 1 * kWordSize)); 2030 __ movq(right, Address(RSP, 1 * kWordSize));
2083 GenerateIdenticalWithNumberCheckStub(assembler, left, right); 2031 GenerateIdenticalWithNumberCheckStub(assembler, left, right);
2084 __ ret(); 2032 __ ret();
2085 } 2033 }
2086 2034
2087
2088 // Called from megamorphic calls. 2035 // Called from megamorphic calls.
2089 // RDI: receiver 2036 // RDI: receiver
2090 // RBX: MegamorphicCache (preserved) 2037 // RBX: MegamorphicCache (preserved)
2091 // Passed to target: 2038 // Passed to target:
2092 // CODE_REG: target Code 2039 // CODE_REG: target Code
2093 // R10: arguments descriptor 2040 // R10: arguments descriptor
2094 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { 2041 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) {
2095 // Jump if receiver is a smi. 2042 // Jump if receiver is a smi.
2096 Label smi_case; 2043 Label smi_case;
2097 __ testq(RDI, Immediate(kSmiTagMask)); 2044 __ testq(RDI, Immediate(kSmiTagMask));
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2148 // Try next entry in the table. 2095 // Try next entry in the table.
2149 __ AddImmediate(RCX, Immediate(Smi::RawValue(1))); 2096 __ AddImmediate(RCX, Immediate(Smi::RawValue(1)));
2150 __ jmp(&loop); 2097 __ jmp(&loop);
2151 2098
2152 // Load cid for the Smi case. 2099 // Load cid for the Smi case.
2153 __ Bind(&smi_case); 2100 __ Bind(&smi_case);
2154 __ movq(RAX, Immediate(kSmiCid)); 2101 __ movq(RAX, Immediate(kSmiCid));
2155 __ jmp(&cid_loaded); 2102 __ jmp(&cid_loaded);
2156 } 2103 }
2157 2104
2158
2159 // Called from switchable IC calls. 2105 // Called from switchable IC calls.
2160 // RDI: receiver 2106 // RDI: receiver
2161 // RBX: ICData (preserved) 2107 // RBX: ICData (preserved)
2162 // Passed to target: 2108 // Passed to target:
2163 // CODE_REG: target Code object 2109 // CODE_REG: target Code object
2164 // R10: arguments descriptor 2110 // R10: arguments descriptor
2165 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { 2111 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) {
2166 Label loop, found, miss; 2112 Label loop, found, miss;
2167 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); 2113 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset()));
2168 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); 2114 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset()));
(...skipping 22 matching lines...) Expand all
2191 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); 2137 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
2192 __ jmp(RCX); 2138 __ jmp(RCX);
2193 2139
2194 __ Bind(&miss); 2140 __ Bind(&miss);
2195 __ LoadIsolate(RAX); 2141 __ LoadIsolate(RAX);
2196 __ movq(CODE_REG, Address(RAX, Isolate::ic_miss_code_offset())); 2142 __ movq(CODE_REG, Address(RAX, Isolate::ic_miss_code_offset()));
2197 __ movq(RCX, FieldAddress(CODE_REG, Code::entry_point_offset())); 2143 __ movq(RCX, FieldAddress(CODE_REG, Code::entry_point_offset()));
2198 __ jmp(RCX); 2144 __ jmp(RCX);
2199 } 2145 }
2200 2146
2201
2202 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { 2147 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) {
2203 Label loop, found, miss; 2148 Label loop, found, miss;
2204 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); 2149 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset()));
2205 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); 2150 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset()));
2206 __ leaq(R13, FieldAddress(R13, Array::data_offset())); 2151 __ leaq(R13, FieldAddress(R13, Array::data_offset()));
2207 // R13: first IC entry 2152 // R13: first IC entry
2208 __ LoadTaggedClassIdMayBeSmi(RAX, RDI); 2153 __ LoadTaggedClassIdMayBeSmi(RAX, RDI);
2209 // RAX: receiver cid as Smi 2154 // RAX: receiver cid as Smi
2210 2155
2211 __ Bind(&loop); 2156 __ Bind(&loop);
(...skipping 16 matching lines...) Expand all
2228 __ movq(CODE_REG, Address(R13, code_offset)); 2173 __ movq(CODE_REG, Address(R13, code_offset));
2229 __ jmp(RCX); 2174 __ jmp(RCX);
2230 2175
2231 __ Bind(&miss); 2176 __ Bind(&miss);
2232 __ LoadIsolate(RAX); 2177 __ LoadIsolate(RAX);
2233 __ movq(CODE_REG, Address(RAX, Isolate::ic_miss_code_offset())); 2178 __ movq(CODE_REG, Address(RAX, Isolate::ic_miss_code_offset()));
2234 __ movq(RCX, FieldAddress(CODE_REG, Code::entry_point_offset())); 2179 __ movq(RCX, FieldAddress(CODE_REG, Code::entry_point_offset()));
2235 __ jmp(RCX); 2180 __ jmp(RCX);
2236 } 2181 }
2237 2182
2238
2239 // RDI: receiver 2183 // RDI: receiver
2240 // RBX: UnlinkedCall 2184 // RBX: UnlinkedCall
2241 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { 2185 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) {
2242 __ EnterStubFrame(); 2186 __ EnterStubFrame();
2243 __ pushq(RDI); // Preserve receiver. 2187 __ pushq(RDI); // Preserve receiver.
2244 2188
2245 __ pushq(Immediate(0)); // Result slot. 2189 __ pushq(Immediate(0)); // Result slot.
2246 __ pushq(RDI); // Arg0: Receiver 2190 __ pushq(RDI); // Arg0: Receiver
2247 __ pushq(RBX); // Arg1: UnlinkedCall 2191 __ pushq(RBX); // Arg1: UnlinkedCall
2248 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2); 2192 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2);
2249 __ popq(RBX); 2193 __ popq(RBX);
2250 __ popq(RBX); 2194 __ popq(RBX);
2251 __ popq(RBX); // result = IC 2195 __ popq(RBX); // result = IC
2252 2196
2253 __ popq(RDI); // Restore receiver. 2197 __ popq(RDI); // Restore receiver.
2254 __ LeaveStubFrame(); 2198 __ LeaveStubFrame();
2255 2199
2256 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2200 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2257 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2201 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2258 __ jmp(RCX); 2202 __ jmp(RCX);
2259 } 2203 }
2260 2204
2261
2262 // Called from switchable IC calls. 2205 // Called from switchable IC calls.
2263 // RDI: receiver 2206 // RDI: receiver
2264 // RBX: SingleTargetCache 2207 // RBX: SingleTargetCache
2265 // Passed to target:: 2208 // Passed to target::
2266 // CODE_REG: target Code object 2209 // CODE_REG: target Code object
2267 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { 2210 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) {
2268 Label miss; 2211 Label miss;
2269 __ LoadClassIdMayBeSmi(RAX, RDI); 2212 __ LoadClassIdMayBeSmi(RAX, RDI);
2270 __ movzxw(R9, FieldAddress(RBX, SingleTargetCache::lower_limit_offset())); 2213 __ movzxw(R9, FieldAddress(RBX, SingleTargetCache::lower_limit_offset()));
2271 __ movzxw(R10, FieldAddress(RBX, SingleTargetCache::upper_limit_offset())); 2214 __ movzxw(R10, FieldAddress(RBX, SingleTargetCache::upper_limit_offset()));
(...skipping 16 matching lines...) Expand all
2288 __ popq(RBX); // result = IC 2231 __ popq(RBX); // result = IC
2289 2232
2290 __ popq(RDI); // Restore receiver. 2233 __ popq(RDI); // Restore receiver.
2291 __ LeaveStubFrame(); 2234 __ LeaveStubFrame();
2292 2235
2293 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2236 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2294 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2237 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2295 __ jmp(RCX); 2238 __ jmp(RCX);
2296 } 2239 }
2297 2240
2298
2299 // Called from the monomorphic checked entry. 2241 // Called from the monomorphic checked entry.
2300 // RDI: receiver 2242 // RDI: receiver
2301 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { 2243 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) {
2302 __ movq(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset())); 2244 __ movq(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset()));
2303 __ EnterStubFrame(); 2245 __ EnterStubFrame();
2304 __ pushq(RDI); // Preserve receiver. 2246 __ pushq(RDI); // Preserve receiver.
2305 2247
2306 __ pushq(Immediate(0)); // Result slot. 2248 __ pushq(Immediate(0)); // Result slot.
2307 __ pushq(RDI); // Arg0: Receiver 2249 __ pushq(RDI); // Arg0: Receiver
2308 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1); 2250 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1);
2309 __ popq(RBX); 2251 __ popq(RBX);
2310 __ popq(RBX); // result = IC 2252 __ popq(RBX); // result = IC
2311 2253
2312 __ popq(RDI); // Restore receiver. 2254 __ popq(RDI); // Restore receiver.
2313 __ LeaveStubFrame(); 2255 __ LeaveStubFrame();
2314 2256
2315 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2257 __ movq(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2316 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2258 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2317 __ jmp(RCX); 2259 __ jmp(RCX);
2318 } 2260 }
2319 2261
2320
2321 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { 2262 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) {
2322 __ int3(); 2263 __ int3();
2323 } 2264 }
2324 2265
2325
2326 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { 2266 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) {
2327 __ int3(); 2267 __ int3();
2328 } 2268 }
2329 2269
2330 } // namespace dart 2270 } // namespace dart
2331 2271
2332 #endif // defined TARGET_ARCH_X64 2272 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/stub_code_ia32_test.cc ('k') | runtime/vm/stub_code_x64_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698