Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(309)

Side by Side Diff: runtime/vm/stub_code_x64.cc

Issue 1268783003: Simplify constant pool usage in x64 code generator (by removing extra argument (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: address comments Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/runtime_entry_x64.cc ('k') | runtime/vm/stub_code_x64_test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/compiler.h" 9 #include "vm/compiler.h"
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
269 __ ret(); 269 __ ret();
270 } 270 }
271 271
272 272
273 // Input parameters: 273 // Input parameters:
274 // R10: arguments descriptor array. 274 // R10: arguments descriptor array.
275 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { 275 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) {
276 __ EnterStubFrame(); 276 __ EnterStubFrame();
277 __ pushq(R10); // Preserve arguments descriptor array. 277 __ pushq(R10); // Preserve arguments descriptor array.
278 // Setup space on stack for return value. 278 // Setup space on stack for return value.
279 __ PushObject(Object::null_object(), PP); 279 __ PushObject(Object::null_object());
280 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); 280 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
281 __ popq(RAX); // Get Code object result. 281 __ popq(RAX); // Get Code object result.
282 __ popq(R10); // Restore arguments descriptor array. 282 __ popq(R10); // Restore arguments descriptor array.
283 // Remove the stub frame as we are about to jump to the dart function. 283 // Remove the stub frame as we are about to jump to the dart function.
284 __ LeaveStubFrame(); 284 __ LeaveStubFrame();
285 285
286 __ movq(RBX, FieldAddress(RAX, Code::instructions_offset())); 286 __ movq(RBX, FieldAddress(RAX, Code::instructions_offset()));
287 __ addq(RBX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 287 __ addq(RBX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
288 __ jmp(RBX); 288 __ jmp(RBX);
289 } 289 }
290 290
291 291
292 // Called from a static call only when an invalid code has been entered 292 // Called from a static call only when an invalid code has been entered
293 // (invalid because its function was optimized or deoptimized). 293 // (invalid because its function was optimized or deoptimized).
294 // R10: arguments descriptor array. 294 // R10: arguments descriptor array.
295 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { 295 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) {
296 __ EnterStubFrame(); 296 __ EnterStubFrame();
297 __ pushq(R10); // Preserve arguments descriptor array. 297 __ pushq(R10); // Preserve arguments descriptor array.
298 // Setup space on stack for return value. 298 // Setup space on stack for return value.
299 __ PushObject(Object::null_object(), PP); 299 __ PushObject(Object::null_object());
300 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); 300 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
301 __ popq(RAX); // Get Code object. 301 __ popq(RAX); // Get Code object.
302 __ popq(R10); // Restore arguments descriptor array. 302 __ popq(R10); // Restore arguments descriptor array.
303 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); 303 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
304 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 304 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
305 __ LeaveStubFrame(); 305 __ LeaveStubFrame();
306 __ jmp(RAX); 306 __ jmp(RAX);
307 __ int3(); 307 __ int3();
308 } 308 }
309 309
310 310
311 // Called from object allocate instruction when the allocation stub has been 311 // Called from object allocate instruction when the allocation stub has been
312 // disabled. 312 // disabled.
313 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { 313 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) {
314 __ EnterStubFrame(); 314 __ EnterStubFrame();
315 // Setup space on stack for return value. 315 // Setup space on stack for return value.
316 __ PushObject(Object::null_object(), PP); 316 __ PushObject(Object::null_object());
317 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); 317 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
318 __ popq(RAX); // Get Code object. 318 __ popq(RAX); // Get Code object.
319 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); 319 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
320 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 320 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
321 __ LeaveStubFrame(); 321 __ LeaveStubFrame();
322 __ jmp(RAX); 322 __ jmp(RAX);
323 __ int3(); 323 __ int3();
324 } 324 }
325 325
326 326
327 // Input parameters: 327 // Input parameters:
328 // R10: smi-tagged argument count, may be zero. 328 // R10: smi-tagged argument count, may be zero.
329 // RBP[kParamEndSlotFromFp + 1]: last argument. 329 // RBP[kParamEndSlotFromFp + 1]: last argument.
330 static void PushArgumentsArray(Assembler* assembler) { 330 static void PushArgumentsArray(Assembler* assembler) {
331 __ LoadObject(R12, Object::null_object(), PP); 331 __ LoadObject(R12, Object::null_object());
332 // Allocate array to store arguments of caller. 332 // Allocate array to store arguments of caller.
333 __ movq(RBX, R12); // Null element type for raw Array. 333 __ movq(RBX, R12); // Null element type for raw Array.
334 const ExternalLabel array_label(StubCode::AllocateArrayEntryPoint()); 334 const ExternalLabel array_label(StubCode::AllocateArrayEntryPoint());
335 __ call(&array_label); 335 __ call(&array_label);
336 __ SmiUntag(R10); 336 __ SmiUntag(R10);
337 // RAX: newly allocated array. 337 // RAX: newly allocated array.
338 // R10: length of the array (was preserved by the stub). 338 // R10: length of the array (was preserved by the stub).
339 __ pushq(RAX); // Array is in RAX and on top of stack. 339 __ pushq(RAX); // Array is in RAX and on top of stack.
340 __ leaq(R12, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize)); 340 __ leaq(R12, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize));
341 __ leaq(RBX, FieldAddress(RAX, Array::data_offset())); 341 __ leaq(RBX, FieldAddress(RAX, Array::data_offset()));
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
540 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 540 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
541 // Three words (saved pp, saved fp, stub's pc marker) 541 // Three words (saved pp, saved fp, stub's pc marker)
542 // in the stack above the return address. 542 // in the stack above the return address.
543 __ movq(RAX, Address(RSP, RAX, TIMES_4, 543 __ movq(RAX, Address(RSP, RAX, TIMES_4,
544 kSavedAboveReturnAddress * kWordSize)); 544 kSavedAboveReturnAddress * kWordSize));
545 // Preserve IC data and arguments descriptor. 545 // Preserve IC data and arguments descriptor.
546 __ pushq(RBX); 546 __ pushq(RBX);
547 __ pushq(R10); 547 __ pushq(R10);
548 548
549 // Space for the result of the runtime call. 549 // Space for the result of the runtime call.
550 __ PushObject(Object::null_object(), PP); 550 __ PushObject(Object::null_object());
551 __ pushq(RAX); // Receiver. 551 __ pushq(RAX); // Receiver.
552 __ pushq(RBX); // IC data. 552 __ pushq(RBX); // IC data.
553 __ pushq(R10); // Arguments descriptor. 553 __ pushq(R10); // Arguments descriptor.
554 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); 554 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3);
555 // Discard arguments. 555 // Discard arguments.
556 __ popq(RAX); 556 __ popq(RAX);
557 __ popq(RAX); 557 __ popq(RAX);
558 __ popq(RAX); 558 __ popq(RAX);
559 __ popq(RAX); // Return value from the runtime call (function). 559 __ popq(RAX); // Return value from the runtime call (function).
560 __ popq(R10); // Restore arguments descriptor. 560 __ popq(R10); // Restore arguments descriptor.
(...skipping 12 matching lines...) Expand all
573 } 573 }
574 574
575 575
576 // Called for inline allocation of arrays. 576 // Called for inline allocation of arrays.
577 // Input parameters: 577 // Input parameters:
578 // R10 : Array length as Smi. 578 // R10 : Array length as Smi.
579 // RBX : array element type (either NULL or an instantiated type). 579 // RBX : array element type (either NULL or an instantiated type).
580 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. 580 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved.
581 // The newly allocated object is returned in RAX. 581 // The newly allocated object is returned in RAX.
582 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { 582 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
583 // Must load pool pointer before being able to patch.
584 Register new_pp = R13;
585 __ LoadPoolPointer(new_pp);
586 Label slow_case; 583 Label slow_case;
587 // Compute the size to be allocated, it is based on the array length 584 // Compute the size to be allocated, it is based on the array length
588 // and is computed as: 585 // and is computed as:
589 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). 586 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)).
590 __ movq(RDI, R10); // Array Length. 587 __ movq(RDI, R10); // Array Length.
591 // Check that length is a positive Smi. 588 // Check that length is a positive Smi.
592 __ testq(RDI, Immediate(kSmiTagMask)); 589 __ testq(RDI, Immediate(kSmiTagMask));
593 if (FLAG_use_slow_path) { 590 if (FLAG_use_slow_path) {
594 __ jmp(&slow_case); 591 __ jmp(&slow_case);
595 } else { 592 } else {
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
667 // Set the length field. 664 // Set the length field.
668 __ InitializeFieldNoBarrier(RAX, 665 __ InitializeFieldNoBarrier(RAX,
669 FieldAddress(RAX, Array::length_offset()), 666 FieldAddress(RAX, Array::length_offset()),
670 R10); 667 R10);
671 668
672 // Initialize all array elements to raw_null. 669 // Initialize all array elements to raw_null.
673 // RAX: new object start as a tagged pointer. 670 // RAX: new object start as a tagged pointer.
674 // RCX: new object end address. 671 // RCX: new object end address.
675 // RDI: iterator which initially points to the start of the variable 672 // RDI: iterator which initially points to the start of the variable
676 // data area to be initialized. 673 // data area to be initialized.
677 __ LoadObject(R12, Object::null_object(), PP); 674 __ LoadObject(R12, Object::null_object());
678 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); 675 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray)));
679 Label done; 676 Label done;
680 Label init_loop; 677 Label init_loop;
681 __ Bind(&init_loop); 678 __ Bind(&init_loop);
682 __ cmpq(RDI, RCX); 679 __ cmpq(RDI, RCX);
683 #if defined(DEBUG) 680 #if defined(DEBUG)
684 static const bool kJumpLength = Assembler::kFarJump; 681 static const bool kJumpLength = Assembler::kFarJump;
685 #else 682 #else
686 static const bool kJumpLength = Assembler::kNearJump; 683 static const bool kJumpLength = Assembler::kNearJump;
687 #endif // DEBUG 684 #endif // DEBUG
688 __ j(ABOVE_EQUAL, &done, kJumpLength); 685 __ j(ABOVE_EQUAL, &done, kJumpLength);
689 // No generational barrier needed, since we are storing null. 686 // No generational barrier needed, since we are storing null.
690 __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12); 687 __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12);
691 __ addq(RDI, Immediate(kWordSize)); 688 __ addq(RDI, Immediate(kWordSize));
692 __ jmp(&init_loop, kJumpLength); 689 __ jmp(&init_loop, kJumpLength);
693 __ Bind(&done); 690 __ Bind(&done);
694 __ ret(); // returns the newly allocated object in RAX. 691 __ ret(); // returns the newly allocated object in RAX.
695 692
696 // Unable to allocate the array using the fast inline code, just call 693 // Unable to allocate the array using the fast inline code, just call
697 // into the runtime. 694 // into the runtime.
698 __ Bind(&slow_case); 695 __ Bind(&slow_case);
699 // Create a stub frame as we are pushing some objects on the stack before 696 // Create a stub frame as we are pushing some objects on the stack before
700 // calling into the runtime. 697 // calling into the runtime.
701 __ EnterStubFrame(); 698 __ EnterStubFrame();
702 // Setup space on stack for return value. 699 // Setup space on stack for return value.
703 __ PushObject(Object::null_object(), PP); 700 __ PushObject(Object::null_object());
704 __ pushq(R10); // Array length as Smi. 701 __ pushq(R10); // Array length as Smi.
705 __ pushq(RBX); // Element type. 702 __ pushq(RBX); // Element type.
706 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); 703 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
707 __ popq(RAX); // Pop element type argument. 704 __ popq(RAX); // Pop element type argument.
708 __ popq(R10); // Pop array length argument. 705 __ popq(R10); // Pop array length argument.
709 __ popq(RAX); // Pop return value from return slot. 706 __ popq(RAX); // Pop return value from return slot.
710 __ LeaveStubFrame(); 707 __ LeaveStubFrame();
711 __ ret(); 708 __ ret();
712 } 709 }
713 710
(...skipping 23 matching lines...) Expand all
737 const intptr_t kArgumentsDescOffset = -(kInitialOffset) * kWordSize; 734 const intptr_t kArgumentsDescOffset = -(kInitialOffset) * kWordSize;
738 __ pushq(kArgDescReg); 735 __ pushq(kArgDescReg);
739 736
740 // Save C++ ABI callee-saved registers. 737 // Save C++ ABI callee-saved registers.
741 __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters, 738 __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters,
742 CallingConventions::kCalleeSaveXmmRegisters); 739 CallingConventions::kCalleeSaveXmmRegisters);
743 740
744 // We now load the pool pointer(PP) as we are about to invoke dart code and we 741 // We now load the pool pointer(PP) as we are about to invoke dart code and we
745 // could potentially invoke some intrinsic functions which need the PP to be 742 // could potentially invoke some intrinsic functions which need the PP to be
746 // set up. 743 // set up.
747 __ LoadPoolPointer(PP); 744 __ LoadPoolPointer();
748 745
749 // If any additional (or fewer) values are pushed, the offsets in 746 // If any additional (or fewer) values are pushed, the offsets in
750 // kExitLinkSlotFromEntryFp will need to be changed. 747 // kExitLinkSlotFromEntryFp will need to be changed.
751 748
752 // Set up THR, which caches the current thread in Dart code. 749 // Set up THR, which caches the current thread in Dart code.
753 if (THR != kThreadReg) { 750 if (THR != kThreadReg) {
754 __ movq(THR, kThreadReg); 751 __ movq(THR, kThreadReg);
755 } 752 }
756 // Load Isolate pointer into kIsolateReg. 753 // Load Isolate pointer into kIsolateReg.
757 const Register kIsolateReg = RBX; 754 const Register kIsolateReg = RBX;
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 __ LoadIsolate(kIsolateReg); 828 __ LoadIsolate(kIsolateReg);
832 __ popq(Address(THR, Thread::top_exit_frame_info_offset())); 829 __ popq(Address(THR, Thread::top_exit_frame_info_offset()));
833 __ popq(Address(THR, Thread::top_resource_offset())); 830 __ popq(Address(THR, Thread::top_resource_offset()));
834 831
835 // Restore the current VMTag from the stack. 832 // Restore the current VMTag from the stack.
836 __ popq(Address(kIsolateReg, Isolate::vm_tag_offset())); 833 __ popq(Address(kIsolateReg, Isolate::vm_tag_offset()));
837 834
838 // Restore C++ ABI callee-saved registers. 835 // Restore C++ ABI callee-saved registers.
839 __ PopRegisters(CallingConventions::kCalleeSaveCpuRegisters, 836 __ PopRegisters(CallingConventions::kCalleeSaveCpuRegisters,
840 CallingConventions::kCalleeSaveXmmRegisters); 837 CallingConventions::kCalleeSaveXmmRegisters);
838 __ set_constant_pool_allowed(false);
841 839
842 // Restore the frame pointer. 840 // Restore the frame pointer.
843 __ LeaveFrame(); 841 __ LeaveFrame();
844 842
845 __ ret(); 843 __ ret();
846 } 844 }
847 845
848 846
849 // Called for inline allocation of contexts. 847 // Called for inline allocation of contexts.
850 // Input: 848 // Input:
851 // R10: number of context variables. 849 // R10: number of context variables.
852 // Output: 850 // Output:
853 // RAX: new allocated RawContext object. 851 // RAX: new allocated RawContext object.
854 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { 852 void StubCode::GenerateAllocateContextStub(Assembler* assembler) {
855 __ LoadObject(R12, Object::null_object(), PP); 853 __ LoadObject(R12, Object::null_object());
856 if (FLAG_inline_alloc) { 854 if (FLAG_inline_alloc) {
857 Label slow_case; 855 Label slow_case;
858 // First compute the rounded instance size. 856 // First compute the rounded instance size.
859 // R10: number of context variables. 857 // R10: number of context variables.
860 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); 858 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1);
861 __ leaq(R13, Address(R10, TIMES_8, fixed_size)); 859 __ leaq(R13, Address(R10, TIMES_8, fixed_size));
862 __ andq(R13, Immediate(-kObjectAlignment)); 860 __ andq(R13, Immediate(-kObjectAlignment));
863 861
864 // Now allocate the object. 862 // Now allocate the object.
865 // R10: number of context variables. 863 // R10: number of context variables.
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
1058 // The generated code is different if the class is parameterized. 1056 // The generated code is different if the class is parameterized.
1059 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; 1057 const bool is_cls_parameterized = cls.NumTypeArguments() > 0;
1060 ASSERT(!is_cls_parameterized || 1058 ASSERT(!is_cls_parameterized ||
1061 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); 1059 (cls.type_arguments_field_offset() != Class::kNoTypeArguments));
1062 // kInlineInstanceSize is a constant used as a threshold for determining 1060 // kInlineInstanceSize is a constant used as a threshold for determining
1063 // when the object initialization should be done as a loop or as 1061 // when the object initialization should be done as a loop or as
1064 // straight line code. 1062 // straight line code.
1065 const int kInlineInstanceSize = 12; // In words. 1063 const int kInlineInstanceSize = 12; // In words.
1066 const intptr_t instance_size = cls.instance_size(); 1064 const intptr_t instance_size = cls.instance_size();
1067 ASSERT(instance_size > 0); 1065 ASSERT(instance_size > 0);
1068 __ LoadObject(R12, Object::null_object(), PP); 1066 __ LoadObject(R12, Object::null_object());
1069 if (is_cls_parameterized) { 1067 if (is_cls_parameterized) {
1070 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset)); 1068 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset));
1071 // RDX: instantiated type arguments. 1069 // RDX: instantiated type arguments.
1072 } 1070 }
1073 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && 1071 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) &&
1074 !cls.trace_allocation()) { 1072 !cls.trace_allocation()) {
1075 Label slow_case; 1073 Label slow_case;
1076 // Allocate the object and update top to point to 1074 // Allocate the object and update top to point to
1077 // next object start and initialize the allocated object. 1075 // next object start and initialize the allocated object.
1078 // RDX: instantiated type arguments (if is_cls_parameterized). 1076 // RDX: instantiated type arguments (if is_cls_parameterized).
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1154 // RAX: new object (tagged). 1152 // RAX: new object (tagged).
1155 __ ret(); 1153 __ ret();
1156 1154
1157 __ Bind(&slow_case); 1155 __ Bind(&slow_case);
1158 } 1156 }
1159 // If is_cls_parameterized: 1157 // If is_cls_parameterized:
1160 // RDX: new object type arguments. 1158 // RDX: new object type arguments.
1161 // Create a stub frame. 1159 // Create a stub frame.
1162 __ EnterStubFrame(); // Uses PP to access class object. 1160 __ EnterStubFrame(); // Uses PP to access class object.
1163 __ pushq(R12); // Setup space on stack for return value. 1161 __ pushq(R12); // Setup space on stack for return value.
1164 __ PushObject(cls, PP); // Push class of object to be allocated. 1162 __ PushObject(cls); // Push class of object to be allocated.
1165 if (is_cls_parameterized) { 1163 if (is_cls_parameterized) {
1166 __ pushq(RDX); // Push type arguments of object to be allocated. 1164 __ pushq(RDX); // Push type arguments of object to be allocated.
1167 } else { 1165 } else {
1168 __ pushq(R12); // Push null type arguments. 1166 __ pushq(R12); // Push null type arguments.
1169 } 1167 }
1170 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. 1168 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object.
1171 __ popq(RAX); // Pop argument (type arguments of object). 1169 __ popq(RAX); // Pop argument (type arguments of object).
1172 __ popq(RAX); // Pop argument (class of object). 1170 __ popq(RAX); // Pop argument (class of object).
1173 __ popq(RAX); // Pop result (newly allocated object). 1171 __ popq(RAX); // Pop result (newly allocated object).
1174 // RAX: new object 1172 // RAX: new object
(...skipping 12 matching lines...) Expand all
1187 // RSP : points to return address. 1185 // RSP : points to return address.
1188 // RSP + 8 : address of last argument. 1186 // RSP + 8 : address of last argument.
1189 // R10 : arguments descriptor array. 1187 // R10 : arguments descriptor array.
1190 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { 1188 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) {
1191 __ EnterStubFrame(); 1189 __ EnterStubFrame();
1192 1190
1193 // Load the receiver. 1191 // Load the receiver.
1194 __ movq(R13, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1192 __ movq(R13, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1195 __ movq(RAX, Address(RBP, R13, TIMES_4, kParamEndSlotFromFp * kWordSize)); 1193 __ movq(RAX, Address(RBP, R13, TIMES_4, kParamEndSlotFromFp * kWordSize));
1196 1194
1197 __ LoadObject(R12, Object::null_object(), PP); 1195 __ LoadObject(R12, Object::null_object());
1198 __ pushq(R12); // Setup space on stack for result from noSuchMethod. 1196 __ pushq(R12); // Setup space on stack for result from noSuchMethod.
1199 __ pushq(RAX); // Receiver. 1197 __ pushq(RAX); // Receiver.
1200 __ pushq(R10); // Arguments descriptor array. 1198 __ pushq(R10); // Arguments descriptor array.
1201 1199
1202 __ movq(R10, R13); // Smi-tagged arguments array length. 1200 __ movq(R10, R13); // Smi-tagged arguments array length.
1203 PushArgumentsArray(assembler); 1201 PushArgumentsArray(assembler);
1204 1202
1205 const intptr_t kNumArgs = 3; 1203 const intptr_t kNumArgs = 3;
1206 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); 1204 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs);
1207 // noSuchMethod on closures always throws an error, so it will never return. 1205 // noSuchMethod on closures always throws an error, so it will never return.
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
1274 } 1272 }
1275 case Token::kSUB: { 1273 case Token::kSUB: {
1276 __ subq(RAX, RCX); 1274 __ subq(RAX, RCX);
1277 __ j(OVERFLOW, not_smi_or_overflow); 1275 __ j(OVERFLOW, not_smi_or_overflow);
1278 break; 1276 break;
1279 } 1277 }
1280 case Token::kEQ: { 1278 case Token::kEQ: {
1281 Label done, is_true; 1279 Label done, is_true;
1282 __ cmpq(RAX, RCX); 1280 __ cmpq(RAX, RCX);
1283 __ j(EQUAL, &is_true, Assembler::kNearJump); 1281 __ j(EQUAL, &is_true, Assembler::kNearJump);
1284 __ LoadObject(RAX, Bool::False(), PP); 1282 __ LoadObject(RAX, Bool::False());
1285 __ jmp(&done, Assembler::kNearJump); 1283 __ jmp(&done, Assembler::kNearJump);
1286 __ Bind(&is_true); 1284 __ Bind(&is_true);
1287 __ LoadObject(RAX, Bool::True(), PP); 1285 __ LoadObject(RAX, Bool::True());
1288 __ Bind(&done); 1286 __ Bind(&done);
1289 break; 1287 break;
1290 } 1288 }
1291 default: UNIMPLEMENTED(); 1289 default: UNIMPLEMENTED();
1292 } 1290 }
1293 1291
1294 1292
1295 if (should_update_result_range) { 1293 if (should_update_result_range) {
1296 Label done; 1294 Label done;
1297 __ movq(RSI, RAX); 1295 __ movq(RSI, RAX);
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
1445 1443
1446 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; 1444 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize;
1447 __ addq(R12, Immediate(entry_size)); // Next entry. 1445 __ addq(R12, Immediate(entry_size)); // Next entry.
1448 __ movq(R13, Address(R12, 0)); // Next class ID. 1446 __ movq(R13, Address(R12, 0)); // Next class ID.
1449 1447
1450 __ Bind(&test); 1448 __ Bind(&test);
1451 __ cmpq(R13, Immediate(Smi::RawValue(kIllegalCid))); // Done? 1449 __ cmpq(R13, Immediate(Smi::RawValue(kIllegalCid))); // Done?
1452 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); 1450 __ j(NOT_EQUAL, &loop, Assembler::kNearJump);
1453 1451
1454 __ Comment("IC miss"); 1452 __ Comment("IC miss");
1455 __ LoadObject(R12, Object::null_object(), PP); 1453 __ LoadObject(R12, Object::null_object());
1456 // Compute address of arguments (first read number of arguments from 1454 // Compute address of arguments (first read number of arguments from
1457 // arguments descriptor array and then compute address on the stack). 1455 // arguments descriptor array and then compute address on the stack).
1458 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1456 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1459 __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0)); // RAX is Smi. 1457 __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0)); // RAX is Smi.
1460 __ EnterStubFrame(); 1458 __ EnterStubFrame();
1461 __ pushq(R10); // Preserve arguments descriptor array. 1459 __ pushq(R10); // Preserve arguments descriptor array.
1462 __ pushq(RBX); // Preserve IC data object. 1460 __ pushq(RBX); // Preserve IC data object.
1463 __ pushq(R12); // Setup space on stack for result (target code object). 1461 __ pushq(R12); // Setup space on stack for result (target code object).
1464 // Push call arguments. 1462 // Push call arguments.
1465 for (intptr_t i = 0; i < num_args; i++) { 1463 for (intptr_t i = 0; i < num_args; i++) {
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
1763 1761
1764 1762
1765 // RBX: Contains an ICData. 1763 // RBX: Contains an ICData.
1766 // TOS(0): return address (Dart code). 1764 // TOS(0): return address (Dart code).
1767 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { 1765 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) {
1768 __ EnterStubFrame(); 1766 __ EnterStubFrame();
1769 // Preserve IC data. 1767 // Preserve IC data.
1770 __ pushq(RBX); 1768 __ pushq(RBX);
1771 // Room for result. Debugger stub returns address of the 1769 // Room for result. Debugger stub returns address of the
1772 // unpatched runtime stub. 1770 // unpatched runtime stub.
1773 __ LoadObject(R12, Object::null_object(), PP); 1771 __ LoadObject(R12, Object::null_object());
1774 __ pushq(R12); // Room for result. 1772 __ pushq(R12); // Room for result.
1775 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1773 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1776 __ popq(RAX); // Address of original. 1774 __ popq(RAX); // Address of original.
1777 __ popq(RBX); // Restore IC data. 1775 __ popq(RBX); // Restore IC data.
1778 __ LeaveStubFrame(); 1776 __ LeaveStubFrame();
1779 __ jmp(RAX); // Jump to original stub. 1777 __ jmp(RAX); // Jump to original stub.
1780 } 1778 }
1781 1779
1782 1780
1783 // TOS(0): return address (Dart code). 1781 // TOS(0): return address (Dart code).
1784 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { 1782 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) {
1785 __ EnterStubFrame(); 1783 __ EnterStubFrame();
1786 // Room for result. Debugger stub returns address of the 1784 // Room for result. Debugger stub returns address of the
1787 // unpatched runtime stub. 1785 // unpatched runtime stub.
1788 __ LoadObject(R12, Object::null_object(), PP); 1786 __ LoadObject(R12, Object::null_object());
1789 __ pushq(R12); // Room for result. 1787 __ pushq(R12); // Room for result.
1790 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1788 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1791 __ popq(RAX); // Address of original. 1789 __ popq(RAX); // Address of original.
1792 __ LeaveStubFrame(); 1790 __ LeaveStubFrame();
1793 __ jmp(RAX); // Jump to original stub. 1791 __ jmp(RAX); // Jump to original stub.
1794 } 1792 }
1795 1793
1796 1794
1797 // Called only from unoptimized code. 1795 // Called only from unoptimized code.
1798 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { 1796 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) {
(...skipping 19 matching lines...) Expand all
1818 // TOS + 1: instantiator type arguments (can be NULL). 1816 // TOS + 1: instantiator type arguments (can be NULL).
1819 // TOS + 2: instance. 1817 // TOS + 2: instance.
1820 // TOS + 3: SubtypeTestCache. 1818 // TOS + 3: SubtypeTestCache.
1821 // Result in RCX: null -> not found, otherwise result (true or false). 1819 // Result in RCX: null -> not found, otherwise result (true or false).
1822 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { 1820 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
1823 ASSERT((1 <= n) && (n <= 3)); 1821 ASSERT((1 <= n) && (n <= 3));
1824 const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize; 1822 const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize;
1825 const intptr_t kInstanceOffsetInBytes = 2 * kWordSize; 1823 const intptr_t kInstanceOffsetInBytes = 2 * kWordSize;
1826 const intptr_t kCacheOffsetInBytes = 3 * kWordSize; 1824 const intptr_t kCacheOffsetInBytes = 3 * kWordSize;
1827 __ movq(RAX, Address(RSP, kInstanceOffsetInBytes)); 1825 __ movq(RAX, Address(RSP, kInstanceOffsetInBytes));
1828 __ LoadObject(R12, Object::null_object(), PP); 1826 __ LoadObject(R12, Object::null_object());
1829 if (n > 1) { 1827 if (n > 1) {
1830 __ LoadClass(R10, RAX, kNoRegister); 1828 __ LoadClass(R10, RAX);
1831 // Compute instance type arguments into R13. 1829 // Compute instance type arguments into R13.
1832 Label has_no_type_arguments; 1830 Label has_no_type_arguments;
1833 __ movq(R13, R12); 1831 __ movq(R13, R12);
1834 __ movl(RDI, FieldAddress(R10, 1832 __ movl(RDI, FieldAddress(R10,
1835 Class::type_arguments_field_offset_in_words_offset())); 1833 Class::type_arguments_field_offset_in_words_offset()));
1836 __ cmpl(RDI, Immediate(Class::kNoTypeArguments)); 1834 __ cmpl(RDI, Immediate(Class::kNoTypeArguments));
1837 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); 1835 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump);
1838 __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0)); 1836 __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0));
1839 __ Bind(&has_no_type_arguments); 1837 __ Bind(&has_no_type_arguments);
1840 } 1838 }
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1969 Immediate(0)); 1967 Immediate(0));
1970 __ jmp(CallingConventions::kArg1Reg); // Jump to the exception handler code. 1968 __ jmp(CallingConventions::kArg1Reg); // Jump to the exception handler code.
1971 } 1969 }
1972 1970
1973 1971
1974 // Calls to the runtime to optimize the given function. 1972 // Calls to the runtime to optimize the given function.
1975 // RDI: function to be reoptimized. 1973 // RDI: function to be reoptimized.
1976 // R10: argument descriptor (preserved). 1974 // R10: argument descriptor (preserved).
1977 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { 1975 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) {
1978 __ EnterStubFrame(); 1976 __ EnterStubFrame();
1979 __ LoadObject(R12, Object::null_object(), PP); 1977 __ LoadObject(R12, Object::null_object());
1980 __ pushq(R10); 1978 __ pushq(R10);
1981 __ pushq(R12); // Setup space on stack for return value. 1979 __ pushq(R12); // Setup space on stack for return value.
1982 __ pushq(RDI); 1980 __ pushq(RDI);
1983 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); 1981 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
1984 __ popq(RAX); // Disard argument. 1982 __ popq(RAX); // Disard argument.
1985 __ popq(RAX); // Get Code object. 1983 __ popq(RAX); // Get Code object.
1986 __ popq(R10); // Restore argument descriptor. 1984 __ popq(R10); // Restore argument descriptor.
1987 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); 1985 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
1988 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 1986 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
1989 __ LeaveStubFrame(); 1987 __ LeaveStubFrame();
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
2115 __ movq(RDI, FieldAddress(cache, MegamorphicCache::buckets_offset())); 2113 __ movq(RDI, FieldAddress(cache, MegamorphicCache::buckets_offset()));
2116 __ movq(RBX, FieldAddress(cache, MegamorphicCache::mask_offset())); 2114 __ movq(RBX, FieldAddress(cache, MegamorphicCache::mask_offset()));
2117 // RDI: cache buckets array. 2115 // RDI: cache buckets array.
2118 // RBX: mask. 2116 // RBX: mask.
2119 __ movq(RCX, RAX); 2117 __ movq(RCX, RAX);
2120 2118
2121 Label loop, update, call_target_function; 2119 Label loop, update, call_target_function;
2122 __ jmp(&loop); 2120 __ jmp(&loop);
2123 2121
2124 __ Bind(&update); 2122 __ Bind(&update);
2125 __ AddImmediate(RCX, Immediate(Smi::RawValue(1)), PP); 2123 __ AddImmediate(RCX, Immediate(Smi::RawValue(1)));
2126 __ Bind(&loop); 2124 __ Bind(&loop);
2127 __ andq(RCX, RBX); 2125 __ andq(RCX, RBX);
2128 const intptr_t base = Array::data_offset(); 2126 const intptr_t base = Array::data_offset();
2129 // RCX is smi tagged, but table entries are two words, so TIMES_8. 2127 // RCX is smi tagged, but table entries are two words, so TIMES_8.
2130 __ movq(RDX, FieldAddress(RDI, RCX, TIMES_8, base)); 2128 __ movq(RDX, FieldAddress(RDI, RCX, TIMES_8, base));
2131 2129
2132 ASSERT(kIllegalCid == 0); 2130 ASSERT(kIllegalCid == 0);
2133 __ testq(RDX, RDX); 2131 __ testq(RDX, RDX);
2134 __ j(ZERO, &call_target_function, Assembler::kNearJump); 2132 __ j(ZERO, &call_target_function, Assembler::kNearJump);
2135 __ cmpq(RDX, RAX); 2133 __ cmpq(RDX, RAX);
2136 __ j(NOT_EQUAL, &update, Assembler::kNearJump); 2134 __ j(NOT_EQUAL, &update, Assembler::kNearJump);
2137 2135
2138 __ Bind(&call_target_function); 2136 __ Bind(&call_target_function);
2139 // Call the target found in the cache. For a class id match, this is a 2137 // Call the target found in the cache. For a class id match, this is a
2140 // proper target for the given name and arguments descriptor. If the 2138 // proper target for the given name and arguments descriptor. If the
2141 // illegal class id was found, the target is a cache miss handler that can 2139 // illegal class id was found, the target is a cache miss handler that can
2142 // be invoked as a normal Dart function. 2140 // be invoked as a normal Dart function.
2143 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); 2141 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize));
2144 __ movq(target, FieldAddress(RAX, Function::instructions_offset())); 2142 __ movq(target, FieldAddress(RAX, Function::instructions_offset()));
2145 // TODO(srdjan): Evaluate performance impact of moving the instruction below 2143 // TODO(srdjan): Evaluate performance impact of moving the instruction below
2146 // to the call site, instead of having it here. 2144 // to the call site, instead of having it here.
2147 __ AddImmediate( 2145 __ AddImmediate(
2148 target, Immediate(Instructions::HeaderSize() - kHeapObjectTag), PP); 2146 target, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
2149 } 2147 }
2150 2148
2151 2149
2152 // Called from megamorphic calls. 2150 // Called from megamorphic calls.
2153 // RDI: receiver. 2151 // RDI: receiver.
2154 // RBX: lookup cache. 2152 // RBX: lookup cache.
2155 // Result: 2153 // Result:
2156 // RCX: entry point. 2154 // RCX: entry point.
2157 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { 2155 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) {
2158 EmitMegamorphicLookup(assembler, RDI, RBX, RCX); 2156 EmitMegamorphicLookup(assembler, RDI, RBX, RCX);
2159 __ ret(); 2157 __ ret();
2160 } 2158 }
2161 2159
2162 } // namespace dart 2160 } // namespace dart
2163 2161
2164 #endif // defined TARGET_ARCH_X64 2162 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/runtime_entry_x64.cc ('k') | runtime/vm/stub_code_x64_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698