| OLD | NEW | 
|---|
| 1 // Copyright (c) 2013, the Dart project authors.  Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors.  Please see the AUTHORS file | 
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a | 
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" | 
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) | 
| 7 | 7 | 
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" | 
| 9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" | 
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" | 
| 11 #include "vm/flow_graph_compiler.h" | 11 #include "vm/flow_graph_compiler.h" | 
| 12 #include "vm/heap.h" | 12 #include "vm/heap.h" | 
| 13 #include "vm/instructions.h" | 13 #include "vm/instructions.h" | 
| 14 #include "vm/object_store.h" | 14 #include "vm/object_store.h" | 
| 15 #include "vm/resolver.h" | 15 #include "vm/resolver.h" | 
| 16 #include "vm/scavenger.h" | 16 #include "vm/scavenger.h" | 
| 17 #include "vm/stack_frame.h" | 17 #include "vm/stack_frame.h" | 
| 18 #include "vm/stub_code.h" | 18 #include "vm/stub_code.h" | 
| 19 #include "vm/tags.h" | 19 #include "vm/tags.h" | 
| 20 | 20 | 
| 21 #define __ assembler-> | 21 #define __ assembler-> | 
| 22 | 22 | 
| 23 namespace dart { | 23 namespace dart { | 
| 24 | 24 | 
| 25 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); | 25 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); | 
| 26 DEFINE_FLAG(bool, use_slow_path, false, | 26 DEFINE_FLAG(bool, | 
| 27     "Set to true for debugging & verifying the slow paths."); | 27             use_slow_path, | 
|  | 28             false, | 
|  | 29             "Set to true for debugging & verifying the slow paths."); | 
| 28 DECLARE_FLAG(bool, trace_optimized_ic_calls); | 30 DECLARE_FLAG(bool, trace_optimized_ic_calls); | 
| 29 | 31 | 
| 30 // Input parameters: | 32 // Input parameters: | 
| 31 //   RSP : points to return address. | 33 //   RSP : points to return address. | 
| 32 //   RSP + 8 : address of last argument in argument array. | 34 //   RSP + 8 : address of last argument in argument array. | 
| 33 //   RSP + 8*R10 : address of first argument in argument array. | 35 //   RSP + 8*R10 : address of first argument in argument array. | 
| 34 //   RSP + 8*R10 + 8 : address of return value. | 36 //   RSP + 8*R10 + 8 : address of return value. | 
| 35 //   RBX : address of the runtime function to call. | 37 //   RBX : address of the runtime function to call. | 
| 36 //   R10 : number of arguments to the call. | 38 //   R10 : number of arguments to the call. | 
| 37 // Must preserve callee saved registers R12 and R13. | 39 // Must preserve callee saved registers R12 and R13. | 
| 38 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 40 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 
| 39   const intptr_t thread_offset = NativeArguments::thread_offset(); | 41   const intptr_t thread_offset = NativeArguments::thread_offset(); | 
| 40   const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 42   const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 
| 41   const intptr_t argv_offset = NativeArguments::argv_offset(); | 43   const intptr_t argv_offset = NativeArguments::argv_offset(); | 
| 42   const intptr_t retval_offset = NativeArguments::retval_offset(); | 44   const intptr_t retval_offset = NativeArguments::retval_offset(); | 
| 43 | 45 | 
| 44   __ EnterStubFrame(); | 46   __ EnterStubFrame(); | 
| 45 | 47 | 
| 46   // Save exit frame information to enable stack walking as we are about | 48   // Save exit frame information to enable stack walking as we are about | 
| 47   // to transition to Dart VM C++ code. | 49   // to transition to Dart VM C++ code. | 
| 48   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 50   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 
| 49 | 51 | 
| 50 #if defined(DEBUG) | 52 #if defined(DEBUG) | 
| 51   { Label ok; | 53   { | 
|  | 54     Label ok; | 
| 52     // Check that we are always entering from Dart code. | 55     // Check that we are always entering from Dart code. | 
| 53     __ movq(RAX, Immediate(VMTag::kDartTagId)); | 56     __ movq(RAX, Immediate(VMTag::kDartTagId)); | 
| 54     __ cmpq(RAX, Assembler::VMTagAddress()); | 57     __ cmpq(RAX, Assembler::VMTagAddress()); | 
| 55     __ j(EQUAL, &ok, Assembler::kNearJump); | 58     __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 56     __ Stop("Not coming from Dart code."); | 59     __ Stop("Not coming from Dart code."); | 
| 57     __ Bind(&ok); | 60     __ Bind(&ok); | 
| 58   } | 61   } | 
| 59 #endif | 62 #endif | 
| 60 | 63 | 
| 61   // Mark that the thread is executing VM code. | 64   // Mark that the thread is executing VM code. | 
| 62   __ movq(Assembler::VMTagAddress(), RBX); | 65   __ movq(Assembler::VMTagAddress(), RBX); | 
| 63 | 66 | 
| 64   // Reserve space for arguments and align frame before entering C++ world. | 67   // Reserve space for arguments and align frame before entering C++ world. | 
| 65   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 68   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 
| 66   if (OS::ActivationFrameAlignment() > 1) { | 69   if (OS::ActivationFrameAlignment() > 1) { | 
| 67     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 70     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 
| 68   } | 71   } | 
| 69 | 72 | 
| 70   // Pass NativeArguments structure by value and call runtime. | 73   // Pass NativeArguments structure by value and call runtime. | 
| 71   __ movq(Address(RSP, thread_offset), THR);  // Set thread in NativeArgs. | 74   __ movq(Address(RSP, thread_offset), THR);  // Set thread in NativeArgs. | 
| 72   // There are no runtime calls to closures, so we do not need to set the tag | 75   // There are no runtime calls to closures, so we do not need to set the tag | 
| 73   // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 76   // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 
| 74   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 77   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 
| 75   // Compute argv. | 78   // Compute argv. | 
| 76   __ leaq(RAX, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize)); | 79   __ leaq(RAX, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize)); | 
| 77   __ movq(Address(RSP, argv_offset), RAX);  // Set argv in NativeArguments. | 80   __ movq(Address(RSP, argv_offset), RAX);    // Set argv in NativeArguments. | 
| 78   __ addq(RAX, Immediate(1 * kWordSize));  // Retval is next to 1st argument. | 81   __ addq(RAX, Immediate(1 * kWordSize));     // Retval is next to 1st argument. | 
| 79   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 82   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 
| 80 #if defined(_WIN64) | 83 #if defined(_WIN64) | 
| 81   ASSERT(sizeof(NativeArguments) > CallingConventions::kRegisterTransferLimit); | 84   ASSERT(sizeof(NativeArguments) > CallingConventions::kRegisterTransferLimit); | 
| 82   __ movq(CallingConventions::kArg1Reg, RSP); | 85   __ movq(CallingConventions::kArg1Reg, RSP); | 
| 83 #endif | 86 #endif | 
| 84   __ CallCFunction(RBX); | 87   __ CallCFunction(RBX); | 
| 85 | 88 | 
| 86   // Mark that the thread is executing Dart code. | 89   // Mark that the thread is executing Dart code. | 
| 87   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 90   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 
| 88 | 91 | 
| (...skipping 11 matching lines...) Expand all  Loading... | 
| 100 } | 103 } | 
| 101 END_LEAF_RUNTIME_ENTRY | 104 END_LEAF_RUNTIME_ENTRY | 
| 102 | 105 | 
| 103 | 106 | 
| 104 // Input parameters: | 107 // Input parameters: | 
| 105 //   RSP : points to return address. | 108 //   RSP : points to return address. | 
| 106 //   RDI : stop message (const char*). | 109 //   RDI : stop message (const char*). | 
| 107 // Must preserve all registers. | 110 // Must preserve all registers. | 
| 108 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { | 111 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { | 
| 109   __ EnterCallRuntimeFrame(0); | 112   __ EnterCallRuntimeFrame(0); | 
| 110   // Call the runtime leaf function. RDI already contains the parameter. | 113 // Call the runtime leaf function. RDI already contains the parameter. | 
| 111 #if defined(_WIN64) | 114 #if defined(_WIN64) | 
| 112   __ movq(CallingConventions::kArg1Reg, RDI); | 115   __ movq(CallingConventions::kArg1Reg, RDI); | 
| 113 #endif | 116 #endif | 
| 114   __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); | 117   __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); | 
| 115   __ LeaveCallRuntimeFrame(); | 118   __ LeaveCallRuntimeFrame(); | 
| 116   __ ret(); | 119   __ ret(); | 
| 117 } | 120 } | 
| 118 | 121 | 
| 119 | 122 | 
| 120 // Input parameters: | 123 // Input parameters: | 
| (...skipping 13 matching lines...) Expand all  Loading... | 
| 134   const intptr_t retval_offset = | 137   const intptr_t retval_offset = | 
| 135       NativeArguments::retval_offset() + native_args_struct_offset; | 138       NativeArguments::retval_offset() + native_args_struct_offset; | 
| 136 | 139 | 
| 137   __ EnterStubFrame(); | 140   __ EnterStubFrame(); | 
| 138 | 141 | 
| 139   // Save exit frame information to enable stack walking as we are about | 142   // Save exit frame information to enable stack walking as we are about | 
| 140   // to transition to native code. | 143   // to transition to native code. | 
| 141   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 144   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 
| 142 | 145 | 
| 143 #if defined(DEBUG) | 146 #if defined(DEBUG) | 
| 144   { Label ok; | 147   { | 
|  | 148     Label ok; | 
| 145     // Check that we are always entering from Dart code. | 149     // Check that we are always entering from Dart code. | 
| 146     __ movq(R8, Immediate(VMTag::kDartTagId)); | 150     __ movq(R8, Immediate(VMTag::kDartTagId)); | 
| 147     __ cmpq(R8, Assembler::VMTagAddress()); | 151     __ cmpq(R8, Assembler::VMTagAddress()); | 
| 148     __ j(EQUAL, &ok, Assembler::kNearJump); | 152     __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 149     __ Stop("Not coming from Dart code."); | 153     __ Stop("Not coming from Dart code."); | 
| 150     __ Bind(&ok); | 154     __ Bind(&ok); | 
| 151   } | 155   } | 
| 152 #endif | 156 #endif | 
| 153 | 157 | 
| 154   // Mark that the thread is executing native code. | 158   // Mark that the thread is executing native code. | 
| 155   __ movq(Assembler::VMTagAddress(), RBX); | 159   __ movq(Assembler::VMTagAddress(), RBX); | 
| 156 | 160 | 
| 157   // Reserve space for the native arguments structure passed on the stack (the | 161   // Reserve space for the native arguments structure passed on the stack (the | 
| 158   // outgoing pointer parameter to the native arguments structure is passed in | 162   // outgoing pointer parameter to the native arguments structure is passed in | 
| 159   // RDI) and align frame before entering the C++ world. | 163   // RDI) and align frame before entering the C++ world. | 
| 160   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 164   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 
| 161   if (OS::ActivationFrameAlignment() > 1) { | 165   if (OS::ActivationFrameAlignment() > 1) { | 
| 162     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 166     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 
| 163   } | 167   } | 
| 164 | 168 | 
| 165   // Pass NativeArguments structure by value and call native function. | 169   // Pass NativeArguments structure by value and call native function. | 
| 166   __ movq(Address(RSP, thread_offset), THR);  // Set thread in NativeArgs. | 170   __ movq(Address(RSP, thread_offset), THR);    // Set thread in NativeArgs. | 
| 167   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 171   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 
| 168   __ movq(Address(RSP, argv_offset), RAX);  // Set argv in NativeArguments. | 172   __ movq(Address(RSP, argv_offset), RAX);      // Set argv in NativeArguments. | 
| 169   __ leaq(RAX, Address(RBP, 2 * kWordSize));  // Compute return value addr. | 173   __ leaq(RAX, Address(RBP, 2 * kWordSize));    // Compute return value addr. | 
| 170   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 174   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 
| 171 | 175 | 
| 172   // Pass the pointer to the NativeArguments. | 176   // Pass the pointer to the NativeArguments. | 
| 173   __ movq(CallingConventions::kArg1Reg, RSP); | 177   __ movq(CallingConventions::kArg1Reg, RSP); | 
| 174   // Pass pointer to function entrypoint. | 178   // Pass pointer to function entrypoint. | 
| 175   __ movq(CallingConventions::kArg2Reg, RBX); | 179   __ movq(CallingConventions::kArg2Reg, RBX); | 
| 176 | 180 | 
| 177   __ movq(RAX, Address(THR, Thread::native_call_wrapper_entry_point_offset())); | 181   __ movq(RAX, Address(THR, Thread::native_call_wrapper_entry_point_offset())); | 
| 178   __ CallCFunction(RAX); | 182   __ CallCFunction(RAX); | 
| 179 | 183 | 
| (...skipping 25 matching lines...) Expand all  Loading... | 
| 205   const intptr_t retval_offset = | 209   const intptr_t retval_offset = | 
| 206       NativeArguments::retval_offset() + native_args_struct_offset; | 210       NativeArguments::retval_offset() + native_args_struct_offset; | 
| 207 | 211 | 
| 208   __ EnterStubFrame(); | 212   __ EnterStubFrame(); | 
| 209 | 213 | 
| 210   // Save exit frame information to enable stack walking as we are about | 214   // Save exit frame information to enable stack walking as we are about | 
| 211   // to transition to native code. | 215   // to transition to native code. | 
| 212   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 216   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), RBP); | 
| 213 | 217 | 
| 214 #if defined(DEBUG) | 218 #if defined(DEBUG) | 
| 215   { Label ok; | 219   { | 
|  | 220     Label ok; | 
| 216     // Check that we are always entering from Dart code. | 221     // Check that we are always entering from Dart code. | 
| 217     __ movq(R8, Immediate(VMTag::kDartTagId)); | 222     __ movq(R8, Immediate(VMTag::kDartTagId)); | 
| 218     __ cmpq(R8, Assembler::VMTagAddress()); | 223     __ cmpq(R8, Assembler::VMTagAddress()); | 
| 219     __ j(EQUAL, &ok, Assembler::kNearJump); | 224     __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 220     __ Stop("Not coming from Dart code."); | 225     __ Stop("Not coming from Dart code."); | 
| 221     __ Bind(&ok); | 226     __ Bind(&ok); | 
| 222   } | 227   } | 
| 223 #endif | 228 #endif | 
| 224 | 229 | 
| 225   // Mark that the thread is executing native code. | 230   // Mark that the thread is executing native code. | 
| 226   __ movq(Assembler::VMTagAddress(), RBX); | 231   __ movq(Assembler::VMTagAddress(), RBX); | 
| 227 | 232 | 
| 228   // Reserve space for the native arguments structure passed on the stack (the | 233   // Reserve space for the native arguments structure passed on the stack (the | 
| 229   // outgoing pointer parameter to the native arguments structure is passed in | 234   // outgoing pointer parameter to the native arguments structure is passed in | 
| 230   // RDI) and align frame before entering the C++ world. | 235   // RDI) and align frame before entering the C++ world. | 
| 231   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 236   __ subq(RSP, Immediate(sizeof(NativeArguments))); | 
| 232   if (OS::ActivationFrameAlignment() > 1) { | 237   if (OS::ActivationFrameAlignment() > 1) { | 
| 233     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 238     __ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 
| 234   } | 239   } | 
| 235 | 240 | 
| 236   // Pass NativeArguments structure by value and call native function. | 241   // Pass NativeArguments structure by value and call native function. | 
| 237   __ movq(Address(RSP, thread_offset), THR);  // Set thread in NativeArgs. | 242   __ movq(Address(RSP, thread_offset), THR);    // Set thread in NativeArgs. | 
| 238   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 243   __ movq(Address(RSP, argc_tag_offset), R10);  // Set argc in NativeArguments. | 
| 239   __ movq(Address(RSP, argv_offset), RAX);  // Set argv in NativeArguments. | 244   __ movq(Address(RSP, argv_offset), RAX);      // Set argv in NativeArguments. | 
| 240   __ leaq(RAX, Address(RBP, 2 * kWordSize));  // Compute return value addr. | 245   __ leaq(RAX, Address(RBP, 2 * kWordSize));    // Compute return value addr. | 
| 241   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 246   __ movq(Address(RSP, retval_offset), RAX);  // Set retval in NativeArguments. | 
| 242 | 247 | 
| 243   // Pass the pointer to the NativeArguments. | 248   // Pass the pointer to the NativeArguments. | 
| 244   __ movq(CallingConventions::kArg1Reg, RSP); | 249   __ movq(CallingConventions::kArg1Reg, RSP); | 
| 245   __ CallCFunction(RBX); | 250   __ CallCFunction(RBX); | 
| 246 | 251 | 
| 247   // Mark that the thread is executing Dart code. | 252   // Mark that the thread is executing Dart code. | 
| 248   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 253   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 
| 249 | 254 | 
| 250   // Reset exit frame information in Isolate structure. | 255   // Reset exit frame information in Isolate structure. | 
| 251   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 256   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 
| 252 | 257 | 
| 253   __ LeaveStubFrame(); | 258   __ LeaveStubFrame(); | 
| 254   __ ret(); | 259   __ ret(); | 
| 255 } | 260 } | 
| 256 | 261 | 
| 257 | 262 | 
| 258 // Input parameters: | 263 // Input parameters: | 
| 259 //   R10: arguments descriptor array. | 264 //   R10: arguments descriptor array. | 
| 260 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 265 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 
| 261   __ EnterStubFrame(); | 266   __ EnterStubFrame(); | 
| 262   __ pushq(R10);  // Preserve arguments descriptor array. | 267   __ pushq(R10);  // Preserve arguments descriptor array. | 
| 263   // Setup space on stack for return value. | 268   // Setup space on stack for return value. | 
| 264   __ pushq(Immediate(0)); | 269   __ pushq(Immediate(0)); | 
| 265   __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 270   __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 
| 266   __ popq(CODE_REG);  // Get Code object result. | 271   __ popq(CODE_REG);  // Get Code object result. | 
| 267   __ popq(R10);  // Restore arguments descriptor array. | 272   __ popq(R10);       // Restore arguments descriptor array. | 
| 268   // Remove the stub frame as we are about to jump to the dart function. | 273   // Remove the stub frame as we are about to jump to the dart function. | 
| 269   __ LeaveStubFrame(); | 274   __ LeaveStubFrame(); | 
| 270 | 275 | 
| 271   __ movq(RBX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 276   __ movq(RBX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 
| 272   __ jmp(RBX); | 277   __ jmp(RBX); | 
| 273 } | 278 } | 
| 274 | 279 | 
| 275 | 280 | 
| 276 // Called from a static call only when an invalid code has been entered | 281 // Called from a static call only when an invalid code has been entered | 
| 277 // (invalid because its function was optimized or deoptimized). | 282 // (invalid because its function was optimized or deoptimized). | 
| 278 // R10: arguments descriptor array. | 283 // R10: arguments descriptor array. | 
| 279 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 284 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 
| 280   // Load code pointer to this stub from the thread: | 285   // Load code pointer to this stub from the thread: | 
| 281   // The one that is passed in, is not correct - it points to the code object | 286   // The one that is passed in, is not correct - it points to the code object | 
| 282   // that needs to be replaced. | 287   // that needs to be replaced. | 
| 283   __ movq(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); | 288   __ movq(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); | 
| 284   __ EnterStubFrame(); | 289   __ EnterStubFrame(); | 
| 285   __ pushq(R10);  // Preserve arguments descriptor array. | 290   __ pushq(R10);  // Preserve arguments descriptor array. | 
| 286   // Setup space on stack for return value. | 291   // Setup space on stack for return value. | 
| 287   __ pushq(Immediate(0)); | 292   __ pushq(Immediate(0)); | 
| 288   __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 293   __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 
| 289   __ popq(CODE_REG);  // Get Code object. | 294   __ popq(CODE_REG);  // Get Code object. | 
| 290   __ popq(R10);  // Restore arguments descriptor array. | 295   __ popq(R10);       // Restore arguments descriptor array. | 
| 291   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 296   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 
| 292   __ LeaveStubFrame(); | 297   __ LeaveStubFrame(); | 
| 293   __ jmp(RAX); | 298   __ jmp(RAX); | 
| 294   __ int3(); | 299   __ int3(); | 
| 295 } | 300 } | 
| 296 | 301 | 
| 297 | 302 | 
| 298 // Called from object allocate instruction when the allocation stub has been | 303 // Called from object allocate instruction when the allocation stub has been | 
| 299 // disabled. | 304 // disabled. | 
| 300 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 305 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 405   __ subq(RSP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); | 410   __ subq(RSP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); | 
| 406   intptr_t offset = 0; | 411   intptr_t offset = 0; | 
| 407   for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 412   for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 
| 408     XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 413     XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 
| 409     __ movups(Address(RSP, offset), xmm_reg); | 414     __ movups(Address(RSP, offset), xmm_reg); | 
| 410     offset += kFpuRegisterSize; | 415     offset += kFpuRegisterSize; | 
| 411   } | 416   } | 
| 412 | 417 | 
| 413   // Pass address of saved registers block. | 418   // Pass address of saved registers block. | 
| 414   __ movq(CallingConventions::kArg1Reg, RSP); | 419   __ movq(CallingConventions::kArg1Reg, RSP); | 
| 415   bool is_lazy = (kind == kLazyDeoptFromReturn) || | 420   bool is_lazy = | 
| 416                  (kind == kLazyDeoptFromThrow); | 421       (kind == kLazyDeoptFromReturn) || (kind == kLazyDeoptFromThrow); | 
| 417   __ movq(CallingConventions::kArg2Reg, Immediate(is_lazy ? 1 : 0)); | 422   __ movq(CallingConventions::kArg2Reg, Immediate(is_lazy ? 1 : 0)); | 
| 418   __ ReserveAlignedFrameSpace(0);  // Ensure stack is aligned before the call. | 423   __ ReserveAlignedFrameSpace(0);  // Ensure stack is aligned before the call. | 
| 419   __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 424   __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 
| 420   // Result (RAX) is stack-size (FP - SP) in bytes. | 425   // Result (RAX) is stack-size (FP - SP) in bytes. | 
| 421 | 426 | 
| 422   if (kind == kLazyDeoptFromReturn) { | 427   if (kind == kLazyDeoptFromReturn) { | 
| 423     // Restore result into RBX temporarily. | 428     // Restore result into RBX temporarily. | 
| 424     __ movq(RBX, Address(RBP, saved_result_slot_from_fp * kWordSize)); | 429     __ movq(RBX, Address(RBP, saved_result_slot_from_fp * kWordSize)); | 
| 425   } else if (kind == kLazyDeoptFromThrow) { | 430   } else if (kind == kLazyDeoptFromThrow) { | 
| 426     // Restore result into RBX temporarily. | 431     // Restore result into RBX temporarily. | 
| 427     __ movq(RBX, Address(RBP, saved_exception_slot_from_fp * kWordSize)); | 432     __ movq(RBX, Address(RBP, saved_exception_slot_from_fp * kWordSize)); | 
| 428     __ movq(RDX, Address(RBP, saved_stacktrace_slot_from_fp * kWordSize)); | 433     __ movq(RDX, Address(RBP, saved_stacktrace_slot_from_fp * kWordSize)); | 
| 429   } | 434   } | 
| 430 | 435 | 
| 431   // There is a Dart Frame on the stack. We must restore PP and leave frame. | 436   // There is a Dart Frame on the stack. We must restore PP and leave frame. | 
| 432   __ RestoreCodePointer(); | 437   __ RestoreCodePointer(); | 
| 433   __ LeaveStubFrame(); | 438   __ LeaveStubFrame(); | 
| 434 | 439 | 
| 435   __ popq(RCX);   // Preserve return address. | 440   __ popq(RCX);       // Preserve return address. | 
| 436   __ movq(RSP, RBP);  // Discard optimized frame. | 441   __ movq(RSP, RBP);  // Discard optimized frame. | 
| 437   __ subq(RSP, RAX);  // Reserve space for deoptimized frame. | 442   __ subq(RSP, RAX);  // Reserve space for deoptimized frame. | 
| 438   __ pushq(RCX);  // Restore return address. | 443   __ pushq(RCX);      // Restore return address. | 
| 439 | 444 | 
| 440   // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 445   // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 
| 441   // is no need to set the correct PC marker or load PP, since they get patched. | 446   // is no need to set the correct PC marker or load PP, since they get patched. | 
| 442   __ EnterStubFrame(); | 447   __ EnterStubFrame(); | 
| 443 | 448 | 
| 444   if (kind == kLazyDeoptFromReturn) { | 449   if (kind == kLazyDeoptFromReturn) { | 
| 445     __ pushq(RBX);  // Preserve result as first local. | 450     __ pushq(RBX);  // Preserve result as first local. | 
| 446   } else if (kind == kLazyDeoptFromThrow) { | 451   } else if (kind == kLazyDeoptFromThrow) { | 
| 447     __ pushq(RBX);  // Preserve exception as first local. | 452     __ pushq(RBX);  // Preserve exception as first local. | 
| 448     __ pushq(RDX);  // Preserve stacktrace as second local. | 453     __ pushq(RDX);  // Preserve stacktrace as second local. | 
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 483   __ popq(RBX); | 488   __ popq(RBX); | 
| 484   __ SmiUntag(RBX); | 489   __ SmiUntag(RBX); | 
| 485   if (kind == kLazyDeoptFromReturn) { | 490   if (kind == kLazyDeoptFromReturn) { | 
| 486     __ popq(RAX);  // Restore result. | 491     __ popq(RAX);  // Restore result. | 
| 487   } else if (kind == kLazyDeoptFromThrow) { | 492   } else if (kind == kLazyDeoptFromThrow) { | 
| 488     __ popq(RDX);  // Restore stacktrace. | 493     __ popq(RDX);  // Restore stacktrace. | 
| 489     __ popq(RAX);  // Restore exception. | 494     __ popq(RAX);  // Restore exception. | 
| 490   } | 495   } | 
| 491   __ LeaveStubFrame(); | 496   __ LeaveStubFrame(); | 
| 492 | 497 | 
| 493   __ popq(RCX);  // Pop return address. | 498   __ popq(RCX);       // Pop return address. | 
| 494   __ addq(RSP, RBX);  // Remove materialization arguments. | 499   __ addq(RSP, RBX);  // Remove materialization arguments. | 
| 495   __ pushq(RCX);  // Push return address. | 500   __ pushq(RCX);      // Push return address. | 
| 496   __ ret(); | 501   __ ret(); | 
| 497 } | 502 } | 
| 498 | 503 | 
| 499 | 504 | 
| 500 // RAX: result, must be preserved | 505 // RAX: result, must be preserved | 
| 501 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 506 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 
| 502   // Push zap value instead of CODE_REG for lazy deopt. | 507   // Push zap value instead of CODE_REG for lazy deopt. | 
| 503   __ pushq(Immediate(0xf1f1f1f1)); | 508   __ pushq(Immediate(0xf1f1f1f1)); | 
| 504   // Return address for "call" to deopt stub. | 509   // Return address for "call" to deopt stub. | 
| 505   __ pushq(Immediate(0xe1e1e1e1)); | 510   __ pushq(Immediate(0xe1e1e1e1)); | 
| (...skipping 22 matching lines...) Expand all  Loading... | 
| 528 static void GenerateDispatcherCode(Assembler* assembler, | 533 static void GenerateDispatcherCode(Assembler* assembler, | 
| 529                                    Label* call_target_function) { | 534                                    Label* call_target_function) { | 
| 530   __ Comment("NoSuchMethodDispatch"); | 535   __ Comment("NoSuchMethodDispatch"); | 
| 531   // When lazily generated invocation dispatchers are disabled, the | 536   // When lazily generated invocation dispatchers are disabled, the | 
| 532   // miss-handler may return null. | 537   // miss-handler may return null. | 
| 533   __ CompareObject(RAX, Object::null_object()); | 538   __ CompareObject(RAX, Object::null_object()); | 
| 534   __ j(NOT_EQUAL, call_target_function); | 539   __ j(NOT_EQUAL, call_target_function); | 
| 535   __ EnterStubFrame(); | 540   __ EnterStubFrame(); | 
| 536   // Load the receiver. | 541   // Load the receiver. | 
| 537   __ movq(RDI, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 542   __ movq(RDI, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 538   __ movq(RAX, Address( | 543   __ movq(RAX, Address(RBP, RDI, TIMES_HALF_WORD_SIZE, | 
| 539       RBP, RDI, TIMES_HALF_WORD_SIZE, kParamEndSlotFromFp * kWordSize)); | 544                        kParamEndSlotFromFp * kWordSize)); | 
| 540   __ pushq(Immediate(0));  // Setup space on stack for result. | 545   __ pushq(Immediate(0));  // Setup space on stack for result. | 
| 541   __ pushq(RAX);  // Receiver. | 546   __ pushq(RAX);           // Receiver. | 
| 542   __ pushq(RBX);  // ICData/MegamorphicCache. | 547   __ pushq(RBX);           // ICData/MegamorphicCache. | 
| 543   __ pushq(R10);  // Arguments descriptor array. | 548   __ pushq(R10);           // Arguments descriptor array. | 
| 544   __ movq(R10, RDI); | 549   __ movq(R10, RDI); | 
| 545   // EDX: Smi-tagged arguments array length. | 550   // EDX: Smi-tagged arguments array length. | 
| 546   PushArgumentsArray(assembler); | 551   PushArgumentsArray(assembler); | 
| 547   const intptr_t kNumArgs = 4; | 552   const intptr_t kNumArgs = 4; | 
| 548   __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); | 553   __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); | 
| 549   __ Drop(4); | 554   __ Drop(4); | 
| 550   __ popq(RAX);  // Return value. | 555   __ popq(RAX);  // Return value. | 
| 551   __ LeaveStubFrame(); | 556   __ LeaveStubFrame(); | 
| 552   __ ret(); | 557   __ ret(); | 
| 553 } | 558 } | 
| 554 | 559 | 
| 555 | 560 | 
| 556 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 561 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 
| 557   __ EnterStubFrame(); | 562   __ EnterStubFrame(); | 
| 558   // Load the receiver into RAX.  The argument count in the arguments | 563   // Load the receiver into RAX.  The argument count in the arguments | 
| 559   // descriptor in R10 is a smi. | 564   // descriptor in R10 is a smi. | 
| 560   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 565   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 561   // Three words (saved pp, saved fp, stub's pc marker) | 566   // Three words (saved pp, saved fp, stub's pc marker) | 
| 562   // in the stack above the return address. | 567   // in the stack above the return address. | 
| 563   __ movq(RAX, Address(RSP, RAX, TIMES_4, | 568   __ movq(RAX, | 
| 564                        kSavedAboveReturnAddress * kWordSize)); | 569           Address(RSP, RAX, TIMES_4, kSavedAboveReturnAddress * kWordSize)); | 
| 565   // Preserve IC data and arguments descriptor. | 570   // Preserve IC data and arguments descriptor. | 
| 566   __ pushq(RBX); | 571   __ pushq(RBX); | 
| 567   __ pushq(R10); | 572   __ pushq(R10); | 
| 568 | 573 | 
| 569   // Space for the result of the runtime call. | 574   // Space for the result of the runtime call. | 
| 570   __ pushq(Immediate(0)); | 575   __ pushq(Immediate(0)); | 
| 571   __ pushq(RAX);  // Receiver. | 576   __ pushq(RAX);  // Receiver. | 
| 572   __ pushq(RBX);  // IC data. | 577   __ pushq(RBX);  // IC data. | 
| 573   __ pushq(R10);  // Arguments descriptor. | 578   __ pushq(R10);  // Arguments descriptor. | 
| 574   __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 579   __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 613   } | 618   } | 
| 614   __ cmpq(RDI, Immediate(0)); | 619   __ cmpq(RDI, Immediate(0)); | 
| 615   __ j(LESS, &slow_case); | 620   __ j(LESS, &slow_case); | 
| 616   // Check for maximum allowed length. | 621   // Check for maximum allowed length. | 
| 617   const Immediate& max_len = | 622   const Immediate& max_len = | 
| 618       Immediate(reinterpret_cast<int64_t>(Smi::New(Array::kMaxElements))); | 623       Immediate(reinterpret_cast<int64_t>(Smi::New(Array::kMaxElements))); | 
| 619   __ cmpq(RDI, max_len); | 624   __ cmpq(RDI, max_len); | 
| 620   __ j(GREATER, &slow_case); | 625   __ j(GREATER, &slow_case); | 
| 621 | 626 | 
| 622   // Check for allocation tracing. | 627   // Check for allocation tracing. | 
| 623   NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, | 628   NOT_IN_PRODUCT( | 
| 624                                          &slow_case, | 629       __ MaybeTraceAllocation(kArrayCid, &slow_case, Assembler::kFarJump)); | 
| 625                                          Assembler::kFarJump)); |  | 
| 626 | 630 | 
| 627   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 631   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 
| 628   __ leaq(RDI, Address(RDI, TIMES_4, fixed_size));  // RDI is a Smi. | 632   __ leaq(RDI, Address(RDI, TIMES_4, fixed_size));  // RDI is a Smi. | 
| 629   ASSERT(kSmiTagShift == 1); | 633   ASSERT(kSmiTagShift == 1); | 
| 630   __ andq(RDI, Immediate(-kObjectAlignment)); | 634   __ andq(RDI, Immediate(-kObjectAlignment)); | 
| 631 | 635 | 
| 632   const intptr_t cid = kArrayCid; | 636   const intptr_t cid = kArrayCid; | 
| 633   Heap::Space space = Heap::kNew; | 637   Heap::Space space = Heap::kNew; | 
| 634   __ movq(R13, Address(THR, Thread::heap_offset())); | 638   __ movq(R13, Address(THR, Thread::heap_offset())); | 
| 635   __ movq(RAX, Address(R13, Heap::TopOffset(space))); | 639   __ movq(RAX, Address(R13, Heap::TopOffset(space))); | 
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 667     __ Bind(&done); | 671     __ Bind(&done); | 
| 668 | 672 | 
| 669     // Get the class index and insert it into the tags. | 673     // Get the class index and insert it into the tags. | 
| 670     __ orq(RDI, Immediate(RawObject::ClassIdTag::encode(cid))); | 674     __ orq(RDI, Immediate(RawObject::ClassIdTag::encode(cid))); | 
| 671     __ movq(FieldAddress(RAX, Array::tags_offset()), RDI);  // Tags. | 675     __ movq(FieldAddress(RAX, Array::tags_offset()), RDI);  // Tags. | 
| 672   } | 676   } | 
| 673 | 677 | 
| 674   // RAX: new object start as a tagged pointer. | 678   // RAX: new object start as a tagged pointer. | 
| 675   // Store the type argument field. | 679   // Store the type argument field. | 
| 676   // No generetional barrier needed, since we store into a new object. | 680   // No generetional barrier needed, since we store into a new object. | 
| 677   __ StoreIntoObjectNoBarrier(RAX, | 681   __ StoreIntoObjectNoBarrier( | 
| 678                               FieldAddress(RAX, Array::type_arguments_offset()), | 682       RAX, FieldAddress(RAX, Array::type_arguments_offset()), RBX); | 
| 679                               RBX); |  | 
| 680 | 683 | 
| 681   // Set the length field. | 684   // Set the length field. | 
| 682   __ StoreIntoObjectNoBarrier(RAX, | 685   __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, Array::length_offset()), | 
| 683                               FieldAddress(RAX, Array::length_offset()), |  | 
| 684                               R10); | 686                               R10); | 
| 685 | 687 | 
| 686   // Initialize all array elements to raw_null. | 688   // Initialize all array elements to raw_null. | 
| 687   // RAX: new object start as a tagged pointer. | 689   // RAX: new object start as a tagged pointer. | 
| 688   // RCX: new object end address. | 690   // RCX: new object end address. | 
| 689   // RDI: iterator which initially points to the start of the variable | 691   // RDI: iterator which initially points to the start of the variable | 
| 690   // data area to be initialized. | 692   // data area to be initialized. | 
| 691   __ LoadObject(R12, Object::null_object()); | 693   __ LoadObject(R12, Object::null_object()); | 
| 692   __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); | 694   __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); | 
| 693   Label done; | 695   Label done; | 
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 731 //   RSP : points to return address. | 733 //   RSP : points to return address. | 
| 732 //   RDI : target code | 734 //   RDI : target code | 
| 733 //   RSI : arguments descriptor array. | 735 //   RSI : arguments descriptor array. | 
| 734 //   RDX : arguments array. | 736 //   RDX : arguments array. | 
| 735 //   RCX : current thread. | 737 //   RCX : current thread. | 
| 736 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 738 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 
| 737   // Save frame pointer coming in. | 739   // Save frame pointer coming in. | 
| 738   __ EnterFrame(0); | 740   __ EnterFrame(0); | 
| 739 | 741 | 
| 740   const Register kTargetCodeReg = CallingConventions::kArg1Reg; | 742   const Register kTargetCodeReg = CallingConventions::kArg1Reg; | 
| 741   const Register kArgDescReg    = CallingConventions::kArg2Reg; | 743   const Register kArgDescReg = CallingConventions::kArg2Reg; | 
| 742   const Register kArgsReg       = CallingConventions::kArg3Reg; | 744   const Register kArgsReg = CallingConventions::kArg3Reg; | 
| 743   const Register kThreadReg     = CallingConventions::kArg4Reg; | 745   const Register kThreadReg = CallingConventions::kArg4Reg; | 
| 744 | 746 | 
| 745   // Push code object to PC marker slot. | 747   // Push code object to PC marker slot. | 
| 746   __ pushq(Address(kThreadReg, Thread::invoke_dart_code_stub_offset())); | 748   __ pushq(Address(kThreadReg, Thread::invoke_dart_code_stub_offset())); | 
| 747 | 749 | 
| 748   // At this point, the stack looks like: | 750   // At this point, the stack looks like: | 
| 749   // | stub code object | 751   // | stub code object | 
| 750   // | saved RBP                                      | <-- RBP | 752   // | saved RBP                                      | <-- RBP | 
| 751   // | saved PC (return to DartEntry::InvokeFunction) | | 753   // | saved PC (return to DartEntry::InvokeFunction) | | 
| 752 | 754 | 
| 753   const intptr_t kInitialOffset = 2; | 755   const intptr_t kInitialOffset = 2; | 
| 754   // Save arguments descriptor array. | 756   // Save arguments descriptor array. | 
| 755   const intptr_t kArgumentsDescOffset = -(kInitialOffset) * kWordSize; | 757   const intptr_t kArgumentsDescOffset = -(kInitialOffset)*kWordSize; | 
| 756   __ pushq(kArgDescReg); | 758   __ pushq(kArgDescReg); | 
| 757 | 759 | 
| 758   // Save C++ ABI callee-saved registers. | 760   // Save C++ ABI callee-saved registers. | 
| 759   __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters, | 761   __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters, | 
| 760                    CallingConventions::kCalleeSaveXmmRegisters); | 762                    CallingConventions::kCalleeSaveXmmRegisters); | 
| 761 | 763 | 
| 762   // If any additional (or fewer) values are pushed, the offsets in | 764   // If any additional (or fewer) values are pushed, the offsets in | 
| 763   // kExitLinkSlotFromEntryFp will need to be changed. | 765   // kExitLinkSlotFromEntryFp will need to be changed. | 
| 764 | 766 | 
| 765   // Set up THR, which caches the current thread in Dart code. | 767   // Set up THR, which caches the current thread in Dart code. | 
| 766   if (THR != kThreadReg) { | 768   if (THR != kThreadReg) { | 
| 767     __ movq(THR, kThreadReg); | 769     __ movq(THR, kThreadReg); | 
| 768   } | 770   } | 
| 769 | 771 | 
| 770   // Save the current VMTag on the stack. | 772   // Save the current VMTag on the stack. | 
| 771   __ movq(RAX, Assembler::VMTagAddress()); | 773   __ movq(RAX, Assembler::VMTagAddress()); | 
| 772   __ pushq(RAX); | 774   __ pushq(RAX); | 
| 773 | 775 | 
| 774   // Mark that the thread is executing Dart code. | 776   // Mark that the thread is executing Dart code. | 
| 775   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 777   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 
| 776 | 778 | 
| 777   // Save top resource and top exit frame info. Use RAX as a temporary register. | 779   // Save top resource and top exit frame info. Use RAX as a temporary register. | 
| 778   // StackFrameIterator reads the top exit frame info saved in this frame. | 780   // StackFrameIterator reads the top exit frame info saved in this frame. | 
| 779   __ movq(RAX, Address(THR, Thread::top_resource_offset())); | 781   __ movq(RAX, Address(THR, Thread::top_resource_offset())); | 
| 780   __ pushq(RAX); | 782   __ pushq(RAX); | 
| 781   __ movq(Address(THR, Thread::top_resource_offset()), | 783   __ movq(Address(THR, Thread::top_resource_offset()), Immediate(0)); | 
| 782           Immediate(0)); |  | 
| 783   __ movq(RAX, Address(THR, Thread::top_exit_frame_info_offset())); | 784   __ movq(RAX, Address(THR, Thread::top_exit_frame_info_offset())); | 
| 784   // The constant kExitLinkSlotFromEntryFp must be kept in sync with the | 785   // The constant kExitLinkSlotFromEntryFp must be kept in sync with the | 
| 785   // code below. | 786   // code below. | 
| 786   __ pushq(RAX); | 787   __ pushq(RAX); | 
| 787 #if defined(DEBUG) | 788 #if defined(DEBUG) | 
| 788   { | 789   { | 
| 789     Label ok; | 790     Label ok; | 
| 790     __ leaq(RAX, Address(RBP, kExitLinkSlotFromEntryFp * kWordSize)); | 791     __ leaq(RAX, Address(RBP, kExitLinkSlotFromEntryFp * kWordSize)); | 
| 791     __ cmpq(RAX, RSP); | 792     __ cmpq(RAX, RSP); | 
| 792     __ j(EQUAL, &ok); | 793     __ j(EQUAL, &ok); | 
| 793     __ Stop("kExitLinkSlotFromEntryFp mismatch"); | 794     __ Stop("kExitLinkSlotFromEntryFp mismatch"); | 
| 794     __ Bind(&ok); | 795     __ Bind(&ok); | 
| 795   } | 796   } | 
| 796 #endif | 797 #endif | 
| 797 | 798 | 
| 798   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), | 799   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 
| 799           Immediate(0)); |  | 
| 800 | 800 | 
| 801   // Load arguments descriptor array into R10, which is passed to Dart code. | 801   // Load arguments descriptor array into R10, which is passed to Dart code. | 
| 802   __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); | 802   __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); | 
| 803 | 803 | 
| 804   // Push arguments. At this point we only need to preserve kTargetCodeReg. | 804   // Push arguments. At this point we only need to preserve kTargetCodeReg. | 
| 805   ASSERT(kTargetCodeReg != RDX); | 805   ASSERT(kTargetCodeReg != RDX); | 
| 806 | 806 | 
| 807   // Load number of arguments into RBX. | 807   // Load number of arguments into RBX. | 
| 808   __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 808   __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 809   __ SmiUntag(RBX); | 809   __ SmiUntag(RBX); | 
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 868   __ LoadObject(R9, Object::null_object()); | 868   __ LoadObject(R9, Object::null_object()); | 
| 869   if (FLAG_inline_alloc) { | 869   if (FLAG_inline_alloc) { | 
| 870     Label slow_case; | 870     Label slow_case; | 
| 871     // First compute the rounded instance size. | 871     // First compute the rounded instance size. | 
| 872     // R10: number of context variables. | 872     // R10: number of context variables. | 
| 873     intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); | 873     intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); | 
| 874     __ leaq(R13, Address(R10, TIMES_8, fixed_size)); | 874     __ leaq(R13, Address(R10, TIMES_8, fixed_size)); | 
| 875     __ andq(R13, Immediate(-kObjectAlignment)); | 875     __ andq(R13, Immediate(-kObjectAlignment)); | 
| 876 | 876 | 
| 877     // Check for allocation tracing. | 877     // Check for allocation tracing. | 
| 878     NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, | 878     NOT_IN_PRODUCT( | 
| 879                                            &slow_case, | 879         __ MaybeTraceAllocation(kContextCid, &slow_case, Assembler::kFarJump)); | 
| 880                                            Assembler::kFarJump)); |  | 
| 881 | 880 | 
| 882     // Now allocate the object. | 881     // Now allocate the object. | 
| 883     // R10: number of context variables. | 882     // R10: number of context variables. | 
| 884     const intptr_t cid = kContextCid; | 883     const intptr_t cid = kContextCid; | 
| 885     Heap::Space space = Heap::kNew; | 884     Heap::Space space = Heap::kNew; | 
| 886     __ movq(RCX, Address(THR, Thread::heap_offset())); | 885     __ movq(RCX, Address(THR, Thread::heap_offset())); | 
| 887     __ movq(RAX, Address(RCX, Heap::TopOffset(space))); | 886     __ movq(RAX, Address(RCX, Heap::TopOffset(space))); | 
| 888     __ addq(R13, RAX); | 887     __ addq(R13, RAX); | 
| 889     // Check if the allocation fits into the remaining space. | 888     // Check if the allocation fits into the remaining space. | 
| 890     // RAX: potential new object. | 889     // RAX: potential new object. | 
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 924       __ jmp(&done); | 923       __ jmp(&done); | 
| 925 | 924 | 
| 926       __ Bind(&size_tag_overflow); | 925       __ Bind(&size_tag_overflow); | 
| 927       // Set overflow size tag value. | 926       // Set overflow size tag value. | 
| 928       __ movq(R13, Immediate(0)); | 927       __ movq(R13, Immediate(0)); | 
| 929 | 928 | 
| 930       __ Bind(&done); | 929       __ Bind(&done); | 
| 931       // RAX: new object. | 930       // RAX: new object. | 
| 932       // R10: number of context variables. | 931       // R10: number of context variables. | 
| 933       // R13: size and bit tags. | 932       // R13: size and bit tags. | 
| 934       __ orq(R13, | 933       __ orq(R13, Immediate(RawObject::ClassIdTag::encode(cid))); | 
| 935              Immediate(RawObject::ClassIdTag::encode(cid))); |  | 
| 936       __ movq(FieldAddress(RAX, Context::tags_offset()), R13);  // Tags. | 934       __ movq(FieldAddress(RAX, Context::tags_offset()), R13);  // Tags. | 
| 937     } | 935     } | 
| 938 | 936 | 
| 939     // Setup up number of context variables field. | 937     // Setup up number of context variables field. | 
| 940     // RAX: new object. | 938     // RAX: new object. | 
| 941     // R10: number of context variables as integer value (not object). | 939     // R10: number of context variables as integer value (not object). | 
| 942     __ movq(FieldAddress(RAX, Context::num_variables_offset()), R10); | 940     __ movq(FieldAddress(RAX, Context::num_variables_offset()), R10); | 
| 943 | 941 | 
| 944     // Setup the parent field. | 942     // Setup the parent field. | 
| 945     // RAX: new object. | 943     // RAX: new object. | 
| 946     // R10: number of context variables. | 944     // R10: number of context variables. | 
| 947     // No generational barrier needed, since we are storing null. | 945     // No generational barrier needed, since we are storing null. | 
| 948     __ StoreIntoObjectNoBarrier(RAX, | 946     __ StoreIntoObjectNoBarrier( | 
| 949                                 FieldAddress(RAX, Context::parent_offset()), | 947         RAX, FieldAddress(RAX, Context::parent_offset()), R9); | 
| 950                                 R9); |  | 
| 951 | 948 | 
| 952     // Initialize the context variables. | 949     // Initialize the context variables. | 
| 953     // RAX: new object. | 950     // RAX: new object. | 
| 954     // R10: number of context variables. | 951     // R10: number of context variables. | 
| 955     { | 952     { | 
| 956       Label loop, entry; | 953       Label loop, entry; | 
| 957       __ leaq(R13, FieldAddress(RAX, Context::variable_offset(0))); | 954       __ leaq(R13, FieldAddress(RAX, Context::variable_offset(0))); | 
| 958 #if defined(DEBUG) | 955 #if defined(DEBUG) | 
| 959       static const bool kJumpLength = Assembler::kFarJump; | 956       static const bool kJumpLength = Assembler::kFarJump; | 
| 960 #else | 957 #else | 
| 961       static const bool kJumpLength = Assembler::kNearJump; | 958       static const bool kJumpLength = Assembler::kNearJump; | 
| 962 #endif  // DEBUG | 959 #endif  // DEBUG | 
| 963       __ jmp(&entry, kJumpLength); | 960       __ jmp(&entry, kJumpLength); | 
| 964       __ Bind(&loop); | 961       __ Bind(&loop); | 
| 965       __ decq(R10); | 962       __ decq(R10); | 
| 966       // No generational barrier needed, since we are storing null. | 963       // No generational barrier needed, since we are storing null. | 
| 967       __ StoreIntoObjectNoBarrier(RAX, | 964       __ StoreIntoObjectNoBarrier(RAX, Address(R13, R10, TIMES_8, 0), R9); | 
| 968                                   Address(R13, R10, TIMES_8, 0), |  | 
| 969                                   R9); |  | 
| 970       __ Bind(&entry); | 965       __ Bind(&entry); | 
| 971       __ cmpq(R10, Immediate(0)); | 966       __ cmpq(R10, Immediate(0)); | 
| 972       __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 967       __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 
| 973     } | 968     } | 
| 974 | 969 | 
| 975     // Done allocating and initializing the context. | 970     // Done allocating and initializing the context. | 
| 976     // RAX: new object. | 971     // RAX: new object. | 
| 977     __ ret(); | 972     __ ret(); | 
| 978 | 973 | 
| 979     __ Bind(&slow_case); | 974     __ Bind(&slow_case); | 
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1116     // Initialize the remaining words of the object. | 1111     // Initialize the remaining words of the object. | 
| 1117     // RAX: new object (tagged). | 1112     // RAX: new object (tagged). | 
| 1118     // RBX: next object start. | 1113     // RBX: next object start. | 
| 1119     // RDX: new object type arguments (if is_cls_parameterized). | 1114     // RDX: new object type arguments (if is_cls_parameterized). | 
| 1120     // R9: raw null. | 1115     // R9: raw null. | 
| 1121     // First try inlining the initialization without a loop. | 1116     // First try inlining the initialization without a loop. | 
| 1122     if (instance_size < (kInlineInstanceSize * kWordSize)) { | 1117     if (instance_size < (kInlineInstanceSize * kWordSize)) { | 
| 1123       // Check if the object contains any non-header fields. | 1118       // Check if the object contains any non-header fields. | 
| 1124       // Small objects are initialized using a consecutive set of writes. | 1119       // Small objects are initialized using a consecutive set of writes. | 
| 1125       for (intptr_t current_offset = Instance::NextFieldOffset(); | 1120       for (intptr_t current_offset = Instance::NextFieldOffset(); | 
| 1126            current_offset < instance_size; | 1121            current_offset < instance_size; current_offset += kWordSize) { | 
| 1127            current_offset += kWordSize) { | 1122         __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, current_offset), R9); | 
| 1128         __ StoreIntoObjectNoBarrier(RAX, |  | 
| 1129                                     FieldAddress(RAX, current_offset), |  | 
| 1130                                     R9); |  | 
| 1131       } | 1123       } | 
| 1132     } else { | 1124     } else { | 
| 1133       __ leaq(RCX, FieldAddress(RAX, Instance::NextFieldOffset())); | 1125       __ leaq(RCX, FieldAddress(RAX, Instance::NextFieldOffset())); | 
| 1134       // Loop until the whole object is initialized. | 1126       // Loop until the whole object is initialized. | 
| 1135       // RAX: new object (tagged). | 1127       // RAX: new object (tagged). | 
| 1136       // RBX: next object start. | 1128       // RBX: next object start. | 
| 1137       // RCX: next word to be initialized. | 1129       // RCX: next word to be initialized. | 
| 1138       // RDX: new object type arguments (if is_cls_parameterized). | 1130       // RDX: new object type arguments (if is_cls_parameterized). | 
| 1139       Label init_loop; | 1131       Label init_loop; | 
| 1140       Label done; | 1132       Label done; | 
| (...skipping 20 matching lines...) Expand all  Loading... | 
| 1161     // Done allocating and initializing the instance. | 1153     // Done allocating and initializing the instance. | 
| 1162     // RAX: new object (tagged). | 1154     // RAX: new object (tagged). | 
| 1163     __ ret(); | 1155     __ ret(); | 
| 1164 | 1156 | 
| 1165     __ Bind(&slow_case); | 1157     __ Bind(&slow_case); | 
| 1166   } | 1158   } | 
| 1167   // If is_cls_parameterized: | 1159   // If is_cls_parameterized: | 
| 1168   // RDX: new object type arguments. | 1160   // RDX: new object type arguments. | 
| 1169   // Create a stub frame. | 1161   // Create a stub frame. | 
| 1170   __ EnterStubFrame();  // Uses PP to access class object. | 1162   __ EnterStubFrame();  // Uses PP to access class object. | 
| 1171   __ pushq(R9);  // Setup space on stack for return value. | 1163   __ pushq(R9);         // Setup space on stack for return value. | 
| 1172   __ PushObject(cls);  // Push class of object to be allocated. | 1164   __ PushObject(cls);   // Push class of object to be allocated. | 
| 1173   if (is_cls_parameterized) { | 1165   if (is_cls_parameterized) { | 
| 1174     __ pushq(RDX);  // Push type arguments of object to be allocated. | 1166     __ pushq(RDX);  // Push type arguments of object to be allocated. | 
| 1175   } else { | 1167   } else { | 
| 1176     __ pushq(R9);  // Push null type arguments. | 1168     __ pushq(R9);  // Push null type arguments. | 
| 1177   } | 1169   } | 
| 1178   __ CallRuntime(kAllocateObjectRuntimeEntry, 2);  // Allocate object. | 1170   __ CallRuntime(kAllocateObjectRuntimeEntry, 2);  // Allocate object. | 
| 1179   __ popq(RAX);  // Pop argument (type arguments of object). | 1171   __ popq(RAX);  // Pop argument (type arguments of object). | 
| 1180   __ popq(RAX);  // Pop argument (class of object). | 1172   __ popq(RAX);  // Pop argument (class of object). | 
| 1181   __ popq(RAX);  // Pop result (newly allocated object). | 1173   __ popq(RAX);  // Pop result (newly allocated object). | 
| 1182   // RAX: new object | 1174   // RAX: new object | 
| (...skipping 11 matching lines...) Expand all  Loading... | 
| 1194 //   RSP + 8 : address of last argument. | 1186 //   RSP + 8 : address of last argument. | 
| 1195 //   R10 : arguments descriptor array. | 1187 //   R10 : arguments descriptor array. | 
| 1196 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { | 1188 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { | 
| 1197   __ EnterStubFrame(); | 1189   __ EnterStubFrame(); | 
| 1198 | 1190 | 
| 1199   // Load the receiver. | 1191   // Load the receiver. | 
| 1200   __ movq(R13, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1192   __ movq(R13, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 1201   __ movq(RAX, Address(RBP, R13, TIMES_4, kParamEndSlotFromFp * kWordSize)); | 1193   __ movq(RAX, Address(RBP, R13, TIMES_4, kParamEndSlotFromFp * kWordSize)); | 
| 1202 | 1194 | 
| 1203   __ pushq(Immediate(0));  // Result slot. | 1195   __ pushq(Immediate(0));  // Result slot. | 
| 1204   __ pushq(RAX);  // Receiver. | 1196   __ pushq(RAX);           // Receiver. | 
| 1205   __ pushq(R10);  // Arguments descriptor array. | 1197   __ pushq(R10);           // Arguments descriptor array. | 
| 1206 | 1198 | 
| 1207   __ movq(R10, R13);  // Smi-tagged arguments array length. | 1199   __ movq(R10, R13);  // Smi-tagged arguments array length. | 
| 1208   PushArgumentsArray(assembler); | 1200   PushArgumentsArray(assembler); | 
| 1209 | 1201 | 
| 1210   const intptr_t kNumArgs = 3; | 1202   const intptr_t kNumArgs = 3; | 
| 1211   __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); | 1203   __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); | 
| 1212   // noSuchMethod on closures always throws an error, so it will never return. | 1204   // noSuchMethod on closures always throws an error, so it will never return. | 
| 1213   __ int3(); | 1205   __ int3(); | 
| 1214 } | 1206 } | 
| 1215 | 1207 | 
| 1216 | 1208 | 
| 1217 // Cannot use function object from ICData as it may be the inlined | 1209 // Cannot use function object from ICData as it may be the inlined | 
| 1218 // function and not the top-scope function. | 1210 // function and not the top-scope function. | 
| 1219 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1211 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 
| 1220   Register ic_reg = RBX; | 1212   Register ic_reg = RBX; | 
| 1221   Register func_reg = RDI; | 1213   Register func_reg = RDI; | 
| 1222   if (FLAG_trace_optimized_ic_calls) { | 1214   if (FLAG_trace_optimized_ic_calls) { | 
| 1223     __ EnterStubFrame(); | 1215     __ EnterStubFrame(); | 
| 1224     __ pushq(func_reg);     // Preserve | 1216     __ pushq(func_reg);  // Preserve | 
| 1225     __ pushq(ic_reg);       // Preserve. | 1217     __ pushq(ic_reg);    // Preserve. | 
| 1226     __ pushq(ic_reg);       // Argument. | 1218     __ pushq(ic_reg);    // Argument. | 
| 1227     __ pushq(func_reg);     // Argument. | 1219     __ pushq(func_reg);  // Argument. | 
| 1228     __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 1220     __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 
| 1229     __ popq(RAX);          // Discard argument; | 1221     __ popq(RAX);       // Discard argument; | 
| 1230     __ popq(RAX);          // Discard argument; | 1222     __ popq(RAX);       // Discard argument; | 
| 1231     __ popq(ic_reg);       // Restore. | 1223     __ popq(ic_reg);    // Restore. | 
| 1232     __ popq(func_reg);     // Restore. | 1224     __ popq(func_reg);  // Restore. | 
| 1233     __ LeaveStubFrame(); | 1225     __ LeaveStubFrame(); | 
| 1234   } | 1226   } | 
| 1235   __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); | 1227   __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); | 
| 1236 } | 1228 } | 
| 1237 | 1229 | 
| 1238 | 1230 | 
| 1239 // Loads function into 'temp_reg', preserves 'ic_reg'. | 1231 // Loads function into 'temp_reg', preserves 'ic_reg'. | 
| 1240 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1232 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 
| 1241                                              Register temp_reg) { | 1233                                              Register temp_reg) { | 
| 1242   if (FLAG_optimization_counter_threshold >= 0) { | 1234   if (FLAG_optimization_counter_threshold >= 0) { | 
| (...skipping 10 matching lines...) Expand all  Loading... | 
| 1253 // Note: RBX must be preserved. | 1245 // Note: RBX must be preserved. | 
| 1254 // Attempt a quick Smi operation for known operations ('kind'). The ICData | 1246 // Attempt a quick Smi operation for known operations ('kind'). The ICData | 
| 1255 // must have been primed with a Smi/Smi check that will be used for counting | 1247 // must have been primed with a Smi/Smi check that will be used for counting | 
| 1256 // the invocations. | 1248 // the invocations. | 
| 1257 static void EmitFastSmiOp(Assembler* assembler, | 1249 static void EmitFastSmiOp(Assembler* assembler, | 
| 1258                           Token::Kind kind, | 1250                           Token::Kind kind, | 
| 1259                           intptr_t num_args, | 1251                           intptr_t num_args, | 
| 1260                           Label* not_smi_or_overflow) { | 1252                           Label* not_smi_or_overflow) { | 
| 1261   __ Comment("Fast Smi op"); | 1253   __ Comment("Fast Smi op"); | 
| 1262   ASSERT(num_args == 2); | 1254   ASSERT(num_args == 2); | 
| 1263   __ movq(RCX, Address(RSP, + 1 * kWordSize));  // Right | 1255   __ movq(RCX, Address(RSP, +1 * kWordSize));  // Right | 
| 1264   __ movq(RAX, Address(RSP, + 2 * kWordSize));  // Left. | 1256   __ movq(RAX, Address(RSP, +2 * kWordSize));  // Left. | 
| 1265   __ movq(R13, RCX); | 1257   __ movq(R13, RCX); | 
| 1266   __ orq(R13, RAX); | 1258   __ orq(R13, RAX); | 
| 1267   __ testq(R13, Immediate(kSmiTagMask)); | 1259   __ testq(R13, Immediate(kSmiTagMask)); | 
| 1268   __ j(NOT_ZERO, not_smi_or_overflow); | 1260   __ j(NOT_ZERO, not_smi_or_overflow); | 
| 1269   switch (kind) { | 1261   switch (kind) { | 
| 1270     case Token::kADD: { | 1262     case Token::kADD: { | 
| 1271       __ addq(RAX, RCX); | 1263       __ addq(RAX, RCX); | 
| 1272       __ j(OVERFLOW, not_smi_or_overflow); | 1264       __ j(OVERFLOW, not_smi_or_overflow); | 
| 1273       break; | 1265       break; | 
| 1274     } | 1266     } | 
| 1275     case Token::kSUB: { | 1267     case Token::kSUB: { | 
| 1276       __ subq(RAX, RCX); | 1268       __ subq(RAX, RCX); | 
| 1277       __ j(OVERFLOW, not_smi_or_overflow); | 1269       __ j(OVERFLOW, not_smi_or_overflow); | 
| 1278       break; | 1270       break; | 
| 1279     } | 1271     } | 
| 1280     case Token::kEQ: { | 1272     case Token::kEQ: { | 
| 1281       Label done, is_true; | 1273       Label done, is_true; | 
| 1282       __ cmpq(RAX, RCX); | 1274       __ cmpq(RAX, RCX); | 
| 1283       __ j(EQUAL, &is_true, Assembler::kNearJump); | 1275       __ j(EQUAL, &is_true, Assembler::kNearJump); | 
| 1284       __ LoadObject(RAX, Bool::False()); | 1276       __ LoadObject(RAX, Bool::False()); | 
| 1285       __ jmp(&done, Assembler::kNearJump); | 1277       __ jmp(&done, Assembler::kNearJump); | 
| 1286       __ Bind(&is_true); | 1278       __ Bind(&is_true); | 
| 1287       __ LoadObject(RAX, Bool::True()); | 1279       __ LoadObject(RAX, Bool::True()); | 
| 1288       __ Bind(&done); | 1280       __ Bind(&done); | 
| 1289       break; | 1281       break; | 
| 1290     } | 1282     } | 
| 1291     default: UNIMPLEMENTED(); | 1283     default: | 
|  | 1284       UNIMPLEMENTED(); | 
| 1292   } | 1285   } | 
| 1293 | 1286 | 
| 1294   // RBX: IC data object (preserved). | 1287   // RBX: IC data object (preserved). | 
| 1295   __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); | 1288   __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); | 
| 1296   // R13: ic_data_array with check entries: classes and target functions. | 1289   // R13: ic_data_array with check entries: classes and target functions. | 
| 1297   __ leaq(R13, FieldAddress(R13, Array::data_offset())); | 1290   __ leaq(R13, FieldAddress(R13, Array::data_offset())); | 
| 1298   // R13: points directly to the first ic data array element. | 1291 // R13: points directly to the first ic data array element. | 
| 1299 #if defined(DEBUG) | 1292 #if defined(DEBUG) | 
| 1300   // Check that first entry is for Smi/Smi. | 1293   // Check that first entry is for Smi/Smi. | 
| 1301   Label error, ok; | 1294   Label error, ok; | 
| 1302   const Immediate& imm_smi_cid = | 1295   const Immediate& imm_smi_cid = | 
| 1303       Immediate(reinterpret_cast<intptr_t>(Smi::New(kSmiCid))); | 1296       Immediate(reinterpret_cast<intptr_t>(Smi::New(kSmiCid))); | 
| 1304   __ cmpq(Address(R13, 0 * kWordSize), imm_smi_cid); | 1297   __ cmpq(Address(R13, 0 * kWordSize), imm_smi_cid); | 
| 1305   __ j(NOT_EQUAL, &error, Assembler::kNearJump); | 1298   __ j(NOT_EQUAL, &error, Assembler::kNearJump); | 
| 1306   __ cmpq(Address(R13, 1 * kWordSize), imm_smi_cid); | 1299   __ cmpq(Address(R13, 1 * kWordSize), imm_smi_cid); | 
| 1307   __ j(EQUAL, &ok, Assembler::kNearJump); | 1300   __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 1308   __ Bind(&error); | 1301   __ Bind(&error); | 
| (...skipping 26 matching lines...) Expand all  Loading... | 
| 1335 // - Match found -> jump to target. | 1328 // - Match found -> jump to target. | 
| 1336 // - Match not found -> jump to IC miss. | 1329 // - Match not found -> jump to IC miss. | 
| 1337 void StubCode::GenerateNArgsCheckInlineCacheStub( | 1330 void StubCode::GenerateNArgsCheckInlineCacheStub( | 
| 1338     Assembler* assembler, | 1331     Assembler* assembler, | 
| 1339     intptr_t num_args, | 1332     intptr_t num_args, | 
| 1340     const RuntimeEntry& handle_ic_miss, | 1333     const RuntimeEntry& handle_ic_miss, | 
| 1341     Token::Kind kind, | 1334     Token::Kind kind, | 
| 1342     bool optimized) { | 1335     bool optimized) { | 
| 1343   ASSERT(num_args > 0); | 1336   ASSERT(num_args > 0); | 
| 1344 #if defined(DEBUG) | 1337 #if defined(DEBUG) | 
| 1345   { Label ok; | 1338   { | 
|  | 1339     Label ok; | 
| 1346     // Check that the IC data array has NumArgsTested() == num_args. | 1340     // Check that the IC data array has NumArgsTested() == num_args. | 
| 1347     // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1341     // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 
| 1348     __ movl(RCX, FieldAddress(RBX, ICData::state_bits_offset())); | 1342     __ movl(RCX, FieldAddress(RBX, ICData::state_bits_offset())); | 
| 1349     ASSERT(ICData::NumArgsTestedShift() == 0);  // No shift needed. | 1343     ASSERT(ICData::NumArgsTestedShift() == 0);  // No shift needed. | 
| 1350     __ andq(RCX, Immediate(ICData::NumArgsTestedMask())); | 1344     __ andq(RCX, Immediate(ICData::NumArgsTestedMask())); | 
| 1351     __ cmpq(RCX, Immediate(num_args)); | 1345     __ cmpq(RCX, Immediate(num_args)); | 
| 1352     __ j(EQUAL, &ok, Assembler::kNearJump); | 1346     __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 1353     __ Stop("Incorrect stub for IC data"); | 1347     __ Stop("Incorrect stub for IC data"); | 
| 1354     __ Bind(&ok); | 1348     __ Bind(&ok); | 
| 1355   } | 1349   } | 
| 1356 #endif  // DEBUG | 1350 #endif  // DEBUG | 
| 1357 | 1351 | 
| 1358   Label stepping, done_stepping; | 1352   Label stepping, done_stepping; | 
| 1359   if (FLAG_support_debugger && !optimized) { | 1353   if (FLAG_support_debugger && !optimized) { | 
| 1360     __ Comment("Check single stepping"); | 1354     __ Comment("Check single stepping"); | 
| 1361     __ LoadIsolate(RAX); | 1355     __ LoadIsolate(RAX); | 
| 1362     __ cmpb(Address(RAX, Isolate::single_step_offset()), Immediate(0)); | 1356     __ cmpb(Address(RAX, Isolate::single_step_offset()), Immediate(0)); | 
| 1363     __ j(NOT_EQUAL, &stepping); | 1357     __ j(NOT_EQUAL, &stepping); | 
| 1364     __ Bind(&done_stepping); | 1358     __ Bind(&done_stepping); | 
| 1365   } | 1359   } | 
| 1366 | 1360 | 
| 1367   Label not_smi_or_overflow; | 1361   Label not_smi_or_overflow; | 
| 1368   if (kind != Token::kILLEGAL) { | 1362   if (kind != Token::kILLEGAL) { | 
| 1369     EmitFastSmiOp( | 1363     EmitFastSmiOp(assembler, kind, num_args, ¬_smi_or_overflow); | 
| 1370         assembler, |  | 
| 1371         kind, |  | 
| 1372         num_args, |  | 
| 1373         ¬_smi_or_overflow); |  | 
| 1374   } | 1364   } | 
| 1375   __ Bind(¬_smi_or_overflow); | 1365   __ Bind(¬_smi_or_overflow); | 
| 1376 | 1366 | 
| 1377   __ Comment("Extract ICData initial values and receiver cid"); | 1367   __ Comment("Extract ICData initial values and receiver cid"); | 
| 1378   // Load arguments descriptor into R10. | 1368   // Load arguments descriptor into R10. | 
| 1379   __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); | 1369   __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); | 
| 1380   // Loop that checks if there is an IC data match. | 1370   // Loop that checks if there is an IC data match. | 
| 1381   Label loop, update, test, found; | 1371   Label loop, update, test, found; | 
| 1382   // RBX: IC data object (preserved). | 1372   // RBX: IC data object (preserved). | 
| 1383   __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); | 1373   __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); | 
| 1384   // R13: ic_data_array with check entries: classes and target functions. | 1374   // R13: ic_data_array with check entries: classes and target functions. | 
| 1385   __ leaq(R13, FieldAddress(R13, Array::data_offset())); | 1375   __ leaq(R13, FieldAddress(R13, Array::data_offset())); | 
| 1386   // R13: points directly to the first ic data array element. | 1376   // R13: points directly to the first ic data array element. | 
| 1387 | 1377 | 
| 1388   // Get the receiver's class ID (first read number of arguments from | 1378   // Get the receiver's class ID (first read number of arguments from | 
| 1389   // arguments descriptor array and then access the receiver from the stack). | 1379   // arguments descriptor array and then access the receiver from the stack). | 
| 1390   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1380   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 1391   __ movq(R9, Address(RSP, RAX, TIMES_4, 0));  // RAX (argument count) is Smi. | 1381   __ movq(R9, Address(RSP, RAX, TIMES_4, 0));  // RAX (argument count) is Smi. | 
| 1392   __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 1382   __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 
| 1393   // RAX: receiver's class ID as smi. | 1383   // RAX: receiver's class ID as smi. | 
| 1394   __ movq(R9, Address(R13, 0));  // First class ID (Smi) to check. | 1384   __ movq(R9, Address(R13, 0));  // First class ID (Smi) to check. | 
| 1395   __ jmp(&test); | 1385   __ jmp(&test); | 
| 1396 | 1386 | 
| 1397   __ Comment("ICData loop"); | 1387   __ Comment("ICData loop"); | 
| 1398   __ Bind(&loop); | 1388   __ Bind(&loop); | 
| 1399   for (int i = 0; i < num_args; i++) { | 1389   for (int i = 0; i < num_args; i++) { | 
| 1400     if (i > 0) { | 1390     if (i > 0) { | 
| 1401       // If not the first, load the next argument's class ID. | 1391       // If not the first, load the next argument's class ID. | 
| 1402       __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1392       __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 1403       __ movq(R9, Address(RSP, RAX, TIMES_4, - i * kWordSize)); | 1393       __ movq(R9, Address(RSP, RAX, TIMES_4, -i * kWordSize)); | 
| 1404       __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 1394       __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 
| 1405       // RAX: next argument class ID (smi). | 1395       // RAX: next argument class ID (smi). | 
| 1406       __ movq(R9, Address(R13, i * kWordSize)); | 1396       __ movq(R9, Address(R13, i * kWordSize)); | 
| 1407       // R9: next class ID to check (smi). | 1397       // R9: next class ID to check (smi). | 
| 1408     } | 1398     } | 
| 1409     __ cmpq(RAX, R9);  // Class id match? | 1399     __ cmpq(RAX, R9);  // Class id match? | 
| 1410     if (i < (num_args - 1)) { | 1400     if (i < (num_args - 1)) { | 
| 1411       __ j(NOT_EQUAL, &update);  // Continue. | 1401       __ j(NOT_EQUAL, &update);  // Continue. | 
| 1412     } else { | 1402     } else { | 
| 1413       // Last check, all checks before matched. | 1403       // Last check, all checks before matched. | 
| 1414       __ j(EQUAL, &found);  // Break. | 1404       __ j(EQUAL, &found);  // Break. | 
| 1415     } | 1405     } | 
| 1416   } | 1406   } | 
| 1417   __ Bind(&update); | 1407   __ Bind(&update); | 
| 1418   // Reload receiver class ID.  It has not been destroyed when num_args == 1. | 1408   // Reload receiver class ID.  It has not been destroyed when num_args == 1. | 
| 1419   if (num_args > 1) { | 1409   if (num_args > 1) { | 
| 1420     __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1410     __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 1421     __ movq(R9, Address(RSP, RAX, TIMES_4, 0)); | 1411     __ movq(R9, Address(RSP, RAX, TIMES_4, 0)); | 
| 1422     __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 1412     __ LoadTaggedClassIdMayBeSmi(RAX, R9); | 
| 1423   } | 1413   } | 
| 1424 | 1414 | 
| 1425   const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; | 1415   const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; | 
| 1426   __ addq(R13, Immediate(entry_size));  // Next entry. | 1416   __ addq(R13, Immediate(entry_size));  // Next entry. | 
| 1427   __ movq(R9, Address(R13, 0));  // Next class ID. | 1417   __ movq(R9, Address(R13, 0));         // Next class ID. | 
| 1428 | 1418 | 
| 1429   __ Bind(&test); | 1419   __ Bind(&test); | 
| 1430   __ cmpq(R9, Immediate(Smi::RawValue(kIllegalCid)));  // Done? | 1420   __ cmpq(R9, Immediate(Smi::RawValue(kIllegalCid)));  // Done? | 
| 1431   __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 1421   __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 
| 1432 | 1422 | 
| 1433   __ Comment("IC miss"); | 1423   __ Comment("IC miss"); | 
| 1434   // Compute address of arguments (first read number of arguments from | 1424   // Compute address of arguments (first read number of arguments from | 
| 1435   // arguments descriptor array and then compute address on the stack). | 1425   // arguments descriptor array and then compute address on the stack). | 
| 1436   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1426   __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 
| 1437   __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0));  // RAX is Smi. | 1427   __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0));  // RAX is Smi. | 
| 1438   __ EnterStubFrame(); | 1428   __ EnterStubFrame(); | 
| 1439   __ pushq(R10);  // Preserve arguments descriptor array. | 1429   __ pushq(R10);           // Preserve arguments descriptor array. | 
| 1440   __ pushq(RBX);  // Preserve IC data object. | 1430   __ pushq(RBX);           // Preserve IC data object. | 
| 1441   __ pushq(Immediate(0));  // Result slot. | 1431   __ pushq(Immediate(0));  // Result slot. | 
| 1442   // Push call arguments. | 1432   // Push call arguments. | 
| 1443   for (intptr_t i = 0; i < num_args; i++) { | 1433   for (intptr_t i = 0; i < num_args; i++) { | 
| 1444     __ movq(RCX, Address(RAX, -kWordSize * i)); | 1434     __ movq(RCX, Address(RAX, -kWordSize * i)); | 
| 1445     __ pushq(RCX); | 1435     __ pushq(RCX); | 
| 1446   } | 1436   } | 
| 1447   __ pushq(RBX);  // Pass IC data object. | 1437   __ pushq(RBX);  // Pass IC data object. | 
| 1448   __ CallRuntime(handle_ic_miss, num_args + 1); | 1438   __ CallRuntime(handle_ic_miss, num_args + 1); | 
| 1449   // Remove the call arguments pushed earlier, including the IC data object. | 1439   // Remove the call arguments pushed earlier, including the IC data object. | 
| 1450   for (intptr_t i = 0; i < num_args + 1; i++) { | 1440   for (intptr_t i = 0; i < num_args + 1; i++) { | 
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1504 //  RBX: Inline cache data object. | 1494 //  RBX: Inline cache data object. | 
| 1505 //  TOS(0): Return address. | 1495 //  TOS(0): Return address. | 
| 1506 // Inline cache data object structure: | 1496 // Inline cache data object structure: | 
| 1507 // 0: function-name | 1497 // 0: function-name | 
| 1508 // 1: N, number of arguments checked. | 1498 // 1: N, number of arguments checked. | 
| 1509 // 2 .. (length - 1): group of checks, each check containing: | 1499 // 2 .. (length - 1): group of checks, each check containing: | 
| 1510 //   - N classes. | 1500 //   - N classes. | 
| 1511 //   - 1 target function. | 1501 //   - 1 target function. | 
| 1512 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { | 1502 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { | 
| 1513   GenerateUsageCounterIncrement(assembler, RCX); | 1503   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1514   GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1504   GenerateNArgsCheckInlineCacheStub( | 
| 1515       kInlineCacheMissHandlerOneArgRuntimeEntry, | 1505       assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 
| 1516       Token::kILLEGAL); |  | 
| 1517 } | 1506 } | 
| 1518 | 1507 | 
| 1519 | 1508 | 
| 1520 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { | 1509 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { | 
| 1521   GenerateUsageCounterIncrement(assembler, RCX); | 1510   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1522   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1511   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 
| 1523       kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1512                                     kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 
| 1524       Token::kILLEGAL); | 1513                                     Token::kILLEGAL); | 
| 1525 } | 1514 } | 
| 1526 | 1515 | 
| 1527 | 1516 | 
| 1528 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { | 1517 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { | 
| 1529   GenerateUsageCounterIncrement(assembler, RCX); | 1518   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1530   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1519   GenerateNArgsCheckInlineCacheStub( | 
| 1531       kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1520       assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); | 
| 1532       Token::kADD); |  | 
| 1533 } | 1521 } | 
| 1534 | 1522 | 
| 1535 | 1523 | 
| 1536 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { | 1524 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { | 
| 1537   GenerateUsageCounterIncrement(assembler, RCX); | 1525   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1538   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1526   GenerateNArgsCheckInlineCacheStub( | 
| 1539       kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1527       assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); | 
| 1540       Token::kSUB); |  | 
| 1541 } | 1528 } | 
| 1542 | 1529 | 
| 1543 | 1530 | 
| 1544 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { | 1531 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { | 
| 1545   GenerateUsageCounterIncrement(assembler, RCX); | 1532   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1546   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1533   GenerateNArgsCheckInlineCacheStub( | 
| 1547       kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1534       assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); | 
| 1548       Token::kEQ); |  | 
| 1549 } | 1535 } | 
| 1550 | 1536 | 
| 1551 | 1537 | 
| 1552 // Use inline cache data array to invoke the target or continue in inline | 1538 // Use inline cache data array to invoke the target or continue in inline | 
| 1553 // cache miss handler. Stub for 1-argument check (receiver class). | 1539 // cache miss handler. Stub for 1-argument check (receiver class). | 
| 1554 //  RDI: function which counter needs to be incremented. | 1540 //  RDI: function which counter needs to be incremented. | 
| 1555 //  RBX: Inline cache data object. | 1541 //  RBX: Inline cache data object. | 
| 1556 //  TOS(0): Return address. | 1542 //  TOS(0): Return address. | 
| 1557 // Inline cache data object structure: | 1543 // Inline cache data object structure: | 
| 1558 // 0: function-name | 1544 // 0: function-name | 
| 1559 // 1: N, number of arguments checked. | 1545 // 1: N, number of arguments checked. | 
| 1560 // 2 .. (length - 1): group of checks, each check containing: | 1546 // 2 .. (length - 1): group of checks, each check containing: | 
| 1561 //   - N classes. | 1547 //   - N classes. | 
| 1562 //   - 1 target function. | 1548 //   - 1 target function. | 
| 1563 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( | 1549 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( | 
| 1564     Assembler* assembler) { | 1550     Assembler* assembler) { | 
| 1565   GenerateOptimizedUsageCounterIncrement(assembler); | 1551   GenerateOptimizedUsageCounterIncrement(assembler); | 
| 1566   GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1552   GenerateNArgsCheckInlineCacheStub(assembler, 1, | 
| 1567       kInlineCacheMissHandlerOneArgRuntimeEntry, | 1553                                     kInlineCacheMissHandlerOneArgRuntimeEntry, | 
| 1568       Token::kILLEGAL, | 1554                                     Token::kILLEGAL, true /* optimized */); | 
| 1569       true /* optimized */); |  | 
| 1570 } | 1555 } | 
| 1571 | 1556 | 
| 1572 | 1557 | 
| 1573 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( | 1558 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( | 
| 1574     Assembler* assembler) { | 1559     Assembler* assembler) { | 
| 1575   GenerateOptimizedUsageCounterIncrement(assembler); | 1560   GenerateOptimizedUsageCounterIncrement(assembler); | 
| 1576   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1561   GenerateNArgsCheckInlineCacheStub(assembler, 2, | 
| 1577       kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1562                                     kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 
| 1578       Token::kILLEGAL, | 1563                                     Token::kILLEGAL, true /* optimized */); | 
| 1579       true /* optimized */); |  | 
| 1580 } | 1564 } | 
| 1581 | 1565 | 
| 1582 | 1566 | 
| 1583 // Intermediary stub between a static call and its target. ICData contains | 1567 // Intermediary stub between a static call and its target. ICData contains | 
| 1584 // the target function and the call count. | 1568 // the target function and the call count. | 
| 1585 // RBX: ICData | 1569 // RBX: ICData | 
| 1586 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1570 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 
| 1587   GenerateUsageCounterIncrement(assembler, RCX); | 1571   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1588 #if defined(DEBUG) | 1572 #if defined(DEBUG) | 
| 1589   { Label ok; | 1573   { | 
|  | 1574     Label ok; | 
| 1590     // Check that the IC data array has NumArgsTested() == 0. | 1575     // Check that the IC data array has NumArgsTested() == 0. | 
| 1591     // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1576     // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 
| 1592     __ movl(RCX, FieldAddress(RBX, ICData::state_bits_offset())); | 1577     __ movl(RCX, FieldAddress(RBX, ICData::state_bits_offset())); | 
| 1593     ASSERT(ICData::NumArgsTestedShift() == 0);  // No shift needed. | 1578     ASSERT(ICData::NumArgsTestedShift() == 0);  // No shift needed. | 
| 1594     __ andq(RCX, Immediate(ICData::NumArgsTestedMask())); | 1579     __ andq(RCX, Immediate(ICData::NumArgsTestedMask())); | 
| 1595     __ cmpq(RCX, Immediate(0)); | 1580     __ cmpq(RCX, Immediate(0)); | 
| 1596     __ j(EQUAL, &ok, Assembler::kNearJump); | 1581     __ j(EQUAL, &ok, Assembler::kNearJump); | 
| 1597     __ Stop("Incorrect IC data for unoptimized static call"); | 1582     __ Stop("Incorrect IC data for unoptimized static call"); | 
| 1598     __ Bind(&ok); | 1583     __ Bind(&ok); | 
| 1599   } | 1584   } | 
| 1600 #endif  // DEBUG | 1585 #endif  // DEBUG | 
| 1601 | 1586 | 
| 1602   // Check single stepping. | 1587   // Check single stepping. | 
| 1603   Label stepping, done_stepping; | 1588   Label stepping, done_stepping; | 
| 1604   if (FLAG_support_debugger) { | 1589   if (FLAG_support_debugger) { | 
| 1605     __ LoadIsolate(RAX); | 1590     __ LoadIsolate(RAX); | 
| 1606     __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); | 1591     __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); | 
| 1607     __ cmpq(RAX, Immediate(0)); | 1592     __ cmpq(RAX, Immediate(0)); | 
| 1608 #if defined(DEBUG) | 1593 #if defined(DEBUG) | 
| 1609       static const bool kJumpLength = Assembler::kFarJump; | 1594     static const bool kJumpLength = Assembler::kFarJump; | 
| 1610 #else | 1595 #else | 
| 1611       static const bool kJumpLength = Assembler::kNearJump; | 1596     static const bool kJumpLength = Assembler::kNearJump; | 
| 1612 #endif  // DEBUG | 1597 #endif  // DEBUG | 
| 1613     __ j(NOT_EQUAL, &stepping, kJumpLength); | 1598     __ j(NOT_EQUAL, &stepping, kJumpLength); | 
| 1614     __ Bind(&done_stepping); | 1599     __ Bind(&done_stepping); | 
| 1615   } | 1600   } | 
| 1616 | 1601 | 
| 1617   // RBX: IC data object (preserved). | 1602   // RBX: IC data object (preserved). | 
| 1618   __ movq(R12, FieldAddress(RBX, ICData::ic_data_offset())); | 1603   __ movq(R12, FieldAddress(RBX, ICData::ic_data_offset())); | 
| 1619   // R12: ic_data_array with entries: target functions and count. | 1604   // R12: ic_data_array with entries: target functions and count. | 
| 1620   __ leaq(R12, FieldAddress(R12, Array::data_offset())); | 1605   __ leaq(R12, FieldAddress(R12, Array::data_offset())); | 
| 1621   // R12: points directly to the first ic data array element. | 1606   // R12: points directly to the first ic data array element. | 
| (...skipping 27 matching lines...) Expand all  Loading... | 
| 1649     __ RestoreCodePointer(); | 1634     __ RestoreCodePointer(); | 
| 1650     __ LeaveStubFrame(); | 1635     __ LeaveStubFrame(); | 
| 1651     __ jmp(&done_stepping, Assembler::kNearJump); | 1636     __ jmp(&done_stepping, Assembler::kNearJump); | 
| 1652   } | 1637   } | 
| 1653 } | 1638 } | 
| 1654 | 1639 | 
| 1655 | 1640 | 
| 1656 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1641 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 
| 1657   GenerateUsageCounterIncrement(assembler, RCX); | 1642   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1658   GenerateNArgsCheckInlineCacheStub( | 1643   GenerateNArgsCheckInlineCacheStub( | 
| 1659       assembler, | 1644       assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 
| 1660       1, |  | 
| 1661       kStaticCallMissHandlerOneArgRuntimeEntry, |  | 
| 1662       Token::kILLEGAL); |  | 
| 1663 } | 1645 } | 
| 1664 | 1646 | 
| 1665 | 1647 | 
| 1666 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1648 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 
| 1667   GenerateUsageCounterIncrement(assembler, RCX); | 1649   GenerateUsageCounterIncrement(assembler, RCX); | 
| 1668   GenerateNArgsCheckInlineCacheStub(assembler, | 1650   GenerateNArgsCheckInlineCacheStub( | 
| 1669       2, | 1651       assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); | 
| 1670       kStaticCallMissHandlerTwoArgsRuntimeEntry, |  | 
| 1671       Token::kILLEGAL); |  | 
| 1672 } | 1652 } | 
| 1673 | 1653 | 
| 1674 | 1654 | 
| 1675 // Stub for compiling a function and jumping to the compiled code. | 1655 // Stub for compiling a function and jumping to the compiled code. | 
| 1676 // RCX: IC-Data (for methods). | 1656 // RCX: IC-Data (for methods). | 
| 1677 // R10: Arguments descriptor. | 1657 // R10: Arguments descriptor. | 
| 1678 // RAX: Function. | 1658 // RAX: Function. | 
| 1679 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 1659 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 
| 1680   __ EnterStubFrame(); | 1660   __ EnterStubFrame(); | 
| 1681   __ pushq(R10);  // Preserve arguments descriptor array. | 1661   __ pushq(R10);  // Preserve arguments descriptor array. | 
| 1682   __ pushq(RBX);  // Preserve IC data object. | 1662   __ pushq(RBX);  // Preserve IC data object. | 
| 1683   __ pushq(RAX);  // Pass function. | 1663   __ pushq(RAX);  // Pass function. | 
| 1684   __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1664   __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 
| 1685   __ popq(RAX);  // Restore function. | 1665   __ popq(RAX);  // Restore function. | 
| 1686   __ popq(RBX);  // Restore IC data array. | 1666   __ popq(RBX);  // Restore IC data array. | 
| 1687   __ popq(R10);  // Restore arguments descriptor array. | 1667   __ popq(R10);  // Restore arguments descriptor array. | 
| 1688   __ LeaveStubFrame(); | 1668   __ LeaveStubFrame(); | 
| 1689 | 1669 | 
| 1690   __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); | 1670   __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); | 
| 1691   __ movq(RAX, FieldAddress(RAX, Function::entry_point_offset())); | 1671   __ movq(RAX, FieldAddress(RAX, Function::entry_point_offset())); | 
| 1692   __ jmp(RAX); | 1672   __ jmp(RAX); | 
| 1693 } | 1673 } | 
| 1694 | 1674 | 
| 1695 | 1675 | 
| 1696 // RBX: Contains an ICData. | 1676 // RBX: Contains an ICData. | 
| 1697 // TOS(0): return address (Dart code). | 1677 // TOS(0): return address (Dart code). | 
| 1698 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1678 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 
| 1699   __ EnterStubFrame(); | 1679   __ EnterStubFrame(); | 
| 1700   __ pushq(RBX);  // Preserve IC data. | 1680   __ pushq(RBX);           // Preserve IC data. | 
| 1701   __ pushq(Immediate(0));  // Result slot. | 1681   __ pushq(Immediate(0));  // Result slot. | 
| 1702   __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1682   __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 
| 1703   __ popq(CODE_REG);  // Original stub. | 1683   __ popq(CODE_REG);  // Original stub. | 
| 1704   __ popq(RBX);  // Restore IC data. | 1684   __ popq(RBX);       // Restore IC data. | 
| 1705   __ LeaveStubFrame(); | 1685   __ LeaveStubFrame(); | 
| 1706 | 1686 | 
| 1707   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 1687   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 
| 1708   __ jmp(RAX);   // Jump to original stub. | 1688   __ jmp(RAX);  // Jump to original stub. | 
| 1709 } | 1689 } | 
| 1710 | 1690 | 
| 1711 | 1691 | 
| 1712 //  TOS(0): return address (Dart code). | 1692 //  TOS(0): return address (Dart code). | 
| 1713 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1693 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 
| 1714   __ EnterStubFrame(); | 1694   __ EnterStubFrame(); | 
| 1715   __ pushq(Immediate(0));  // Result slot. | 1695   __ pushq(Immediate(0));  // Result slot. | 
| 1716   __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1696   __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 
| 1717   __ popq(CODE_REG);  // Original stub. | 1697   __ popq(CODE_REG);  // Original stub. | 
| 1718   __ LeaveStubFrame(); | 1698   __ LeaveStubFrame(); | 
| 1719 | 1699 | 
| 1720   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 1700   __ movq(RAX, FieldAddress(CODE_REG, Code::entry_point_offset())); | 
| 1721   __ jmp(RAX);   // Jump to original stub. | 1701   __ jmp(RAX);  // Jump to original stub. | 
| 1722 } | 1702 } | 
| 1723 | 1703 | 
| 1724 | 1704 | 
| 1725 // Called only from unoptimized code. | 1705 // Called only from unoptimized code. | 
| 1726 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { | 1706 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { | 
| 1727   // Check single stepping. | 1707   // Check single stepping. | 
| 1728   Label stepping, done_stepping; | 1708   Label stepping, done_stepping; | 
| 1729   __ LoadIsolate(RAX); | 1709   __ LoadIsolate(RAX); | 
| 1730   __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); | 1710   __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); | 
| 1731   __ cmpq(RAX, Immediate(0)); | 1711   __ cmpq(RAX, Immediate(0)); | 
| (...skipping 20 matching lines...) Expand all  Loading... | 
| 1752   const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize; | 1732   const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize; | 
| 1753   const intptr_t kInstanceOffsetInBytes = 2 * kWordSize; | 1733   const intptr_t kInstanceOffsetInBytes = 2 * kWordSize; | 
| 1754   const intptr_t kCacheOffsetInBytes = 3 * kWordSize; | 1734   const intptr_t kCacheOffsetInBytes = 3 * kWordSize; | 
| 1755   __ movq(RAX, Address(RSP, kInstanceOffsetInBytes)); | 1735   __ movq(RAX, Address(RSP, kInstanceOffsetInBytes)); | 
| 1756   __ LoadObject(R9, Object::null_object()); | 1736   __ LoadObject(R9, Object::null_object()); | 
| 1757   if (n > 1) { | 1737   if (n > 1) { | 
| 1758     __ LoadClass(R10, RAX); | 1738     __ LoadClass(R10, RAX); | 
| 1759     // Compute instance type arguments into R13. | 1739     // Compute instance type arguments into R13. | 
| 1760     Label has_no_type_arguments; | 1740     Label has_no_type_arguments; | 
| 1761     __ movq(R13, R9); | 1741     __ movq(R13, R9); | 
| 1762     __ movl(RDI, FieldAddress(R10, | 1742     __ movl(RDI, | 
| 1763         Class::type_arguments_field_offset_in_words_offset())); | 1743             FieldAddress(R10, | 
|  | 1744                          Class::type_arguments_field_offset_in_words_offset())); | 
| 1764     __ cmpl(RDI, Immediate(Class::kNoTypeArguments)); | 1745     __ cmpl(RDI, Immediate(Class::kNoTypeArguments)); | 
| 1765     __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); | 1746     __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); | 
| 1766     __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0)); | 1747     __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0)); | 
| 1767     __ Bind(&has_no_type_arguments); | 1748     __ Bind(&has_no_type_arguments); | 
| 1768   } | 1749   } | 
| 1769   __ LoadClassId(R10, RAX); | 1750   __ LoadClassId(R10, RAX); | 
| 1770   // RAX: instance, R10: instance class id. | 1751   // RAX: instance, R10: instance class id. | 
| 1771   // R13: instance type arguments or null, used only if n > 1. | 1752   // R13: instance type arguments or null, used only if n > 1. | 
| 1772   __ movq(RDX, Address(RSP, kCacheOffsetInBytes)); | 1753   __ movq(RDX, Address(RSP, kCacheOffsetInBytes)); | 
| 1773   // RDX: SubtypeTestCache. | 1754   // RDX: SubtypeTestCache. | 
| 1774   __ movq(RDX, FieldAddress(RDX, SubtypeTestCache::cache_offset())); | 1755   __ movq(RDX, FieldAddress(RDX, SubtypeTestCache::cache_offset())); | 
| 1775   __ addq(RDX, Immediate(Array::data_offset() - kHeapObjectTag)); | 1756   __ addq(RDX, Immediate(Array::data_offset() - kHeapObjectTag)); | 
| 1776   // RDX: Entry start. | 1757   // RDX: Entry start. | 
| 1777   // R10: instance class id. | 1758   // R10: instance class id. | 
| 1778   // R13: instance type arguments. | 1759   // R13: instance type arguments. | 
| 1779   Label loop, found, not_found, next_iteration; | 1760   Label loop, found, not_found, next_iteration; | 
| 1780   __ SmiTag(R10); | 1761   __ SmiTag(R10); | 
| 1781   __ cmpq(R10, Immediate(Smi::RawValue(kClosureCid))); | 1762   __ cmpq(R10, Immediate(Smi::RawValue(kClosureCid))); | 
| 1782   __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 1763   __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 
| 1783   __ movq(R10, FieldAddress(RAX, Closure::function_offset())); | 1764   __ movq(R10, FieldAddress(RAX, Closure::function_offset())); | 
| 1784   // R10: instance class id as Smi or function. | 1765   // R10: instance class id as Smi or function. | 
| 1785   __ Bind(&loop); | 1766   __ Bind(&loop); | 
| 1786   __ movq(RDI, | 1767   __ movq(RDI, Address(RDX, kWordSize * | 
| 1787           Address(RDX, | 1768                                 SubtypeTestCache::kInstanceClassIdOrFunction)); | 
| 1788                   kWordSize * SubtypeTestCache::kInstanceClassIdOrFunction)); |  | 
| 1789   __ cmpq(RDI, R9); | 1769   __ cmpq(RDI, R9); | 
| 1790   __ j(EQUAL, ¬_found, Assembler::kNearJump); | 1770   __ j(EQUAL, ¬_found, Assembler::kNearJump); | 
| 1791   __ cmpq(RDI, R10); | 1771   __ cmpq(RDI, R10); | 
| 1792   if (n == 1) { | 1772   if (n == 1) { | 
| 1793     __ j(EQUAL, &found, Assembler::kNearJump); | 1773     __ j(EQUAL, &found, Assembler::kNearJump); | 
| 1794   } else { | 1774   } else { | 
| 1795     __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1775     __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 
| 1796     __ movq(RDI, | 1776     __ movq(RDI, | 
| 1797         Address(RDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); | 1777             Address(RDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); | 
| 1798     __ cmpq(RDI, R13); | 1778     __ cmpq(RDI, R13); | 
| 1799     if (n == 2) { | 1779     if (n == 2) { | 
| 1800       __ j(EQUAL, &found, Assembler::kNearJump); | 1780       __ j(EQUAL, &found, Assembler::kNearJump); | 
| 1801     } else { | 1781     } else { | 
| 1802       __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1782       __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 
| 1803       __ movq(RDI, | 1783       __ movq(RDI, | 
| 1804           Address(RDX, | 1784               Address(RDX, kWordSize * | 
| 1805                   kWordSize * SubtypeTestCache::kInstantiatorTypeArguments)); | 1785                                SubtypeTestCache::kInstantiatorTypeArguments)); | 
| 1806       __ cmpq(RDI, Address(RSP, kInstantiatorTypeArgumentsInBytes)); | 1786       __ cmpq(RDI, Address(RSP, kInstantiatorTypeArgumentsInBytes)); | 
| 1807       __ j(EQUAL, &found, Assembler::kNearJump); | 1787       __ j(EQUAL, &found, Assembler::kNearJump); | 
| 1808     } | 1788     } | 
| 1809   } | 1789   } | 
| 1810 | 1790 | 
| 1811   __ Bind(&next_iteration); | 1791   __ Bind(&next_iteration); | 
| 1812   __ addq(RDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); | 1792   __ addq(RDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); | 
| 1813   __ jmp(&loop, Assembler::kNearJump); | 1793   __ jmp(&loop, Assembler::kNearJump); | 
| 1814   // Fall through to not found. | 1794   // Fall through to not found. | 
| 1815   __ Bind(¬_found); | 1795   __ Bind(¬_found); | 
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1888   Register stacktrace_reg = CallingConventions::kArg5Reg; | 1868   Register stacktrace_reg = CallingConventions::kArg5Reg; | 
| 1889   __ movq(THR, CallingConventions::kArg6Reg); | 1869   __ movq(THR, CallingConventions::kArg6Reg); | 
| 1890 #endif | 1870 #endif | 
| 1891   __ movq(RBP, CallingConventions::kArg3Reg); | 1871   __ movq(RBP, CallingConventions::kArg3Reg); | 
| 1892   __ movq(RSP, CallingConventions::kArg2Reg); | 1872   __ movq(RSP, CallingConventions::kArg2Reg); | 
| 1893   __ movq(kStackTraceObjectReg, stacktrace_reg); | 1873   __ movq(kStackTraceObjectReg, stacktrace_reg); | 
| 1894   __ movq(kExceptionObjectReg, CallingConventions::kArg4Reg); | 1874   __ movq(kExceptionObjectReg, CallingConventions::kArg4Reg); | 
| 1895   // Set the tag. | 1875   // Set the tag. | 
| 1896   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 1876   __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 
| 1897   // Clear top exit frame. | 1877   // Clear top exit frame. | 
| 1898   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), | 1878   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 
| 1899           Immediate(0)); |  | 
| 1900   // Restore the pool pointer. | 1879   // Restore the pool pointer. | 
| 1901   __ RestoreCodePointer(); | 1880   __ RestoreCodePointer(); | 
| 1902   __ LoadPoolPointer(PP); | 1881   __ LoadPoolPointer(PP); | 
| 1903   __ jmp(CallingConventions::kArg1Reg);  // Jump to the exception handler code. | 1882   __ jmp(CallingConventions::kArg1Reg);  // Jump to the exception handler code. | 
| 1904 } | 1883 } | 
| 1905 | 1884 | 
| 1906 | 1885 | 
| 1907 // Calls to the runtime to optimize the given function. | 1886 // Calls to the runtime to optimize the given function. | 
| 1908 // RDI: function to be reoptimized. | 1887 // RDI: function to be reoptimized. | 
| 1909 // R10: argument descriptor (preserved). | 1888 // R10: argument descriptor (preserved). | 
| 1910 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1889 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 
| 1911   __ EnterStubFrame(); | 1890   __ EnterStubFrame(); | 
| 1912   __ pushq(R10);  // Preserve args descriptor. | 1891   __ pushq(R10);           // Preserve args descriptor. | 
| 1913   __ pushq(Immediate(0));  // Result slot. | 1892   __ pushq(Immediate(0));  // Result slot. | 
| 1914   __ pushq(RDI);  // Arg0: function to optimize | 1893   __ pushq(RDI);           // Arg0: function to optimize | 
| 1915   __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1894   __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 
| 1916   __ popq(RAX);  // Disard argument. | 1895   __ popq(RAX);  // Disard argument. | 
| 1917   __ popq(RAX);  // Get Code object. | 1896   __ popq(RAX);  // Get Code object. | 
| 1918   __ popq(R10);  // Restore argument descriptor. | 1897   __ popq(R10);  // Restore argument descriptor. | 
| 1919   __ LeaveStubFrame(); | 1898   __ LeaveStubFrame(); | 
| 1920   __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); | 1899   __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); | 
| 1921   __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset())); | 1900   __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset())); | 
| 1922   __ jmp(RCX); | 1901   __ jmp(RCX); | 
| 1923   __ int3(); | 1902   __ int3(); | 
| 1924 } | 1903 } | 
| (...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2267 } | 2246 } | 
| 2268 | 2247 | 
| 2269 | 2248 | 
| 2270 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2249 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 
| 2271   __ int3(); | 2250   __ int3(); | 
| 2272 } | 2251 } | 
| 2273 | 2252 | 
| 2274 }  // namespace dart | 2253 }  // namespace dart | 
| 2275 | 2254 | 
| 2276 #endif  // defined TARGET_ARCH_X64 | 2255 #endif  // defined TARGET_ARCH_X64 | 
| OLD | NEW | 
|---|