| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| 11 #include "vm/flow_graph_compiler.h" | 11 #include "vm/flow_graph_compiler.h" |
| 12 #include "vm/instructions.h" | 12 #include "vm/instructions.h" |
| 13 #include "vm/heap.h" | 13 #include "vm/heap.h" |
| 14 #include "vm/object_store.h" | 14 #include "vm/object_store.h" |
| 15 #include "vm/resolver.h" | 15 #include "vm/resolver.h" |
| 16 #include "vm/scavenger.h" | 16 #include "vm/scavenger.h" |
| 17 #include "vm/stack_frame.h" | 17 #include "vm/stack_frame.h" |
| 18 #include "vm/stub_code.h" | 18 #include "vm/stub_code.h" |
| 19 #include "vm/tags.h" | 19 #include "vm/tags.h" |
| 20 | 20 |
| 21 | 21 |
| 22 #define __ assembler-> | 22 #define __ assembler-> |
| 23 | 23 |
| 24 namespace dart { | 24 namespace dart { |
| 25 | 25 |
| 26 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); | 26 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); |
| 27 DEFINE_FLAG(bool, use_slow_path, false, | 27 DEFINE_FLAG(bool, |
| 28 "Set to true for debugging & verifying the slow paths."); | 28 use_slow_path, |
| 29 false, |
| 30 "Set to true for debugging & verifying the slow paths."); |
| 29 DECLARE_FLAG(bool, trace_optimized_ic_calls); | 31 DECLARE_FLAG(bool, trace_optimized_ic_calls); |
| 30 | 32 |
| 31 #define INT32_SIZEOF(x) static_cast<int32_t>(sizeof(x)) | 33 #define INT32_SIZEOF(x) static_cast<int32_t>(sizeof(x)) |
| 32 | 34 |
| 33 // Input parameters: | 35 // Input parameters: |
| 34 // ESP : points to return address. | 36 // ESP : points to return address. |
| 35 // ESP + 4 : address of last argument in argument array. | 37 // ESP + 4 : address of last argument in argument array. |
| 36 // ESP + 4*EDX : address of first argument in argument array. | 38 // ESP + 4*EDX : address of first argument in argument array. |
| 37 // ESP + 4*EDX + 4 : address of return value. | 39 // ESP + 4*EDX + 4 : address of return value. |
| 38 // ECX : address of the runtime function to call. | 40 // ECX : address of the runtime function to call. |
| 39 // EDX : number of arguments to the call. | 41 // EDX : number of arguments to the call. |
| 40 // Must preserve callee saved registers EDI and EBX. | 42 // Must preserve callee saved registers EDI and EBX. |
| 41 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 43 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { |
| 42 const intptr_t thread_offset = NativeArguments::thread_offset(); | 44 const intptr_t thread_offset = NativeArguments::thread_offset(); |
| 43 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 45 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| 44 const intptr_t argv_offset = NativeArguments::argv_offset(); | 46 const intptr_t argv_offset = NativeArguments::argv_offset(); |
| 45 const intptr_t retval_offset = NativeArguments::retval_offset(); | 47 const intptr_t retval_offset = NativeArguments::retval_offset(); |
| 46 | 48 |
| 47 __ EnterFrame(0); | 49 __ EnterFrame(0); |
| 48 | 50 |
| 49 // Save exit frame information to enable stack walking as we are about | 51 // Save exit frame information to enable stack walking as we are about |
| 50 // to transition to Dart VM C++ code. | 52 // to transition to Dart VM C++ code. |
| 51 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); | 53 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); |
| 52 | 54 |
| 53 #if defined(DEBUG) | 55 #if defined(DEBUG) |
| 54 { Label ok; | 56 { |
| 57 Label ok; |
| 55 // Check that we are always entering from Dart code. | 58 // Check that we are always entering from Dart code. |
| 56 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 59 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 57 __ j(EQUAL, &ok, Assembler::kNearJump); | 60 __ j(EQUAL, &ok, Assembler::kNearJump); |
| 58 __ Stop("Not coming from Dart code."); | 61 __ Stop("Not coming from Dart code."); |
| 59 __ Bind(&ok); | 62 __ Bind(&ok); |
| 60 } | 63 } |
| 61 #endif | 64 #endif |
| 62 | 65 |
| 63 // Mark that the thread is executing VM code. | 66 // Mark that the thread is executing VM code. |
| 64 __ movl(Assembler::VMTagAddress(), ECX); | 67 __ movl(Assembler::VMTagAddress(), ECX); |
| 65 | 68 |
| 66 // Reserve space for arguments and align frame before entering C++ world. | 69 // Reserve space for arguments and align frame before entering C++ world. |
| 67 __ AddImmediate(ESP, Immediate(-INT32_SIZEOF(NativeArguments))); | 70 __ AddImmediate(ESP, Immediate(-INT32_SIZEOF(NativeArguments))); |
| 68 if (OS::ActivationFrameAlignment() > 1) { | 71 if (OS::ActivationFrameAlignment() > 1) { |
| 69 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 72 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
| 70 } | 73 } |
| 71 | 74 |
| 72 // Pass NativeArguments structure by value and call runtime. | 75 // Pass NativeArguments structure by value and call runtime. |
| 73 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. | 76 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. |
| 74 // There are no runtime calls to closures, so we do not need to set the tag | 77 // There are no runtime calls to closures, so we do not need to set the tag |
| 75 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 78 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. |
| 76 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. | 79 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. |
| 77 // Compute argv. | 80 // Compute argv. |
| 78 __ leal(EAX, Address(EBP, EDX, TIMES_4, kParamEndSlotFromFp * kWordSize)); | 81 __ leal(EAX, Address(EBP, EDX, TIMES_4, kParamEndSlotFromFp * kWordSize)); |
| 79 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. | 82 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. |
| 80 __ addl(EAX, Immediate(1 * kWordSize)); // Retval is next to 1st argument. | 83 __ addl(EAX, Immediate(1 * kWordSize)); // Retval is next to 1st argument. |
| 81 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. | 84 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. |
| 82 __ call(ECX); | 85 __ call(ECX); |
| 83 | 86 |
| 84 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 87 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 85 | 88 |
| 86 // Reset exit frame information in Isolate structure. | 89 // Reset exit frame information in Isolate structure. |
| 87 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 90 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
| 88 | 91 |
| 89 __ LeaveFrame(); | 92 __ LeaveFrame(); |
| 90 __ ret(); | 93 __ ret(); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 130 NativeArguments::retval_offset() + native_args_struct_offset; | 133 NativeArguments::retval_offset() + native_args_struct_offset; |
| 131 | 134 |
| 132 __ EnterFrame(0); | 135 __ EnterFrame(0); |
| 133 | 136 |
| 134 | 137 |
| 135 // Save exit frame information to enable stack walking as we are about | 138 // Save exit frame information to enable stack walking as we are about |
| 136 // to transition to dart VM code. | 139 // to transition to dart VM code. |
| 137 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); | 140 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); |
| 138 | 141 |
| 139 #if defined(DEBUG) | 142 #if defined(DEBUG) |
| 140 { Label ok; | 143 { |
| 144 Label ok; |
| 141 // Check that we are always entering from Dart code. | 145 // Check that we are always entering from Dart code. |
| 142 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 146 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 143 __ j(EQUAL, &ok, Assembler::kNearJump); | 147 __ j(EQUAL, &ok, Assembler::kNearJump); |
| 144 __ Stop("Not coming from Dart code."); | 148 __ Stop("Not coming from Dart code."); |
| 145 __ Bind(&ok); | 149 __ Bind(&ok); |
| 146 } | 150 } |
| 147 #endif | 151 #endif |
| 148 | 152 |
| 149 // Mark that the thread is executing native code. | 153 // Mark that the thread is executing native code. |
| 150 __ movl(Assembler::VMTagAddress(), ECX); | 154 __ movl(Assembler::VMTagAddress(), ECX); |
| 151 | 155 |
| 152 // Reserve space for the native arguments structure, the outgoing parameters | 156 // Reserve space for the native arguments structure, the outgoing parameters |
| 153 // (pointer to the native arguments structure, the C function entry point) | 157 // (pointer to the native arguments structure, the C function entry point) |
| 154 // and align frame before entering the C++ world. | 158 // and align frame before entering the C++ world. |
| 155 __ AddImmediate(ESP, | 159 __ AddImmediate(ESP, |
| 156 Immediate(-INT32_SIZEOF(NativeArguments) - (2 * kWordSize))); | 160 Immediate(-INT32_SIZEOF(NativeArguments) - (2 * kWordSize))); |
| 157 if (OS::ActivationFrameAlignment() > 1) { | 161 if (OS::ActivationFrameAlignment() > 1) { |
| 158 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 162 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
| 159 } | 163 } |
| 160 | 164 |
| 161 // Pass NativeArguments structure by value and call native function. | 165 // Pass NativeArguments structure by value and call native function. |
| 162 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. | 166 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. |
| 163 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. | 167 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. |
| 164 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. | 168 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. |
| 165 __ leal(EAX, Address(EBP, 2 * kWordSize)); // Compute return value addr. | 169 __ leal(EAX, Address(EBP, 2 * kWordSize)); // Compute return value addr. |
| 166 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. | 170 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. |
| 167 __ leal(EAX, Address(ESP, 2 * kWordSize)); // Pointer to the NativeArguments. | 171 __ leal(EAX, Address(ESP, 2 * kWordSize)); // Pointer to the NativeArguments. |
| 168 __ movl(Address(ESP, 0), EAX); // Pass the pointer to the NativeArguments. | 172 __ movl(Address(ESP, 0), EAX); // Pass the pointer to the NativeArguments. |
| 169 | 173 |
| 170 __ movl(Address(ESP, kWordSize), ECX); // Function to call. | 174 __ movl(Address(ESP, kWordSize), ECX); // Function to call. |
| 171 ExternalLabel label(NativeEntry::NativeCallWrapperEntry()); | 175 ExternalLabel label(NativeEntry::NativeCallWrapperEntry()); |
| 172 __ call(&label); | 176 __ call(&label); |
| 173 | 177 |
| 174 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 178 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 175 | 179 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 198 const intptr_t retval_offset = | 202 const intptr_t retval_offset = |
| 199 NativeArguments::retval_offset() + native_args_struct_offset; | 203 NativeArguments::retval_offset() + native_args_struct_offset; |
| 200 | 204 |
| 201 __ EnterFrame(0); | 205 __ EnterFrame(0); |
| 202 | 206 |
| 203 // Save exit frame information to enable stack walking as we are about | 207 // Save exit frame information to enable stack walking as we are about |
| 204 // to transition to dart VM code. | 208 // to transition to dart VM code. |
| 205 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); | 209 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); |
| 206 | 210 |
| 207 #if defined(DEBUG) | 211 #if defined(DEBUG) |
| 208 { Label ok; | 212 { |
| 213 Label ok; |
| 209 // Check that we are always entering from Dart code. | 214 // Check that we are always entering from Dart code. |
| 210 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 215 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 211 __ j(EQUAL, &ok, Assembler::kNearJump); | 216 __ j(EQUAL, &ok, Assembler::kNearJump); |
| 212 __ Stop("Not coming from Dart code."); | 217 __ Stop("Not coming from Dart code."); |
| 213 __ Bind(&ok); | 218 __ Bind(&ok); |
| 214 } | 219 } |
| 215 #endif | 220 #endif |
| 216 | 221 |
| 217 // Mark that the thread is executing native code. | 222 // Mark that the thread is executing native code. |
| 218 __ movl(Assembler::VMTagAddress(), ECX); | 223 __ movl(Assembler::VMTagAddress(), ECX); |
| 219 | 224 |
| 220 // Reserve space for the native arguments structure, the outgoing parameter | 225 // Reserve space for the native arguments structure, the outgoing parameter |
| 221 // (pointer to the native arguments structure) and align frame before | 226 // (pointer to the native arguments structure) and align frame before |
| 222 // entering the C++ world. | 227 // entering the C++ world. |
| 223 __ AddImmediate(ESP, Immediate(-INT32_SIZEOF(NativeArguments) - kWordSize)); | 228 __ AddImmediate(ESP, Immediate(-INT32_SIZEOF(NativeArguments) - kWordSize)); |
| 224 if (OS::ActivationFrameAlignment() > 1) { | 229 if (OS::ActivationFrameAlignment() > 1) { |
| 225 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 230 __ andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
| 226 } | 231 } |
| 227 | 232 |
| 228 // Pass NativeArguments structure by value and call native function. | 233 // Pass NativeArguments structure by value and call native function. |
| 229 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. | 234 __ movl(Address(ESP, thread_offset), THR); // Set thread in NativeArgs. |
| 230 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. | 235 __ movl(Address(ESP, argc_tag_offset), EDX); // Set argc in NativeArguments. |
| 231 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. | 236 __ movl(Address(ESP, argv_offset), EAX); // Set argv in NativeArguments. |
| 232 __ leal(EAX, Address(EBP, 2 * kWordSize)); // Compute return value addr. | 237 __ leal(EAX, Address(EBP, 2 * kWordSize)); // Compute return value addr. |
| 233 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. | 238 __ movl(Address(ESP, retval_offset), EAX); // Set retval in NativeArguments. |
| 234 __ leal(EAX, Address(ESP, kWordSize)); // Pointer to the NativeArguments. | 239 __ leal(EAX, Address(ESP, kWordSize)); // Pointer to the NativeArguments. |
| 235 __ movl(Address(ESP, 0), EAX); // Pass the pointer to the NativeArguments. | 240 __ movl(Address(ESP, 0), EAX); // Pass the pointer to the NativeArguments. |
| 236 __ call(ECX); | 241 __ call(ECX); |
| 237 | 242 |
| 238 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 243 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
| 239 | 244 |
| 240 // Reset exit frame information in Isolate structure. | 245 // Reset exit frame information in Isolate structure. |
| 241 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 246 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
| 242 | 247 |
| 243 __ LeaveFrame(); | 248 __ LeaveFrame(); |
| 244 __ ret(); | 249 __ ret(); |
| 245 } | 250 } |
| 246 | 251 |
| 247 | 252 |
| 248 // Input parameters: | 253 // Input parameters: |
| 249 // EDX: arguments descriptor array. | 254 // EDX: arguments descriptor array. |
| 250 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 255 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
| 251 __ EnterStubFrame(); | 256 __ EnterStubFrame(); |
| 252 __ pushl(EDX); // Preserve arguments descriptor array. | 257 __ pushl(EDX); // Preserve arguments descriptor array. |
| 253 __ pushl(Immediate(0)); // Setup space on stack for return value. | 258 __ pushl(Immediate(0)); // Setup space on stack for return value. |
| 254 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 259 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
| 255 __ popl(EAX); // Get Code object result. | 260 __ popl(EAX); // Get Code object result. |
| 256 __ popl(EDX); // Restore arguments descriptor array. | 261 __ popl(EDX); // Restore arguments descriptor array. |
| 257 // Remove the stub frame as we are about to jump to the dart function. | 262 // Remove the stub frame as we are about to jump to the dart function. |
| 258 __ LeaveFrame(); | 263 __ LeaveFrame(); |
| 259 | 264 |
| 260 __ movl(ECX, FieldAddress(EAX, Code::entry_point_offset())); | 265 __ movl(ECX, FieldAddress(EAX, Code::entry_point_offset())); |
| 261 __ jmp(ECX); | 266 __ jmp(ECX); |
| 262 } | 267 } |
| 263 | 268 |
| 264 | 269 |
| 265 // Called from a static call only when an invalid code has been entered | 270 // Called from a static call only when an invalid code has been entered |
| 266 // (invalid because its function was optimized or deoptimized). | 271 // (invalid because its function was optimized or deoptimized). |
| 267 // EDX: arguments descriptor array. | 272 // EDX: arguments descriptor array. |
| 268 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 273 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 269 // Create a stub frame as we are pushing some objects on the stack before | 274 // Create a stub frame as we are pushing some objects on the stack before |
| 270 // calling into the runtime. | 275 // calling into the runtime. |
| 271 __ EnterStubFrame(); | 276 __ EnterStubFrame(); |
| 272 __ pushl(EDX); // Preserve arguments descriptor array. | 277 __ pushl(EDX); // Preserve arguments descriptor array. |
| 273 __ pushl(Immediate(0)); // Setup space on stack for return value. | 278 __ pushl(Immediate(0)); // Setup space on stack for return value. |
| 274 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 279 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
| 275 __ popl(EAX); // Get Code object. | 280 __ popl(EAX); // Get Code object. |
| 276 __ popl(EDX); // Restore arguments descriptor array. | 281 __ popl(EDX); // Restore arguments descriptor array. |
| 277 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); | 282 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); |
| 278 __ LeaveFrame(); | 283 __ LeaveFrame(); |
| 279 __ jmp(EAX); | 284 __ jmp(EAX); |
| 280 __ int3(); | 285 __ int3(); |
| 281 } | 286 } |
| 282 | 287 |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 380 intptr_t offset = 0; | 385 intptr_t offset = 0; |
| 381 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 386 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
| 382 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 387 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
| 383 __ movups(Address(ESP, offset), xmm_reg); | 388 __ movups(Address(ESP, offset), xmm_reg); |
| 384 offset += kFpuRegisterSize; | 389 offset += kFpuRegisterSize; |
| 385 } | 390 } |
| 386 | 391 |
| 387 __ movl(ECX, ESP); // Preserve saved registers block. | 392 __ movl(ECX, ESP); // Preserve saved registers block. |
| 388 __ ReserveAlignedFrameSpace(2 * kWordSize); | 393 __ ReserveAlignedFrameSpace(2 * kWordSize); |
| 389 __ movl(Address(ESP, 0 * kWordSize), ECX); // Start of register block. | 394 __ movl(Address(ESP, 0 * kWordSize), ECX); // Start of register block. |
| 390 bool is_lazy = (kind == kLazyDeoptFromReturn) || | 395 bool is_lazy = |
| 391 (kind == kLazyDeoptFromThrow); | 396 (kind == kLazyDeoptFromReturn) || (kind == kLazyDeoptFromThrow); |
| 392 __ movl(Address(ESP, 1 * kWordSize), Immediate(is_lazy ? 1 : 0)); | 397 __ movl(Address(ESP, 1 * kWordSize), Immediate(is_lazy ? 1 : 0)); |
| 393 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 398 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
| 394 // Result (EAX) is stack-size (FP - SP) in bytes. | 399 // Result (EAX) is stack-size (FP - SP) in bytes. |
| 395 | 400 |
| 396 if (kind == kLazyDeoptFromReturn) { | 401 if (kind == kLazyDeoptFromReturn) { |
| 397 // Restore result into EBX temporarily. | 402 // Restore result into EBX temporarily. |
| 398 __ movl(EBX, Address(EBP, saved_result_slot_from_fp * kWordSize)); | 403 __ movl(EBX, Address(EBP, saved_result_slot_from_fp * kWordSize)); |
| 399 } else if (kind == kLazyDeoptFromThrow) { | 404 } else if (kind == kLazyDeoptFromThrow) { |
| 400 // Restore result into EBX temporarily. | 405 // Restore result into EBX temporarily. |
| 401 __ movl(EBX, Address(EBP, saved_exception_slot_from_fp * kWordSize)); | 406 __ movl(EBX, Address(EBP, saved_exception_slot_from_fp * kWordSize)); |
| 402 __ movl(ECX, Address(EBP, saved_stacktrace_slot_from_fp * kWordSize)); | 407 __ movl(ECX, Address(EBP, saved_stacktrace_slot_from_fp * kWordSize)); |
| 403 } | 408 } |
| 404 | 409 |
| 405 __ LeaveFrame(); | 410 __ LeaveFrame(); |
| 406 __ popl(EDX); // Preserve return address. | 411 __ popl(EDX); // Preserve return address. |
| 407 __ movl(ESP, EBP); // Discard optimized frame. | 412 __ movl(ESP, EBP); // Discard optimized frame. |
| 408 __ subl(ESP, EAX); // Reserve space for deoptimized frame. | 413 __ subl(ESP, EAX); // Reserve space for deoptimized frame. |
| 409 __ pushl(EDX); // Restore return address. | 414 __ pushl(EDX); // Restore return address. |
| 410 | 415 |
| 411 // Leaf runtime function DeoptimizeFillFrame expects a Dart frame. | 416 // Leaf runtime function DeoptimizeFillFrame expects a Dart frame. |
| 412 __ EnterDartFrame(0); | 417 __ EnterDartFrame(0); |
| 413 if (kind == kLazyDeoptFromReturn) { | 418 if (kind == kLazyDeoptFromReturn) { |
| 414 __ pushl(EBX); // Preserve result as first local. | 419 __ pushl(EBX); // Preserve result as first local. |
| 415 } else if (kind == kLazyDeoptFromThrow) { | 420 } else if (kind == kLazyDeoptFromThrow) { |
| 416 __ pushl(EBX); // Preserve exception as first local. | 421 __ pushl(EBX); // Preserve exception as first local. |
| 417 __ pushl(ECX); // Preserve stacktrace as first local. | 422 __ pushl(ECX); // Preserve stacktrace as first local. |
| 418 } | 423 } |
| 419 __ ReserveAlignedFrameSpace(1 * kWordSize); | 424 __ ReserveAlignedFrameSpace(1 * kWordSize); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 447 __ popl(EBX); | 452 __ popl(EBX); |
| 448 __ SmiUntag(EBX); | 453 __ SmiUntag(EBX); |
| 449 if (kind == kLazyDeoptFromReturn) { | 454 if (kind == kLazyDeoptFromReturn) { |
| 450 __ popl(EAX); // Restore result. | 455 __ popl(EAX); // Restore result. |
| 451 } else if (kind == kLazyDeoptFromThrow) { | 456 } else if (kind == kLazyDeoptFromThrow) { |
| 452 __ popl(EDX); // Restore exception. | 457 __ popl(EDX); // Restore exception. |
| 453 __ popl(EAX); // Restore stacktrace. | 458 __ popl(EAX); // Restore stacktrace. |
| 454 } | 459 } |
| 455 __ LeaveFrame(); | 460 __ LeaveFrame(); |
| 456 | 461 |
| 457 __ popl(ECX); // Pop return address. | 462 __ popl(ECX); // Pop return address. |
| 458 __ addl(ESP, EBX); // Remove materialization arguments. | 463 __ addl(ESP, EBX); // Remove materialization arguments. |
| 459 __ pushl(ECX); // Push return address. | 464 __ pushl(ECX); // Push return address. |
| 460 __ ret(); | 465 __ ret(); |
| 461 } | 466 } |
| 462 | 467 |
| 463 | 468 |
| 464 // EAX: result, must be preserved | 469 // EAX: result, must be preserved |
| 465 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 470 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
| 466 // Return address for "call" to deopt stub. | 471 // Return address for "call" to deopt stub. |
| 467 __ pushl(Immediate(0xe1e1e1e1)); | 472 __ pushl(Immediate(0xe1e1e1e1)); |
| 468 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 473 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 469 } | 474 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 488 __ Comment("NoSuchMethodDispatch"); | 493 __ Comment("NoSuchMethodDispatch"); |
| 489 // When lazily generated invocation dispatchers are disabled, the | 494 // When lazily generated invocation dispatchers are disabled, the |
| 490 // miss-handler may return null. | 495 // miss-handler may return null. |
| 491 const Immediate& raw_null = | 496 const Immediate& raw_null = |
| 492 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 497 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| 493 __ cmpl(EAX, raw_null); | 498 __ cmpl(EAX, raw_null); |
| 494 __ j(NOT_EQUAL, call_target_function); | 499 __ j(NOT_EQUAL, call_target_function); |
| 495 __ EnterStubFrame(); | 500 __ EnterStubFrame(); |
| 496 // Load the receiver. | 501 // Load the receiver. |
| 497 __ movl(EDI, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 502 __ movl(EDI, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 498 __ movl(EAX, Address( | 503 __ movl(EAX, Address(EBP, EDI, TIMES_HALF_WORD_SIZE, |
| 499 EBP, EDI, TIMES_HALF_WORD_SIZE, kParamEndSlotFromFp * kWordSize)); | 504 kParamEndSlotFromFp * kWordSize)); |
| 500 __ pushl(Immediate(0)); // Setup space on stack for result. | 505 __ pushl(Immediate(0)); // Setup space on stack for result. |
| 501 __ pushl(EAX); // Receiver. | 506 __ pushl(EAX); // Receiver. |
| 502 __ pushl(ECX); // ICData/MegamorphicCache. | 507 __ pushl(ECX); // ICData/MegamorphicCache. |
| 503 __ pushl(EDX); // Arguments descriptor array. | 508 __ pushl(EDX); // Arguments descriptor array. |
| 504 __ movl(EDX, EDI); | 509 __ movl(EDX, EDI); |
| 505 // EDX: Smi-tagged arguments array length. | 510 // EDX: Smi-tagged arguments array length. |
| 506 PushArgumentsArray(assembler); | 511 PushArgumentsArray(assembler); |
| 507 const intptr_t kNumArgs = 4; | 512 const intptr_t kNumArgs = 4; |
| 508 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); | 513 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); |
| 509 __ Drop(4); | 514 __ Drop(4); |
| 510 __ popl(EAX); // Return value. | 515 __ popl(EAX); // Return value. |
| 511 __ LeaveFrame(); | 516 __ LeaveFrame(); |
| 512 __ ret(); | 517 __ ret(); |
| 513 } | 518 } |
| 514 | 519 |
| 515 | 520 |
| 516 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 521 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
| 517 __ EnterStubFrame(); | 522 __ EnterStubFrame(); |
| 518 // Load the receiver into EAX. The argument count in the arguments | 523 // Load the receiver into EAX. The argument count in the arguments |
| 519 // descriptor in EDX is a smi. | 524 // descriptor in EDX is a smi. |
| 520 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 525 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 521 // Two words (saved fp, stub's pc marker) in the stack above the return | 526 // Two words (saved fp, stub's pc marker) in the stack above the return |
| 522 // address. | 527 // address. |
| 523 __ movl(EAX, Address(ESP, EAX, TIMES_2, 2 * kWordSize)); | 528 __ movl(EAX, Address(ESP, EAX, TIMES_2, 2 * kWordSize)); |
| 524 // Preserve IC data and arguments descriptor. | 529 // Preserve IC data and arguments descriptor. |
| 525 __ pushl(ECX); | 530 __ pushl(ECX); |
| 526 __ pushl(EDX); | 531 __ pushl(EDX); |
| 527 | 532 |
| 528 __ pushl(Immediate(0)); // Space for the result of the runtime call. | 533 __ pushl(Immediate(0)); // Space for the result of the runtime call. |
| 529 __ pushl(EAX); // Pass receiver. | 534 __ pushl(EAX); // Pass receiver. |
| 530 __ pushl(ECX); // Pass IC data. | 535 __ pushl(ECX); // Pass IC data. |
| 531 __ pushl(EDX); // Pass arguments descriptor. | 536 __ pushl(EDX); // Pass arguments descriptor. |
| 532 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 537 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); |
| 533 // Discard arguments. | 538 // Discard arguments. |
| 534 __ popl(EAX); | 539 __ popl(EAX); |
| 535 __ popl(EAX); | 540 __ popl(EAX); |
| 536 __ popl(EAX); | 541 __ popl(EAX); |
| 537 __ popl(EAX); // Return value from the runtime call (function). | 542 __ popl(EAX); // Return value from the runtime call (function). |
| 538 __ popl(EDX); // Restore arguments descriptor. | 543 __ popl(EDX); // Restore arguments descriptor. |
| 539 __ popl(ECX); // Restore IC data. | 544 __ popl(ECX); // Restore IC data. |
| 540 __ LeaveFrame(); | 545 __ LeaveFrame(); |
| 541 | 546 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 563 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). | 568 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). |
| 564 // Assert that length is a Smi. | 569 // Assert that length is a Smi. |
| 565 __ testl(EDX, Immediate(kSmiTagMask)); | 570 __ testl(EDX, Immediate(kSmiTagMask)); |
| 566 | 571 |
| 567 if (FLAG_use_slow_path) { | 572 if (FLAG_use_slow_path) { |
| 568 __ jmp(&slow_case); | 573 __ jmp(&slow_case); |
| 569 } else { | 574 } else { |
| 570 __ j(NOT_ZERO, &slow_case); | 575 __ j(NOT_ZERO, &slow_case); |
| 571 } | 576 } |
| 572 __ cmpl(EDX, Immediate(0)); | 577 __ cmpl(EDX, Immediate(0)); |
| 573 __ j(LESS, &slow_case); | 578 __ j(LESS, &slow_case); |
| 574 | 579 |
| 575 // Check for maximum allowed length. | 580 // Check for maximum allowed length. |
| 576 const Immediate& max_len = | 581 const Immediate& max_len = |
| 577 Immediate(reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements))); | 582 Immediate(reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements))); |
| 578 __ cmpl(EDX, max_len); | 583 __ cmpl(EDX, max_len); |
| 579 __ j(GREATER, &slow_case); | 584 __ j(GREATER, &slow_case); |
| 580 | 585 |
| 581 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, | 586 NOT_IN_PRODUCT( |
| 582 EAX, | 587 __ MaybeTraceAllocation(kArrayCid, EAX, &slow_case, Assembler::kFarJump)); |
| 583 &slow_case, | |
| 584 Assembler::kFarJump)); | |
| 585 | 588 |
| 586 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 589 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; |
| 587 __ leal(EBX, Address(EDX, TIMES_2, fixed_size)); // EDX is Smi. | 590 __ leal(EBX, Address(EDX, TIMES_2, fixed_size)); // EDX is Smi. |
| 588 ASSERT(kSmiTagShift == 1); | 591 ASSERT(kSmiTagShift == 1); |
| 589 __ andl(EBX, Immediate(-kObjectAlignment)); | 592 __ andl(EBX, Immediate(-kObjectAlignment)); |
| 590 | 593 |
| 591 // ECX: array element type. | 594 // ECX: array element type. |
| 592 // EDX: array length as Smi. | 595 // EDX: array length as Smi. |
| 593 // EBX: allocation size. | 596 // EBX: allocation size. |
| 594 | 597 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 635 // Get the class index and insert it into the tags. | 638 // Get the class index and insert it into the tags. |
| 636 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); | 639 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); |
| 637 __ movl(FieldAddress(EAX, Array::tags_offset()), EDI); // Tags. | 640 __ movl(FieldAddress(EAX, Array::tags_offset()), EDI); // Tags. |
| 638 } | 641 } |
| 639 // EAX: new object start as a tagged pointer. | 642 // EAX: new object start as a tagged pointer. |
| 640 // EBX: allocation size. | 643 // EBX: allocation size. |
| 641 // ECX: array element type. | 644 // ECX: array element type. |
| 642 // EDX: Array length as Smi (preserved). | 645 // EDX: Array length as Smi (preserved). |
| 643 // Store the type argument field. | 646 // Store the type argument field. |
| 644 // No generetional barrier needed, since we store into a new object. | 647 // No generetional barrier needed, since we store into a new object. |
| 645 __ StoreIntoObjectNoBarrier(EAX, | 648 __ StoreIntoObjectNoBarrier( |
| 646 FieldAddress(EAX, Array::type_arguments_offset()), | 649 EAX, FieldAddress(EAX, Array::type_arguments_offset()), ECX); |
| 647 ECX); | |
| 648 | 650 |
| 649 // Set the length field. | 651 // Set the length field. |
| 650 __ StoreIntoObjectNoBarrier(EAX, | 652 __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, Array::length_offset()), |
| 651 FieldAddress(EAX, Array::length_offset()), | |
| 652 EDX); | 653 EDX); |
| 653 | 654 |
| 654 // Initialize all array elements to raw_null. | 655 // Initialize all array elements to raw_null. |
| 655 // EAX: new object start as a tagged pointer. | 656 // EAX: new object start as a tagged pointer. |
| 656 // EBX: allocation size. | 657 // EBX: allocation size. |
| 657 // EDI: iterator which initially points to the start of the variable | 658 // EDI: iterator which initially points to the start of the variable |
| 658 // data area to be initialized. | 659 // data area to be initialized. |
| 659 // ECX: array element type. | 660 // ECX: array element type. |
| 660 // EDX: array length as Smi. | 661 // EDX: array length as Smi. |
| 661 __ leal(EBX, FieldAddress(EAX, EBX, TIMES_1, 0)); | 662 __ leal(EBX, FieldAddress(EAX, EBX, TIMES_1, 0)); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 672 __ Bind(&done); | 673 __ Bind(&done); |
| 673 __ ret(); // returns the newly allocated object in EAX. | 674 __ ret(); // returns the newly allocated object in EAX. |
| 674 | 675 |
| 675 // Unable to allocate the array using the fast inline code, just call | 676 // Unable to allocate the array using the fast inline code, just call |
| 676 // into the runtime. | 677 // into the runtime. |
| 677 __ Bind(&slow_case); | 678 __ Bind(&slow_case); |
| 678 // Create a stub frame as we are pushing some objects on the stack before | 679 // Create a stub frame as we are pushing some objects on the stack before |
| 679 // calling into the runtime. | 680 // calling into the runtime. |
| 680 __ EnterStubFrame(); | 681 __ EnterStubFrame(); |
| 681 __ pushl(Immediate(0)); // Setup space on stack for return value. | 682 __ pushl(Immediate(0)); // Setup space on stack for return value. |
| 682 __ pushl(EDX); // Array length as Smi. | 683 __ pushl(EDX); // Array length as Smi. |
| 683 __ pushl(ECX); // Element type. | 684 __ pushl(ECX); // Element type. |
| 684 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); | 685 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); |
| 685 __ popl(EAX); // Pop element type argument. | 686 __ popl(EAX); // Pop element type argument. |
| 686 __ popl(EDX); // Pop array length argument (preserved). | 687 __ popl(EDX); // Pop array length argument (preserved). |
| 687 __ popl(EAX); // Pop return value from return slot. | 688 __ popl(EAX); // Pop return value from return slot. |
| 688 __ LeaveFrame(); | 689 __ LeaveFrame(); |
| 689 __ ret(); | 690 __ ret(); |
| 690 } | 691 } |
| 691 | 692 |
| 692 | 693 |
| 693 // Called when invoking dart code from C++ (VM code). | 694 // Called when invoking dart code from C++ (VM code). |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 807 // EBX and EDX are destroyed. | 808 // EBX and EDX are destroyed. |
| 808 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 809 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
| 809 if (FLAG_inline_alloc) { | 810 if (FLAG_inline_alloc) { |
| 810 Label slow_case; | 811 Label slow_case; |
| 811 // First compute the rounded instance size. | 812 // First compute the rounded instance size. |
| 812 // EDX: number of context variables. | 813 // EDX: number of context variables. |
| 813 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); | 814 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); |
| 814 __ leal(EBX, Address(EDX, TIMES_4, fixed_size)); | 815 __ leal(EBX, Address(EDX, TIMES_4, fixed_size)); |
| 815 __ andl(EBX, Immediate(-kObjectAlignment)); | 816 __ andl(EBX, Immediate(-kObjectAlignment)); |
| 816 | 817 |
| 817 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, | 818 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, EAX, &slow_case, |
| 818 EAX, | |
| 819 &slow_case, | |
| 820 Assembler::kFarJump)); | 819 Assembler::kFarJump)); |
| 821 | 820 |
| 822 // Now allocate the object. | 821 // Now allocate the object. |
| 823 // EDX: number of context variables. | 822 // EDX: number of context variables. |
| 824 const intptr_t cid = kContextCid; | 823 const intptr_t cid = kContextCid; |
| 825 Heap::Space space = Heap::kNew; | 824 Heap::Space space = Heap::kNew; |
| 826 __ movl(ECX, Address(THR, Thread::heap_offset())); | 825 __ movl(ECX, Address(THR, Thread::heap_offset())); |
| 827 __ movl(EAX, Address(ECX, Heap::TopOffset(space))); | 826 __ movl(EAX, Address(ECX, Heap::TopOffset(space))); |
| 828 __ addl(EBX, EAX); | 827 __ addl(EBX, EAX); |
| 829 // Check if the allocation fits into the remaining space. | 828 // Check if the allocation fits into the remaining space. |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 867 __ jmp(&done); | 866 __ jmp(&done); |
| 868 | 867 |
| 869 __ Bind(&size_tag_overflow); | 868 __ Bind(&size_tag_overflow); |
| 870 // Set overflow size tag value. | 869 // Set overflow size tag value. |
| 871 __ movl(EBX, Immediate(0)); | 870 __ movl(EBX, Immediate(0)); |
| 872 | 871 |
| 873 __ Bind(&done); | 872 __ Bind(&done); |
| 874 // EAX: new object. | 873 // EAX: new object. |
| 875 // EDX: number of context variables. | 874 // EDX: number of context variables. |
| 876 // EBX: size and bit tags. | 875 // EBX: size and bit tags. |
| 877 __ orl(EBX, | 876 __ orl(EBX, Immediate(RawObject::ClassIdTag::encode(cid))); |
| 878 Immediate(RawObject::ClassIdTag::encode(cid))); | |
| 879 __ movl(FieldAddress(EAX, Context::tags_offset()), EBX); // Tags. | 877 __ movl(FieldAddress(EAX, Context::tags_offset()), EBX); // Tags. |
| 880 } | 878 } |
| 881 | 879 |
| 882 // Setup up number of context variables field. | 880 // Setup up number of context variables field. |
| 883 // EAX: new object. | 881 // EAX: new object. |
| 884 // EDX: number of context variables as integer value (not object). | 882 // EDX: number of context variables as integer value (not object). |
| 885 __ movl(FieldAddress(EAX, Context::num_variables_offset()), EDX); | 883 __ movl(FieldAddress(EAX, Context::num_variables_offset()), EDX); |
| 886 | 884 |
| 887 // Setup the parent field. | 885 // Setup the parent field. |
| 888 // EAX: new object. | 886 // EAX: new object. |
| 889 // EDX: number of context variables. | 887 // EDX: number of context variables. |
| 890 // No generational barrier needed, since we are storing null. | 888 // No generational barrier needed, since we are storing null. |
| 891 __ StoreIntoObjectNoBarrier(EAX, | 889 __ StoreIntoObjectNoBarrier(EAX, |
| 892 FieldAddress(EAX, Context::parent_offset()), | 890 FieldAddress(EAX, Context::parent_offset()), |
| 893 Object::null_object()); | 891 Object::null_object()); |
| 894 | 892 |
| 895 // Initialize the context variables. | 893 // Initialize the context variables. |
| 896 // EAX: new object. | 894 // EAX: new object. |
| 897 // EDX: number of context variables. | 895 // EDX: number of context variables. |
| 898 { | 896 { |
| 899 Label loop, entry; | 897 Label loop, entry; |
| 900 __ leal(EBX, FieldAddress(EAX, Context::variable_offset(0))); | 898 __ leal(EBX, FieldAddress(EAX, Context::variable_offset(0))); |
| 901 | 899 |
| 902 __ jmp(&entry, Assembler::kNearJump); | 900 __ jmp(&entry, Assembler::kNearJump); |
| 903 __ Bind(&loop); | 901 __ Bind(&loop); |
| 904 __ decl(EDX); | 902 __ decl(EDX); |
| 905 // No generational barrier needed, since we are storing null. | 903 // No generational barrier needed, since we are storing null. |
| 906 __ StoreIntoObjectNoBarrier(EAX, | 904 __ StoreIntoObjectNoBarrier(EAX, Address(EBX, EDX, TIMES_4, 0), |
| 907 Address(EBX, EDX, TIMES_4, 0), | |
| 908 Object::null_object()); | 905 Object::null_object()); |
| 909 __ Bind(&entry); | 906 __ Bind(&entry); |
| 910 __ cmpl(EDX, Immediate(0)); | 907 __ cmpl(EDX, Immediate(0)); |
| 911 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 908 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
| 912 } | 909 } |
| 913 | 910 |
| 914 // Done allocating and initializing the context. | 911 // Done allocating and initializing the context. |
| 915 // EAX: new object. | 912 // EAX: new object. |
| 916 __ ret(); | 913 __ ret(); |
| 917 | 914 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1001 } | 998 } |
| 1002 | 999 |
| 1003 | 1000 |
| 1004 // Called for inline allocation of objects. | 1001 // Called for inline allocation of objects. |
| 1005 // Input parameters: | 1002 // Input parameters: |
| 1006 // ESP + 4 : type arguments object (only if class is parameterized). | 1003 // ESP + 4 : type arguments object (only if class is parameterized). |
| 1007 // ESP : points to return address. | 1004 // ESP : points to return address. |
| 1008 // Uses EAX, EBX, ECX, EDX, EDI as temporary registers. | 1005 // Uses EAX, EBX, ECX, EDX, EDI as temporary registers. |
| 1009 // Returns patch_code_pc offset where patching code for disabling the stub | 1006 // Returns patch_code_pc offset where patching code for disabling the stub |
| 1010 // has been generated (similar to regularly generated Dart code). | 1007 // has been generated (similar to regularly generated Dart code). |
| 1011 void StubCode::GenerateAllocationStubForClass( | 1008 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
| 1012 Assembler* assembler, const Class& cls) { | 1009 const Class& cls) { |
| 1013 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize; | 1010 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize; |
| 1014 const Immediate& raw_null = | 1011 const Immediate& raw_null = |
| 1015 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 1012 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| 1016 // The generated code is different if the class is parameterized. | 1013 // The generated code is different if the class is parameterized. |
| 1017 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1014 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
| 1018 ASSERT(!is_cls_parameterized || | 1015 ASSERT(!is_cls_parameterized || |
| 1019 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1016 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
| 1020 // kInlineInstanceSize is a constant used as a threshold for determining | 1017 // kInlineInstanceSize is a constant used as a threshold for determining |
| 1021 // when the object initialization should be done as a loop or as | 1018 // when the object initialization should be done as a loop or as |
| 1022 // straight line code. | 1019 // straight line code. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1065 // Initialize the remaining words of the object. | 1062 // Initialize the remaining words of the object. |
| 1066 | 1063 |
| 1067 // EAX: new object (tagged). | 1064 // EAX: new object (tagged). |
| 1068 // EBX: next object start. | 1065 // EBX: next object start. |
| 1069 // EDX: new object type arguments (if is_cls_parameterized). | 1066 // EDX: new object type arguments (if is_cls_parameterized). |
| 1070 // First try inlining the initialization without a loop. | 1067 // First try inlining the initialization without a loop. |
| 1071 if (instance_size < (kInlineInstanceSize * kWordSize)) { | 1068 if (instance_size < (kInlineInstanceSize * kWordSize)) { |
| 1072 // Check if the object contains any non-header fields. | 1069 // Check if the object contains any non-header fields. |
| 1073 // Small objects are initialized using a consecutive set of writes. | 1070 // Small objects are initialized using a consecutive set of writes. |
| 1074 for (intptr_t current_offset = Instance::NextFieldOffset(); | 1071 for (intptr_t current_offset = Instance::NextFieldOffset(); |
| 1075 current_offset < instance_size; | 1072 current_offset < instance_size; current_offset += kWordSize) { |
| 1076 current_offset += kWordSize) { | 1073 __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, current_offset), |
| 1077 __ StoreIntoObjectNoBarrier(EAX, | |
| 1078 FieldAddress(EAX, current_offset), | |
| 1079 Object::null_object()); | 1074 Object::null_object()); |
| 1080 } | 1075 } |
| 1081 } else { | 1076 } else { |
| 1082 __ leal(ECX, FieldAddress(EAX, Instance::NextFieldOffset())); | 1077 __ leal(ECX, FieldAddress(EAX, Instance::NextFieldOffset())); |
| 1083 // Loop until the whole object is initialized. | 1078 // Loop until the whole object is initialized. |
| 1084 // EAX: new object (tagged). | 1079 // EAX: new object (tagged). |
| 1085 // EBX: next object start. | 1080 // EBX: next object start. |
| 1086 // ECX: next word to be initialized. | 1081 // ECX: next word to be initialized. |
| 1087 // EDX: new object type arguments (if is_cls_parameterized). | 1082 // EDX: new object type arguments (if is_cls_parameterized). |
| 1088 Label init_loop; | 1083 Label init_loop; |
| 1089 Label done; | 1084 Label done; |
| 1090 __ Bind(&init_loop); | 1085 __ Bind(&init_loop); |
| 1091 __ cmpl(ECX, EBX); | 1086 __ cmpl(ECX, EBX); |
| 1092 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); | 1087 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); |
| 1093 __ StoreIntoObjectNoBarrier(EAX, | 1088 __ StoreIntoObjectNoBarrier(EAX, Address(ECX, 0), Object::null_object()); |
| 1094 Address(ECX, 0), | |
| 1095 Object::null_object()); | |
| 1096 __ addl(ECX, Immediate(kWordSize)); | 1089 __ addl(ECX, Immediate(kWordSize)); |
| 1097 __ jmp(&init_loop, Assembler::kNearJump); | 1090 __ jmp(&init_loop, Assembler::kNearJump); |
| 1098 __ Bind(&done); | 1091 __ Bind(&done); |
| 1099 } | 1092 } |
| 1100 if (is_cls_parameterized) { | 1093 if (is_cls_parameterized) { |
| 1101 // EAX: new object (tagged). | 1094 // EAX: new object (tagged). |
| 1102 // EDX: new object type arguments. | 1095 // EDX: new object type arguments. |
| 1103 // Set the type arguments in the new object. | 1096 // Set the type arguments in the new object. |
| 1104 intptr_t offset = cls.type_arguments_field_offset(); | 1097 intptr_t offset = cls.type_arguments_field_offset(); |
| 1105 __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset), EDX); | 1098 __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset), EDX); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1142 // EDX : arguments descriptor array. | 1135 // EDX : arguments descriptor array. |
| 1143 // Uses EAX, EBX, EDI as temporary registers. | 1136 // Uses EAX, EBX, EDI as temporary registers. |
| 1144 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { | 1137 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { |
| 1145 __ EnterStubFrame(); | 1138 __ EnterStubFrame(); |
| 1146 | 1139 |
| 1147 // Load the receiver. | 1140 // Load the receiver. |
| 1148 __ movl(EDI, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 1141 __ movl(EDI, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 1149 __ movl(EAX, Address(EBP, EDI, TIMES_2, kParamEndSlotFromFp * kWordSize)); | 1142 __ movl(EAX, Address(EBP, EDI, TIMES_2, kParamEndSlotFromFp * kWordSize)); |
| 1150 | 1143 |
| 1151 __ pushl(Immediate(0)); // Setup space on stack for result from noSuchMethod. | 1144 __ pushl(Immediate(0)); // Setup space on stack for result from noSuchMethod. |
| 1152 __ pushl(EAX); // Receiver. | 1145 __ pushl(EAX); // Receiver. |
| 1153 __ pushl(EDX); // Arguments descriptor array. | 1146 __ pushl(EDX); // Arguments descriptor array. |
| 1154 | 1147 |
| 1155 __ movl(EDX, EDI); | 1148 __ movl(EDX, EDI); |
| 1156 // EDX: Smi-tagged arguments array length. | 1149 // EDX: Smi-tagged arguments array length. |
| 1157 PushArgumentsArray(assembler); | 1150 PushArgumentsArray(assembler); |
| 1158 | 1151 |
| 1159 const intptr_t kNumArgs = 3; | 1152 const intptr_t kNumArgs = 3; |
| 1160 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); | 1153 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); |
| 1161 // noSuchMethod on closures always throws an error, so it will never return. | 1154 // noSuchMethod on closures always throws an error, so it will never return. |
| 1162 __ int3(); | 1155 __ int3(); |
| 1163 } | 1156 } |
| 1164 | 1157 |
| 1165 | 1158 |
| 1166 // Cannot use function object from ICData as it may be the inlined | 1159 // Cannot use function object from ICData as it may be the inlined |
| 1167 // function and not the top-scope function. | 1160 // function and not the top-scope function. |
| 1168 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1161 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
| 1169 Register ic_reg = ECX; | 1162 Register ic_reg = ECX; |
| 1170 Register func_reg = EBX; | 1163 Register func_reg = EBX; |
| 1171 if (FLAG_trace_optimized_ic_calls) { | 1164 if (FLAG_trace_optimized_ic_calls) { |
| 1172 __ EnterStubFrame(); | 1165 __ EnterStubFrame(); |
| 1173 __ pushl(func_reg); // Preserve | 1166 __ pushl(func_reg); // Preserve |
| 1174 __ pushl(ic_reg); // Preserve. | 1167 __ pushl(ic_reg); // Preserve. |
| 1175 __ pushl(ic_reg); // Argument. | 1168 __ pushl(ic_reg); // Argument. |
| 1176 __ pushl(func_reg); // Argument. | 1169 __ pushl(func_reg); // Argument. |
| 1177 __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 1170 __ CallRuntime(kTraceICCallRuntimeEntry, 2); |
| 1178 __ popl(EAX); // Discard argument; | 1171 __ popl(EAX); // Discard argument; |
| 1179 __ popl(EAX); // Discard argument; | 1172 __ popl(EAX); // Discard argument; |
| 1180 __ popl(ic_reg); // Restore. | 1173 __ popl(ic_reg); // Restore. |
| 1181 __ popl(func_reg); // Restore. | 1174 __ popl(func_reg); // Restore. |
| 1182 __ LeaveFrame(); | 1175 __ LeaveFrame(); |
| 1183 } | 1176 } |
| 1184 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); | 1177 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1185 } | 1178 } |
| 1186 | 1179 |
| 1187 | 1180 |
| 1188 // Loads function into 'temp_reg'. | 1181 // Loads function into 'temp_reg'. |
| 1189 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1182 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
| 1190 Register temp_reg) { | 1183 Register temp_reg) { |
| 1191 if (FLAG_optimization_counter_threshold >= 0) { | 1184 if (FLAG_optimization_counter_threshold >= 0) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1202 // Note: ECX must be preserved. | 1195 // Note: ECX must be preserved. |
| 1203 // Attempt a quick Smi operation for known operations ('kind'). The ICData | 1196 // Attempt a quick Smi operation for known operations ('kind'). The ICData |
| 1204 // must have been primed with a Smi/Smi check that will be used for counting | 1197 // must have been primed with a Smi/Smi check that will be used for counting |
| 1205 // the invocations. | 1198 // the invocations. |
| 1206 static void EmitFastSmiOp(Assembler* assembler, | 1199 static void EmitFastSmiOp(Assembler* assembler, |
| 1207 Token::Kind kind, | 1200 Token::Kind kind, |
| 1208 intptr_t num_args, | 1201 intptr_t num_args, |
| 1209 Label* not_smi_or_overflow) { | 1202 Label* not_smi_or_overflow) { |
| 1210 __ Comment("Fast Smi op"); | 1203 __ Comment("Fast Smi op"); |
| 1211 ASSERT(num_args == 2); | 1204 ASSERT(num_args == 2); |
| 1212 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Right | 1205 __ movl(EDI, Address(ESP, +1 * kWordSize)); // Right |
| 1213 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left | 1206 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left |
| 1214 __ movl(EBX, EDI); | 1207 __ movl(EBX, EDI); |
| 1215 __ orl(EBX, EAX); | 1208 __ orl(EBX, EAX); |
| 1216 __ testl(EBX, Immediate(kSmiTagMask)); | 1209 __ testl(EBX, Immediate(kSmiTagMask)); |
| 1217 __ j(NOT_ZERO, not_smi_or_overflow, Assembler::kNearJump); | 1210 __ j(NOT_ZERO, not_smi_or_overflow, Assembler::kNearJump); |
| 1218 switch (kind) { | 1211 switch (kind) { |
| 1219 case Token::kADD: { | 1212 case Token::kADD: { |
| 1220 __ addl(EAX, EDI); | 1213 __ addl(EAX, EDI); |
| 1221 __ j(OVERFLOW, not_smi_or_overflow, Assembler::kNearJump); | 1214 __ j(OVERFLOW, not_smi_or_overflow, Assembler::kNearJump); |
| 1222 break; | 1215 break; |
| 1223 } | 1216 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1236 Label done, is_true; | 1229 Label done, is_true; |
| 1237 __ cmpl(EAX, EDI); | 1230 __ cmpl(EAX, EDI); |
| 1238 __ j(EQUAL, &is_true, Assembler::kNearJump); | 1231 __ j(EQUAL, &is_true, Assembler::kNearJump); |
| 1239 __ LoadObject(EAX, Bool::False()); | 1232 __ LoadObject(EAX, Bool::False()); |
| 1240 __ jmp(&done, Assembler::kNearJump); | 1233 __ jmp(&done, Assembler::kNearJump); |
| 1241 __ Bind(&is_true); | 1234 __ Bind(&is_true); |
| 1242 __ LoadObject(EAX, Bool::True()); | 1235 __ LoadObject(EAX, Bool::True()); |
| 1243 __ Bind(&done); | 1236 __ Bind(&done); |
| 1244 break; | 1237 break; |
| 1245 } | 1238 } |
| 1246 default: UNIMPLEMENTED(); | 1239 default: |
| 1240 UNIMPLEMENTED(); |
| 1247 } | 1241 } |
| 1248 | 1242 |
| 1249 // ECX: IC data object. | 1243 // ECX: IC data object. |
| 1250 __ movl(EBX, FieldAddress(ECX, ICData::ic_data_offset())); | 1244 __ movl(EBX, FieldAddress(ECX, ICData::ic_data_offset())); |
| 1251 // EBX: ic_data_array with check entries: classes and target functions. | 1245 // EBX: ic_data_array with check entries: classes and target functions. |
| 1252 __ leal(EBX, FieldAddress(EBX, Array::data_offset())); | 1246 __ leal(EBX, FieldAddress(EBX, Array::data_offset())); |
| 1253 #if defined(DEBUG) | 1247 #if defined(DEBUG) |
| 1254 // Check that first entry is for Smi/Smi. | 1248 // Check that first entry is for Smi/Smi. |
| 1255 Label error, ok; | 1249 Label error, ok; |
| 1256 const Immediate& imm_smi_cid = | 1250 const Immediate& imm_smi_cid = |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1287 // - Match found -> jump to target. | 1281 // - Match found -> jump to target. |
| 1288 // - Match not found -> jump to IC miss. | 1282 // - Match not found -> jump to IC miss. |
| 1289 void StubCode::GenerateNArgsCheckInlineCacheStub( | 1283 void StubCode::GenerateNArgsCheckInlineCacheStub( |
| 1290 Assembler* assembler, | 1284 Assembler* assembler, |
| 1291 intptr_t num_args, | 1285 intptr_t num_args, |
| 1292 const RuntimeEntry& handle_ic_miss, | 1286 const RuntimeEntry& handle_ic_miss, |
| 1293 Token::Kind kind, | 1287 Token::Kind kind, |
| 1294 bool optimized) { | 1288 bool optimized) { |
| 1295 ASSERT(num_args > 0); | 1289 ASSERT(num_args > 0); |
| 1296 #if defined(DEBUG) | 1290 #if defined(DEBUG) |
| 1297 { Label ok; | 1291 { |
| 1292 Label ok; |
| 1298 // Check that the IC data array has NumArgsTested() == num_args. | 1293 // Check that the IC data array has NumArgsTested() == num_args. |
| 1299 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1294 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. |
| 1300 __ movl(EBX, FieldAddress(ECX, ICData::state_bits_offset())); | 1295 __ movl(EBX, FieldAddress(ECX, ICData::state_bits_offset())); |
| 1301 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. | 1296 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. |
| 1302 __ andl(EBX, Immediate(ICData::NumArgsTestedMask())); | 1297 __ andl(EBX, Immediate(ICData::NumArgsTestedMask())); |
| 1303 __ cmpl(EBX, Immediate(num_args)); | 1298 __ cmpl(EBX, Immediate(num_args)); |
| 1304 __ j(EQUAL, &ok, Assembler::kNearJump); | 1299 __ j(EQUAL, &ok, Assembler::kNearJump); |
| 1305 __ Stop("Incorrect stub for IC data"); | 1300 __ Stop("Incorrect stub for IC data"); |
| 1306 __ Bind(&ok); | 1301 __ Bind(&ok); |
| 1307 } | 1302 } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1342 __ movl(EDI, Address(EBX, 0)); // First class id (smi) to check. | 1337 __ movl(EDI, Address(EBX, 0)); // First class id (smi) to check. |
| 1343 Label loop, update, test, found; | 1338 Label loop, update, test, found; |
| 1344 __ jmp(&test); | 1339 __ jmp(&test); |
| 1345 | 1340 |
| 1346 __ Comment("ICData loop"); | 1341 __ Comment("ICData loop"); |
| 1347 __ Bind(&loop); | 1342 __ Bind(&loop); |
| 1348 for (int i = 0; i < num_args; i++) { | 1343 for (int i = 0; i < num_args; i++) { |
| 1349 if (i > 0) { | 1344 if (i > 0) { |
| 1350 // If not the first, load the next argument's class ID. | 1345 // If not the first, load the next argument's class ID. |
| 1351 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 1346 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 1352 __ movl(EDI, Address(ESP, EAX, TIMES_2, - i * kWordSize)); | 1347 __ movl(EDI, Address(ESP, EAX, TIMES_2, -i * kWordSize)); |
| 1353 __ LoadTaggedClassIdMayBeSmi(EAX, EDI); | 1348 __ LoadTaggedClassIdMayBeSmi(EAX, EDI); |
| 1354 | 1349 |
| 1355 // EAX: next argument class ID (smi). | 1350 // EAX: next argument class ID (smi). |
| 1356 __ movl(EDI, Address(EBX, i * kWordSize)); | 1351 __ movl(EDI, Address(EBX, i * kWordSize)); |
| 1357 // EDI: next class ID to check (smi). | 1352 // EDI: next class ID to check (smi). |
| 1358 } | 1353 } |
| 1359 __ cmpl(EAX, EDI); // Class id match? | 1354 __ cmpl(EAX, EDI); // Class id match? |
| 1360 if (i < (num_args - 1)) { | 1355 if (i < (num_args - 1)) { |
| 1361 __ j(NOT_EQUAL, &update); // Continue. | 1356 __ j(NOT_EQUAL, &update); // Continue. |
| 1362 } else { | 1357 } else { |
| 1363 // Last check, all checks before matched. | 1358 // Last check, all checks before matched. |
| 1364 __ j(EQUAL, &found); // Break. | 1359 __ j(EQUAL, &found); // Break. |
| 1365 } | 1360 } |
| 1366 } | 1361 } |
| 1367 __ Bind(&update); | 1362 __ Bind(&update); |
| 1368 // Reload receiver class ID. It has not been destroyed when num_args == 1. | 1363 // Reload receiver class ID. It has not been destroyed when num_args == 1. |
| 1369 if (num_args > 1) { | 1364 if (num_args > 1) { |
| 1370 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 1365 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 1371 __ movl(EDI, Address(ESP, EAX, TIMES_2, 0)); | 1366 __ movl(EDI, Address(ESP, EAX, TIMES_2, 0)); |
| 1372 __ LoadTaggedClassIdMayBeSmi(EAX, EDI); | 1367 __ LoadTaggedClassIdMayBeSmi(EAX, EDI); |
| 1373 } | 1368 } |
| 1374 | 1369 |
| 1375 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; | 1370 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; |
| 1376 __ addl(EBX, Immediate(entry_size)); // Next entry. | 1371 __ addl(EBX, Immediate(entry_size)); // Next entry. |
| 1377 __ movl(EDI, Address(EBX, 0)); // Next class ID. | 1372 __ movl(EDI, Address(EBX, 0)); // Next class ID. |
| 1378 | 1373 |
| 1379 __ Bind(&test); | 1374 __ Bind(&test); |
| 1380 __ cmpl(EDI, Immediate(Smi::RawValue(kIllegalCid))); // Done? | 1375 __ cmpl(EDI, Immediate(Smi::RawValue(kIllegalCid))); // Done? |
| 1381 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 1376 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
| 1382 | 1377 |
| 1383 __ Comment("IC miss"); | 1378 __ Comment("IC miss"); |
| 1384 // Compute address of arguments (first read number of arguments from | 1379 // Compute address of arguments (first read number of arguments from |
| 1385 // arguments descriptor array and then compute address on the stack). | 1380 // arguments descriptor array and then compute address on the stack). |
| 1386 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 1381 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
| 1387 __ leal(EAX, Address(ESP, EAX, TIMES_2, 0)); // EAX is Smi. | 1382 __ leal(EAX, Address(ESP, EAX, TIMES_2, 0)); // EAX is Smi. |
| 1388 // Create a stub frame as we are pushing some objects on the stack before | 1383 // Create a stub frame as we are pushing some objects on the stack before |
| 1389 // calling into the runtime. | 1384 // calling into the runtime. |
| 1390 __ EnterStubFrame(); | 1385 __ EnterStubFrame(); |
| 1391 __ pushl(EDX); // Preserve arguments descriptor array. | 1386 __ pushl(EDX); // Preserve arguments descriptor array. |
| 1392 __ pushl(ECX); // Preserve IC data object. | 1387 __ pushl(ECX); // Preserve IC data object. |
| 1393 __ pushl(Immediate(0)); // Result slot. | 1388 __ pushl(Immediate(0)); // Result slot. |
| 1394 // Push call arguments. | 1389 // Push call arguments. |
| 1395 for (intptr_t i = 0; i < num_args; i++) { | 1390 for (intptr_t i = 0; i < num_args; i++) { |
| 1396 __ movl(EBX, Address(EAX, -kWordSize * i)); | 1391 __ movl(EBX, Address(EAX, -kWordSize * i)); |
| 1397 __ pushl(EBX); | 1392 __ pushl(EBX); |
| 1398 } | 1393 } |
| 1399 __ pushl(ECX); // Pass IC data object. | 1394 __ pushl(ECX); // Pass IC data object. |
| 1400 __ CallRuntime(handle_ic_miss, num_args + 1); | 1395 __ CallRuntime(handle_ic_miss, num_args + 1); |
| 1401 // Remove the call arguments pushed earlier, including the IC data object. | 1396 // Remove the call arguments pushed earlier, including the IC data object. |
| 1402 for (intptr_t i = 0; i < num_args + 1; i++) { | 1397 for (intptr_t i = 0; i < num_args + 1; i++) { |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1451 // ECX: Inline cache data object. | 1446 // ECX: Inline cache data object. |
| 1452 // TOS(0): Return address. | 1447 // TOS(0): Return address. |
| 1453 // Inline cache data object structure: | 1448 // Inline cache data object structure: |
| 1454 // 0: function-name | 1449 // 0: function-name |
| 1455 // 1: N, number of arguments checked. | 1450 // 1: N, number of arguments checked. |
| 1456 // 2 .. (length - 1): group of checks, each check containing: | 1451 // 2 .. (length - 1): group of checks, each check containing: |
| 1457 // - N classes. | 1452 // - N classes. |
| 1458 // - 1 target function. | 1453 // - 1 target function. |
| 1459 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { | 1454 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { |
| 1460 GenerateUsageCounterIncrement(assembler, EBX); | 1455 GenerateUsageCounterIncrement(assembler, EBX); |
| 1461 GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1456 GenerateNArgsCheckInlineCacheStub( |
| 1462 kInlineCacheMissHandlerOneArgRuntimeEntry, | 1457 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
| 1463 Token::kILLEGAL); | |
| 1464 } | 1458 } |
| 1465 | 1459 |
| 1466 | 1460 |
| 1467 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { | 1461 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { |
| 1468 GenerateUsageCounterIncrement(assembler, EBX); | 1462 GenerateUsageCounterIncrement(assembler, EBX); |
| 1469 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1463 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
| 1470 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1464 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
| 1471 Token::kILLEGAL); | 1465 Token::kILLEGAL); |
| 1472 } | 1466 } |
| 1473 | 1467 |
| 1474 | 1468 |
| 1475 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { | 1469 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { |
| 1476 GenerateUsageCounterIncrement(assembler, EBX); | 1470 GenerateUsageCounterIncrement(assembler, EBX); |
| 1477 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1471 GenerateNArgsCheckInlineCacheStub( |
| 1478 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1472 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); |
| 1479 Token::kADD); | |
| 1480 } | 1473 } |
| 1481 | 1474 |
| 1482 | 1475 |
| 1483 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { | 1476 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { |
| 1484 GenerateUsageCounterIncrement(assembler, EBX); | 1477 GenerateUsageCounterIncrement(assembler, EBX); |
| 1485 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1478 GenerateNArgsCheckInlineCacheStub( |
| 1486 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1479 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); |
| 1487 Token::kSUB); | |
| 1488 } | 1480 } |
| 1489 | 1481 |
| 1490 | 1482 |
| 1491 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { | 1483 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { |
| 1492 GenerateUsageCounterIncrement(assembler, EBX); | 1484 GenerateUsageCounterIncrement(assembler, EBX); |
| 1493 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1485 GenerateNArgsCheckInlineCacheStub( |
| 1494 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1486 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); |
| 1495 Token::kEQ); | |
| 1496 } | 1487 } |
| 1497 | 1488 |
| 1498 | 1489 |
| 1499 // Use inline cache data array to invoke the target or continue in inline | 1490 // Use inline cache data array to invoke the target or continue in inline |
| 1500 // cache miss handler. Stub for 1-argument check (receiver class). | 1491 // cache miss handler. Stub for 1-argument check (receiver class). |
| 1501 // EDI: function which counter needs to be incremented. | 1492 // EDI: function which counter needs to be incremented. |
| 1502 // ECX: Inline cache data object. | 1493 // ECX: Inline cache data object. |
| 1503 // TOS(0): Return address. | 1494 // TOS(0): Return address. |
| 1504 // Inline cache data object structure: | 1495 // Inline cache data object structure: |
| 1505 // 0: function-name | 1496 // 0: function-name |
| 1506 // 1: N, number of arguments checked. | 1497 // 1: N, number of arguments checked. |
| 1507 // 2 .. (length - 1): group of checks, each check containing: | 1498 // 2 .. (length - 1): group of checks, each check containing: |
| 1508 // - N classes. | 1499 // - N classes. |
| 1509 // - 1 target function. | 1500 // - 1 target function. |
| 1510 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( | 1501 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( |
| 1511 Assembler* assembler) { | 1502 Assembler* assembler) { |
| 1512 GenerateOptimizedUsageCounterIncrement(assembler); | 1503 GenerateOptimizedUsageCounterIncrement(assembler); |
| 1513 GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1504 GenerateNArgsCheckInlineCacheStub(assembler, 1, |
| 1514 kInlineCacheMissHandlerOneArgRuntimeEntry, | 1505 kInlineCacheMissHandlerOneArgRuntimeEntry, |
| 1515 Token::kILLEGAL, | 1506 Token::kILLEGAL, true /* optimized */); |
| 1516 true /* optimized */); | |
| 1517 } | 1507 } |
| 1518 | 1508 |
| 1519 | 1509 |
| 1520 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( | 1510 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( |
| 1521 Assembler* assembler) { | 1511 Assembler* assembler) { |
| 1522 GenerateOptimizedUsageCounterIncrement(assembler); | 1512 GenerateOptimizedUsageCounterIncrement(assembler); |
| 1523 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1513 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
| 1524 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1514 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
| 1525 Token::kILLEGAL, | 1515 Token::kILLEGAL, true /* optimized */); |
| 1526 true /* optimized */); | |
| 1527 } | 1516 } |
| 1528 | 1517 |
| 1529 | 1518 |
| 1530 // Intermediary stub between a static call and its target. ICData contains | 1519 // Intermediary stub between a static call and its target. ICData contains |
| 1531 // the target function and the call count. | 1520 // the target function and the call count. |
| 1532 // ECX: ICData | 1521 // ECX: ICData |
| 1533 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1522 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1534 GenerateUsageCounterIncrement(assembler, EBX); | 1523 GenerateUsageCounterIncrement(assembler, EBX); |
| 1535 | 1524 |
| 1536 #if defined(DEBUG) | 1525 #if defined(DEBUG) |
| 1537 { Label ok; | 1526 { |
| 1527 Label ok; |
| 1538 // Check that the IC data array has NumArgsTested() == num_args. | 1528 // Check that the IC data array has NumArgsTested() == num_args. |
| 1539 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1529 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. |
| 1540 __ movl(EBX, FieldAddress(ECX, ICData::state_bits_offset())); | 1530 __ movl(EBX, FieldAddress(ECX, ICData::state_bits_offset())); |
| 1541 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. | 1531 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. |
| 1542 __ andl(EBX, Immediate(ICData::NumArgsTestedMask())); | 1532 __ andl(EBX, Immediate(ICData::NumArgsTestedMask())); |
| 1543 __ cmpl(EBX, Immediate(0)); | 1533 __ cmpl(EBX, Immediate(0)); |
| 1544 __ j(EQUAL, &ok, Assembler::kNearJump); | 1534 __ j(EQUAL, &ok, Assembler::kNearJump); |
| 1545 __ Stop("Incorrect IC data for unoptimized static call"); | 1535 __ Stop("Incorrect IC data for unoptimized static call"); |
| 1546 __ Bind(&ok); | 1536 __ Bind(&ok); |
| 1547 } | 1537 } |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1588 __ popl(ECX); | 1578 __ popl(ECX); |
| 1589 __ LeaveFrame(); | 1579 __ LeaveFrame(); |
| 1590 __ jmp(&done_stepping, Assembler::kNearJump); | 1580 __ jmp(&done_stepping, Assembler::kNearJump); |
| 1591 } | 1581 } |
| 1592 } | 1582 } |
| 1593 | 1583 |
| 1594 | 1584 |
| 1595 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1585 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1596 GenerateUsageCounterIncrement(assembler, EBX); | 1586 GenerateUsageCounterIncrement(assembler, EBX); |
| 1597 GenerateNArgsCheckInlineCacheStub( | 1587 GenerateNArgsCheckInlineCacheStub( |
| 1598 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, | 1588 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
| 1599 Token::kILLEGAL); | |
| 1600 } | 1589 } |
| 1601 | 1590 |
| 1602 | 1591 |
| 1603 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1592 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1604 GenerateUsageCounterIncrement(assembler, EBX); | 1593 GenerateUsageCounterIncrement(assembler, EBX); |
| 1605 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1594 GenerateNArgsCheckInlineCacheStub( |
| 1606 kStaticCallMissHandlerTwoArgsRuntimeEntry, | 1595 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
| 1607 Token::kILLEGAL); | |
| 1608 } | 1596 } |
| 1609 | 1597 |
| 1610 | 1598 |
| 1611 // Stub for compiling a function and jumping to the compiled code. | 1599 // Stub for compiling a function and jumping to the compiled code. |
| 1612 // ECX: IC-Data (for methods). | 1600 // ECX: IC-Data (for methods). |
| 1613 // EDX: Arguments descriptor. | 1601 // EDX: Arguments descriptor. |
| 1614 // EAX: Function. | 1602 // EAX: Function. |
| 1615 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 1603 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { |
| 1616 __ EnterStubFrame(); | 1604 __ EnterStubFrame(); |
| 1617 __ pushl(EDX); // Preserve arguments descriptor array. | 1605 __ pushl(EDX); // Preserve arguments descriptor array. |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1692 const intptr_t kCacheOffsetInBytes = 3 * kWordSize; | 1680 const intptr_t kCacheOffsetInBytes = 3 * kWordSize; |
| 1693 const Immediate& raw_null = | 1681 const Immediate& raw_null = |
| 1694 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 1682 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| 1695 __ movl(EAX, Address(ESP, kInstanceOffsetInBytes)); | 1683 __ movl(EAX, Address(ESP, kInstanceOffsetInBytes)); |
| 1696 if (n > 1) { | 1684 if (n > 1) { |
| 1697 // Get instance type arguments. | 1685 // Get instance type arguments. |
| 1698 __ LoadClass(ECX, EAX, EBX); | 1686 __ LoadClass(ECX, EAX, EBX); |
| 1699 // Compute instance type arguments into EBX. | 1687 // Compute instance type arguments into EBX. |
| 1700 Label has_no_type_arguments; | 1688 Label has_no_type_arguments; |
| 1701 __ movl(EBX, raw_null); | 1689 __ movl(EBX, raw_null); |
| 1702 __ movl(EDI, FieldAddress(ECX, | 1690 __ movl(EDI, |
| 1703 Class::type_arguments_field_offset_in_words_offset())); | 1691 FieldAddress(ECX, |
| 1692 Class::type_arguments_field_offset_in_words_offset())); |
| 1704 __ cmpl(EDI, Immediate(Class::kNoTypeArguments)); | 1693 __ cmpl(EDI, Immediate(Class::kNoTypeArguments)); |
| 1705 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); | 1694 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); |
| 1706 __ movl(EBX, FieldAddress(EAX, EDI, TIMES_4, 0)); | 1695 __ movl(EBX, FieldAddress(EAX, EDI, TIMES_4, 0)); |
| 1707 __ Bind(&has_no_type_arguments); | 1696 __ Bind(&has_no_type_arguments); |
| 1708 } | 1697 } |
| 1709 __ LoadClassId(ECX, EAX); | 1698 __ LoadClassId(ECX, EAX); |
| 1710 // EAX: instance, ECX: instance class id. | 1699 // EAX: instance, ECX: instance class id. |
| 1711 // EBX: instance type arguments (null if none), used only if n > 1. | 1700 // EBX: instance type arguments (null if none), used only if n > 1. |
| 1712 __ movl(EDX, Address(ESP, kCacheOffsetInBytes)); | 1701 __ movl(EDX, Address(ESP, kCacheOffsetInBytes)); |
| 1713 // EDX: SubtypeTestCache. | 1702 // EDX: SubtypeTestCache. |
| 1714 __ movl(EDX, FieldAddress(EDX, SubtypeTestCache::cache_offset())); | 1703 __ movl(EDX, FieldAddress(EDX, SubtypeTestCache::cache_offset())); |
| 1715 __ addl(EDX, Immediate(Array::data_offset() - kHeapObjectTag)); | 1704 __ addl(EDX, Immediate(Array::data_offset() - kHeapObjectTag)); |
| 1716 | 1705 |
| 1717 Label loop, found, not_found, next_iteration; | 1706 Label loop, found, not_found, next_iteration; |
| 1718 // EDX: Entry start. | 1707 // EDX: Entry start. |
| 1719 // ECX: instance class id. | 1708 // ECX: instance class id. |
| 1720 // EBX: instance type arguments. | 1709 // EBX: instance type arguments. |
| 1721 __ SmiTag(ECX); | 1710 __ SmiTag(ECX); |
| 1722 __ cmpl(ECX, Immediate(Smi::RawValue(kClosureCid))); | 1711 __ cmpl(ECX, Immediate(Smi::RawValue(kClosureCid))); |
| 1723 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 1712 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
| 1724 __ movl(ECX, FieldAddress(EAX, Closure::function_offset())); | 1713 __ movl(ECX, FieldAddress(EAX, Closure::function_offset())); |
| 1725 // ECX: instance class id as Smi or function. | 1714 // ECX: instance class id as Smi or function. |
| 1726 __ Bind(&loop); | 1715 __ Bind(&loop); |
| 1727 __ movl(EDI, | 1716 __ movl(EDI, Address(EDX, kWordSize * |
| 1728 Address(EDX, | 1717 SubtypeTestCache::kInstanceClassIdOrFunction)); |
| 1729 kWordSize * SubtypeTestCache::kInstanceClassIdOrFunction)); | |
| 1730 __ cmpl(EDI, raw_null); | 1718 __ cmpl(EDI, raw_null); |
| 1731 __ j(EQUAL, ¬_found, Assembler::kNearJump); | 1719 __ j(EQUAL, ¬_found, Assembler::kNearJump); |
| 1732 __ cmpl(EDI, ECX); | 1720 __ cmpl(EDI, ECX); |
| 1733 if (n == 1) { | 1721 if (n == 1) { |
| 1734 __ j(EQUAL, &found, Assembler::kNearJump); | 1722 __ j(EQUAL, &found, Assembler::kNearJump); |
| 1735 } else { | 1723 } else { |
| 1736 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1724 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); |
| 1737 __ movl(EDI, | 1725 __ movl(EDI, |
| 1738 Address(EDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); | 1726 Address(EDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); |
| 1739 __ cmpl(EDI, EBX); | 1727 __ cmpl(EDI, EBX); |
| 1740 if (n == 2) { | 1728 if (n == 2) { |
| 1741 __ j(EQUAL, &found, Assembler::kNearJump); | 1729 __ j(EQUAL, &found, Assembler::kNearJump); |
| 1742 } else { | 1730 } else { |
| 1743 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1731 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); |
| 1744 __ movl(EDI, | 1732 __ movl(EDI, |
| 1745 Address(EDX, kWordSize * | 1733 Address(EDX, kWordSize * |
| 1746 SubtypeTestCache::kInstantiatorTypeArguments)); | 1734 SubtypeTestCache::kInstantiatorTypeArguments)); |
| 1747 __ cmpl(EDI, Address(ESP, kInstantiatorTypeArgumentsInBytes)); | 1735 __ cmpl(EDI, Address(ESP, kInstantiatorTypeArgumentsInBytes)); |
| 1748 __ j(EQUAL, &found, Assembler::kNearJump); | 1736 __ j(EQUAL, &found, Assembler::kNearJump); |
| 1749 } | 1737 } |
| 1750 } | 1738 } |
| 1751 __ Bind(&next_iteration); | 1739 __ Bind(&next_iteration); |
| 1752 __ addl(EDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); | 1740 __ addl(EDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); |
| 1753 __ jmp(&loop, Assembler::kNearJump); | 1741 __ jmp(&loop, Assembler::kNearJump); |
| 1754 // Fall through to not found. | 1742 // Fall through to not found. |
| 1755 __ Bind(¬_found); | 1743 __ Bind(¬_found); |
| 1756 __ movl(ECX, raw_null); | 1744 __ movl(ECX, raw_null); |
| (...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2065 } | 2053 } |
| 2066 | 2054 |
| 2067 | 2055 |
| 2068 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2056 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
| 2069 __ int3(); | 2057 __ int3(); |
| 2070 } | 2058 } |
| 2071 | 2059 |
| 2072 } // namespace dart | 2060 } // namespace dart |
| 2073 | 2061 |
| 2074 #endif // defined TARGET_ARCH_IA32 | 2062 #endif // defined TARGET_ARCH_IA32 |
| OLD | NEW |