OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
11 #include "vm/flow_graph_compiler.h" | 11 #include "vm/flow_graph_compiler.h" |
| 12 #include "vm/heap.h" |
12 #include "vm/instructions.h" | 13 #include "vm/instructions.h" |
13 #include "vm/heap.h" | |
14 #include "vm/object_store.h" | 14 #include "vm/object_store.h" |
15 #include "vm/resolver.h" | 15 #include "vm/resolver.h" |
16 #include "vm/scavenger.h" | 16 #include "vm/scavenger.h" |
17 #include "vm/stack_frame.h" | 17 #include "vm/stack_frame.h" |
18 #include "vm/stub_code.h" | 18 #include "vm/stub_code.h" |
19 #include "vm/tags.h" | 19 #include "vm/tags.h" |
20 | 20 |
21 | |
22 #define __ assembler-> | 21 #define __ assembler-> |
23 | 22 |
24 namespace dart { | 23 namespace dart { |
25 | 24 |
26 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); | 25 DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects."); |
27 DEFINE_FLAG(bool, | 26 DEFINE_FLAG(bool, |
28 use_slow_path, | 27 use_slow_path, |
29 false, | 28 false, |
30 "Set to true for debugging & verifying the slow paths."); | 29 "Set to true for debugging & verifying the slow paths."); |
31 DECLARE_FLAG(bool, trace_optimized_ic_calls); | 30 DECLARE_FLAG(bool, trace_optimized_ic_calls); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
86 | 85 |
87 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 86 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
88 | 87 |
89 // Reset exit frame information in Isolate structure. | 88 // Reset exit frame information in Isolate structure. |
90 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 89 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
91 | 90 |
92 __ LeaveFrame(); | 91 __ LeaveFrame(); |
93 __ ret(); | 92 __ ret(); |
94 } | 93 } |
95 | 94 |
96 | |
97 // Print the stop message. | 95 // Print the stop message. |
98 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { | 96 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { |
99 OS::Print("Stop message: %s\n", message); | 97 OS::Print("Stop message: %s\n", message); |
100 } | 98 } |
101 END_LEAF_RUNTIME_ENTRY | 99 END_LEAF_RUNTIME_ENTRY |
102 | 100 |
103 | |
104 // Input parameters: | 101 // Input parameters: |
105 // ESP : points to return address. | 102 // ESP : points to return address. |
106 // EAX : stop message (const char*). | 103 // EAX : stop message (const char*). |
107 // Must preserve all registers, except EAX. | 104 // Must preserve all registers, except EAX. |
108 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { | 105 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { |
109 __ EnterCallRuntimeFrame(1 * kWordSize); | 106 __ EnterCallRuntimeFrame(1 * kWordSize); |
110 __ movl(Address(ESP, 0), EAX); | 107 __ movl(Address(ESP, 0), EAX); |
111 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); | 108 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); |
112 __ LeaveCallRuntimeFrame(); | 109 __ LeaveCallRuntimeFrame(); |
113 __ ret(); | 110 __ ret(); |
114 } | 111 } |
115 | 112 |
116 | |
117 // Input parameters: | 113 // Input parameters: |
118 // ESP : points to return address. | 114 // ESP : points to return address. |
119 // ESP + 4 : address of return value. | 115 // ESP + 4 : address of return value. |
120 // EAX : address of first argument in argument array. | 116 // EAX : address of first argument in argument array. |
121 // ECX : address of the native function to call. | 117 // ECX : address of the native function to call. |
122 // EDX : argc_tag including number of arguments and function kind. | 118 // EDX : argc_tag including number of arguments and function kind. |
123 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, | 119 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, |
124 ExternalLabel* wrapper) { | 120 ExternalLabel* wrapper) { |
125 const intptr_t native_args_struct_offset = | 121 const intptr_t native_args_struct_offset = |
126 NativeEntry::kNumCallWrapperArguments * kWordSize; | 122 NativeEntry::kNumCallWrapperArguments * kWordSize; |
127 const intptr_t thread_offset = | 123 const intptr_t thread_offset = |
128 NativeArguments::thread_offset() + native_args_struct_offset; | 124 NativeArguments::thread_offset() + native_args_struct_offset; |
129 const intptr_t argc_tag_offset = | 125 const intptr_t argc_tag_offset = |
130 NativeArguments::argc_tag_offset() + native_args_struct_offset; | 126 NativeArguments::argc_tag_offset() + native_args_struct_offset; |
131 const intptr_t argv_offset = | 127 const intptr_t argv_offset = |
132 NativeArguments::argv_offset() + native_args_struct_offset; | 128 NativeArguments::argv_offset() + native_args_struct_offset; |
133 const intptr_t retval_offset = | 129 const intptr_t retval_offset = |
134 NativeArguments::retval_offset() + native_args_struct_offset; | 130 NativeArguments::retval_offset() + native_args_struct_offset; |
135 | 131 |
136 __ EnterFrame(0); | 132 __ EnterFrame(0); |
137 | 133 |
138 | |
139 // Save exit frame information to enable stack walking as we are about | 134 // Save exit frame information to enable stack walking as we are about |
140 // to transition to dart VM code. | 135 // to transition to dart VM code. |
141 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); | 136 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), EBP); |
142 | 137 |
143 #if defined(DEBUG) | 138 #if defined(DEBUG) |
144 { | 139 { |
145 Label ok; | 140 Label ok; |
146 // Check that we are always entering from Dart code. | 141 // Check that we are always entering from Dart code. |
147 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 142 __ cmpl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
148 __ j(EQUAL, &ok, Assembler::kNearJump); | 143 __ j(EQUAL, &ok, Assembler::kNearJump); |
(...skipping 28 matching lines...) Expand all Loading... |
177 | 172 |
178 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 173 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
179 | 174 |
180 // Reset exit frame information in Isolate structure. | 175 // Reset exit frame information in Isolate structure. |
181 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 176 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
182 | 177 |
183 __ LeaveFrame(); | 178 __ LeaveFrame(); |
184 __ ret(); | 179 __ ret(); |
185 } | 180 } |
186 | 181 |
187 | |
188 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { | 182 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { |
189 ExternalLabel wrapper(NativeEntry::NoScopeNativeCallWrapperEntry()); | 183 ExternalLabel wrapper(NativeEntry::NoScopeNativeCallWrapperEntry()); |
190 GenerateCallNativeWithWrapperStub(assembler, &wrapper); | 184 GenerateCallNativeWithWrapperStub(assembler, &wrapper); |
191 } | 185 } |
192 | 186 |
193 | |
194 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { | 187 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { |
195 ExternalLabel wrapper(NativeEntry::AutoScopeNativeCallWrapperEntry()); | 188 ExternalLabel wrapper(NativeEntry::AutoScopeNativeCallWrapperEntry()); |
196 GenerateCallNativeWithWrapperStub(assembler, &wrapper); | 189 GenerateCallNativeWithWrapperStub(assembler, &wrapper); |
197 } | 190 } |
198 | 191 |
199 | |
200 // Input parameters: | 192 // Input parameters: |
201 // ESP : points to return address. | 193 // ESP : points to return address. |
202 // ESP + 4 : address of return value. | 194 // ESP + 4 : address of return value. |
203 // EAX : address of first argument in argument array. | 195 // EAX : address of first argument in argument array. |
204 // ECX : address of the native function to call. | 196 // ECX : address of the native function to call. |
205 // EDX : argc_tag including number of arguments and function kind. | 197 // EDX : argc_tag including number of arguments and function kind. |
206 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { | 198 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { |
207 const intptr_t native_args_struct_offset = kWordSize; | 199 const intptr_t native_args_struct_offset = kWordSize; |
208 const intptr_t thread_offset = | 200 const intptr_t thread_offset = |
209 NativeArguments::thread_offset() + native_args_struct_offset; | 201 NativeArguments::thread_offset() + native_args_struct_offset; |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
254 | 246 |
255 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 247 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
256 | 248 |
257 // Reset exit frame information in Isolate structure. | 249 // Reset exit frame information in Isolate structure. |
258 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 250 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
259 | 251 |
260 __ LeaveFrame(); | 252 __ LeaveFrame(); |
261 __ ret(); | 253 __ ret(); |
262 } | 254 } |
263 | 255 |
264 | |
265 // Input parameters: | 256 // Input parameters: |
266 // EDX: arguments descriptor array. | 257 // EDX: arguments descriptor array. |
267 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 258 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
268 __ EnterStubFrame(); | 259 __ EnterStubFrame(); |
269 __ pushl(EDX); // Preserve arguments descriptor array. | 260 __ pushl(EDX); // Preserve arguments descriptor array. |
270 __ pushl(Immediate(0)); // Setup space on stack for return value. | 261 __ pushl(Immediate(0)); // Setup space on stack for return value. |
271 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 262 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
272 __ popl(EAX); // Get Code object result. | 263 __ popl(EAX); // Get Code object result. |
273 __ popl(EDX); // Restore arguments descriptor array. | 264 __ popl(EDX); // Restore arguments descriptor array. |
274 // Remove the stub frame as we are about to jump to the dart function. | 265 // Remove the stub frame as we are about to jump to the dart function. |
275 __ LeaveFrame(); | 266 __ LeaveFrame(); |
276 | 267 |
277 __ movl(ECX, FieldAddress(EAX, Code::entry_point_offset())); | 268 __ movl(ECX, FieldAddress(EAX, Code::entry_point_offset())); |
278 __ jmp(ECX); | 269 __ jmp(ECX); |
279 } | 270 } |
280 | 271 |
281 | |
282 // Called from a static call only when an invalid code has been entered | 272 // Called from a static call only when an invalid code has been entered |
283 // (invalid because its function was optimized or deoptimized). | 273 // (invalid because its function was optimized or deoptimized). |
284 // EDX: arguments descriptor array. | 274 // EDX: arguments descriptor array. |
285 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 275 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
286 // Create a stub frame as we are pushing some objects on the stack before | 276 // Create a stub frame as we are pushing some objects on the stack before |
287 // calling into the runtime. | 277 // calling into the runtime. |
288 __ EnterStubFrame(); | 278 __ EnterStubFrame(); |
289 __ pushl(EDX); // Preserve arguments descriptor array. | 279 __ pushl(EDX); // Preserve arguments descriptor array. |
290 __ pushl(Immediate(0)); // Setup space on stack for return value. | 280 __ pushl(Immediate(0)); // Setup space on stack for return value. |
291 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 281 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
292 __ popl(EAX); // Get Code object. | 282 __ popl(EAX); // Get Code object. |
293 __ popl(EDX); // Restore arguments descriptor array. | 283 __ popl(EDX); // Restore arguments descriptor array. |
294 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); | 284 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); |
295 __ LeaveFrame(); | 285 __ LeaveFrame(); |
296 __ jmp(EAX); | 286 __ jmp(EAX); |
297 __ int3(); | 287 __ int3(); |
298 } | 288 } |
299 | 289 |
300 | |
301 // Called from object allocate instruction when the allocation stub has been | 290 // Called from object allocate instruction when the allocation stub has been |
302 // disabled. | 291 // disabled. |
303 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 292 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
304 __ EnterStubFrame(); | 293 __ EnterStubFrame(); |
305 __ pushl(Immediate(0)); // Setup space on stack for return value. | 294 __ pushl(Immediate(0)); // Setup space on stack for return value. |
306 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 295 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
307 __ popl(EAX); // Get Code object. | 296 __ popl(EAX); // Get Code object. |
308 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); | 297 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); |
309 __ LeaveFrame(); | 298 __ LeaveFrame(); |
310 __ jmp(EAX); | 299 __ jmp(EAX); |
311 __ int3(); | 300 __ int3(); |
312 } | 301 } |
313 | 302 |
314 | |
315 // Input parameters: | 303 // Input parameters: |
316 // EDX: smi-tagged argument count, may be zero. | 304 // EDX: smi-tagged argument count, may be zero. |
317 // EBP[kParamEndSlotFromFp + 1]: last argument. | 305 // EBP[kParamEndSlotFromFp + 1]: last argument. |
318 // Uses EAX, EBX, ECX, EDX, EDI. | 306 // Uses EAX, EBX, ECX, EDX, EDI. |
319 static void PushArgumentsArray(Assembler* assembler) { | 307 static void PushArgumentsArray(Assembler* assembler) { |
320 // Allocate array to store arguments of caller. | 308 // Allocate array to store arguments of caller. |
321 const Immediate& raw_null = | 309 const Immediate& raw_null = |
322 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 310 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
323 __ movl(ECX, raw_null); // Null element type for raw Array. | 311 __ movl(ECX, raw_null); // Null element type for raw Array. |
324 __ Call(*StubCode::AllocateArray_entry()); | 312 __ Call(*StubCode::AllocateArray_entry()); |
(...skipping 11 matching lines...) Expand all Loading... |
336 __ movl(EDI, Address(EBX, 0)); | 324 __ movl(EDI, Address(EBX, 0)); |
337 // Generational barrier is needed, array is not necessarily in new space. | 325 // Generational barrier is needed, array is not necessarily in new space. |
338 __ StoreIntoObject(EAX, Address(ECX, 0), EDI); | 326 __ StoreIntoObject(EAX, Address(ECX, 0), EDI); |
339 __ AddImmediate(ECX, Immediate(kWordSize)); | 327 __ AddImmediate(ECX, Immediate(kWordSize)); |
340 __ AddImmediate(EBX, Immediate(-kWordSize)); | 328 __ AddImmediate(EBX, Immediate(-kWordSize)); |
341 __ Bind(&loop_condition); | 329 __ Bind(&loop_condition); |
342 __ decl(EDX); | 330 __ decl(EDX); |
343 __ j(POSITIVE, &loop, Assembler::kNearJump); | 331 __ j(POSITIVE, &loop, Assembler::kNearJump); |
344 } | 332 } |
345 | 333 |
346 | |
347 // Used by eager and lazy deoptimization. Preserve result in EAX if necessary. | 334 // Used by eager and lazy deoptimization. Preserve result in EAX if necessary. |
348 // This stub translates optimized frame into unoptimized frame. The optimized | 335 // This stub translates optimized frame into unoptimized frame. The optimized |
349 // frame can contain values in registers and on stack, the unoptimized | 336 // frame can contain values in registers and on stack, the unoptimized |
350 // frame contains all values on stack. | 337 // frame contains all values on stack. |
351 // Deoptimization occurs in following steps: | 338 // Deoptimization occurs in following steps: |
352 // - Push all registers that can contain values. | 339 // - Push all registers that can contain values. |
353 // - Call C routine to copy the stack and saved registers into temporary buffer. | 340 // - Call C routine to copy the stack and saved registers into temporary buffer. |
354 // - Adjust caller's frame to correct unoptimized frame size. | 341 // - Adjust caller's frame to correct unoptimized frame size. |
355 // - Fill the unoptimized frame. | 342 // - Fill the unoptimized frame. |
356 // - Materialize objects that require allocation (e.g. Double instances). | 343 // - Materialize objects that require allocation (e.g. Double instances). |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
468 __ popl(EAX); // Restore stacktrace. | 455 __ popl(EAX); // Restore stacktrace. |
469 } | 456 } |
470 __ LeaveFrame(); | 457 __ LeaveFrame(); |
471 | 458 |
472 __ popl(ECX); // Pop return address. | 459 __ popl(ECX); // Pop return address. |
473 __ addl(ESP, EBX); // Remove materialization arguments. | 460 __ addl(ESP, EBX); // Remove materialization arguments. |
474 __ pushl(ECX); // Push return address. | 461 __ pushl(ECX); // Push return address. |
475 // The caller is responsible for emitting the return instruction. | 462 // The caller is responsible for emitting the return instruction. |
476 } | 463 } |
477 | 464 |
478 | |
479 // EAX: result, must be preserved | 465 // EAX: result, must be preserved |
480 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 466 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
481 // Return address for "call" to deopt stub. | 467 // Return address for "call" to deopt stub. |
482 __ pushl(Immediate(kZapReturnAddress)); | 468 __ pushl(Immediate(kZapReturnAddress)); |
483 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 469 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
484 __ ret(); | 470 __ ret(); |
485 } | 471 } |
486 | 472 |
487 | |
488 // EAX: exception, must be preserved | 473 // EAX: exception, must be preserved |
489 // EDX: stacktrace, must be preserved | 474 // EDX: stacktrace, must be preserved |
490 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 475 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
491 // Return address for "call" to deopt stub. | 476 // Return address for "call" to deopt stub. |
492 __ pushl(Immediate(kZapReturnAddress)); | 477 __ pushl(Immediate(kZapReturnAddress)); |
493 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 478 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
494 __ ret(); | 479 __ ret(); |
495 } | 480 } |
496 | 481 |
497 | |
498 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 482 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
499 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 483 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
500 __ ret(); | 484 __ ret(); |
501 } | 485 } |
502 | 486 |
503 | |
504 static void GenerateDispatcherCode(Assembler* assembler, | 487 static void GenerateDispatcherCode(Assembler* assembler, |
505 Label* call_target_function) { | 488 Label* call_target_function) { |
506 __ Comment("NoSuchMethodDispatch"); | 489 __ Comment("NoSuchMethodDispatch"); |
507 // When lazily generated invocation dispatchers are disabled, the | 490 // When lazily generated invocation dispatchers are disabled, the |
508 // miss-handler may return null. | 491 // miss-handler may return null. |
509 const Immediate& raw_null = | 492 const Immediate& raw_null = |
510 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 493 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
511 __ cmpl(EAX, raw_null); | 494 __ cmpl(EAX, raw_null); |
512 __ j(NOT_EQUAL, call_target_function); | 495 __ j(NOT_EQUAL, call_target_function); |
513 __ EnterStubFrame(); | 496 __ EnterStubFrame(); |
(...skipping 18 matching lines...) Expand all Loading... |
532 // EDX: Smi-tagged arguments array length. | 515 // EDX: Smi-tagged arguments array length. |
533 PushArgumentsArray(assembler); | 516 PushArgumentsArray(assembler); |
534 const intptr_t kNumArgs = 4; | 517 const intptr_t kNumArgs = 4; |
535 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); | 518 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); |
536 __ Drop(4); | 519 __ Drop(4); |
537 __ popl(EAX); // Return value. | 520 __ popl(EAX); // Return value. |
538 __ LeaveFrame(); | 521 __ LeaveFrame(); |
539 __ ret(); | 522 __ ret(); |
540 } | 523 } |
541 | 524 |
542 | |
543 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 525 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
544 __ EnterStubFrame(); | 526 __ EnterStubFrame(); |
545 // Load the receiver into EAX. The argument count in the arguments | 527 // Load the receiver into EAX. The argument count in the arguments |
546 // descriptor in EDX is a smi. | 528 // descriptor in EDX is a smi. |
547 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); | 529 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); |
548 // Two words (saved fp, stub's pc marker) in the stack above the return | 530 // Two words (saved fp, stub's pc marker) in the stack above the return |
549 // address. | 531 // address. |
550 __ movl(EAX, Address(ESP, EAX, TIMES_2, 2 * kWordSize)); | 532 __ movl(EAX, Address(ESP, EAX, TIMES_2, 2 * kWordSize)); |
551 // Preserve IC data and arguments descriptor. | 533 // Preserve IC data and arguments descriptor. |
552 __ pushl(ECX); | 534 __ pushl(ECX); |
(...skipping 16 matching lines...) Expand all Loading... |
569 if (!FLAG_lazy_dispatchers) { | 551 if (!FLAG_lazy_dispatchers) { |
570 Label call_target_function; | 552 Label call_target_function; |
571 GenerateDispatcherCode(assembler, &call_target_function); | 553 GenerateDispatcherCode(assembler, &call_target_function); |
572 __ Bind(&call_target_function); | 554 __ Bind(&call_target_function); |
573 } | 555 } |
574 | 556 |
575 __ movl(EBX, FieldAddress(EAX, Function::entry_point_offset())); | 557 __ movl(EBX, FieldAddress(EAX, Function::entry_point_offset())); |
576 __ jmp(EBX); | 558 __ jmp(EBX); |
577 } | 559 } |
578 | 560 |
579 | |
580 // Called for inline allocation of arrays. | 561 // Called for inline allocation of arrays. |
581 // Input parameters: | 562 // Input parameters: |
582 // EDX : Array length as Smi (must be preserved). | 563 // EDX : Array length as Smi (must be preserved). |
583 // ECX : array element type (either NULL or an instantiated type). | 564 // ECX : array element type (either NULL or an instantiated type). |
584 // Uses EAX, EBX, ECX, EDI as temporary registers. | 565 // Uses EAX, EBX, ECX, EDI as temporary registers. |
585 // The newly allocated object is returned in EAX. | 566 // The newly allocated object is returned in EAX. |
586 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { | 567 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { |
587 Label slow_case; | 568 Label slow_case; |
588 // Compute the size to be allocated, it is based on the array length | 569 // Compute the size to be allocated, it is based on the array length |
589 // and is computed as: | 570 // and is computed as: |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
707 __ pushl(EDX); // Array length as Smi. | 688 __ pushl(EDX); // Array length as Smi. |
708 __ pushl(ECX); // Element type. | 689 __ pushl(ECX); // Element type. |
709 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); | 690 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); |
710 __ popl(EAX); // Pop element type argument. | 691 __ popl(EAX); // Pop element type argument. |
711 __ popl(EDX); // Pop array length argument (preserved). | 692 __ popl(EDX); // Pop array length argument (preserved). |
712 __ popl(EAX); // Pop return value from return slot. | 693 __ popl(EAX); // Pop return value from return slot. |
713 __ LeaveFrame(); | 694 __ LeaveFrame(); |
714 __ ret(); | 695 __ ret(); |
715 } | 696 } |
716 | 697 |
717 | |
718 // Called when invoking dart code from C++ (VM code). | 698 // Called when invoking dart code from C++ (VM code). |
719 // Input parameters: | 699 // Input parameters: |
720 // ESP : points to return address. | 700 // ESP : points to return address. |
721 // ESP + 4 : code object of the dart function to call. | 701 // ESP + 4 : code object of the dart function to call. |
722 // ESP + 8 : arguments descriptor array. | 702 // ESP + 8 : arguments descriptor array. |
723 // ESP + 12 : arguments array. | 703 // ESP + 12 : arguments array. |
724 // ESP + 16 : current thread. | 704 // ESP + 16 : current thread. |
725 // Uses EAX, EDX, ECX, EDI as temporary registers. | 705 // Uses EAX, EDX, ECX, EDI as temporary registers. |
726 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 706 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
727 const intptr_t kTargetCodeOffset = 2 * kWordSize; | 707 const intptr_t kTargetCodeOffset = 2 * kWordSize; |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
817 __ popl(EDI); | 797 __ popl(EDI); |
818 __ popl(ESI); | 798 __ popl(ESI); |
819 __ popl(EBX); | 799 __ popl(EBX); |
820 | 800 |
821 // Restore the frame pointer. | 801 // Restore the frame pointer. |
822 __ LeaveFrame(); | 802 __ LeaveFrame(); |
823 | 803 |
824 __ ret(); | 804 __ ret(); |
825 } | 805 } |
826 | 806 |
827 | |
828 // Called for inline allocation of contexts. | 807 // Called for inline allocation of contexts. |
829 // Input: | 808 // Input: |
830 // EDX: number of context variables. | 809 // EDX: number of context variables. |
831 // Output: | 810 // Output: |
832 // EAX: new allocated RawContext object. | 811 // EAX: new allocated RawContext object. |
833 // EBX and EDX are destroyed. | 812 // EBX and EDX are destroyed. |
834 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 813 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
835 if (FLAG_inline_alloc) { | 814 if (FLAG_inline_alloc) { |
836 Label slow_case; | 815 Label slow_case; |
837 // First compute the rounded instance size. | 816 // First compute the rounded instance size. |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
948 __ pushl(EDX); | 927 __ pushl(EDX); |
949 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. | 928 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. |
950 __ popl(EAX); // Pop number of context variables argument. | 929 __ popl(EAX); // Pop number of context variables argument. |
951 __ popl(EAX); // Pop the new context object. | 930 __ popl(EAX); // Pop the new context object. |
952 // EAX: new object | 931 // EAX: new object |
953 // Restore the frame pointer. | 932 // Restore the frame pointer. |
954 __ LeaveFrame(); | 933 __ LeaveFrame(); |
955 __ ret(); | 934 __ ret(); |
956 } | 935 } |
957 | 936 |
958 | |
959 // Helper stub to implement Assembler::StoreIntoObject. | 937 // Helper stub to implement Assembler::StoreIntoObject. |
960 // Input parameters: | 938 // Input parameters: |
961 // EDX: Address being stored | 939 // EDX: Address being stored |
962 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { | 940 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { |
963 // Save values being destroyed. | 941 // Save values being destroyed. |
964 __ pushl(EAX); | 942 __ pushl(EAX); |
965 __ pushl(ECX); | 943 __ pushl(ECX); |
966 | 944 |
967 Label add_to_buffer; | 945 Label add_to_buffer; |
968 // Check whether this object has already been remembered. Skip adding to the | 946 // Check whether this object has already been remembered. Skip adding to the |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1016 // Setup frame, push callee-saved registers. | 994 // Setup frame, push callee-saved registers. |
1017 | 995 |
1018 __ EnterCallRuntimeFrame(1 * kWordSize); | 996 __ EnterCallRuntimeFrame(1 * kWordSize); |
1019 __ movl(Address(ESP, 0), THR); // Push the thread as the only argument. | 997 __ movl(Address(ESP, 0), THR); // Push the thread as the only argument. |
1020 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); | 998 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); |
1021 // Restore callee-saved registers, tear down frame. | 999 // Restore callee-saved registers, tear down frame. |
1022 __ LeaveCallRuntimeFrame(); | 1000 __ LeaveCallRuntimeFrame(); |
1023 __ ret(); | 1001 __ ret(); |
1024 } | 1002 } |
1025 | 1003 |
1026 | |
1027 // Called for inline allocation of objects. | 1004 // Called for inline allocation of objects. |
1028 // Input parameters: | 1005 // Input parameters: |
1029 // ESP + 4 : type arguments object (only if class is parameterized). | 1006 // ESP + 4 : type arguments object (only if class is parameterized). |
1030 // ESP : points to return address. | 1007 // ESP : points to return address. |
1031 // Uses EAX, EBX, ECX, EDX, EDI as temporary registers. | 1008 // Uses EAX, EBX, ECX, EDX, EDI as temporary registers. |
1032 // Returns patch_code_pc offset where patching code for disabling the stub | 1009 // Returns patch_code_pc offset where patching code for disabling the stub |
1033 // has been generated (similar to regularly generated Dart code). | 1010 // has been generated (similar to regularly generated Dart code). |
1034 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, | 1011 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
1035 const Class& cls) { | 1012 const Class& cls) { |
1036 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize; | 1013 const intptr_t kObjectTypeArgumentsOffset = 1 * kWordSize; |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1144 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1121 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
1145 __ popl(EAX); // Pop argument (type arguments of object). | 1122 __ popl(EAX); // Pop argument (type arguments of object). |
1146 __ popl(EAX); // Pop argument (class of object). | 1123 __ popl(EAX); // Pop argument (class of object). |
1147 __ popl(EAX); // Pop result (newly allocated object). | 1124 __ popl(EAX); // Pop result (newly allocated object). |
1148 // EAX: new object | 1125 // EAX: new object |
1149 // Restore the frame pointer. | 1126 // Restore the frame pointer. |
1150 __ LeaveFrame(); | 1127 __ LeaveFrame(); |
1151 __ ret(); | 1128 __ ret(); |
1152 } | 1129 } |
1153 | 1130 |
1154 | |
1155 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function | 1131 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function |
1156 // from the entry code of a dart function after an error in passed argument | 1132 // from the entry code of a dart function after an error in passed argument |
1157 // name or number is detected. | 1133 // name or number is detected. |
1158 // Input parameters: | 1134 // Input parameters: |
1159 // ESP : points to return address. | 1135 // ESP : points to return address. |
1160 // ESP + 4 : address of last argument. | 1136 // ESP + 4 : address of last argument. |
1161 // EDX : arguments descriptor array. | 1137 // EDX : arguments descriptor array. |
1162 // Uses EAX, EBX, EDI as temporary registers. | 1138 // Uses EAX, EBX, EDI as temporary registers. |
1163 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { | 1139 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { |
1164 __ EnterStubFrame(); | 1140 __ EnterStubFrame(); |
(...skipping 17 matching lines...) Expand all Loading... |
1182 | 1158 |
1183 // EDX: Smi-tagged arguments array length. | 1159 // EDX: Smi-tagged arguments array length. |
1184 PushArgumentsArray(assembler); | 1160 PushArgumentsArray(assembler); |
1185 | 1161 |
1186 const intptr_t kNumArgs = 3; | 1162 const intptr_t kNumArgs = 3; |
1187 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); | 1163 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); |
1188 // noSuchMethod on closures always throws an error, so it will never return. | 1164 // noSuchMethod on closures always throws an error, so it will never return. |
1189 __ int3(); | 1165 __ int3(); |
1190 } | 1166 } |
1191 | 1167 |
1192 | |
1193 // Cannot use function object from ICData as it may be the inlined | 1168 // Cannot use function object from ICData as it may be the inlined |
1194 // function and not the top-scope function. | 1169 // function and not the top-scope function. |
1195 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1170 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
1196 Register ic_reg = ECX; | 1171 Register ic_reg = ECX; |
1197 Register func_reg = EBX; | 1172 Register func_reg = EBX; |
1198 if (FLAG_trace_optimized_ic_calls) { | 1173 if (FLAG_trace_optimized_ic_calls) { |
1199 __ EnterStubFrame(); | 1174 __ EnterStubFrame(); |
1200 __ pushl(func_reg); // Preserve | 1175 __ pushl(func_reg); // Preserve |
1201 __ pushl(ic_reg); // Preserve. | 1176 __ pushl(ic_reg); // Preserve. |
1202 __ pushl(ic_reg); // Argument. | 1177 __ pushl(ic_reg); // Argument. |
1203 __ pushl(func_reg); // Argument. | 1178 __ pushl(func_reg); // Argument. |
1204 __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 1179 __ CallRuntime(kTraceICCallRuntimeEntry, 2); |
1205 __ popl(EAX); // Discard argument; | 1180 __ popl(EAX); // Discard argument; |
1206 __ popl(EAX); // Discard argument; | 1181 __ popl(EAX); // Discard argument; |
1207 __ popl(ic_reg); // Restore. | 1182 __ popl(ic_reg); // Restore. |
1208 __ popl(func_reg); // Restore. | 1183 __ popl(func_reg); // Restore. |
1209 __ LeaveFrame(); | 1184 __ LeaveFrame(); |
1210 } | 1185 } |
1211 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); | 1186 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); |
1212 } | 1187 } |
1213 | 1188 |
1214 | |
1215 // Loads function into 'temp_reg'. | 1189 // Loads function into 'temp_reg'. |
1216 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1190 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
1217 Register temp_reg) { | 1191 Register temp_reg) { |
1218 if (FLAG_optimization_counter_threshold >= 0) { | 1192 if (FLAG_optimization_counter_threshold >= 0) { |
1219 Register ic_reg = ECX; | 1193 Register ic_reg = ECX; |
1220 Register func_reg = temp_reg; | 1194 Register func_reg = temp_reg; |
1221 ASSERT(ic_reg != func_reg); | 1195 ASSERT(ic_reg != func_reg); |
1222 __ Comment("Increment function counter"); | 1196 __ Comment("Increment function counter"); |
1223 __ movl(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); | 1197 __ movl(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); |
1224 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); | 1198 __ incl(FieldAddress(func_reg, Function::usage_counter_offset())); |
1225 } | 1199 } |
1226 } | 1200 } |
1227 | 1201 |
1228 | |
1229 // Note: ECX must be preserved. | 1202 // Note: ECX must be preserved. |
1230 // Attempt a quick Smi operation for known operations ('kind'). The ICData | 1203 // Attempt a quick Smi operation for known operations ('kind'). The ICData |
1231 // must have been primed with a Smi/Smi check that will be used for counting | 1204 // must have been primed with a Smi/Smi check that will be used for counting |
1232 // the invocations. | 1205 // the invocations. |
1233 static void EmitFastSmiOp(Assembler* assembler, | 1206 static void EmitFastSmiOp(Assembler* assembler, |
1234 Token::Kind kind, | 1207 Token::Kind kind, |
1235 intptr_t num_args, | 1208 intptr_t num_args, |
1236 Label* not_smi_or_overflow) { | 1209 Label* not_smi_or_overflow) { |
1237 __ Comment("Fast Smi op"); | 1210 __ Comment("Fast Smi op"); |
1238 ASSERT(num_args == 2); | 1211 ASSERT(num_args == 2); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1292 __ Bind(&ok); | 1265 __ Bind(&ok); |
1293 #endif | 1266 #endif |
1294 if (FLAG_optimization_counter_threshold >= 0) { | 1267 if (FLAG_optimization_counter_threshold >= 0) { |
1295 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1268 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
1296 // Update counter, ignore overflow. | 1269 // Update counter, ignore overflow. |
1297 __ addl(Address(EBX, count_offset), Immediate(Smi::RawValue(1))); | 1270 __ addl(Address(EBX, count_offset), Immediate(Smi::RawValue(1))); |
1298 } | 1271 } |
1299 __ ret(); | 1272 __ ret(); |
1300 } | 1273 } |
1301 | 1274 |
1302 | |
1303 // Generate inline cache check for 'num_args'. | 1275 // Generate inline cache check for 'num_args'. |
1304 // ECX: Inline cache data object. | 1276 // ECX: Inline cache data object. |
1305 // TOS(0): return address | 1277 // TOS(0): return address |
1306 // Control flow: | 1278 // Control flow: |
1307 // - If receiver is null -> jump to IC miss. | 1279 // - If receiver is null -> jump to IC miss. |
1308 // - If receiver is Smi -> load Smi class. | 1280 // - If receiver is Smi -> load Smi class. |
1309 // - If receiver is not-Smi -> load receiver's class. | 1281 // - If receiver is not-Smi -> load receiver's class. |
1310 // - Check if 'num_args' (including receiver) match any IC data group. | 1282 // - Check if 'num_args' (including receiver) match any IC data group. |
1311 // - Match found -> jump to target. | 1283 // - Match found -> jump to target. |
1312 // - Match not found -> jump to IC miss. | 1284 // - Match not found -> jump to IC miss. |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1393 // Load receiver into EDI. | 1365 // Load receiver into EDI. |
1394 __ movl(EDI, Address(ESP, EAX, TIMES_2, 0)); | 1366 __ movl(EDI, Address(ESP, EAX, TIMES_2, 0)); |
1395 __ LoadTaggedClassIdMayBeSmi(EDI, EDI); | 1367 __ LoadTaggedClassIdMayBeSmi(EDI, EDI); |
1396 __ cmpl(EDI, Address(EBX, 0)); // Class id match? | 1368 __ cmpl(EDI, Address(EBX, 0)); // Class id match? |
1397 __ j(NOT_EQUAL, &update); // Continue. | 1369 __ j(NOT_EQUAL, &update); // Continue. |
1398 | 1370 |
1399 // Load second argument into EDI. | 1371 // Load second argument into EDI. |
1400 __ movl(EDI, Address(ESP, EAX, TIMES_2, -kWordSize)); | 1372 __ movl(EDI, Address(ESP, EAX, TIMES_2, -kWordSize)); |
1401 __ LoadTaggedClassIdMayBeSmi(EDI, EDI); | 1373 __ LoadTaggedClassIdMayBeSmi(EDI, EDI); |
1402 __ cmpl(EDI, Address(EBX, kWordSize)); // Class id match? | 1374 __ cmpl(EDI, Address(EBX, kWordSize)); // Class id match? |
1403 __ j(EQUAL, &found); // Break. | 1375 __ j(EQUAL, &found); // Break. |
1404 | 1376 |
1405 __ Bind(&update); | 1377 __ Bind(&update); |
1406 __ addl(EBX, Immediate(entry_size)); // Next entry. | 1378 __ addl(EBX, Immediate(entry_size)); // Next entry. |
1407 __ cmpl(Address(EBX, -entry_size), | 1379 __ cmpl(Address(EBX, -entry_size), |
1408 Immediate(Smi::RawValue(kIllegalCid))); // Done? | 1380 Immediate(Smi::RawValue(kIllegalCid))); // Done? |
1409 } | 1381 } |
1410 | 1382 |
1411 if (unroll == 0) { | 1383 if (unroll == 0) { |
1412 __ j(NOT_EQUAL, &loop); | 1384 __ j(NOT_EQUAL, &loop); |
1413 } else { | 1385 } else { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1469 __ Bind(&stepping); | 1441 __ Bind(&stepping); |
1470 __ EnterStubFrame(); | 1442 __ EnterStubFrame(); |
1471 __ pushl(ECX); | 1443 __ pushl(ECX); |
1472 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1444 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1473 __ popl(ECX); | 1445 __ popl(ECX); |
1474 __ LeaveFrame(); | 1446 __ LeaveFrame(); |
1475 __ jmp(&done_stepping); | 1447 __ jmp(&done_stepping); |
1476 } | 1448 } |
1477 } | 1449 } |
1478 | 1450 |
1479 | |
1480 // Use inline cache data array to invoke the target or continue in inline | 1451 // Use inline cache data array to invoke the target or continue in inline |
1481 // cache miss handler. Stub for 1-argument check (receiver class). | 1452 // cache miss handler. Stub for 1-argument check (receiver class). |
1482 // ECX: Inline cache data object. | 1453 // ECX: Inline cache data object. |
1483 // TOS(0): Return address. | 1454 // TOS(0): Return address. |
1484 // Inline cache data object structure: | 1455 // Inline cache data object structure: |
1485 // 0: function-name | 1456 // 0: function-name |
1486 // 1: N, number of arguments checked. | 1457 // 1: N, number of arguments checked. |
1487 // 2 .. (length - 1): group of checks, each check containing: | 1458 // 2 .. (length - 1): group of checks, each check containing: |
1488 // - N classes. | 1459 // - N classes. |
1489 // - 1 target function. | 1460 // - 1 target function. |
1490 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { | 1461 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { |
1491 GenerateUsageCounterIncrement(assembler, EBX); | 1462 GenerateUsageCounterIncrement(assembler, EBX); |
1492 GenerateNArgsCheckInlineCacheStub( | 1463 GenerateNArgsCheckInlineCacheStub( |
1493 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 1464 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
1494 } | 1465 } |
1495 | 1466 |
1496 | |
1497 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { | 1467 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { |
1498 GenerateUsageCounterIncrement(assembler, EBX); | 1468 GenerateUsageCounterIncrement(assembler, EBX); |
1499 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1469 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
1500 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1470 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
1501 Token::kILLEGAL); | 1471 Token::kILLEGAL); |
1502 } | 1472 } |
1503 | 1473 |
1504 | |
1505 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { | 1474 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { |
1506 GenerateUsageCounterIncrement(assembler, EBX); | 1475 GenerateUsageCounterIncrement(assembler, EBX); |
1507 GenerateNArgsCheckInlineCacheStub( | 1476 GenerateNArgsCheckInlineCacheStub( |
1508 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); | 1477 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); |
1509 } | 1478 } |
1510 | 1479 |
1511 | |
1512 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { | 1480 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { |
1513 GenerateUsageCounterIncrement(assembler, EBX); | 1481 GenerateUsageCounterIncrement(assembler, EBX); |
1514 GenerateNArgsCheckInlineCacheStub( | 1482 GenerateNArgsCheckInlineCacheStub( |
1515 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); | 1483 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); |
1516 } | 1484 } |
1517 | 1485 |
1518 | |
1519 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { | 1486 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { |
1520 GenerateUsageCounterIncrement(assembler, EBX); | 1487 GenerateUsageCounterIncrement(assembler, EBX); |
1521 GenerateNArgsCheckInlineCacheStub( | 1488 GenerateNArgsCheckInlineCacheStub( |
1522 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); | 1489 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); |
1523 } | 1490 } |
1524 | 1491 |
1525 | |
1526 // Use inline cache data array to invoke the target or continue in inline | 1492 // Use inline cache data array to invoke the target or continue in inline |
1527 // cache miss handler. Stub for 1-argument check (receiver class). | 1493 // cache miss handler. Stub for 1-argument check (receiver class). |
1528 // EDI: function which counter needs to be incremented. | 1494 // EDI: function which counter needs to be incremented. |
1529 // ECX: Inline cache data object. | 1495 // ECX: Inline cache data object. |
1530 // TOS(0): Return address. | 1496 // TOS(0): Return address. |
1531 // Inline cache data object structure: | 1497 // Inline cache data object structure: |
1532 // 0: function-name | 1498 // 0: function-name |
1533 // 1: N, number of arguments checked. | 1499 // 1: N, number of arguments checked. |
1534 // 2 .. (length - 1): group of checks, each check containing: | 1500 // 2 .. (length - 1): group of checks, each check containing: |
1535 // - N classes. | 1501 // - N classes. |
1536 // - 1 target function. | 1502 // - 1 target function. |
1537 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( | 1503 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( |
1538 Assembler* assembler) { | 1504 Assembler* assembler) { |
1539 GenerateOptimizedUsageCounterIncrement(assembler); | 1505 GenerateOptimizedUsageCounterIncrement(assembler); |
1540 GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1506 GenerateNArgsCheckInlineCacheStub(assembler, 1, |
1541 kInlineCacheMissHandlerOneArgRuntimeEntry, | 1507 kInlineCacheMissHandlerOneArgRuntimeEntry, |
1542 Token::kILLEGAL, true /* optimized */); | 1508 Token::kILLEGAL, true /* optimized */); |
1543 } | 1509 } |
1544 | 1510 |
1545 | |
1546 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( | 1511 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( |
1547 Assembler* assembler) { | 1512 Assembler* assembler) { |
1548 GenerateOptimizedUsageCounterIncrement(assembler); | 1513 GenerateOptimizedUsageCounterIncrement(assembler); |
1549 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1514 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
1550 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1515 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
1551 Token::kILLEGAL, true /* optimized */); | 1516 Token::kILLEGAL, true /* optimized */); |
1552 } | 1517 } |
1553 | 1518 |
1554 | |
1555 // Intermediary stub between a static call and its target. ICData contains | 1519 // Intermediary stub between a static call and its target. ICData contains |
1556 // the target function and the call count. | 1520 // the target function and the call count. |
1557 // ECX: ICData | 1521 // ECX: ICData |
1558 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1522 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
1559 GenerateUsageCounterIncrement(assembler, EBX); | 1523 GenerateUsageCounterIncrement(assembler, EBX); |
1560 | 1524 |
1561 #if defined(DEBUG) | 1525 #if defined(DEBUG) |
1562 { | 1526 { |
1563 Label ok; | 1527 Label ok; |
1564 // Check that the IC data array has NumArgsTested() == num_args. | 1528 // Check that the IC data array has NumArgsTested() == num_args. |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1606 __ Bind(&stepping); | 1570 __ Bind(&stepping); |
1607 __ EnterStubFrame(); | 1571 __ EnterStubFrame(); |
1608 __ pushl(ECX); | 1572 __ pushl(ECX); |
1609 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1573 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1610 __ popl(ECX); | 1574 __ popl(ECX); |
1611 __ LeaveFrame(); | 1575 __ LeaveFrame(); |
1612 __ jmp(&done_stepping, Assembler::kNearJump); | 1576 __ jmp(&done_stepping, Assembler::kNearJump); |
1613 } | 1577 } |
1614 } | 1578 } |
1615 | 1579 |
1616 | |
1617 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1580 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
1618 GenerateUsageCounterIncrement(assembler, EBX); | 1581 GenerateUsageCounterIncrement(assembler, EBX); |
1619 GenerateNArgsCheckInlineCacheStub( | 1582 GenerateNArgsCheckInlineCacheStub( |
1620 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 1583 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
1621 } | 1584 } |
1622 | 1585 |
1623 | |
1624 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1586 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
1625 GenerateUsageCounterIncrement(assembler, EBX); | 1587 GenerateUsageCounterIncrement(assembler, EBX); |
1626 GenerateNArgsCheckInlineCacheStub( | 1588 GenerateNArgsCheckInlineCacheStub( |
1627 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); | 1589 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
1628 } | 1590 } |
1629 | 1591 |
1630 | |
1631 // Stub for compiling a function and jumping to the compiled code. | 1592 // Stub for compiling a function and jumping to the compiled code. |
1632 // ECX: IC-Data (for methods). | 1593 // ECX: IC-Data (for methods). |
1633 // EDX: Arguments descriptor. | 1594 // EDX: Arguments descriptor. |
1634 // EAX: Function. | 1595 // EAX: Function. |
1635 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 1596 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { |
1636 __ EnterStubFrame(); | 1597 __ EnterStubFrame(); |
1637 __ pushl(EDX); // Preserve arguments descriptor array. | 1598 __ pushl(EDX); // Preserve arguments descriptor array. |
1638 __ pushl(ECX); // Preserve IC data object. | 1599 __ pushl(ECX); // Preserve IC data object. |
1639 __ pushl(EAX); // Pass function. | 1600 __ pushl(EAX); // Pass function. |
1640 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1601 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); |
1641 __ popl(EAX); // Restore function. | 1602 __ popl(EAX); // Restore function. |
1642 __ popl(ECX); // Restore IC data array. | 1603 __ popl(ECX); // Restore IC data array. |
1643 __ popl(EDX); // Restore arguments descriptor array. | 1604 __ popl(EDX); // Restore arguments descriptor array. |
1644 __ LeaveFrame(); | 1605 __ LeaveFrame(); |
1645 | 1606 |
1646 __ movl(EAX, FieldAddress(EAX, Function::entry_point_offset())); | 1607 __ movl(EAX, FieldAddress(EAX, Function::entry_point_offset())); |
1647 __ jmp(EAX); | 1608 __ jmp(EAX); |
1648 } | 1609 } |
1649 | 1610 |
1650 | |
1651 // ECX: Contains an ICData. | 1611 // ECX: Contains an ICData. |
1652 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1612 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { |
1653 __ EnterStubFrame(); | 1613 __ EnterStubFrame(); |
1654 // Save IC data. | 1614 // Save IC data. |
1655 __ pushl(ECX); | 1615 __ pushl(ECX); |
1656 // Room for result. Debugger stub returns address of the | 1616 // Room for result. Debugger stub returns address of the |
1657 // unpatched runtime stub. | 1617 // unpatched runtime stub. |
1658 __ pushl(Immediate(0)); // Room for result. | 1618 __ pushl(Immediate(0)); // Room for result. |
1659 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1619 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1660 __ popl(EAX); // Code of original stub. | 1620 __ popl(EAX); // Code of original stub. |
1661 __ popl(ECX); // Restore IC data. | 1621 __ popl(ECX); // Restore IC data. |
1662 __ LeaveFrame(); | 1622 __ LeaveFrame(); |
1663 // Jump to original stub. | 1623 // Jump to original stub. |
1664 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); | 1624 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); |
1665 __ jmp(EAX); | 1625 __ jmp(EAX); |
1666 } | 1626 } |
1667 | 1627 |
1668 | |
1669 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1628 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { |
1670 __ EnterStubFrame(); | 1629 __ EnterStubFrame(); |
1671 // Room for result. Debugger stub returns address of the | 1630 // Room for result. Debugger stub returns address of the |
1672 // unpatched runtime stub. | 1631 // unpatched runtime stub. |
1673 __ pushl(Immediate(0)); // Room for result. | 1632 __ pushl(Immediate(0)); // Room for result. |
1674 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1633 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1675 __ popl(EAX); // Code of the original stub | 1634 __ popl(EAX); // Code of the original stub |
1676 __ LeaveFrame(); | 1635 __ LeaveFrame(); |
1677 // Jump to original stub. | 1636 // Jump to original stub. |
1678 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); | 1637 __ movl(EAX, FieldAddress(EAX, Code::entry_point_offset())); |
1679 __ jmp(EAX); | 1638 __ jmp(EAX); |
1680 } | 1639 } |
1681 | 1640 |
1682 | |
1683 // Called only from unoptimized code. | 1641 // Called only from unoptimized code. |
1684 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { | 1642 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { |
1685 // Check single stepping. | 1643 // Check single stepping. |
1686 Label stepping, done_stepping; | 1644 Label stepping, done_stepping; |
1687 __ LoadIsolate(EAX); | 1645 __ LoadIsolate(EAX); |
1688 __ movzxb(EAX, Address(EAX, Isolate::single_step_offset())); | 1646 __ movzxb(EAX, Address(EAX, Isolate::single_step_offset())); |
1689 __ cmpl(EAX, Immediate(0)); | 1647 __ cmpl(EAX, Immediate(0)); |
1690 __ j(NOT_EQUAL, &stepping, Assembler::kNearJump); | 1648 __ j(NOT_EQUAL, &stepping, Assembler::kNearJump); |
1691 __ Bind(&done_stepping); | 1649 __ Bind(&done_stepping); |
1692 __ ret(); | 1650 __ ret(); |
1693 | 1651 |
1694 __ Bind(&stepping); | 1652 __ Bind(&stepping); |
1695 __ EnterStubFrame(); | 1653 __ EnterStubFrame(); |
1696 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1654 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1697 __ LeaveFrame(); | 1655 __ LeaveFrame(); |
1698 __ jmp(&done_stepping, Assembler::kNearJump); | 1656 __ jmp(&done_stepping, Assembler::kNearJump); |
1699 } | 1657 } |
1700 | 1658 |
1701 | |
1702 // Used to check class and type arguments. Arguments passed on stack: | 1659 // Used to check class and type arguments. Arguments passed on stack: |
1703 // TOS + 0: return address. | 1660 // TOS + 0: return address. |
1704 // TOS + 1: function type arguments (only if n == 4, can be raw_null). | 1661 // TOS + 1: function type arguments (only if n == 4, can be raw_null). |
1705 // TOS + 2: instantiator type arguments (only if n == 4, can be raw_null). | 1662 // TOS + 2: instantiator type arguments (only if n == 4, can be raw_null). |
1706 // TOS + 3: instance. | 1663 // TOS + 3: instance. |
1707 // TOS + 4: SubtypeTestCache. | 1664 // TOS + 4: SubtypeTestCache. |
1708 // Result in ECX: null -> not found, otherwise result (true or false). | 1665 // Result in ECX: null -> not found, otherwise result (true or false). |
1709 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { | 1666 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { |
1710 ASSERT((n == 1) || (n == 2) || (n == 4)); | 1667 ASSERT((n == 1) || (n == 2) || (n == 4)); |
1711 const intptr_t kFunctionTypeArgumentsInBytes = 1 * kWordSize; | 1668 const intptr_t kFunctionTypeArgumentsInBytes = 1 * kWordSize; |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1784 // Fall through to not found. | 1741 // Fall through to not found. |
1785 __ Bind(¬_found); | 1742 __ Bind(¬_found); |
1786 __ movl(ECX, raw_null); | 1743 __ movl(ECX, raw_null); |
1787 __ ret(); | 1744 __ ret(); |
1788 | 1745 |
1789 __ Bind(&found); | 1746 __ Bind(&found); |
1790 __ movl(ECX, Address(EDX, kWordSize * SubtypeTestCache::kTestResult)); | 1747 __ movl(ECX, Address(EDX, kWordSize * SubtypeTestCache::kTestResult)); |
1791 __ ret(); | 1748 __ ret(); |
1792 } | 1749 } |
1793 | 1750 |
1794 | |
1795 // Used to check class and type arguments. Arguments passed on stack: | 1751 // Used to check class and type arguments. Arguments passed on stack: |
1796 // TOS + 0: return address. | 1752 // TOS + 0: return address. |
1797 // TOS + 1: raw_null. | 1753 // TOS + 1: raw_null. |
1798 // TOS + 2: raw_null. | 1754 // TOS + 2: raw_null. |
1799 // TOS + 3: instance. | 1755 // TOS + 3: instance. |
1800 // TOS + 4: SubtypeTestCache. | 1756 // TOS + 4: SubtypeTestCache. |
1801 // Result in ECX: null -> not found, otherwise result (true or false). | 1757 // Result in ECX: null -> not found, otherwise result (true or false). |
1802 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { | 1758 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { |
1803 GenerateSubtypeNTestCacheStub(assembler, 1); | 1759 GenerateSubtypeNTestCacheStub(assembler, 1); |
1804 } | 1760 } |
1805 | 1761 |
1806 | |
1807 // Used to check class and type arguments. Arguments passed on stack: | 1762 // Used to check class and type arguments. Arguments passed on stack: |
1808 // TOS + 0: return address. | 1763 // TOS + 0: return address. |
1809 // TOS + 1: raw_null. | 1764 // TOS + 1: raw_null. |
1810 // TOS + 2: raw_null. | 1765 // TOS + 2: raw_null. |
1811 // TOS + 3: instance. | 1766 // TOS + 3: instance. |
1812 // TOS + 4: SubtypeTestCache. | 1767 // TOS + 4: SubtypeTestCache. |
1813 // Result in ECX: null -> not found, otherwise result (true or false). | 1768 // Result in ECX: null -> not found, otherwise result (true or false). |
1814 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { | 1769 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { |
1815 GenerateSubtypeNTestCacheStub(assembler, 2); | 1770 GenerateSubtypeNTestCacheStub(assembler, 2); |
1816 } | 1771 } |
1817 | 1772 |
1818 | |
1819 // Used to check class and type arguments. Arguments passed on stack: | 1773 // Used to check class and type arguments. Arguments passed on stack: |
1820 // TOS + 0: return address. | 1774 // TOS + 0: return address. |
1821 // TOS + 1: function type arguments (can be raw_null). | 1775 // TOS + 1: function type arguments (can be raw_null). |
1822 // TOS + 2: instantiator type arguments (can be raw_null). | 1776 // TOS + 2: instantiator type arguments (can be raw_null). |
1823 // TOS + 3: instance. | 1777 // TOS + 3: instance. |
1824 // TOS + 4: SubtypeTestCache. | 1778 // TOS + 4: SubtypeTestCache. |
1825 // Result in ECX: null -> not found, otherwise result (true or false). | 1779 // Result in ECX: null -> not found, otherwise result (true or false). |
1826 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { | 1780 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { |
1827 GenerateSubtypeNTestCacheStub(assembler, 4); | 1781 GenerateSubtypeNTestCacheStub(assembler, 4); |
1828 } | 1782 } |
1829 | 1783 |
1830 | |
1831 // Return the current stack pointer address, used to do stack alignment checks. | 1784 // Return the current stack pointer address, used to do stack alignment checks. |
1832 // TOS + 0: return address | 1785 // TOS + 0: return address |
1833 // Result in EAX. | 1786 // Result in EAX. |
1834 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { | 1787 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { |
1835 __ leal(EAX, Address(ESP, kWordSize)); | 1788 __ leal(EAX, Address(ESP, kWordSize)); |
1836 __ ret(); | 1789 __ ret(); |
1837 } | 1790 } |
1838 | 1791 |
1839 | |
1840 // Jump to a frame on the call stack. | 1792 // Jump to a frame on the call stack. |
1841 // TOS + 0: return address | 1793 // TOS + 0: return address |
1842 // TOS + 1: program_counter | 1794 // TOS + 1: program_counter |
1843 // TOS + 2: stack_pointer | 1795 // TOS + 2: stack_pointer |
1844 // TOS + 3: frame_pointer | 1796 // TOS + 3: frame_pointer |
1845 // TOS + 4: thread | 1797 // TOS + 4: thread |
1846 // No Result. | 1798 // No Result. |
1847 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { | 1799 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { |
1848 __ movl(THR, Address(ESP, 4 * kWordSize)); // Load target thread. | 1800 __ movl(THR, Address(ESP, 4 * kWordSize)); // Load target thread. |
1849 __ movl(EBP, Address(ESP, 3 * kWordSize)); // Load target frame_pointer. | 1801 __ movl(EBP, Address(ESP, 3 * kWordSize)); // Load target frame_pointer. |
1850 __ movl(EBX, Address(ESP, 1 * kWordSize)); // Load target PC into EBX. | 1802 __ movl(EBX, Address(ESP, 1 * kWordSize)); // Load target PC into EBX. |
1851 __ movl(ESP, Address(ESP, 2 * kWordSize)); // Load target stack_pointer. | 1803 __ movl(ESP, Address(ESP, 2 * kWordSize)); // Load target stack_pointer. |
1852 // Set tag. | 1804 // Set tag. |
1853 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); | 1805 __ movl(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId)); |
1854 // Clear top exit frame. | 1806 // Clear top exit frame. |
1855 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); | 1807 __ movl(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0)); |
1856 __ jmp(EBX); // Jump to the exception handler code. | 1808 __ jmp(EBX); // Jump to the exception handler code. |
1857 } | 1809 } |
1858 | 1810 |
1859 | |
1860 // Run an exception handler. Execution comes from JumpToFrame stub. | 1811 // Run an exception handler. Execution comes from JumpToFrame stub. |
1861 // | 1812 // |
1862 // The arguments are stored in the Thread object. | 1813 // The arguments are stored in the Thread object. |
1863 // No result. | 1814 // No result. |
1864 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { | 1815 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { |
1865 ASSERT(kExceptionObjectReg == EAX); | 1816 ASSERT(kExceptionObjectReg == EAX); |
1866 ASSERT(kStackTraceObjectReg == EDX); | 1817 ASSERT(kStackTraceObjectReg == EDX); |
1867 __ movl(EBX, Address(THR, Thread::resume_pc_offset())); | 1818 __ movl(EBX, Address(THR, Thread::resume_pc_offset())); |
1868 | 1819 |
1869 // Load the exception from the current thread. | 1820 // Load the exception from the current thread. |
1870 Address exception_addr(THR, Thread::active_exception_offset()); | 1821 Address exception_addr(THR, Thread::active_exception_offset()); |
1871 __ movl(kExceptionObjectReg, exception_addr); | 1822 __ movl(kExceptionObjectReg, exception_addr); |
1872 __ movl(exception_addr, Immediate(0)); | 1823 __ movl(exception_addr, Immediate(0)); |
1873 | 1824 |
1874 // Load the stacktrace from the current thread. | 1825 // Load the stacktrace from the current thread. |
1875 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); | 1826 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); |
1876 __ movl(kStackTraceObjectReg, stacktrace_addr); | 1827 __ movl(kStackTraceObjectReg, stacktrace_addr); |
1877 __ movl(stacktrace_addr, Immediate(0)); | 1828 __ movl(stacktrace_addr, Immediate(0)); |
1878 | 1829 |
1879 __ jmp(EBX); // Jump to continuation point. | 1830 __ jmp(EBX); // Jump to continuation point. |
1880 } | 1831 } |
1881 | 1832 |
1882 | |
1883 // Deoptimize a frame on the call stack before rewinding. | 1833 // Deoptimize a frame on the call stack before rewinding. |
1884 // The arguments are stored in the Thread object. | 1834 // The arguments are stored in the Thread object. |
1885 // No result. | 1835 // No result. |
1886 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | 1836 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
1887 // Push the deopt pc. | 1837 // Push the deopt pc. |
1888 __ pushl(Address(THR, Thread::resume_pc_offset())); | 1838 __ pushl(Address(THR, Thread::resume_pc_offset())); |
1889 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 1839 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
1890 | 1840 |
1891 // After we have deoptimized, jump to the correct frame. | 1841 // After we have deoptimized, jump to the correct frame. |
1892 __ EnterStubFrame(); | 1842 __ EnterStubFrame(); |
1893 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); | 1843 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); |
1894 __ LeaveFrame(); | 1844 __ LeaveFrame(); |
1895 __ int3(); | 1845 __ int3(); |
1896 } | 1846 } |
1897 | 1847 |
1898 | |
1899 // Calls to the runtime to optimize the given function. | 1848 // Calls to the runtime to optimize the given function. |
1900 // EBX: function to be reoptimized. | 1849 // EBX: function to be reoptimized. |
1901 // EDX: argument descriptor (preserved). | 1850 // EDX: argument descriptor (preserved). |
1902 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1851 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1903 __ EnterStubFrame(); | 1852 __ EnterStubFrame(); |
1904 __ pushl(EDX); | 1853 __ pushl(EDX); |
1905 __ pushl(Immediate(0)); // Setup space on stack for return value. | 1854 __ pushl(Immediate(0)); // Setup space on stack for return value. |
1906 __ pushl(EBX); | 1855 __ pushl(EBX); |
1907 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1856 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1908 __ popl(EAX); // Discard argument. | 1857 __ popl(EAX); // Discard argument. |
1909 __ popl(EAX); // Get Function object | 1858 __ popl(EAX); // Get Function object |
1910 __ popl(EDX); // Restore argument descriptor. | 1859 __ popl(EDX); // Restore argument descriptor. |
1911 __ LeaveFrame(); | 1860 __ LeaveFrame(); |
1912 __ movl(CODE_REG, FieldAddress(EAX, Function::code_offset())); | 1861 __ movl(CODE_REG, FieldAddress(EAX, Function::code_offset())); |
1913 __ movl(EAX, FieldAddress(EAX, Function::entry_point_offset())); | 1862 __ movl(EAX, FieldAddress(EAX, Function::entry_point_offset())); |
1914 __ jmp(EAX); | 1863 __ jmp(EAX); |
1915 __ int3(); | 1864 __ int3(); |
1916 } | 1865 } |
1917 | 1866 |
1918 | |
1919 // Does identical check (object references are equal or not equal) with special | 1867 // Does identical check (object references are equal or not equal) with special |
1920 // checks for boxed numbers. | 1868 // checks for boxed numbers. |
1921 // Return ZF set. | 1869 // Return ZF set. |
1922 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint | 1870 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint |
1923 // cannot contain a value that fits in Mint or Smi. | 1871 // cannot contain a value that fits in Mint or Smi. |
1924 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, | 1872 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, |
1925 const Register left, | 1873 const Register left, |
1926 const Register right, | 1874 const Register right, |
1927 const Register temp) { | 1875 const Register temp) { |
1928 Label reference_compare, done, check_mint, check_bigint; | 1876 Label reference_compare, done, check_mint, check_bigint; |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1971 // Result in EAX, 0 means equal. | 1919 // Result in EAX, 0 means equal. |
1972 __ LeaveFrame(); | 1920 __ LeaveFrame(); |
1973 __ cmpl(EAX, Immediate(0)); | 1921 __ cmpl(EAX, Immediate(0)); |
1974 __ jmp(&done); | 1922 __ jmp(&done); |
1975 | 1923 |
1976 __ Bind(&reference_compare); | 1924 __ Bind(&reference_compare); |
1977 __ cmpl(left, right); | 1925 __ cmpl(left, right); |
1978 __ Bind(&done); | 1926 __ Bind(&done); |
1979 } | 1927 } |
1980 | 1928 |
1981 | |
1982 // Called only from unoptimized code. All relevant registers have been saved. | 1929 // Called only from unoptimized code. All relevant registers have been saved. |
1983 // TOS + 0: return address | 1930 // TOS + 0: return address |
1984 // TOS + 1: right argument. | 1931 // TOS + 1: right argument. |
1985 // TOS + 2: left argument. | 1932 // TOS + 2: left argument. |
1986 // Returns ZF set. | 1933 // Returns ZF set. |
1987 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( | 1934 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( |
1988 Assembler* assembler) { | 1935 Assembler* assembler) { |
1989 // Check single stepping. | 1936 // Check single stepping. |
1990 Label stepping, done_stepping; | 1937 Label stepping, done_stepping; |
1991 if (FLAG_support_debugger) { | 1938 if (FLAG_support_debugger) { |
(...skipping 14 matching lines...) Expand all Loading... |
2006 | 1953 |
2007 if (FLAG_support_debugger) { | 1954 if (FLAG_support_debugger) { |
2008 __ Bind(&stepping); | 1955 __ Bind(&stepping); |
2009 __ EnterStubFrame(); | 1956 __ EnterStubFrame(); |
2010 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1957 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
2011 __ LeaveFrame(); | 1958 __ LeaveFrame(); |
2012 __ jmp(&done_stepping); | 1959 __ jmp(&done_stepping); |
2013 } | 1960 } |
2014 } | 1961 } |
2015 | 1962 |
2016 | |
2017 // Called from optimized code only. | 1963 // Called from optimized code only. |
2018 // TOS + 0: return address | 1964 // TOS + 0: return address |
2019 // TOS + 1: right argument. | 1965 // TOS + 1: right argument. |
2020 // TOS + 2: left argument. | 1966 // TOS + 2: left argument. |
2021 // Returns ZF set. | 1967 // Returns ZF set. |
2022 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( | 1968 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( |
2023 Assembler* assembler) { | 1969 Assembler* assembler) { |
2024 const Register left = EAX; | 1970 const Register left = EAX; |
2025 const Register right = EDX; | 1971 const Register right = EDX; |
2026 const Register temp = ECX; | 1972 const Register temp = ECX; |
2027 __ movl(left, Address(ESP, 2 * kWordSize)); | 1973 __ movl(left, Address(ESP, 2 * kWordSize)); |
2028 __ movl(right, Address(ESP, 1 * kWordSize)); | 1974 __ movl(right, Address(ESP, 1 * kWordSize)); |
2029 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp); | 1975 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp); |
2030 __ ret(); | 1976 __ ret(); |
2031 } | 1977 } |
2032 | 1978 |
2033 | |
2034 // Called from megamorphic calls. | 1979 // Called from megamorphic calls. |
2035 // EBX: receiver | 1980 // EBX: receiver |
2036 // ECX: MegamorphicCache (preserved) | 1981 // ECX: MegamorphicCache (preserved) |
2037 // Passed to target: | 1982 // Passed to target: |
2038 // EBX: target entry point | 1983 // EBX: target entry point |
2039 // EDX: argument descriptor | 1984 // EDX: argument descriptor |
2040 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { | 1985 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { |
2041 // Jump if receiver is a smi. | 1986 // Jump if receiver is a smi. |
2042 Label smi_case; | 1987 Label smi_case; |
2043 // Check if object (in tmp) is a Smi. | 1988 // Check if object (in tmp) is a Smi. |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2103 | 2048 |
2104 // Called from switchable IC calls. | 2049 // Called from switchable IC calls. |
2105 // EBX: receiver | 2050 // EBX: receiver |
2106 // ECX: ICData (preserved) | 2051 // ECX: ICData (preserved) |
2107 // Passed to target: | 2052 // Passed to target: |
2108 // EDX: arguments descriptor | 2053 // EDX: arguments descriptor |
2109 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { | 2054 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { |
2110 __ int3(); | 2055 __ int3(); |
2111 } | 2056 } |
2112 | 2057 |
2113 | |
2114 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { | 2058 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { |
2115 __ int3(); | 2059 __ int3(); |
2116 } | 2060 } |
2117 | 2061 |
2118 | |
2119 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { | 2062 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { |
2120 __ int3(); | 2063 __ int3(); |
2121 } | 2064 } |
2122 | 2065 |
2123 | |
2124 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { | 2066 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { |
2125 __ int3(); | 2067 __ int3(); |
2126 } | 2068 } |
2127 | 2069 |
2128 | |
2129 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { | 2070 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { |
2130 __ int3(); | 2071 __ int3(); |
2131 } | 2072 } |
2132 | 2073 |
2133 | |
2134 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2074 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2135 __ int3(); | 2075 __ int3(); |
2136 } | 2076 } |
2137 | 2077 |
2138 | |
2139 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { | 2078 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { |
2140 __ int3(); | 2079 __ int3(); |
2141 } | 2080 } |
2142 | 2081 |
2143 } // namespace dart | 2082 } // namespace dart |
2144 | 2083 |
2145 #endif // defined TARGET_ARCH_IA32 | 2084 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |