Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(269)

Side by Side Diff: runtime/vm/stub_code_arm.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/stub_code.cc ('k') | runtime/vm/stub_code_arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/compiler.h" 9 #include "vm/compiler.h"
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
96 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset()); 96 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset());
97 97
98 // Reset exit frame information in Isolate structure. 98 // Reset exit frame information in Isolate structure.
99 __ LoadImmediate(R2, 0); 99 __ LoadImmediate(R2, 0);
100 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset()); 100 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());
101 101
102 __ LeaveStubFrame(); 102 __ LeaveStubFrame();
103 __ Ret(); 103 __ Ret();
104 } 104 }
105 105
106
107 // Print the stop message. 106 // Print the stop message.
108 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { 107 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) {
109 OS::Print("Stop message: %s\n", message); 108 OS::Print("Stop message: %s\n", message);
110 } 109 }
111 END_LEAF_RUNTIME_ENTRY 110 END_LEAF_RUNTIME_ENTRY
112 111
113
114 // Input parameters: 112 // Input parameters:
115 // R0 : stop message (const char*). 113 // R0 : stop message (const char*).
116 // Must preserve all registers. 114 // Must preserve all registers.
117 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { 115 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) {
118 __ EnterCallRuntimeFrame(0); 116 __ EnterCallRuntimeFrame(0);
119 // Call the runtime leaf function. R0 already contains the parameter. 117 // Call the runtime leaf function. R0 already contains the parameter.
120 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1); 118 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1);
121 __ LeaveCallRuntimeFrame(); 119 __ LeaveCallRuntimeFrame();
122 __ Ret(); 120 __ Ret();
123 } 121 }
124 122
125
126 // Input parameters: 123 // Input parameters:
127 // LR : return address. 124 // LR : return address.
128 // SP : address of return value. 125 // SP : address of return value.
129 // R9 : address of the native function to call. 126 // R9 : address of the native function to call.
130 // R2 : address of first argument in argument array. 127 // R2 : address of first argument in argument array.
131 // R1 : argc_tag including number of arguments and function kind. 128 // R1 : argc_tag including number of arguments and function kind.
132 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, 129 static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
133 Address wrapper) { 130 Address wrapper) {
134 const intptr_t thread_offset = NativeArguments::thread_offset(); 131 const intptr_t thread_offset = NativeArguments::thread_offset();
135 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); 132 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset();
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset()); 195 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset());
199 196
200 // Reset exit frame information in Isolate structure. 197 // Reset exit frame information in Isolate structure.
201 __ LoadImmediate(R2, 0); 198 __ LoadImmediate(R2, 0);
202 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset()); 199 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());
203 200
204 __ LeaveStubFrame(); 201 __ LeaveStubFrame();
205 __ Ret(); 202 __ Ret();
206 } 203 }
207 204
208
209 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { 205 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) {
210 GenerateCallNativeWithWrapperStub( 206 GenerateCallNativeWithWrapperStub(
211 assembler, 207 assembler,
212 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset())); 208 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset()));
213 } 209 }
214 210
215
216 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { 211 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) {
217 GenerateCallNativeWithWrapperStub( 212 GenerateCallNativeWithWrapperStub(
218 assembler, 213 assembler,
219 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset())); 214 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset()));
220 } 215 }
221 216
222
223 // Input parameters: 217 // Input parameters:
224 // LR : return address. 218 // LR : return address.
225 // SP : address of return value. 219 // SP : address of return value.
226 // R9 : address of the native function to call. 220 // R9 : address of the native function to call.
227 // R2 : address of first argument in argument array. 221 // R2 : address of first argument in argument array.
228 // R1 : argc_tag including number of arguments and function kind. 222 // R1 : argc_tag including number of arguments and function kind.
229 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { 223 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) {
230 const intptr_t thread_offset = NativeArguments::thread_offset(); 224 const intptr_t thread_offset = NativeArguments::thread_offset();
231 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); 225 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset();
232 const intptr_t argv_offset = NativeArguments::argv_offset(); 226 const intptr_t argv_offset = NativeArguments::argv_offset();
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
291 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset()); 285 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset());
292 286
293 // Reset exit frame information in Isolate structure. 287 // Reset exit frame information in Isolate structure.
294 __ LoadImmediate(R2, 0); 288 __ LoadImmediate(R2, 0);
295 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset()); 289 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());
296 290
297 __ LeaveStubFrame(); 291 __ LeaveStubFrame();
298 __ Ret(); 292 __ Ret();
299 } 293 }
300 294
301
302 // Input parameters: 295 // Input parameters:
303 // R4: arguments descriptor array. 296 // R4: arguments descriptor array.
304 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { 297 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) {
305 // Create a stub frame as we are pushing some objects on the stack before 298 // Create a stub frame as we are pushing some objects on the stack before
306 // calling into the runtime. 299 // calling into the runtime.
307 __ EnterStubFrame(); 300 __ EnterStubFrame();
308 // Setup space on stack for return value and preserve arguments descriptor. 301 // Setup space on stack for return value and preserve arguments descriptor.
309 __ LoadImmediate(R0, 0); 302 __ LoadImmediate(R0, 0);
310 __ PushList((1 << R0) | (1 << R4)); 303 __ PushList((1 << R0) | (1 << R4));
311 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); 304 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
312 // Get Code object result and restore arguments descriptor array. 305 // Get Code object result and restore arguments descriptor array.
313 __ PopList((1 << R0) | (1 << R4)); 306 __ PopList((1 << R0) | (1 << R4));
314 // Remove the stub frame. 307 // Remove the stub frame.
315 __ LeaveStubFrame(); 308 __ LeaveStubFrame();
316 // Jump to the dart function. 309 // Jump to the dart function.
317 __ mov(CODE_REG, Operand(R0)); 310 __ mov(CODE_REG, Operand(R0));
318 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset())); 311 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset()));
319 __ bx(R0); 312 __ bx(R0);
320 } 313 }
321 314
322
323 // Called from a static call only when an invalid code has been entered 315 // Called from a static call only when an invalid code has been entered
324 // (invalid because its function was optimized or deoptimized). 316 // (invalid because its function was optimized or deoptimized).
325 // R4: arguments descriptor array. 317 // R4: arguments descriptor array.
326 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { 318 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) {
327 // Load code pointer to this stub from the thread: 319 // Load code pointer to this stub from the thread:
328 // The one that is passed in, is not correct - it points to the code object 320 // The one that is passed in, is not correct - it points to the code object
329 // that needs to be replaced. 321 // that needs to be replaced.
330 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); 322 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset()));
331 // Create a stub frame as we are pushing some objects on the stack before 323 // Create a stub frame as we are pushing some objects on the stack before
332 // calling into the runtime. 324 // calling into the runtime.
333 __ EnterStubFrame(); 325 __ EnterStubFrame();
334 // Setup space on stack for return value and preserve arguments descriptor. 326 // Setup space on stack for return value and preserve arguments descriptor.
335 __ LoadImmediate(R0, 0); 327 __ LoadImmediate(R0, 0);
336 __ PushList((1 << R0) | (1 << R4)); 328 __ PushList((1 << R0) | (1 << R4));
337 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); 329 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
338 // Get Code object result and restore arguments descriptor array. 330 // Get Code object result and restore arguments descriptor array.
339 __ PopList((1 << R0) | (1 << R4)); 331 __ PopList((1 << R0) | (1 << R4));
340 // Remove the stub frame. 332 // Remove the stub frame.
341 __ LeaveStubFrame(); 333 __ LeaveStubFrame();
342 // Jump to the dart function. 334 // Jump to the dart function.
343 __ mov(CODE_REG, Operand(R0)); 335 __ mov(CODE_REG, Operand(R0));
344 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset())); 336 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset()));
345 __ bx(R0); 337 __ bx(R0);
346 } 338 }
347 339
348
349 // Called from object allocate instruction when the allocation stub has been 340 // Called from object allocate instruction when the allocation stub has been
350 // disabled. 341 // disabled.
351 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { 342 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) {
352 // Load code pointer to this stub from the thread: 343 // Load code pointer to this stub from the thread:
353 // The one that is passed in, is not correct - it points to the code object 344 // The one that is passed in, is not correct - it points to the code object
354 // that needs to be replaced. 345 // that needs to be replaced.
355 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); 346 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset()));
356 __ EnterStubFrame(); 347 __ EnterStubFrame();
357 // Setup space on stack for return value. 348 // Setup space on stack for return value.
358 __ LoadImmediate(R0, 0); 349 __ LoadImmediate(R0, 0);
359 __ Push(R0); 350 __ Push(R0);
360 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); 351 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
361 // Get Code object result. 352 // Get Code object result.
362 __ Pop(R0); 353 __ Pop(R0);
363 // Remove the stub frame. 354 // Remove the stub frame.
364 __ LeaveStubFrame(); 355 __ LeaveStubFrame();
365 // Jump to the dart function. 356 // Jump to the dart function.
366 __ mov(CODE_REG, Operand(R0)); 357 __ mov(CODE_REG, Operand(R0));
367 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset())); 358 __ ldr(R0, FieldAddress(R0, Code::entry_point_offset()));
368 __ bx(R0); 359 __ bx(R0);
369 } 360 }
370 361
371
372 // Input parameters: 362 // Input parameters:
373 // R2: smi-tagged argument count, may be zero. 363 // R2: smi-tagged argument count, may be zero.
374 // FP[kParamEndSlotFromFp + 1]: last argument. 364 // FP[kParamEndSlotFromFp + 1]: last argument.
375 static void PushArgumentsArray(Assembler* assembler) { 365 static void PushArgumentsArray(Assembler* assembler) {
376 // Allocate array to store arguments of caller. 366 // Allocate array to store arguments of caller.
377 __ LoadObject(R1, Object::null_object()); 367 __ LoadObject(R1, Object::null_object());
378 // R1: null element type for raw Array. 368 // R1: null element type for raw Array.
379 // R2: smi-tagged argument count, may be zero. 369 // R2: smi-tagged argument count, may be zero.
380 __ BranchLink(*StubCode::AllocateArray_entry()); 370 __ BranchLink(*StubCode::AllocateArray_entry());
381 // R0: newly allocated array. 371 // R0: newly allocated array.
382 // R2: smi-tagged argument count, may be zero (was preserved by the stub). 372 // R2: smi-tagged argument count, may be zero (was preserved by the stub).
383 __ Push(R0); // Array is in R0 and on top of stack. 373 __ Push(R0); // Array is in R0 and on top of stack.
384 __ AddImmediate(R1, FP, kParamEndSlotFromFp * kWordSize); 374 __ AddImmediate(R1, FP, kParamEndSlotFromFp * kWordSize);
385 __ AddImmediate(R3, R0, Array::data_offset() - kHeapObjectTag); 375 __ AddImmediate(R3, R0, Array::data_offset() - kHeapObjectTag);
386 // Copy arguments from stack to array (starting at the end). 376 // Copy arguments from stack to array (starting at the end).
387 // R1: address just beyond last argument on stack. 377 // R1: address just beyond last argument on stack.
388 // R3: address of first argument in array. 378 // R3: address of first argument in array.
389 Label enter; 379 Label enter;
390 __ b(&enter); 380 __ b(&enter);
391 Label loop; 381 Label loop;
392 __ Bind(&loop); 382 __ Bind(&loop);
393 __ ldr(IP, Address(R1, kWordSize, Address::PreIndex)); 383 __ ldr(IP, Address(R1, kWordSize, Address::PreIndex));
394 // Generational barrier is needed, array is not necessarily in new space. 384 // Generational barrier is needed, array is not necessarily in new space.
395 __ StoreIntoObject(R0, Address(R3, R2, LSL, 1), IP); 385 __ StoreIntoObject(R0, Address(R3, R2, LSL, 1), IP);
396 __ Bind(&enter); 386 __ Bind(&enter);
397 __ subs(R2, R2, Operand(Smi::RawValue(1))); // R2 is Smi. 387 __ subs(R2, R2, Operand(Smi::RawValue(1))); // R2 is Smi.
398 __ b(&loop, PL); 388 __ b(&loop, PL);
399 } 389 }
400 390
401
402 // Used by eager and lazy deoptimization. Preserve result in R0 if necessary. 391 // Used by eager and lazy deoptimization. Preserve result in R0 if necessary.
403 // This stub translates optimized frame into unoptimized frame. The optimized 392 // This stub translates optimized frame into unoptimized frame. The optimized
404 // frame can contain values in registers and on stack, the unoptimized 393 // frame can contain values in registers and on stack, the unoptimized
405 // frame contains all values on stack. 394 // frame contains all values on stack.
406 // Deoptimization occurs in following steps: 395 // Deoptimization occurs in following steps:
407 // - Push all registers that can contain values. 396 // - Push all registers that can contain values.
408 // - Call C routine to copy the stack and saved registers into temporary buffer. 397 // - Call C routine to copy the stack and saved registers into temporary buffer.
409 // - Adjust caller's frame to correct unoptimized frame size. 398 // - Adjust caller's frame to correct unoptimized frame size.
410 // - Fill the unoptimized frame. 399 // - Fill the unoptimized frame.
411 // - Materialize objects that require allocation (e.g. Double instances). 400 // - Materialize objects that require allocation (e.g. Double instances).
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
548 } else if (kind == kLazyDeoptFromThrow) { 537 } else if (kind == kLazyDeoptFromThrow) {
549 __ Pop(R1); // Restore stacktrace. 538 __ Pop(R1); // Restore stacktrace.
550 __ Pop(R0); // Restore exception. 539 __ Pop(R0); // Restore exception.
551 } 540 }
552 __ LeaveStubFrame(); 541 __ LeaveStubFrame();
553 // Remove materialization arguments. 542 // Remove materialization arguments.
554 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize)); 543 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize));
555 // The caller is responsible for emitting the return instruction. 544 // The caller is responsible for emitting the return instruction.
556 } 545 }
557 546
558
559 // R0: result, must be preserved 547 // R0: result, must be preserved
560 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { 548 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) {
561 // Push zap value instead of CODE_REG for lazy deopt. 549 // Push zap value instead of CODE_REG for lazy deopt.
562 __ LoadImmediate(IP, kZapCodeReg); 550 __ LoadImmediate(IP, kZapCodeReg);
563 __ Push(IP); 551 __ Push(IP);
564 // Return address for "call" to deopt stub. 552 // Return address for "call" to deopt stub.
565 __ LoadImmediate(LR, kZapReturnAddress); 553 __ LoadImmediate(LR, kZapReturnAddress);
566 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); 554 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset()));
567 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); 555 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn);
568 __ Ret(); 556 __ Ret();
569 } 557 }
570 558
571
572 // R0: exception, must be preserved 559 // R0: exception, must be preserved
573 // R1: stacktrace, must be preserved 560 // R1: stacktrace, must be preserved
574 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { 561 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) {
575 // Push zap value instead of CODE_REG for lazy deopt. 562 // Push zap value instead of CODE_REG for lazy deopt.
576 __ LoadImmediate(IP, kZapCodeReg); 563 __ LoadImmediate(IP, kZapCodeReg);
577 __ Push(IP); 564 __ Push(IP);
578 // Return address for "call" to deopt stub. 565 // Return address for "call" to deopt stub.
579 __ LoadImmediate(LR, kZapReturnAddress); 566 __ LoadImmediate(LR, kZapReturnAddress);
580 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); 567 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset()));
581 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); 568 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow);
582 __ Ret(); 569 __ Ret();
583 } 570 }
584 571
585
586 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { 572 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) {
587 GenerateDeoptimizationSequence(assembler, kEagerDeopt); 573 GenerateDeoptimizationSequence(assembler, kEagerDeopt);
588 __ Ret(); 574 __ Ret();
589 } 575 }
590 576
591
592 static void GenerateDispatcherCode(Assembler* assembler, 577 static void GenerateDispatcherCode(Assembler* assembler,
593 Label* call_target_function) { 578 Label* call_target_function) {
594 __ Comment("NoSuchMethodDispatch"); 579 __ Comment("NoSuchMethodDispatch");
595 // When lazily generated invocation dispatchers are disabled, the 580 // When lazily generated invocation dispatchers are disabled, the
596 // miss-handler may return null. 581 // miss-handler may return null.
597 __ CompareObject(R0, Object::null_object()); 582 __ CompareObject(R0, Object::null_object());
598 __ b(call_target_function, NE); 583 __ b(call_target_function, NE);
599 __ EnterStubFrame(); 584 __ EnterStubFrame();
600 // Load the receiver. 585 // Load the receiver.
601 __ ldr(R2, FieldAddress(R4, ArgumentsDescriptor::count_offset())); 586 __ ldr(R2, FieldAddress(R4, ArgumentsDescriptor::count_offset()));
(...skipping 13 matching lines...) Expand all
615 // R2: Smi-tagged arguments array length. 600 // R2: Smi-tagged arguments array length.
616 PushArgumentsArray(assembler); 601 PushArgumentsArray(assembler);
617 const intptr_t kNumArgs = 4; 602 const intptr_t kNumArgs = 4;
618 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); 603 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs);
619 __ Drop(4); 604 __ Drop(4);
620 __ Pop(R0); // Return value. 605 __ Pop(R0); // Return value.
621 __ LeaveStubFrame(); 606 __ LeaveStubFrame();
622 __ Ret(); 607 __ Ret();
623 } 608 }
624 609
625
626 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { 610 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) {
627 __ EnterStubFrame(); 611 __ EnterStubFrame();
628 612
629 // Load the receiver. 613 // Load the receiver.
630 __ ldr(R2, FieldAddress(R4, ArgumentsDescriptor::count_offset())); 614 __ ldr(R2, FieldAddress(R4, ArgumentsDescriptor::count_offset()));
631 __ add(IP, FP, Operand(R2, LSL, 1)); // R2 is Smi. 615 __ add(IP, FP, Operand(R2, LSL, 1)); // R2 is Smi.
632 __ ldr(R8, Address(IP, kParamEndSlotFromFp * kWordSize)); 616 __ ldr(R8, Address(IP, kParamEndSlotFromFp * kWordSize));
633 617
634 // Preserve IC data and arguments descriptor. 618 // Preserve IC data and arguments descriptor.
635 __ PushList((1 << R4) | (1 << R9)); 619 __ PushList((1 << R4) | (1 << R9));
(...skipping 19 matching lines...) Expand all
655 GenerateDispatcherCode(assembler, &call_target_function); 639 GenerateDispatcherCode(assembler, &call_target_function);
656 __ Bind(&call_target_function); 640 __ Bind(&call_target_function);
657 } 641 }
658 642
659 // Tail-call to target function. 643 // Tail-call to target function.
660 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); 644 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset()));
661 __ ldr(R2, FieldAddress(R0, Function::entry_point_offset())); 645 __ ldr(R2, FieldAddress(R0, Function::entry_point_offset()));
662 __ bx(R2); 646 __ bx(R2);
663 } 647 }
664 648
665
666 // Called for inline allocation of arrays. 649 // Called for inline allocation of arrays.
667 // Input parameters: 650 // Input parameters:
668 // LR: return address. 651 // LR: return address.
669 // R1: array element type (either NULL or an instantiated type). 652 // R1: array element type (either NULL or an instantiated type).
670 // R2: array length as Smi (must be preserved). 653 // R2: array length as Smi (must be preserved).
671 // The newly allocated object is returned in R0. 654 // The newly allocated object is returned in R0.
672 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { 655 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
673 Label slow_case; 656 Label slow_case;
674 // Compute the size to be allocated, it is based on the array length 657 // Compute the size to be allocated, it is based on the array length
675 // and is computed as: 658 // and is computed as:
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
778 // Push array length as Smi and element type. 761 // Push array length as Smi and element type.
779 __ PushList((1 << R1) | (1 << R2) | (1 << IP)); 762 __ PushList((1 << R1) | (1 << R2) | (1 << IP));
780 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); 763 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
781 // Pop arguments; result is popped in IP. 764 // Pop arguments; result is popped in IP.
782 __ PopList((1 << R1) | (1 << R2) | (1 << IP)); // R2 is restored. 765 __ PopList((1 << R1) | (1 << R2) | (1 << IP)); // R2 is restored.
783 __ mov(R0, Operand(IP)); 766 __ mov(R0, Operand(IP));
784 __ LeaveStubFrame(); 767 __ LeaveStubFrame();
785 __ Ret(); 768 __ Ret();
786 } 769 }
787 770
788
789 // Called when invoking Dart code from C++ (VM code). 771 // Called when invoking Dart code from C++ (VM code).
790 // Input parameters: 772 // Input parameters:
791 // LR : points to return address. 773 // LR : points to return address.
792 // R0 : code object of the Dart function to call. 774 // R0 : code object of the Dart function to call.
793 // R1 : arguments descriptor array. 775 // R1 : arguments descriptor array.
794 // R2 : arguments array. 776 // R2 : arguments array.
795 // R3 : current thread. 777 // R3 : current thread.
796 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { 778 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) {
797 // Save frame pointer coming in. 779 // Save frame pointer coming in.
798 __ EnterFrame((1 << FP) | (1 << LR), 0); 780 __ EnterFrame((1 << FP) | (1 << LR), 0);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
899 } 881 }
900 // Restore CPU registers. 882 // Restore CPU registers.
901 __ PopList(kAbiPreservedCpuRegs); 883 __ PopList(kAbiPreservedCpuRegs);
902 __ set_constant_pool_allowed(false); 884 __ set_constant_pool_allowed(false);
903 885
904 // Restore the frame pointer and return. 886 // Restore the frame pointer and return.
905 __ LeaveFrame((1 << FP) | (1 << LR)); 887 __ LeaveFrame((1 << FP) | (1 << LR));
906 __ Ret(); 888 __ Ret();
907 } 889 }
908 890
909
910 // Called for inline allocation of contexts. 891 // Called for inline allocation of contexts.
911 // Input: 892 // Input:
912 // R1: number of context variables. 893 // R1: number of context variables.
913 // Output: 894 // Output:
914 // R0: new allocated RawContext object. 895 // R0: new allocated RawContext object.
915 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { 896 void StubCode::GenerateAllocateContextStub(Assembler* assembler) {
916 if (FLAG_inline_alloc) { 897 if (FLAG_inline_alloc) {
917 Label slow_case; 898 Label slow_case;
918 // First compute the rounded instance size. 899 // First compute the rounded instance size.
919 // R1: number of context variables. 900 // R1: number of context variables.
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1021 __ PushList((1 << R1) | (1 << R2)); 1002 __ PushList((1 << R1) | (1 << R2));
1022 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. 1003 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context.
1023 __ Drop(1); // Pop number of context variables argument. 1004 __ Drop(1); // Pop number of context variables argument.
1024 __ Pop(R0); // Pop the new context object. 1005 __ Pop(R0); // Pop the new context object.
1025 // R0: new object 1006 // R0: new object
1026 // Restore the frame pointer. 1007 // Restore the frame pointer.
1027 __ LeaveStubFrame(); 1008 __ LeaveStubFrame();
1028 __ Ret(); 1009 __ Ret();
1029 } 1010 }
1030 1011
1031
1032 // Helper stub to implement Assembler::StoreIntoObject. 1012 // Helper stub to implement Assembler::StoreIntoObject.
1033 // Input parameters: 1013 // Input parameters:
1034 // R0: address (i.e. object) being stored into. 1014 // R0: address (i.e. object) being stored into.
1035 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { 1015 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) {
1036 // Save values being destroyed. 1016 // Save values being destroyed.
1037 __ PushList((1 << R1) | (1 << R2) | (1 << R3)); 1017 __ PushList((1 << R1) | (1 << R2) | (1 << R3));
1038 1018
1039 Label add_to_buffer; 1019 Label add_to_buffer;
1040 // Check whether this object has already been remembered. Skip adding to the 1020 // Check whether this object has already been remembered. Skip adding to the
1041 // store buffer if the object is in the store buffer already. 1021 // store buffer if the object is in the store buffer already.
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1094 // Setup frame, push callee-saved registers. 1074 // Setup frame, push callee-saved registers.
1095 1075
1096 __ EnterCallRuntimeFrame(0 * kWordSize); 1076 __ EnterCallRuntimeFrame(0 * kWordSize);
1097 __ mov(R0, Operand(THR)); 1077 __ mov(R0, Operand(THR));
1098 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); 1078 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1);
1099 // Restore callee-saved registers, tear down frame. 1079 // Restore callee-saved registers, tear down frame.
1100 __ LeaveCallRuntimeFrame(); 1080 __ LeaveCallRuntimeFrame();
1101 __ Ret(); 1081 __ Ret();
1102 } 1082 }
1103 1083
1104
1105 // Called for inline allocation of objects. 1084 // Called for inline allocation of objects.
1106 // Input parameters: 1085 // Input parameters:
1107 // LR : return address. 1086 // LR : return address.
1108 // SP + 0 : type arguments object (only if class is parameterized). 1087 // SP + 0 : type arguments object (only if class is parameterized).
1109 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, 1088 void StubCode::GenerateAllocationStubForClass(Assembler* assembler,
1110 const Class& cls) { 1089 const Class& cls) {
1111 // Must load pool pointer before being able to patch. 1090 // Must load pool pointer before being able to patch.
1112 Register new_pp = NOTFP; 1091 Register new_pp = NOTFP;
1113 __ LoadPoolPointer(new_pp); 1092 __ LoadPoolPointer(new_pp);
1114 // The generated code is different if the class is parameterized. 1093 // The generated code is different if the class is parameterized.
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1231 } 1210 }
1232 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. 1211 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object.
1233 __ Drop(2); // Pop arguments. 1212 __ Drop(2); // Pop arguments.
1234 __ Pop(R0); // Pop result (newly allocated object). 1213 __ Pop(R0); // Pop result (newly allocated object).
1235 // R0: new object 1214 // R0: new object
1236 // Restore the frame pointer. 1215 // Restore the frame pointer.
1237 __ LeaveStubFrame(); 1216 __ LeaveStubFrame();
1238 __ Ret(); 1217 __ Ret();
1239 } 1218 }
1240 1219
1241
1242 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function 1220 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function
1243 // from the entry code of a dart function after an error in passed argument 1221 // from the entry code of a dart function after an error in passed argument
1244 // name or number is detected. 1222 // name or number is detected.
1245 // Input parameters: 1223 // Input parameters:
1246 // LR : return address. 1224 // LR : return address.
1247 // SP : address of last argument. 1225 // SP : address of last argument.
1248 // R4: arguments descriptor array. 1226 // R4: arguments descriptor array.
1249 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { 1227 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) {
1250 __ EnterStubFrame(); 1228 __ EnterStubFrame();
1251 1229
(...skipping 15 matching lines...) Expand all
1267 1245
1268 // R2: Smi-tagged arguments array length. 1246 // R2: Smi-tagged arguments array length.
1269 PushArgumentsArray(assembler); 1247 PushArgumentsArray(assembler);
1270 1248
1271 const intptr_t kNumArgs = 3; 1249 const intptr_t kNumArgs = 3;
1272 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); 1250 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs);
1273 // noSuchMethod on closures always throws an error, so it will never return. 1251 // noSuchMethod on closures always throws an error, so it will never return.
1274 __ bkpt(0); 1252 __ bkpt(0);
1275 } 1253 }
1276 1254
1277
1278 // R8: function object. 1255 // R8: function object.
1279 // R9: inline cache data object. 1256 // R9: inline cache data object.
1280 // Cannot use function object from ICData as it may be the inlined 1257 // Cannot use function object from ICData as it may be the inlined
1281 // function and not the top-scope function. 1258 // function and not the top-scope function.
1282 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { 1259 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) {
1283 Register ic_reg = R9; 1260 Register ic_reg = R9;
1284 Register func_reg = R8; 1261 Register func_reg = R8;
1285 if (FLAG_trace_optimized_ic_calls) { 1262 if (FLAG_trace_optimized_ic_calls) {
1286 __ EnterStubFrame(); 1263 __ EnterStubFrame();
1287 __ PushList((1 << R9) | (1 << R8)); // Preserve. 1264 __ PushList((1 << R9) | (1 << R8)); // Preserve.
1288 __ Push(ic_reg); // Argument. 1265 __ Push(ic_reg); // Argument.
1289 __ Push(func_reg); // Argument. 1266 __ Push(func_reg); // Argument.
1290 __ CallRuntime(kTraceICCallRuntimeEntry, 2); 1267 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
1291 __ Drop(2); // Discard argument; 1268 __ Drop(2); // Discard argument;
1292 __ PopList((1 << R9) | (1 << R8)); // Restore. 1269 __ PopList((1 << R9) | (1 << R8)); // Restore.
1293 __ LeaveStubFrame(); 1270 __ LeaveStubFrame();
1294 } 1271 }
1295 __ ldr(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset())); 1272 __ ldr(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset()));
1296 __ add(NOTFP, NOTFP, Operand(1)); 1273 __ add(NOTFP, NOTFP, Operand(1));
1297 __ str(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset())); 1274 __ str(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset()));
1298 } 1275 }
1299 1276
1300
1301 // Loads function into 'temp_reg'. 1277 // Loads function into 'temp_reg'.
1302 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, 1278 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler,
1303 Register temp_reg) { 1279 Register temp_reg) {
1304 if (FLAG_optimization_counter_threshold >= 0) { 1280 if (FLAG_optimization_counter_threshold >= 0) {
1305 Register ic_reg = R9; 1281 Register ic_reg = R9;
1306 Register func_reg = temp_reg; 1282 Register func_reg = temp_reg;
1307 ASSERT(temp_reg == R8); 1283 ASSERT(temp_reg == R8);
1308 __ Comment("Increment function counter"); 1284 __ Comment("Increment function counter");
1309 __ ldr(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); 1285 __ ldr(func_reg, FieldAddress(ic_reg, ICData::owner_offset()));
1310 __ ldr(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset())); 1286 __ ldr(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset()));
1311 __ add(NOTFP, NOTFP, Operand(1)); 1287 __ add(NOTFP, NOTFP, Operand(1));
1312 __ str(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset())); 1288 __ str(NOTFP, FieldAddress(func_reg, Function::usage_counter_offset()));
1313 } 1289 }
1314 } 1290 }
1315 1291
1316
1317 // Note: R9 must be preserved. 1292 // Note: R9 must be preserved.
1318 // Attempt a quick Smi operation for known operations ('kind'). The ICData 1293 // Attempt a quick Smi operation for known operations ('kind'). The ICData
1319 // must have been primed with a Smi/Smi check that will be used for counting 1294 // must have been primed with a Smi/Smi check that will be used for counting
1320 // the invocations. 1295 // the invocations.
1321 static void EmitFastSmiOp(Assembler* assembler, 1296 static void EmitFastSmiOp(Assembler* assembler,
1322 Token::Kind kind, 1297 Token::Kind kind,
1323 intptr_t num_args, 1298 intptr_t num_args,
1324 Label* not_smi_or_overflow) { 1299 Label* not_smi_or_overflow) {
1325 __ Comment("Fast Smi op"); 1300 __ Comment("Fast Smi op");
1326 __ ldr(R0, Address(SP, 0 * kWordSize)); 1301 __ ldr(R0, Address(SP, 0 * kWordSize));
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1370 if (FLAG_optimization_counter_threshold >= 0) { 1345 if (FLAG_optimization_counter_threshold >= 0) {
1371 // Update counter, ignore overflow. 1346 // Update counter, ignore overflow.
1372 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; 1347 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize;
1373 __ LoadFromOffset(kWord, R1, R8, count_offset); 1348 __ LoadFromOffset(kWord, R1, R8, count_offset);
1374 __ adds(R1, R1, Operand(Smi::RawValue(1))); 1349 __ adds(R1, R1, Operand(Smi::RawValue(1)));
1375 __ StoreIntoSmiField(Address(R8, count_offset), R1); 1350 __ StoreIntoSmiField(Address(R8, count_offset), R1);
1376 } 1351 }
1377 __ Ret(); 1352 __ Ret();
1378 } 1353 }
1379 1354
1380
1381 // Generate inline cache check for 'num_args'. 1355 // Generate inline cache check for 'num_args'.
1382 // LR: return address. 1356 // LR: return address.
1383 // R9: inline cache data object. 1357 // R9: inline cache data object.
1384 // Control flow: 1358 // Control flow:
1385 // - If receiver is null -> jump to IC miss. 1359 // - If receiver is null -> jump to IC miss.
1386 // - If receiver is Smi -> load Smi class. 1360 // - If receiver is Smi -> load Smi class.
1387 // - If receiver is not-Smi -> load receiver's class. 1361 // - If receiver is not-Smi -> load receiver's class.
1388 // - Check if 'num_args' (including receiver) match any IC data group. 1362 // - Check if 'num_args' (including receiver) match any IC data group.
1389 // - Match found -> jump to target. 1363 // - Match found -> jump to target.
1390 // - Match not found -> jump to IC miss. 1364 // - Match not found -> jump to IC miss.
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
1545 __ EnterStubFrame(); 1519 __ EnterStubFrame();
1546 __ Push(R9); // Preserve IC data. 1520 __ Push(R9); // Preserve IC data.
1547 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1521 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1548 __ Pop(R9); 1522 __ Pop(R9);
1549 __ RestoreCodePointer(); 1523 __ RestoreCodePointer();
1550 __ LeaveStubFrame(); 1524 __ LeaveStubFrame();
1551 __ b(&done_stepping); 1525 __ b(&done_stepping);
1552 } 1526 }
1553 } 1527 }
1554 1528
1555
1556 // Use inline cache data array to invoke the target or continue in inline 1529 // Use inline cache data array to invoke the target or continue in inline
1557 // cache miss handler. Stub for 1-argument check (receiver class). 1530 // cache miss handler. Stub for 1-argument check (receiver class).
1558 // LR: return address. 1531 // LR: return address.
1559 // R9: inline cache data object. 1532 // R9: inline cache data object.
1560 // Inline cache data object structure: 1533 // Inline cache data object structure:
1561 // 0: function-name 1534 // 0: function-name
1562 // 1: N, number of arguments checked. 1535 // 1: N, number of arguments checked.
1563 // 2 .. (length - 1): group of checks, each check containing: 1536 // 2 .. (length - 1): group of checks, each check containing:
1564 // - N classes. 1537 // - N classes.
1565 // - 1 target function. 1538 // - 1 target function.
1566 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { 1539 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) {
1567 GenerateUsageCounterIncrement(assembler, R8); 1540 GenerateUsageCounterIncrement(assembler, R8);
1568 GenerateNArgsCheckInlineCacheStub( 1541 GenerateNArgsCheckInlineCacheStub(
1569 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); 1542 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL);
1570 } 1543 }
1571 1544
1572
1573 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { 1545 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) {
1574 GenerateUsageCounterIncrement(assembler, R8); 1546 GenerateUsageCounterIncrement(assembler, R8);
1575 GenerateNArgsCheckInlineCacheStub(assembler, 2, 1547 GenerateNArgsCheckInlineCacheStub(assembler, 2,
1576 kInlineCacheMissHandlerTwoArgsRuntimeEntry, 1548 kInlineCacheMissHandlerTwoArgsRuntimeEntry,
1577 Token::kILLEGAL); 1549 Token::kILLEGAL);
1578 } 1550 }
1579 1551
1580
1581 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { 1552 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) {
1582 GenerateUsageCounterIncrement(assembler, R8); 1553 GenerateUsageCounterIncrement(assembler, R8);
1583 GenerateNArgsCheckInlineCacheStub( 1554 GenerateNArgsCheckInlineCacheStub(
1584 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); 1555 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD);
1585 } 1556 }
1586 1557
1587
1588 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { 1558 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) {
1589 GenerateUsageCounterIncrement(assembler, R8); 1559 GenerateUsageCounterIncrement(assembler, R8);
1590 GenerateNArgsCheckInlineCacheStub( 1560 GenerateNArgsCheckInlineCacheStub(
1591 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); 1561 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB);
1592 } 1562 }
1593 1563
1594
1595 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { 1564 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) {
1596 GenerateUsageCounterIncrement(assembler, R8); 1565 GenerateUsageCounterIncrement(assembler, R8);
1597 GenerateNArgsCheckInlineCacheStub( 1566 GenerateNArgsCheckInlineCacheStub(
1598 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); 1567 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ);
1599 } 1568 }
1600 1569
1601
1602 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( 1570 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub(
1603 Assembler* assembler) { 1571 Assembler* assembler) {
1604 GenerateOptimizedUsageCounterIncrement(assembler); 1572 GenerateOptimizedUsageCounterIncrement(assembler);
1605 GenerateNArgsCheckInlineCacheStub(assembler, 1, 1573 GenerateNArgsCheckInlineCacheStub(assembler, 1,
1606 kInlineCacheMissHandlerOneArgRuntimeEntry, 1574 kInlineCacheMissHandlerOneArgRuntimeEntry,
1607 Token::kILLEGAL, true /* optimized */); 1575 Token::kILLEGAL, true /* optimized */);
1608 } 1576 }
1609 1577
1610
1611 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( 1578 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub(
1612 Assembler* assembler) { 1579 Assembler* assembler) {
1613 GenerateOptimizedUsageCounterIncrement(assembler); 1580 GenerateOptimizedUsageCounterIncrement(assembler);
1614 GenerateNArgsCheckInlineCacheStub(assembler, 2, 1581 GenerateNArgsCheckInlineCacheStub(assembler, 2,
1615 kInlineCacheMissHandlerTwoArgsRuntimeEntry, 1582 kInlineCacheMissHandlerTwoArgsRuntimeEntry,
1616 Token::kILLEGAL, true /* optimized */); 1583 Token::kILLEGAL, true /* optimized */);
1617 } 1584 }
1618 1585
1619
1620 // Intermediary stub between a static call and its target. ICData contains 1586 // Intermediary stub between a static call and its target. ICData contains
1621 // the target function and the call count. 1587 // the target function and the call count.
1622 // R9: ICData 1588 // R9: ICData
1623 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { 1589 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
1624 GenerateUsageCounterIncrement(assembler, R8); 1590 GenerateUsageCounterIncrement(assembler, R8);
1625 #if defined(DEBUG) 1591 #if defined(DEBUG)
1626 { 1592 {
1627 Label ok; 1593 Label ok;
1628 // Check that the IC data array has NumArgsTested() == 0. 1594 // Check that the IC data array has NumArgsTested() == 0.
1629 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. 1595 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'.
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1676 __ EnterStubFrame(); 1642 __ EnterStubFrame();
1677 __ Push(R9); // Preserve IC data. 1643 __ Push(R9); // Preserve IC data.
1678 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1644 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1679 __ Pop(R9); 1645 __ Pop(R9);
1680 __ RestoreCodePointer(); 1646 __ RestoreCodePointer();
1681 __ LeaveStubFrame(); 1647 __ LeaveStubFrame();
1682 __ b(&done_stepping); 1648 __ b(&done_stepping);
1683 } 1649 }
1684 } 1650 }
1685 1651
1686
1687 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { 1652 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
1688 GenerateUsageCounterIncrement(assembler, R8); 1653 GenerateUsageCounterIncrement(assembler, R8);
1689 GenerateNArgsCheckInlineCacheStub( 1654 GenerateNArgsCheckInlineCacheStub(
1690 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); 1655 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL);
1691 } 1656 }
1692 1657
1693
1694 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { 1658 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) {
1695 GenerateUsageCounterIncrement(assembler, R8); 1659 GenerateUsageCounterIncrement(assembler, R8);
1696 GenerateNArgsCheckInlineCacheStub( 1660 GenerateNArgsCheckInlineCacheStub(
1697 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); 1661 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL);
1698 } 1662 }
1699 1663
1700
1701 // Stub for compiling a function and jumping to the compiled code. 1664 // Stub for compiling a function and jumping to the compiled code.
1702 // R9: IC-Data (for methods). 1665 // R9: IC-Data (for methods).
1703 // R4: Arguments descriptor. 1666 // R4: Arguments descriptor.
1704 // R0: Function. 1667 // R0: Function.
1705 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { 1668 void StubCode::GenerateLazyCompileStub(Assembler* assembler) {
1706 // Preserve arg desc. and IC data object. 1669 // Preserve arg desc. and IC data object.
1707 __ EnterStubFrame(); 1670 __ EnterStubFrame();
1708 __ PushList((1 << R4) | (1 << R9)); 1671 __ PushList((1 << R4) | (1 << R9));
1709 __ Push(R0); // Pass function. 1672 __ Push(R0); // Pass function.
1710 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); 1673 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
1711 __ Pop(R0); // Restore argument. 1674 __ Pop(R0); // Restore argument.
1712 __ PopList((1 << R4) | (1 << R9)); // Restore arg desc. and IC data. 1675 __ PopList((1 << R4) | (1 << R9)); // Restore arg desc. and IC data.
1713 __ LeaveStubFrame(); 1676 __ LeaveStubFrame();
1714 1677
1715 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); 1678 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset()));
1716 __ ldr(R2, FieldAddress(R0, Function::entry_point_offset())); 1679 __ ldr(R2, FieldAddress(R0, Function::entry_point_offset()));
1717 __ bx(R2); 1680 __ bx(R2);
1718 } 1681 }
1719 1682
1720
1721 // R9: Contains an ICData. 1683 // R9: Contains an ICData.
1722 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { 1684 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) {
1723 __ EnterStubFrame(); 1685 __ EnterStubFrame();
1724 __ LoadImmediate(R0, 0); 1686 __ LoadImmediate(R0, 0);
1725 // Preserve arguments descriptor and make room for result. 1687 // Preserve arguments descriptor and make room for result.
1726 __ PushList((1 << R0) | (1 << R9)); 1688 __ PushList((1 << R0) | (1 << R9));
1727 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1689 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1728 __ PopList((1 << R0) | (1 << R9)); 1690 __ PopList((1 << R0) | (1 << R9));
1729 __ LeaveStubFrame(); 1691 __ LeaveStubFrame();
1730 __ mov(CODE_REG, Operand(R0)); 1692 __ mov(CODE_REG, Operand(R0));
1731 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset())); 1693 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset()));
1732 __ bx(R0); 1694 __ bx(R0);
1733 } 1695 }
1734 1696
1735
1736 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { 1697 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) {
1737 __ EnterStubFrame(); 1698 __ EnterStubFrame();
1738 __ LoadImmediate(R0, 0); 1699 __ LoadImmediate(R0, 0);
1739 // Make room for result. 1700 // Make room for result.
1740 __ PushList((1 << R0)); 1701 __ PushList((1 << R0));
1741 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); 1702 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
1742 __ PopList((1 << CODE_REG)); 1703 __ PopList((1 << CODE_REG));
1743 __ LeaveStubFrame(); 1704 __ LeaveStubFrame();
1744 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset())); 1705 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset()));
1745 __ bx(R0); 1706 __ bx(R0);
1746 } 1707 }
1747 1708
1748
1749 // Called only from unoptimized code. All relevant registers have been saved. 1709 // Called only from unoptimized code. All relevant registers have been saved.
1750 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { 1710 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) {
1751 // Check single stepping. 1711 // Check single stepping.
1752 Label stepping, done_stepping; 1712 Label stepping, done_stepping;
1753 __ LoadIsolate(R1); 1713 __ LoadIsolate(R1);
1754 __ ldrb(R1, Address(R1, Isolate::single_step_offset())); 1714 __ ldrb(R1, Address(R1, Isolate::single_step_offset()));
1755 __ CompareImmediate(R1, 0); 1715 __ CompareImmediate(R1, 0);
1756 __ b(&stepping, NE); 1716 __ b(&stepping, NE);
1757 __ Bind(&done_stepping); 1717 __ Bind(&done_stepping);
1758 __ Ret(); 1718 __ Ret();
1759 1719
1760 __ Bind(&stepping); 1720 __ Bind(&stepping);
1761 __ EnterStubFrame(); 1721 __ EnterStubFrame();
1762 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 1722 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1763 __ LeaveStubFrame(); 1723 __ LeaveStubFrame();
1764 __ b(&done_stepping); 1724 __ b(&done_stepping);
1765 } 1725 }
1766 1726
1767
1768 // Used to check class and type arguments. Arguments passed in registers: 1727 // Used to check class and type arguments. Arguments passed in registers:
1769 // LR: return address. 1728 // LR: return address.
1770 // R0: instance (must be preserved). 1729 // R0: instance (must be preserved).
1771 // R2: instantiator type arguments (only if n == 4, can be raw_null). 1730 // R2: instantiator type arguments (only if n == 4, can be raw_null).
1772 // R1: function type arguments (only if n == 4, can be raw_null). 1731 // R1: function type arguments (only if n == 4, can be raw_null).
1773 // R3: SubtypeTestCache. 1732 // R3: SubtypeTestCache.
1774 // Result in R1: null -> not found, otherwise result (true or false). 1733 // Result in R1: null -> not found, otherwise result (true or false).
1775 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { 1734 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
1776 ASSERT((n == 1) || (n == 2) || (n == 4)); 1735 ASSERT((n == 1) || (n == 2) || (n == 4));
1777 if (n > 1) { 1736 if (n > 1) {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1843 // Fall through to not found. 1802 // Fall through to not found.
1844 __ Bind(&not_found); 1803 __ Bind(&not_found);
1845 __ LoadObject(R1, Object::null_object()); 1804 __ LoadObject(R1, Object::null_object());
1846 __ Ret(); 1805 __ Ret();
1847 1806
1848 __ Bind(&found); 1807 __ Bind(&found);
1849 __ ldr(R1, Address(R3, kWordSize * SubtypeTestCache::kTestResult)); 1808 __ ldr(R1, Address(R3, kWordSize * SubtypeTestCache::kTestResult));
1850 __ Ret(); 1809 __ Ret();
1851 } 1810 }
1852 1811
1853
1854 // Used to check class and type arguments. Arguments passed in registers: 1812 // Used to check class and type arguments. Arguments passed in registers:
1855 // LR: return address. 1813 // LR: return address.
1856 // R0: instance (must be preserved). 1814 // R0: instance (must be preserved).
1857 // R2: unused. 1815 // R2: unused.
1858 // R1: unused. 1816 // R1: unused.
1859 // R3: SubtypeTestCache. 1817 // R3: SubtypeTestCache.
1860 // Result in R1: null -> not found, otherwise result (true or false). 1818 // Result in R1: null -> not found, otherwise result (true or false).
1861 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { 1819 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) {
1862 GenerateSubtypeNTestCacheStub(assembler, 1); 1820 GenerateSubtypeNTestCacheStub(assembler, 1);
1863 } 1821 }
1864 1822
1865
1866 // Used to check class and type arguments. Arguments passed in registers: 1823 // Used to check class and type arguments. Arguments passed in registers:
1867 // LR: return address. 1824 // LR: return address.
1868 // R0: instance (must be preserved). 1825 // R0: instance (must be preserved).
1869 // R2: unused. 1826 // R2: unused.
1870 // R1: unused. 1827 // R1: unused.
1871 // R3: SubtypeTestCache. 1828 // R3: SubtypeTestCache.
1872 // Result in R1: null -> not found, otherwise result (true or false). 1829 // Result in R1: null -> not found, otherwise result (true or false).
1873 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { 1830 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) {
1874 GenerateSubtypeNTestCacheStub(assembler, 2); 1831 GenerateSubtypeNTestCacheStub(assembler, 2);
1875 } 1832 }
1876 1833
1877
1878 // Used to check class and type arguments. Arguments passed in registers: 1834 // Used to check class and type arguments. Arguments passed in registers:
1879 // LR: return address. 1835 // LR: return address.
1880 // R0: instance (must be preserved). 1836 // R0: instance (must be preserved).
1881 // R2: instantiator type arguments (can be raw_null). 1837 // R2: instantiator type arguments (can be raw_null).
1882 // R1: function type arguments (can be raw_null). 1838 // R1: function type arguments (can be raw_null).
1883 // R3: SubtypeTestCache. 1839 // R3: SubtypeTestCache.
1884 // Result in R1: null -> not found, otherwise result (true or false). 1840 // Result in R1: null -> not found, otherwise result (true or false).
1885 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { 1841 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) {
1886 GenerateSubtypeNTestCacheStub(assembler, 4); 1842 GenerateSubtypeNTestCacheStub(assembler, 4);
1887 } 1843 }
1888 1844
1889
1890 // Return the current stack pointer address, used to do stack alignment checks. 1845 // Return the current stack pointer address, used to do stack alignment checks.
1891 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { 1846 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) {
1892 __ mov(R0, Operand(SP)); 1847 __ mov(R0, Operand(SP));
1893 __ Ret(); 1848 __ Ret();
1894 } 1849 }
1895 1850
1896
1897 // Jump to a frame on the call stack. 1851 // Jump to a frame on the call stack.
1898 // LR: return address. 1852 // LR: return address.
1899 // R0: program_counter. 1853 // R0: program_counter.
1900 // R1: stack_pointer. 1854 // R1: stack_pointer.
1901 // R2: frame_pointer. 1855 // R2: frame_pointer.
1902 // R3: thread. 1856 // R3: thread.
1903 // Does not return. 1857 // Does not return.
1904 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { 1858 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) {
1905 ASSERT(kExceptionObjectReg == R0); 1859 ASSERT(kExceptionObjectReg == R0);
1906 ASSERT(kStackTraceObjectReg == R1); 1860 ASSERT(kStackTraceObjectReg == R1);
1907 __ mov(IP, Operand(R1)); // Copy Stack pointer into IP. 1861 __ mov(IP, Operand(R1)); // Copy Stack pointer into IP.
1908 __ mov(LR, Operand(R0)); // Program counter. 1862 __ mov(LR, Operand(R0)); // Program counter.
1909 __ mov(THR, Operand(R3)); // Thread. 1863 __ mov(THR, Operand(R3)); // Thread.
1910 __ mov(FP, Operand(R2)); // Frame_pointer. 1864 __ mov(FP, Operand(R2)); // Frame_pointer.
1911 __ mov(SP, Operand(IP)); // Set Stack pointer. 1865 __ mov(SP, Operand(IP)); // Set Stack pointer.
1912 // Set the tag. 1866 // Set the tag.
1913 __ LoadImmediate(R2, VMTag::kDartTagId); 1867 __ LoadImmediate(R2, VMTag::kDartTagId);
1914 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset()); 1868 __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset());
1915 // Clear top exit frame. 1869 // Clear top exit frame.
1916 __ LoadImmediate(R2, 0); 1870 __ LoadImmediate(R2, 0);
1917 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset()); 1871 __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());
1918 // Restore the pool pointer. 1872 // Restore the pool pointer.
1919 __ RestoreCodePointer(); 1873 __ RestoreCodePointer();
1920 __ LoadPoolPointer(); 1874 __ LoadPoolPointer();
1921 __ bx(LR); // Jump to continuation point. 1875 __ bx(LR); // Jump to continuation point.
1922 } 1876 }
1923 1877
1924
1925 // Run an exception handler. Execution comes from JumpToFrame 1878 // Run an exception handler. Execution comes from JumpToFrame
1926 // stub or from the simulator. 1879 // stub or from the simulator.
1927 // 1880 //
1928 // The arguments are stored in the Thread object. 1881 // The arguments are stored in the Thread object.
1929 // Does not return. 1882 // Does not return.
1930 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { 1883 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) {
1931 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset()); 1884 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset());
1932 __ LoadImmediate(R2, 0); 1885 __ LoadImmediate(R2, 0);
1933 1886
1934 // Exception object. 1887 // Exception object.
1935 __ LoadFromOffset(kWord, R0, THR, Thread::active_exception_offset()); 1888 __ LoadFromOffset(kWord, R0, THR, Thread::active_exception_offset());
1936 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset()); 1889 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset());
1937 1890
1938 // StackTrace object. 1891 // StackTrace object.
1939 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset()); 1892 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset());
1940 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset()); 1893 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset());
1941 1894
1942 __ bx(LR); // Jump to the exception handler code. 1895 __ bx(LR); // Jump to the exception handler code.
1943 } 1896 }
1944 1897
1945
1946 // Deoptimize a frame on the call stack before rewinding. 1898 // Deoptimize a frame on the call stack before rewinding.
1947 // The arguments are stored in the Thread object. 1899 // The arguments are stored in the Thread object.
1948 // No result. 1900 // No result.
1949 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { 1901 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) {
1950 // Push zap value instead of CODE_REG. 1902 // Push zap value instead of CODE_REG.
1951 __ LoadImmediate(IP, kZapCodeReg); 1903 __ LoadImmediate(IP, kZapCodeReg);
1952 __ Push(IP); 1904 __ Push(IP);
1953 1905
1954 // Load the deopt pc into LR. 1906 // Load the deopt pc into LR.
1955 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset()); 1907 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset());
1956 GenerateDeoptimizationSequence(assembler, kEagerDeopt); 1908 GenerateDeoptimizationSequence(assembler, kEagerDeopt);
1957 1909
1958 // After we have deoptimized, jump to the correct frame. 1910 // After we have deoptimized, jump to the correct frame.
1959 __ EnterStubFrame(); 1911 __ EnterStubFrame();
1960 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); 1912 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
1961 __ LeaveStubFrame(); 1913 __ LeaveStubFrame();
1962 __ bkpt(0); 1914 __ bkpt(0);
1963 } 1915 }
1964 1916
1965
1966 // Calls to the runtime to optimize the given function. 1917 // Calls to the runtime to optimize the given function.
1967 // R8: function to be reoptimized. 1918 // R8: function to be reoptimized.
1968 // R4: argument descriptor (preserved). 1919 // R4: argument descriptor (preserved).
1969 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { 1920 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) {
1970 __ EnterStubFrame(); 1921 __ EnterStubFrame();
1971 __ Push(R4); 1922 __ Push(R4);
1972 __ LoadImmediate(IP, 0); 1923 __ LoadImmediate(IP, 0);
1973 __ Push(IP); // Setup space on stack for return value. 1924 __ Push(IP); // Setup space on stack for return value.
1974 __ Push(R8); 1925 __ Push(R8);
1975 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); 1926 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
1976 __ Pop(R0); // Discard argument. 1927 __ Pop(R0); // Discard argument.
1977 __ Pop(R0); // Get Function object 1928 __ Pop(R0); // Get Function object
1978 __ Pop(R4); // Restore argument descriptor. 1929 __ Pop(R4); // Restore argument descriptor.
1979 __ LeaveStubFrame(); 1930 __ LeaveStubFrame();
1980 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); 1931 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset()));
1981 __ ldr(R1, FieldAddress(R0, Function::entry_point_offset())); 1932 __ ldr(R1, FieldAddress(R0, Function::entry_point_offset()));
1982 __ bx(R1); 1933 __ bx(R1);
1983 __ bkpt(0); 1934 __ bkpt(0);
1984 } 1935 }
1985 1936
1986
1987 // Does identical check (object references are equal or not equal) with special 1937 // Does identical check (object references are equal or not equal) with special
1988 // checks for boxed numbers. 1938 // checks for boxed numbers.
1989 // LR: return address. 1939 // LR: return address.
1990 // Return Zero condition flag set if equal. 1940 // Return Zero condition flag set if equal.
1991 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint 1941 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint
1992 // cannot contain a value that fits in Mint or Smi. 1942 // cannot contain a value that fits in Mint or Smi.
1993 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, 1943 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
1994 const Register left, 1944 const Register left,
1995 const Register right, 1945 const Register right,
1996 const Register temp) { 1946 const Register temp) {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2043 // Result in R0, 0 means equal. 1993 // Result in R0, 0 means equal.
2044 __ LeaveStubFrame(); 1994 __ LeaveStubFrame();
2045 __ cmp(R0, Operand(0)); 1995 __ cmp(R0, Operand(0));
2046 __ b(&done); 1996 __ b(&done);
2047 1997
2048 __ Bind(&reference_compare); 1998 __ Bind(&reference_compare);
2049 __ cmp(left, Operand(right)); 1999 __ cmp(left, Operand(right));
2050 __ Bind(&done); 2000 __ Bind(&done);
2051 } 2001 }
2052 2002
2053
2054 // Called only from unoptimized code. All relevant registers have been saved. 2003 // Called only from unoptimized code. All relevant registers have been saved.
2055 // LR: return address. 2004 // LR: return address.
2056 // SP + 4: left operand. 2005 // SP + 4: left operand.
2057 // SP + 0: right operand. 2006 // SP + 0: right operand.
2058 // Return Zero condition flag set if equal. 2007 // Return Zero condition flag set if equal.
2059 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( 2008 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
2060 Assembler* assembler) { 2009 Assembler* assembler) {
2061 // Check single stepping. 2010 // Check single stepping.
2062 Label stepping, done_stepping; 2011 Label stepping, done_stepping;
2063 if (FLAG_support_debugger) { 2012 if (FLAG_support_debugger) {
(...skipping 15 matching lines...) Expand all
2079 if (FLAG_support_debugger) { 2028 if (FLAG_support_debugger) {
2080 __ Bind(&stepping); 2029 __ Bind(&stepping);
2081 __ EnterStubFrame(); 2030 __ EnterStubFrame();
2082 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); 2031 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2083 __ RestoreCodePointer(); 2032 __ RestoreCodePointer();
2084 __ LeaveStubFrame(); 2033 __ LeaveStubFrame();
2085 __ b(&done_stepping); 2034 __ b(&done_stepping);
2086 } 2035 }
2087 } 2036 }
2088 2037
2089
2090 // Called from optimized code only. 2038 // Called from optimized code only.
2091 // LR: return address. 2039 // LR: return address.
2092 // SP + 4: left operand. 2040 // SP + 4: left operand.
2093 // SP + 0: right operand. 2041 // SP + 0: right operand.
2094 // Return Zero condition flag set if equal. 2042 // Return Zero condition flag set if equal.
2095 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( 2043 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub(
2096 Assembler* assembler) { 2044 Assembler* assembler) {
2097 const Register temp = R2; 2045 const Register temp = R2;
2098 const Register left = R1; 2046 const Register left = R1;
2099 const Register right = R0; 2047 const Register right = R0;
2100 __ ldr(left, Address(SP, 1 * kWordSize)); 2048 __ ldr(left, Address(SP, 1 * kWordSize));
2101 __ ldr(right, Address(SP, 0 * kWordSize)); 2049 __ ldr(right, Address(SP, 0 * kWordSize));
2102 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp); 2050 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp);
2103 __ Ret(); 2051 __ Ret();
2104 } 2052 }
2105 2053
2106
2107 // Called from megamorphic calls. 2054 // Called from megamorphic calls.
2108 // R0: receiver 2055 // R0: receiver
2109 // R9: MegamorphicCache (preserved) 2056 // R9: MegamorphicCache (preserved)
2110 // Passed to target: 2057 // Passed to target:
2111 // CODE_REG: target Code 2058 // CODE_REG: target Code
2112 // R4: arguments descriptor 2059 // R4: arguments descriptor
2113 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { 2060 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) {
2114 __ LoadTaggedClassIdMayBeSmi(R0, R0); 2061 __ LoadTaggedClassIdMayBeSmi(R0, R0);
2115 // R0: receiver cid as Smi. 2062 // R0: receiver cid as Smi.
2116 __ ldr(R2, FieldAddress(R9, MegamorphicCache::buckets_offset())); 2063 __ ldr(R2, FieldAddress(R9, MegamorphicCache::buckets_offset()));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2151 __ Bind(&probe_failed); 2098 __ Bind(&probe_failed);
2152 ASSERT(kIllegalCid == 0); 2099 ASSERT(kIllegalCid == 0);
2153 __ tst(R6, Operand(R6)); 2100 __ tst(R6, Operand(R6));
2154 __ b(&load_target, EQ); // branch if miss. 2101 __ b(&load_target, EQ); // branch if miss.
2155 2102
2156 // Try next entry in the table. 2103 // Try next entry in the table.
2157 __ AddImmediate(R3, Smi::RawValue(1)); 2104 __ AddImmediate(R3, Smi::RawValue(1));
2158 __ b(&loop); 2105 __ b(&loop);
2159 } 2106 }
2160 2107
2161
2162 // Called from switchable IC calls. 2108 // Called from switchable IC calls.
2163 // R0: receiver 2109 // R0: receiver
2164 // R9: ICData (preserved) 2110 // R9: ICData (preserved)
2165 // Passed to target: 2111 // Passed to target:
2166 // CODE_REG: target Code object 2112 // CODE_REG: target Code object
2167 // R4: arguments descriptor 2113 // R4: arguments descriptor
2168 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { 2114 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) {
2169 Label loop, found, miss; 2115 Label loop, found, miss;
2170 __ ldr(R4, FieldAddress(R9, ICData::arguments_descriptor_offset())); 2116 __ ldr(R4, FieldAddress(R9, ICData::arguments_descriptor_offset()));
2171 __ ldr(R8, FieldAddress(R9, ICData::ic_data_offset())); 2117 __ ldr(R8, FieldAddress(R9, ICData::ic_data_offset()));
(...skipping 20 matching lines...) Expand all
2192 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); 2138 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset()));
2193 __ bx(R1); 2139 __ bx(R1);
2194 2140
2195 __ Bind(&miss); 2141 __ Bind(&miss);
2196 __ LoadIsolate(R2); 2142 __ LoadIsolate(R2);
2197 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); 2143 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset()));
2198 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); 2144 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset()));
2199 __ bx(R1); 2145 __ bx(R1);
2200 } 2146 }
2201 2147
2202
2203 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { 2148 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) {
2204 Label loop, found, miss; 2149 Label loop, found, miss;
2205 __ ldr(R4, FieldAddress(R9, ICData::arguments_descriptor_offset())); 2150 __ ldr(R4, FieldAddress(R9, ICData::arguments_descriptor_offset()));
2206 __ ldr(R8, FieldAddress(R9, ICData::ic_data_offset())); 2151 __ ldr(R8, FieldAddress(R9, ICData::ic_data_offset()));
2207 __ AddImmediate(R8, Array::data_offset() - kHeapObjectTag); 2152 __ AddImmediate(R8, Array::data_offset() - kHeapObjectTag);
2208 // R8: first IC entry 2153 // R8: first IC entry
2209 __ LoadTaggedClassIdMayBeSmi(R1, R0); 2154 __ LoadTaggedClassIdMayBeSmi(R1, R0);
2210 // R1: receiver cid as Smi 2155 // R1: receiver cid as Smi
2211 2156
2212 __ Bind(&loop); 2157 __ Bind(&loop);
(...skipping 14 matching lines...) Expand all
2227 __ ldr(CODE_REG, Address(R8, code_offset)); 2172 __ ldr(CODE_REG, Address(R8, code_offset));
2228 __ bx(R1); 2173 __ bx(R1);
2229 2174
2230 __ Bind(&miss); 2175 __ Bind(&miss);
2231 __ LoadIsolate(R2); 2176 __ LoadIsolate(R2);
2232 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); 2177 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset()));
2233 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); 2178 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset()));
2234 __ bx(R1); 2179 __ bx(R1);
2235 } 2180 }
2236 2181
2237
2238 // Called from switchable IC calls. 2182 // Called from switchable IC calls.
2239 // R0: receiver 2183 // R0: receiver
2240 // R9: UnlinkedCall 2184 // R9: UnlinkedCall
2241 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { 2185 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) {
2242 __ EnterStubFrame(); 2186 __ EnterStubFrame();
2243 __ Push(R0); // Preserve receiver. 2187 __ Push(R0); // Preserve receiver.
2244 2188
2245 __ LoadImmediate(IP, 0); 2189 __ LoadImmediate(IP, 0);
2246 __ Push(IP); // Result slot 2190 __ Push(IP); // Result slot
2247 __ Push(R0); // Arg0: Receiver 2191 __ Push(R0); // Arg0: Receiver
2248 __ Push(R9); // Arg1: UnlinkedCall 2192 __ Push(R9); // Arg1: UnlinkedCall
2249 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2); 2193 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2);
2250 __ Drop(2); 2194 __ Drop(2);
2251 __ Pop(R9); // result = IC 2195 __ Pop(R9); // result = IC
2252 2196
2253 __ Pop(R0); // Restore receiver. 2197 __ Pop(R0); // Restore receiver.
2254 __ LeaveStubFrame(); 2198 __ LeaveStubFrame();
2255 2199
2256 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2200 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2257 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2201 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2258 __ bx(R1); 2202 __ bx(R1);
2259 } 2203 }
2260 2204
2261
2262 // Called from switchable IC calls. 2205 // Called from switchable IC calls.
2263 // R0: receiver 2206 // R0: receiver
2264 // R9: SingleTargetCache 2207 // R9: SingleTargetCache
2265 // Passed to target: 2208 // Passed to target:
2266 // CODE_REG: target Code object 2209 // CODE_REG: target Code object
2267 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { 2210 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) {
2268 Label miss; 2211 Label miss;
2269 __ LoadClassIdMayBeSmi(R1, R0); 2212 __ LoadClassIdMayBeSmi(R1, R0);
2270 __ ldrh(R2, FieldAddress(R9, SingleTargetCache::lower_limit_offset())); 2213 __ ldrh(R2, FieldAddress(R9, SingleTargetCache::lower_limit_offset()));
2271 __ ldrh(R3, FieldAddress(R9, SingleTargetCache::upper_limit_offset())); 2214 __ ldrh(R3, FieldAddress(R9, SingleTargetCache::upper_limit_offset()));
(...skipping 19 matching lines...) Expand all
2291 __ Pop(R9); // result = IC 2234 __ Pop(R9); // result = IC
2292 2235
2293 __ Pop(R0); // Restore receiver. 2236 __ Pop(R0); // Restore receiver.
2294 __ LeaveStubFrame(); 2237 __ LeaveStubFrame();
2295 2238
2296 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2239 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2297 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2240 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2298 __ bx(R1); 2241 __ bx(R1);
2299 } 2242 }
2300 2243
2301
2302 // Called from the monomorphic checked entry. 2244 // Called from the monomorphic checked entry.
2303 // R0: receiver 2245 // R0: receiver
2304 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { 2246 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) {
2305 __ ldr(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset())); 2247 __ ldr(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset()));
2306 __ EnterStubFrame(); 2248 __ EnterStubFrame();
2307 __ Push(R0); // Preserve receiver. 2249 __ Push(R0); // Preserve receiver.
2308 2250
2309 __ LoadImmediate(IP, 0); 2251 __ LoadImmediate(IP, 0);
2310 __ Push(IP); // Result slot 2252 __ Push(IP); // Result slot
2311 __ Push(R0); // Arg0: Receiver 2253 __ Push(R0); // Arg0: Receiver
2312 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1); 2254 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1);
2313 __ Drop(1); 2255 __ Drop(1);
2314 __ Pop(R9); // result = IC 2256 __ Pop(R9); // result = IC
2315 2257
2316 __ Pop(R0); // Restore receiver. 2258 __ Pop(R0); // Restore receiver.
2317 __ LeaveStubFrame(); 2259 __ LeaveStubFrame();
2318 2260
2319 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); 2261 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset()));
2320 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 2262 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
2321 __ bx(R1); 2263 __ bx(R1);
2322 } 2264 }
2323 2265
2324
2325 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { 2266 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) {
2326 __ bkpt(0); 2267 __ bkpt(0);
2327 } 2268 }
2328 2269
2329
2330 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { 2270 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) {
2331 __ bkpt(0); 2271 __ bkpt(0);
2332 } 2272 }
2333 2273
2334 } // namespace dart 2274 } // namespace dart
2335 2275
2336 #endif // defined TARGET_ARCH_ARM 2276 #endif // defined TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « runtime/vm/stub_code.cc ('k') | runtime/vm/stub_code_arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698