OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
270 | 270 |
271 | 271 |
272 // Input parameters: | 272 // Input parameters: |
273 // R10: arguments descriptor array. | 273 // R10: arguments descriptor array. |
274 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 274 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
275 __ EnterStubFrame(); | 275 __ EnterStubFrame(); |
276 __ pushq(R10); // Preserve arguments descriptor array. | 276 __ pushq(R10); // Preserve arguments descriptor array. |
277 // Setup space on stack for return value. | 277 // Setup space on stack for return value. |
278 __ PushObject(Object::null_object(), PP); | 278 __ PushObject(Object::null_object(), PP); |
279 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 279 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
280 __ popq(RAX); // Get Code object result. | 280 __ popq(CODE_REG); // Get Code object result. |
281 __ popq(R10); // Restore arguments descriptor array. | 281 __ popq(R10); // Restore arguments descriptor array. |
282 // Remove the stub frame as we are about to jump to the dart function. | 282 // Remove the stub frame as we are about to jump to the dart function. |
283 __ LeaveStubFrame(); | 283 __ LeaveStubFrame(); |
284 | 284 |
285 __ movq(RBX, FieldAddress(RAX, Code::instructions_offset())); | 285 __ movq(RBX, FieldAddress(CODE_REG, Code::instructions_offset())); |
286 __ addq(RBX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 286 __ addq(RBX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
287 __ jmp(RBX); | 287 __ jmp(RBX); |
288 } | 288 } |
289 | 289 |
290 | 290 |
291 // Called from a static call only when an invalid code has been entered | 291 // Called from a static call only when an invalid code has been entered |
292 // (invalid because its function was optimized or deoptimized). | 292 // (invalid because its function was optimized or deoptimized). |
293 // R10: arguments descriptor array. | 293 // R10: arguments descriptor array. |
294 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 294 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
295 __ EnterStubFrame(); | 295 __ EnterStubFrame(); |
296 __ pushq(R10); // Preserve arguments descriptor array. | 296 __ pushq(R10); // Preserve arguments descriptor array. |
297 // Setup space on stack for return value. | 297 // Setup space on stack for return value. |
298 __ PushObject(Object::null_object(), PP); | 298 __ PushObject(Object::null_object(), PP); |
299 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 299 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
300 __ popq(RAX); // Get Code object. | 300 __ popq(CODE_REG); // Get Code object. |
301 __ popq(R10); // Restore arguments descriptor array. | 301 __ popq(R10); // Restore arguments descriptor array. |
302 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 302 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
303 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 303 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
304 __ LeaveStubFrame(); | 304 __ LeaveStubFrame(); |
305 __ jmp(RAX); | 305 __ jmp(RAX); |
306 __ int3(); | 306 __ int3(); |
307 } | 307 } |
308 | 308 |
309 | 309 |
310 // Called from object allocate instruction when the allocation stub has been | 310 // Called from object allocate instruction when the allocation stub has been |
311 // disabled. | 311 // disabled. |
312 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 312 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
313 __ EnterStubFrame(); | 313 __ EnterStubFrame(); |
314 // Setup space on stack for return value. | 314 // Setup space on stack for return value. |
315 __ PushObject(Object::null_object(), PP); | 315 __ PushObject(Object::null_object(), PP); |
316 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 316 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
317 __ popq(RAX); // Get Code object. | 317 __ popq(CODE_REG); // Get Code object. |
318 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 318 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
319 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 319 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
320 __ LeaveStubFrame(); | 320 __ LeaveStubFrame(); |
321 __ jmp(RAX); | 321 __ jmp(RAX); |
322 __ int3(); | 322 __ int3(); |
323 } | 323 } |
324 | 324 |
325 | 325 |
326 // Called from array allocate instruction when the allocation stub has been | 326 // Called from array allocate instruction when the allocation stub has been |
327 // disabled. | 327 // disabled. |
328 // R10: length (preserved). | 328 // R10: length (preserved). |
329 // RBX: element type (preserved). | 329 // RBX: element type (preserved). |
330 void StubCode::GenerateFixAllocateArrayStubTargetStub(Assembler* assembler) { | 330 void StubCode::GenerateFixAllocateArrayStubTargetStub(Assembler* assembler) { |
331 __ EnterStubFrame(); | 331 __ EnterStubFrame(); |
332 __ pushq(R10); // Preserve length. | 332 __ pushq(R10); // Preserve length. |
333 __ pushq(RBX); // Preserve element type. | 333 __ pushq(RBX); // Preserve element type. |
334 // Setup space on stack for return value. | 334 // Setup space on stack for return value. |
335 __ PushObject(Object::null_object(), PP); | 335 __ PushObject(Object::null_object(), PP); |
336 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 336 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
337 __ popq(RAX); // Get Code object. | 337 __ popq(CODE_REG); // Get Code object. |
338 __ popq(RBX); // Restore element type. | 338 __ popq(RBX); // Restore element type. |
339 __ popq(R10); // Restore length. | 339 __ popq(R10); // Restore length. |
340 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 340 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
341 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 341 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
342 __ LeaveStubFrame(); | 342 __ LeaveStubFrame(); |
343 __ jmp(RAX); | 343 __ jmp(RAX); |
344 __ int3(); | 344 __ int3(); |
345 } | 345 } |
346 | 346 |
347 | 347 |
348 // Input parameters: | 348 // Input parameters: |
349 // R10: smi-tagged argument count, may be zero. | 349 // R10: smi-tagged argument count, may be zero. |
350 // RBP[kParamEndSlotFromFp + 1]: last argument. | 350 // RBP[kParamEndSlotFromFp + 1]: last argument. |
351 static void PushArgumentsArray(Assembler* assembler) { | 351 static void PushArgumentsArray(Assembler* assembler) { |
352 StubCode* stub_code = Isolate::Current()->stub_code(); | 352 StubCode* stub_code = Isolate::Current()->stub_code(); |
353 | 353 |
354 __ LoadObject(R12, Object::null_object(), PP); | 354 __ LoadObject(R12, Object::null_object(), PP); |
355 // Allocate array to store arguments of caller. | 355 // Allocate array to store arguments of caller. |
356 __ movq(RBX, R12); // Null element type for raw Array. | 356 __ movq(RBX, R12); // Null element type for raw Array. |
357 const Code& array_stub = Code::Handle(stub_code->GetAllocateArrayStub()); | 357 const Code& array_stub = Code::Handle(stub_code->GetAllocateArrayStub()); |
358 const ExternalLabel array_label(array_stub.EntryPoint()); | 358 __ Call(array_stub, PP); |
359 __ call(&array_label); | |
360 __ SmiUntag(R10); | 359 __ SmiUntag(R10); |
361 // RAX: newly allocated array. | 360 // RAX: newly allocated array. |
362 // R10: length of the array (was preserved by the stub). | 361 // R10: length of the array (was preserved by the stub). |
363 __ pushq(RAX); // Array is in RAX and on top of stack. | 362 __ pushq(RAX); // Array is in RAX and on top of stack. |
364 __ leaq(R12, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize)); | 363 __ leaq(R12, Address(RBP, R10, TIMES_8, kParamEndSlotFromFp * kWordSize)); |
365 __ leaq(RBX, FieldAddress(RAX, Array::data_offset())); | 364 __ leaq(RBX, FieldAddress(RAX, Array::data_offset())); |
366 // R12: address of first argument on stack. | 365 // R12: address of first argument on stack. |
367 // RBX: address of first argument in array. | 366 // RBX: address of first argument in array. |
368 Label loop, loop_condition; | 367 Label loop, loop_condition; |
369 #if defined(DEBUG) | 368 #if defined(DEBUG) |
370 static const bool kJumpLength = Assembler::kFarJump; | 369 static const bool kJumpLength = Assembler::kFarJump; |
371 #else | 370 #else |
372 static const bool kJumpLength = Assembler::kNearJump; | 371 static const bool kJumpLength = Assembler::kNearJump; |
373 #endif // DEBUG | 372 #endif // DEBUG |
374 __ jmp(&loop_condition, kJumpLength); | 373 __ jmp(&loop_condition, kJumpLength); |
375 __ Bind(&loop); | 374 __ Bind(&loop); |
376 __ movq(RDI, Address(R12, 0)); | 375 __ movq(RDI, Address(R12, 0)); |
377 // No generational barrier needed, since array is in new space. | 376 // No generational barrier needed, since array is in new space. |
378 __ InitializeFieldNoBarrier(RAX, Address(RBX, 0), RDI); | 377 __ InitializeFieldNoBarrier(RAX, Address(RBX, 0), RDI); |
379 __ addq(RBX, Immediate(kWordSize)); | 378 __ addq(RBX, Immediate(kWordSize)); |
380 __ subq(R12, Immediate(kWordSize)); | 379 __ subq(R12, Immediate(kWordSize)); |
381 __ Bind(&loop_condition); | 380 __ Bind(&loop_condition); |
382 __ decq(R10); | 381 __ decq(R10); |
383 __ j(POSITIVE, &loop, Assembler::kNearJump); | 382 __ j(POSITIVE, &loop, Assembler::kNearJump); |
384 } | 383 } |
385 | 384 |
386 | 385 |
387 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeCopyFrame, | 386 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeCopyFrame, |
388 intptr_t deopt_reason, | 387 uword saved_registers_address, |
389 uword saved_registers_address); | 388 uword is_lazy_deopt); |
390 | 389 |
391 DECLARE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp); | 390 DECLARE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp); |
392 | 391 |
393 | 392 |
| 393 enum DeoptStubKind { |
| 394 kLazyDeopt, |
| 395 kEagerDeopt |
| 396 }; |
| 397 |
394 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary. | 398 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary. |
395 // This stub translates optimized frame into unoptimized frame. The optimized | 399 // This stub translates optimized frame into unoptimized frame. The optimized |
396 // frame can contain values in registers and on stack, the unoptimized | 400 // frame can contain values in registers and on stack, the unoptimized |
397 // frame contains all values on stack. | 401 // frame contains all values on stack. |
398 // Deoptimization occurs in following steps: | 402 // Deoptimization occurs in following steps: |
399 // - Push all registers that can contain values. | 403 // - Push all registers that can contain values. |
400 // - Call C routine to copy the stack and saved registers into temporary buffer. | 404 // - Call C routine to copy the stack and saved registers into temporary buffer. |
401 // - Adjust caller's frame to correct unoptimized frame size. | 405 // - Adjust caller's frame to correct unoptimized frame size. |
402 // - Fill the unoptimized frame. | 406 // - Fill the unoptimized frame. |
403 // - Materialize objects that require allocation (e.g. Double instances). | 407 // - Materialize objects that require allocation (e.g. Double instances). |
404 // GC can occur only after frame is fully rewritten. | 408 // GC can occur only after frame is fully rewritten. |
405 // Stack after EnterDartFrame(0, PP, kNoRegister) below: | 409 // Stack after EnterDartFrame(0, PP, kNoRegister) below: |
406 // +------------------+ | 410 // +------------------+ |
407 // | Saved PP | <- PP | 411 // | Saved PP | <- PP |
408 // +------------------+ | 412 // +------------------+ |
409 // | PC marker | <- TOS | 413 // | PC marker | <- TOS |
410 // +------------------+ | 414 // +------------------+ |
411 // | Saved FP | <- FP of stub | 415 // | Saved FP | <- FP of stub |
412 // +------------------+ | 416 // +------------------+ |
413 // | return-address | (deoptimization point) | 417 // | return-address | (deoptimization point) |
414 // +------------------+ | 418 // +------------------+ |
415 // | ... | <- SP of optimized frame | 419 // | ... | <- SP of optimized frame |
416 // | 420 // |
417 // Parts of the code cannot GC, part of the code can GC. | 421 // Parts of the code cannot GC, part of the code can GC. |
418 static void GenerateDeoptimizationSequence(Assembler* assembler, | 422 static void GenerateDeoptimizationSequence(Assembler* assembler, |
419 bool preserve_result) { | 423 DeoptStubKind kind) { |
420 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 424 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
421 // is no need to set the correct PC marker or load PP, since they get patched. | 425 // is no need to set the correct PC marker or load PP, since they get patched. |
422 __ EnterFrame(0); | 426 __ EnterDartFrame(0, kNoRegister); |
423 __ pushq(Immediate(0)); | |
424 __ pushq(PP); | |
425 | 427 |
426 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 428 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
427 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 429 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
428 const intptr_t saved_result_slot_from_fp = | 430 const intptr_t saved_result_slot_from_fp = |
429 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - RAX); | 431 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - RAX); |
430 // Result in RAX is preserved as part of pushing all registers below. | 432 // Result in RAX is preserved as part of pushing all registers below. |
431 | 433 |
432 // Push registers in their enumeration order: lowest register number at | 434 // Push registers in their enumeration order: lowest register number at |
433 // lowest address. | 435 // lowest address. |
434 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 436 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
435 __ pushq(static_cast<Register>(i)); | 437 __ pushq(static_cast<Register>(i)); |
436 } | 438 } |
437 __ subq(RSP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); | 439 __ subq(RSP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); |
438 intptr_t offset = 0; | 440 intptr_t offset = 0; |
439 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 441 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
440 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 442 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
441 __ movups(Address(RSP, offset), xmm_reg); | 443 __ movups(Address(RSP, offset), xmm_reg); |
442 offset += kFpuRegisterSize; | 444 offset += kFpuRegisterSize; |
443 } | 445 } |
444 | 446 |
445 // Pass address of saved registers block. | 447 // Pass address of saved registers block. |
446 __ movq(CallingConventions::kArg1Reg, RSP); | 448 __ movq(CallingConventions::kArg1Reg, RSP); |
| 449 __ movq(CallingConventions::kArg2Reg, Immediate(kind == kLazyDeopt ? 1 : 0)); |
447 __ ReserveAlignedFrameSpace(0); // Ensure stack is aligned before the call. | 450 __ ReserveAlignedFrameSpace(0); // Ensure stack is aligned before the call. |
448 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1); | 451 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
449 // Result (RAX) is stack-size (FP - SP) in bytes. | 452 // Result (RAX) is stack-size (FP - SP) in bytes. |
450 | 453 |
451 if (preserve_result) { | 454 if (kind == kLazyDeopt) { |
452 // Restore result into RBX temporarily. | 455 // Restore result into RBX temporarily. |
453 __ movq(RBX, Address(RBP, saved_result_slot_from_fp * kWordSize)); | 456 __ movq(RBX, Address(RBP, saved_result_slot_from_fp * kWordSize)); |
454 } | 457 } |
455 | 458 |
456 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 459 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 460 __ RestoreCodePointer(); |
457 __ LeaveDartFrame(); | 461 __ LeaveDartFrame(); |
458 | 462 |
459 __ popq(RCX); // Preserve return address. | 463 __ popq(RCX); // Preserve return address. |
460 __ movq(RSP, RBP); // Discard optimized frame. | 464 __ movq(RSP, RBP); // Discard optimized frame. |
461 __ subq(RSP, RAX); // Reserve space for deoptimized frame. | 465 __ subq(RSP, RAX); // Reserve space for deoptimized frame. |
462 __ pushq(RCX); // Restore return address. | 466 __ pushq(RCX); // Restore return address. |
463 | 467 |
464 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 468 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
465 // is no need to set the correct PC marker or load PP, since they get patched. | 469 // is no need to set the correct PC marker or load PP, since they get patched. |
466 __ EnterFrame(0); | 470 __ EnterDartFrame(0, kNoRegister); |
467 __ pushq(Immediate(0)); | |
468 __ pushq(PP); | |
469 | 471 |
470 if (preserve_result) { | 472 if (kind == kLazyDeopt) { |
471 __ pushq(RBX); // Preserve result as first local. | 473 __ pushq(RBX); // Preserve result as first local. |
472 } | 474 } |
473 __ ReserveAlignedFrameSpace(0); | 475 __ ReserveAlignedFrameSpace(0); |
474 // Pass last FP as a parameter. | 476 // Pass last FP as a parameter. |
475 __ movq(CallingConventions::kArg1Reg, RBP); | 477 __ movq(CallingConventions::kArg1Reg, RBP); |
476 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); | 478 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); |
477 if (preserve_result) { | 479 if (kind == kLazyDeopt) { |
478 // Restore result into RBX. | 480 // Restore result into RBX. |
479 __ movq(RBX, Address(RBP, kFirstLocalSlotFromFp * kWordSize)); | 481 __ movq(RBX, Address(RBP, kFirstLocalSlotFromFp * kWordSize)); |
480 } | 482 } |
481 // Code above cannot cause GC. | 483 // Code above cannot cause GC. |
482 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 484 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 485 __ RestoreCodePointer(); |
483 __ LeaveDartFrame(); | 486 __ LeaveDartFrame(); |
484 | 487 |
485 // Frame is fully rewritten at this point and it is safe to perform a GC. | 488 // Frame is fully rewritten at this point and it is safe to perform a GC. |
486 // Materialize any objects that were deferred by FillFrame because they | 489 // Materialize any objects that were deferred by FillFrame because they |
487 // require allocation. | 490 // require allocation. |
488 // Enter stub frame with loading PP. The caller's PP is not materialized yet. | 491 // Enter stub frame with loading PP. The caller's PP is not materialized yet. |
489 __ EnterStubFrame(); | 492 __ EnterStubFrame(); |
490 if (preserve_result) { | 493 if (kind == kLazyDeopt) { |
491 __ pushq(Immediate(0)); // Workaround for dropped stack slot during GC. | 494 __ pushq(Immediate(0)); // Workaround for dropped stack slot during GC. |
492 __ pushq(RBX); // Preserve result, it will be GC-d here. | 495 __ pushq(RBX); // Preserve result, it will be GC-d here. |
493 } | 496 } |
494 __ pushq(Immediate(Smi::RawValue(0))); // Space for the result. | 497 __ pushq(Immediate(Smi::RawValue(0))); // Space for the result. |
495 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 498 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
496 // Result tells stub how many bytes to remove from the expression stack | 499 // Result tells stub how many bytes to remove from the expression stack |
497 // of the bottom-most frame. They were used as materialization arguments. | 500 // of the bottom-most frame. They were used as materialization arguments. |
498 __ popq(RBX); | 501 __ popq(RBX); |
499 __ SmiUntag(RBX); | 502 __ SmiUntag(RBX); |
500 if (preserve_result) { | 503 if (kind == kLazyDeopt) { |
501 __ popq(RAX); // Restore result. | 504 __ popq(RAX); // Restore result. |
502 __ Drop(1); // Workaround for dropped stack slot during GC. | 505 __ Drop(1); // Workaround for dropped stack slot during GC. |
503 } | 506 } |
504 __ LeaveStubFrame(); | 507 __ LeaveStubFrame(); |
505 | 508 |
506 __ popq(RCX); // Pop return address. | 509 __ popq(RCX); // Pop return address. |
507 __ addq(RSP, RBX); // Remove materialization arguments. | 510 __ addq(RSP, RBX); // Remove materialization arguments. |
508 __ pushq(RCX); // Push return address. | 511 __ pushq(RCX); // Push return address. |
509 __ ret(); | 512 __ ret(); |
510 } | 513 } |
511 | 514 |
512 | 515 |
513 // TOS: return address + call-instruction-size (5 bytes). | 516 // TOS: return address + call-instruction-size (5 bytes). |
514 // RAX: result, must be preserved | 517 // RAX: result, must be preserved |
515 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 518 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { |
516 // Correct return address to point just after the call that is being | 519 // Correct return address to point just after the call that is being |
517 // deoptimized. | 520 // deoptimized. |
518 __ popq(RBX); | 521 __ popq(RBX); |
519 __ subq(RBX, Immediate(ShortCallPattern::InstructionLength())); | 522 __ subq(RBX, Immediate(ShortCallPattern::InstructionLength())); |
520 __ pushq(RBX); | 523 __ pushq(RBX); |
521 GenerateDeoptimizationSequence(assembler, true); // Preserve RAX. | 524 GenerateDeoptimizationSequence(assembler, kLazyDeopt); |
522 } | 525 } |
523 | 526 |
524 | 527 |
525 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 528 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
526 GenerateDeoptimizationSequence(assembler, false); // Don't preserve RAX. | 529 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
527 } | 530 } |
528 | 531 |
529 | 532 |
530 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 533 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
531 __ EnterStubFrame(); | 534 __ EnterStubFrame(); |
532 // Load the receiver into RAX. The argument count in the arguments | 535 // Load the receiver into RAX. The argument count in the arguments |
533 // descriptor in R10 is a smi. | 536 // descriptor in R10 is a smi. |
534 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 537 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
535 // Three words (saved pp, saved fp, stub's pc marker) | 538 // Three words (saved pp, saved fp, stub's pc marker) |
536 // in the stack above the return address. | 539 // in the stack above the return address. |
537 __ movq(RAX, Address(RSP, RAX, TIMES_4, | 540 __ movq(RAX, Address(RSP, RAX, TIMES_4, |
538 kSavedAboveReturnAddress * kWordSize)); | 541 kSavedAboveReturnAddress * kWordSize)); |
539 // Preserve IC data and arguments descriptor. | 542 // Preserve IC data and arguments descriptor. |
540 __ pushq(RBX); | 543 __ pushq(RBX); |
541 __ pushq(R10); | 544 __ pushq(R10); |
542 | 545 |
543 // Space for the result of the runtime call. | 546 // Space for the result of the runtime call. |
544 __ PushObject(Object::null_object(), PP); | 547 __ PushObject(Object::null_object(), PP); |
545 __ pushq(RAX); // Receiver. | 548 __ pushq(RAX); // Receiver. |
546 __ pushq(RBX); // IC data. | 549 __ pushq(RBX); // IC data. |
547 __ pushq(R10); // Arguments descriptor. | 550 __ pushq(R10); // Arguments descriptor. |
548 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 551 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); |
549 // Discard arguments. | 552 // Discard arguments. |
550 __ popq(RAX); | 553 __ popq(RAX); |
551 __ popq(RAX); | 554 __ popq(RAX); |
552 __ popq(RAX); | 555 __ popq(RAX); |
553 __ popq(RAX); // Return value from the runtime call (function). | 556 __ popq(RAX); // Return value from the runtime call (function). |
554 __ popq(R10); // Restore arguments descriptor. | 557 __ popq(R10); // Restore arguments descriptor. |
555 __ popq(RBX); // Restore IC data. | 558 __ popq(RBX); // Restore IC data. |
| 559 __ RestoreCodePointer(); |
556 __ LeaveStubFrame(); | 560 __ LeaveStubFrame(); |
557 | 561 |
558 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); | 562 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); |
| 563 __ movq(RCX, FieldAddress(CODE_REG, Code::instructions_offset())); |
559 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 564 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
560 __ jmp(RCX); | 565 __ jmp(RCX); |
561 } | 566 } |
562 | 567 |
563 | 568 |
564 // Called for inline allocation of arrays. | 569 // Called for inline allocation of arrays. |
565 // Input parameters: | 570 // Input parameters: |
566 // R10 : Array length as Smi. | 571 // R10 : Array length as Smi. |
567 // RBX : array element type (either NULL or an instantiated type). | 572 // RBX : array element type (either NULL or an instantiated type). |
568 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. | 573 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
684 __ pushq(R10); // Array length as Smi. | 689 __ pushq(R10); // Array length as Smi. |
685 __ pushq(RBX); // Element type. | 690 __ pushq(RBX); // Element type. |
686 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); | 691 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); |
687 __ popq(RAX); // Pop element type argument. | 692 __ popq(RAX); // Pop element type argument. |
688 __ popq(R10); // Pop array length argument. | 693 __ popq(R10); // Pop array length argument. |
689 __ popq(RAX); // Pop return value from return slot. | 694 __ popq(RAX); // Pop return value from return slot. |
690 __ LeaveStubFrame(); | 695 __ LeaveStubFrame(); |
691 __ ret(); | 696 __ ret(); |
692 *patch_code_pc_offset = assembler->CodeSize(); | 697 *patch_code_pc_offset = assembler->CodeSize(); |
693 StubCode* stub_code = Isolate::Current()->stub_code(); | 698 StubCode* stub_code = Isolate::Current()->stub_code(); |
694 __ JmpPatchable(&stub_code->FixAllocateArrayStubTargetLabel(), new_pp); | 699 __ JmpPatchable(Code::Handle( |
| 700 stub_code->FixAllocateArrayStubTargetCode()), new_pp); |
695 } | 701 } |
696 | 702 |
697 | 703 |
698 // Called when invoking Dart code from C++ (VM code). | 704 // Called when invoking Dart code from C++ (VM code). |
699 // Input parameters: | 705 // Input parameters: |
700 // RSP : points to return address. | 706 // RSP : points to return address. |
701 // RDI : entrypoint of the Dart function to call. | 707 // RDI : target code |
702 // RSI : arguments descriptor array. | 708 // RSI : arguments descriptor array. |
703 // RDX : arguments array. | 709 // RDX : arguments array. |
704 // RCX : current thread. | 710 // RCX : current thread. |
705 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 711 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
706 // Save frame pointer coming in. | 712 // Save frame pointer coming in. |
707 __ EnterFrame(0); | 713 __ EnterFrame(0); |
708 | 714 |
709 const Register kEntryPointReg = CallingConventions::kArg1Reg; | 715 const Register kStubCodeReg = CallingConventions::kArg1Reg; |
710 const Register kArgDescReg = CallingConventions::kArg2Reg; | 716 const Register kTargetCodeReg = CallingConventions::kArg2Reg; |
711 const Register kArgsReg = CallingConventions::kArg3Reg; | 717 const Register kArgDescReg = CallingConventions::kArg3Reg; |
712 const Register kThreadReg = CallingConventions::kArg4Reg; | 718 const Register kArgsReg = CallingConventions::kArg4Reg; |
| 719 |
| 720 // Set up THR, which caches the current thread in Dart code. |
| 721 #if defined(_WIN64) |
| 722 __ movq(THR, Address(RSP, 5 * kWordSize)); |
| 723 #else |
| 724 const Register kThreadReg = CallingConventions::kArg5Reg; |
| 725 if (THR != kThreadReg) { |
| 726 __ movq(THR, kThreadReg); |
| 727 } |
| 728 #endif |
713 | 729 |
714 // At this point, the stack looks like: | 730 // At this point, the stack looks like: |
715 // | saved RBP | <-- RBP | 731 // | saved RBP | <-- RBP |
716 // | saved PC (return to DartEntry::InvokeFunction) | | 732 // | saved PC (return to DartEntry::InvokeFunction) | |
717 | 733 |
718 const intptr_t kInitialOffset = 1; | 734 const intptr_t kArgumentsDescOffset = -1 * kWordSize; |
719 // Save arguments descriptor array. | 735 const intptr_t kCodePointerOffset = -2 * kWordSize; |
720 const intptr_t kArgumentsDescOffset = -(kInitialOffset) * kWordSize; | |
721 __ pushq(kArgDescReg); | 736 __ pushq(kArgDescReg); |
| 737 __ pushq(Address(kStubCodeReg, VMHandles::kOffsetOfRawPtrInHandle)); |
722 | 738 |
723 // Save C++ ABI callee-saved registers. | 739 // Save C++ ABI callee-saved registers. |
724 __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters, | 740 __ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters, |
725 CallingConventions::kCalleeSaveXmmRegisters); | 741 CallingConventions::kCalleeSaveXmmRegisters); |
726 | 742 |
727 // We now load the pool pointer(PP) as we are about to invoke dart code and we | 743 // We now load the pool pointer(PP) as we are about to invoke dart code and we |
728 // could potentially invoke some intrinsic functions which need the PP to be | 744 // could potentially invoke some intrinsic functions which need the PP to be |
729 // set up. | 745 // set up. |
| 746 __ movq(CODE_REG, Address(kStubCodeReg, VMHandles::kOffsetOfRawPtrInHandle)); |
730 __ LoadPoolPointer(PP); | 747 __ LoadPoolPointer(PP); |
731 | 748 |
732 // If any additional (or fewer) values are pushed, the offsets in | 749 // If any additional (or fewer) values are pushed, the offsets in |
733 // kExitLinkSlotFromEntryFp will need to be changed. | 750 // kExitLinkSlotFromEntryFp will need to be changed. |
734 | 751 |
735 // Set up THR, which caches the current thread in Dart code. | |
736 if (THR != kThreadReg) { | |
737 __ movq(THR, kThreadReg); | |
738 } | |
739 // Load Isolate pointer into kIsolateReg. | 752 // Load Isolate pointer into kIsolateReg. |
740 const Register kIsolateReg = RBX; | 753 const Register kIsolateReg = RBX; |
741 __ LoadIsolate(kIsolateReg); | 754 __ LoadIsolate(kIsolateReg); |
742 | 755 |
743 // Save the current VMTag on the stack. | 756 // Save the current VMTag on the stack. |
744 __ movq(RAX, Address(kIsolateReg, Isolate::vm_tag_offset())); | 757 __ movq(RAX, Address(kIsolateReg, Isolate::vm_tag_offset())); |
745 __ pushq(RAX); | 758 __ pushq(RAX); |
746 | 759 |
747 // Mark that the isolate is executing Dart code. | 760 // Mark that the isolate is executing Dart code. |
748 __ movq(Address(kIsolateReg, Isolate::vm_tag_offset()), | 761 __ movq(Address(kIsolateReg, Isolate::vm_tag_offset()), |
749 Immediate(VMTag::kDartTagId)); | 762 Immediate(VMTag::kDartTagId)); |
750 | 763 |
751 // Save top resource and top exit frame info. Use RAX as a temporary register. | 764 // Save top resource and top exit frame info. Use RAX as a temporary register. |
752 // StackFrameIterator reads the top exit frame info saved in this frame. | 765 // StackFrameIterator reads the top exit frame info saved in this frame. |
753 __ movq(RAX, Address(kIsolateReg, Isolate::top_resource_offset())); | 766 __ movq(RAX, Address(kIsolateReg, Isolate::top_resource_offset())); |
754 __ pushq(RAX); | 767 __ pushq(RAX); |
755 __ movq(Address(kIsolateReg, Isolate::top_resource_offset()), | 768 __ movq(Address(kIsolateReg, Isolate::top_resource_offset()), |
756 Immediate(0)); | 769 Immediate(0)); |
757 __ movq(RAX, Address(kIsolateReg, Isolate::top_exit_frame_info_offset())); | 770 __ movq(RAX, Address(kIsolateReg, Isolate::top_exit_frame_info_offset())); |
758 // The constant kExitLinkSlotFromEntryFp must be kept in sync with the | 771 // The constant kExitLinkSlotFromEntryFp must be kept in sync with the |
759 // code below. | 772 // code below. |
760 __ pushq(RAX); | 773 __ pushq(RAX); |
761 #if defined(DEBUG) | 774 #if defined(DEBUG) |
762 { | 775 { |
763 Label ok; | 776 Label ok; |
764 __ leaq(RAX, Address(RBP, kExitLinkSlotFromEntryFp * kWordSize)); | 777 __ leaq(RAX, Address(RBP, kExitLinkSlotFromEntryFp * kWordSize)); |
765 __ cmpq(RAX, RSP); | 778 __ cmpq(RAX, RSP); |
766 __ j(EQUAL, &ok); | 779 __ j(EQUAL, &ok); |
| 780 __ int3(); |
767 __ Stop("kExitLinkSlotFromEntryFp mismatch"); | 781 __ Stop("kExitLinkSlotFromEntryFp mismatch"); |
768 __ Bind(&ok); | 782 __ Bind(&ok); |
769 } | 783 } |
770 #endif | 784 #endif |
771 __ movq(Address(kIsolateReg, Isolate::top_exit_frame_info_offset()), | 785 __ movq(Address(kIsolateReg, Isolate::top_exit_frame_info_offset()), |
772 Immediate(0)); | 786 Immediate(0)); |
773 | 787 |
774 // Load arguments descriptor array into R10, which is passed to Dart code. | 788 // Load arguments descriptor array into R10, which is passed to Dart code. |
775 __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); | 789 __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); |
776 | 790 |
777 // Push arguments. At this point we only need to preserve kEntryPointReg. | 791 // Push arguments. At this point we only need to preserve kTargetCodeReg. |
778 ASSERT(kEntryPointReg != RDX); | 792 ASSERT(kTargetCodeReg != RDX); |
779 | 793 |
780 // Load number of arguments into RBX. | 794 // Load number of arguments into RBX. |
781 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 795 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
782 __ SmiUntag(RBX); | 796 __ SmiUntag(RBX); |
783 | 797 |
784 // Compute address of 'arguments array' data area into RDX. | 798 // Compute address of 'arguments array' data area into RDX. |
785 __ movq(RDX, Address(kArgsReg, VMHandles::kOffsetOfRawPtrInHandle)); | 799 __ movq(RDX, Address(kArgsReg, VMHandles::kOffsetOfRawPtrInHandle)); |
786 __ leaq(RDX, FieldAddress(RDX, Array::data_offset())); | 800 __ leaq(RDX, FieldAddress(RDX, Array::data_offset())); |
787 | 801 |
788 // Set up arguments for the Dart call. | 802 // Set up arguments for the Dart call. |
789 Label push_arguments; | 803 Label push_arguments; |
790 Label done_push_arguments; | 804 Label done_push_arguments; |
791 __ testq(RBX, RBX); // check if there are arguments. | 805 __ testq(RBX, RBX); // check if there are arguments. |
792 __ j(ZERO, &done_push_arguments, Assembler::kNearJump); | 806 __ j(ZERO, &done_push_arguments, Assembler::kNearJump); |
793 __ movq(RAX, Immediate(0)); | 807 __ movq(RAX, Immediate(0)); |
794 __ Bind(&push_arguments); | 808 __ Bind(&push_arguments); |
795 __ pushq(Address(RDX, RAX, TIMES_8, 0)); | 809 __ pushq(Address(RDX, RAX, TIMES_8, 0)); |
796 __ incq(RAX); | 810 __ incq(RAX); |
797 __ cmpq(RAX, RBX); | 811 __ cmpq(RAX, RBX); |
798 __ j(LESS, &push_arguments, Assembler::kNearJump); | 812 __ j(LESS, &push_arguments, Assembler::kNearJump); |
799 __ Bind(&done_push_arguments); | 813 __ Bind(&done_push_arguments); |
800 | 814 |
801 // Call the Dart code entrypoint. | 815 // Call the Dart code entrypoint. |
802 __ call(kEntryPointReg); // R10 is the arguments descriptor array. | 816 __ movq(CODE_REG, |
| 817 Address(kTargetCodeReg, VMHandles::kOffsetOfRawPtrInHandle)); |
| 818 __ movq(kTargetCodeReg, FieldAddress(CODE_REG, Code::instructions_offset())); |
| 819 __ addq(kTargetCodeReg, |
| 820 Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| 821 __ call(kTargetCodeReg); // R10 is the arguments descriptor array. |
| 822 |
| 823 // If we arrived here from JumpToExceptionHandler then PP would not be |
| 824 // set correctly. Reload it from the code object. |
| 825 // TODO(vegorov): set PP in the JumpToExceptionHandler to avoid reloading. |
| 826 __ movq(CODE_REG, Address(RBP, kCodePointerOffset)); |
| 827 __ LoadPoolPointer(PP); |
803 | 828 |
804 // Read the saved arguments descriptor array to obtain the number of passed | 829 // Read the saved arguments descriptor array to obtain the number of passed |
805 // arguments. | 830 // arguments. |
806 __ movq(kArgDescReg, Address(RBP, kArgumentsDescOffset)); | 831 __ movq(kArgDescReg, Address(RBP, kArgumentsDescOffset)); |
807 __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); | 832 __ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle)); |
808 __ movq(RDX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 833 __ movq(RDX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
809 // Get rid of arguments pushed on the stack. | 834 // Get rid of arguments pushed on the stack. |
810 __ leaq(RSP, Address(RSP, RDX, TIMES_4, 0)); // RDX is a Smi. | 835 __ leaq(RSP, Address(RSP, RDX, TIMES_4, 0)); // RDX is a Smi. |
811 | 836 |
812 // Restore the saved top exit frame info and top resource back into the | 837 // Restore the saved top exit frame info and top resource back into the |
(...skipping 15 matching lines...) Expand all Loading... |
828 __ ret(); | 853 __ ret(); |
829 } | 854 } |
830 | 855 |
831 | 856 |
832 // Called for inline allocation of contexts. | 857 // Called for inline allocation of contexts. |
833 // Input: | 858 // Input: |
834 // R10: number of context variables. | 859 // R10: number of context variables. |
835 // Output: | 860 // Output: |
836 // RAX: new allocated RawContext object. | 861 // RAX: new allocated RawContext object. |
837 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 862 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
838 __ LoadObject(R12, Object::null_object(), PP); | 863 __ LoadObject(R9, Object::null_object(), PP); |
839 if (FLAG_inline_alloc) { | 864 if (FLAG_inline_alloc) { |
840 Label slow_case; | 865 Label slow_case; |
841 Isolate* isolate = Isolate::Current(); | 866 Isolate* isolate = Isolate::Current(); |
842 Heap* heap = isolate->heap(); | 867 Heap* heap = isolate->heap(); |
843 // First compute the rounded instance size. | 868 // First compute the rounded instance size. |
844 // R10: number of context variables. | 869 // R10: number of context variables. |
845 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); | 870 intptr_t fixed_size = (sizeof(RawContext) + kObjectAlignment - 1); |
846 __ leaq(R13, Address(R10, TIMES_8, fixed_size)); | 871 __ leaq(R13, Address(R10, TIMES_8, fixed_size)); |
847 __ andq(R13, Immediate(-kObjectAlignment)); | 872 __ andq(R13, Immediate(-kObjectAlignment)); |
848 | 873 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
906 // RAX: new object. | 931 // RAX: new object. |
907 // R10: number of context variables as integer value (not object). | 932 // R10: number of context variables as integer value (not object). |
908 __ movq(FieldAddress(RAX, Context::num_variables_offset()), R10); | 933 __ movq(FieldAddress(RAX, Context::num_variables_offset()), R10); |
909 | 934 |
910 // Setup the parent field. | 935 // Setup the parent field. |
911 // RAX: new object. | 936 // RAX: new object. |
912 // R10: number of context variables. | 937 // R10: number of context variables. |
913 // No generational barrier needed, since we are storing null. | 938 // No generational barrier needed, since we are storing null. |
914 __ InitializeFieldNoBarrier(RAX, | 939 __ InitializeFieldNoBarrier(RAX, |
915 FieldAddress(RAX, Context::parent_offset()), | 940 FieldAddress(RAX, Context::parent_offset()), |
916 R12); | 941 R9); |
917 | 942 |
918 // Initialize the context variables. | 943 // Initialize the context variables. |
919 // RAX: new object. | 944 // RAX: new object. |
920 // R10: number of context variables. | 945 // R10: number of context variables. |
921 { | 946 { |
922 Label loop, entry; | 947 Label loop, entry; |
923 __ leaq(R13, FieldAddress(RAX, Context::variable_offset(0))); | 948 __ leaq(R13, FieldAddress(RAX, Context::variable_offset(0))); |
924 #if defined(DEBUG) | 949 #if defined(DEBUG) |
925 static const bool kJumpLength = Assembler::kFarJump; | 950 static const bool kJumpLength = Assembler::kFarJump; |
926 #else | 951 #else |
927 static const bool kJumpLength = Assembler::kNearJump; | 952 static const bool kJumpLength = Assembler::kNearJump; |
928 #endif // DEBUG | 953 #endif // DEBUG |
929 __ jmp(&entry, kJumpLength); | 954 __ jmp(&entry, kJumpLength); |
930 __ Bind(&loop); | 955 __ Bind(&loop); |
931 __ decq(R10); | 956 __ decq(R10); |
932 // No generational barrier needed, since we are storing null. | 957 // No generational barrier needed, since we are storing null. |
933 __ InitializeFieldNoBarrier(RAX, | 958 __ InitializeFieldNoBarrier(RAX, |
934 Address(R13, R10, TIMES_8, 0), | 959 Address(R13, R10, TIMES_8, 0), |
935 R12); | 960 R9); |
936 __ Bind(&entry); | 961 __ Bind(&entry); |
937 __ cmpq(R10, Immediate(0)); | 962 __ cmpq(R10, Immediate(0)); |
938 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 963 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
939 } | 964 } |
940 | 965 |
941 // Done allocating and initializing the context. | 966 // Done allocating and initializing the context. |
942 // RAX: new object. | 967 // RAX: new object. |
943 __ ret(); | 968 __ ret(); |
944 | 969 |
945 __ Bind(&slow_case); | 970 __ Bind(&slow_case); |
946 } | 971 } |
947 // Create a stub frame. | 972 // Create a stub frame. |
948 __ EnterStubFrame(); | 973 __ EnterStubFrame(); |
949 __ pushq(R12); // Setup space on stack for the return value. | 974 __ pushq(R9); // Setup space on stack for the return value. |
950 __ SmiTag(R10); | 975 __ SmiTag(R10); |
951 __ pushq(R10); // Push number of context variables. | 976 __ pushq(R10); // Push number of context variables. |
952 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. | 977 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. |
953 __ popq(RAX); // Pop number of context variables argument. | 978 __ popq(RAX); // Pop number of context variables argument. |
954 __ popq(RAX); // Pop the new context object. | 979 __ popq(RAX); // Pop the new context object. |
955 // RAX: new object | 980 // RAX: new object |
956 // Restore the frame pointer. | 981 // Restore the frame pointer. |
957 __ LeaveStubFrame(); | 982 __ LeaveStubFrame(); |
958 __ ret(); | 983 __ ret(); |
959 } | 984 } |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1040 // The generated code is different if the class is parameterized. | 1065 // The generated code is different if the class is parameterized. |
1041 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1066 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
1042 ASSERT(!is_cls_parameterized || | 1067 ASSERT(!is_cls_parameterized || |
1043 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1068 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
1044 // kInlineInstanceSize is a constant used as a threshold for determining | 1069 // kInlineInstanceSize is a constant used as a threshold for determining |
1045 // when the object initialization should be done as a loop or as | 1070 // when the object initialization should be done as a loop or as |
1046 // straight line code. | 1071 // straight line code. |
1047 const int kInlineInstanceSize = 12; // In words. | 1072 const int kInlineInstanceSize = 12; // In words. |
1048 const intptr_t instance_size = cls.instance_size(); | 1073 const intptr_t instance_size = cls.instance_size(); |
1049 ASSERT(instance_size > 0); | 1074 ASSERT(instance_size > 0); |
1050 __ LoadObject(R12, Object::null_object(), PP); | 1075 __ LoadObject(R9, Object::null_object(), PP); |
1051 if (is_cls_parameterized) { | 1076 if (is_cls_parameterized) { |
1052 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset)); | 1077 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset)); |
1053 // RDX: instantiated type arguments. | 1078 // RDX: instantiated type arguments. |
1054 } | 1079 } |
1055 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size)) { | 1080 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size)) { |
1056 Label slow_case; | 1081 Label slow_case; |
1057 // Allocate the object and update top to point to | 1082 // Allocate the object and update top to point to |
1058 // next object start and initialize the allocated object. | 1083 // next object start and initialize the allocated object. |
1059 // RDX: instantiated type arguments (if is_cls_parameterized). | 1084 // RDX: instantiated type arguments (if is_cls_parameterized). |
1060 Heap* heap = Isolate::Current()->heap(); | 1085 Heap* heap = Isolate::Current()->heap(); |
(...skipping 23 matching lines...) Expand all Loading... |
1084 tags = RawObject::SizeTag::update(instance_size, tags); | 1109 tags = RawObject::SizeTag::update(instance_size, tags); |
1085 ASSERT(cls.id() != kIllegalCid); | 1110 ASSERT(cls.id() != kIllegalCid); |
1086 tags = RawObject::ClassIdTag::update(cls.id(), tags); | 1111 tags = RawObject::ClassIdTag::update(cls.id(), tags); |
1087 __ movq(Address(RAX, Instance::tags_offset()), Immediate(tags)); | 1112 __ movq(Address(RAX, Instance::tags_offset()), Immediate(tags)); |
1088 __ addq(RAX, Immediate(kHeapObjectTag)); | 1113 __ addq(RAX, Immediate(kHeapObjectTag)); |
1089 | 1114 |
1090 // Initialize the remaining words of the object. | 1115 // Initialize the remaining words of the object. |
1091 // RAX: new object (tagged). | 1116 // RAX: new object (tagged). |
1092 // RBX: next object start. | 1117 // RBX: next object start. |
1093 // RDX: new object type arguments (if is_cls_parameterized). | 1118 // RDX: new object type arguments (if is_cls_parameterized). |
1094 // R12: raw null. | 1119 // R9: raw null. |
1095 // First try inlining the initialization without a loop. | 1120 // First try inlining the initialization without a loop. |
1096 if (instance_size < (kInlineInstanceSize * kWordSize)) { | 1121 if (instance_size < (kInlineInstanceSize * kWordSize)) { |
1097 // Check if the object contains any non-header fields. | 1122 // Check if the object contains any non-header fields. |
1098 // Small objects are initialized using a consecutive set of writes. | 1123 // Small objects are initialized using a consecutive set of writes. |
1099 for (intptr_t current_offset = Instance::NextFieldOffset(); | 1124 for (intptr_t current_offset = Instance::NextFieldOffset(); |
1100 current_offset < instance_size; | 1125 current_offset < instance_size; |
1101 current_offset += kWordSize) { | 1126 current_offset += kWordSize) { |
1102 __ InitializeFieldNoBarrier(RAX, | 1127 __ InitializeFieldNoBarrier(RAX, |
1103 FieldAddress(RAX, current_offset), | 1128 FieldAddress(RAX, current_offset), |
1104 R12); | 1129 R9); |
1105 } | 1130 } |
1106 } else { | 1131 } else { |
1107 __ leaq(RCX, FieldAddress(RAX, Instance::NextFieldOffset())); | 1132 __ leaq(RCX, FieldAddress(RAX, Instance::NextFieldOffset())); |
1108 // Loop until the whole object is initialized. | 1133 // Loop until the whole object is initialized. |
1109 // RAX: new object (tagged). | 1134 // RAX: new object (tagged). |
1110 // RBX: next object start. | 1135 // RBX: next object start. |
1111 // RCX: next word to be initialized. | 1136 // RCX: next word to be initialized. |
1112 // RDX: new object type arguments (if is_cls_parameterized). | 1137 // RDX: new object type arguments (if is_cls_parameterized). |
1113 Label init_loop; | 1138 Label init_loop; |
1114 Label done; | 1139 Label done; |
1115 __ Bind(&init_loop); | 1140 __ Bind(&init_loop); |
1116 __ cmpq(RCX, RBX); | 1141 __ cmpq(RCX, RBX); |
1117 #if defined(DEBUG) | 1142 #if defined(DEBUG) |
1118 static const bool kJumpLength = Assembler::kFarJump; | 1143 static const bool kJumpLength = Assembler::kFarJump; |
1119 #else | 1144 #else |
1120 static const bool kJumpLength = Assembler::kNearJump; | 1145 static const bool kJumpLength = Assembler::kNearJump; |
1121 #endif // DEBUG | 1146 #endif // DEBUG |
1122 __ j(ABOVE_EQUAL, &done, kJumpLength); | 1147 __ j(ABOVE_EQUAL, &done, kJumpLength); |
1123 __ InitializeFieldNoBarrier(RAX, Address(RCX, 0), R12); | 1148 __ InitializeFieldNoBarrier(RAX, Address(RCX, 0), R9); |
1124 __ addq(RCX, Immediate(kWordSize)); | 1149 __ addq(RCX, Immediate(kWordSize)); |
1125 __ jmp(&init_loop, Assembler::kNearJump); | 1150 __ jmp(&init_loop, Assembler::kNearJump); |
1126 __ Bind(&done); | 1151 __ Bind(&done); |
1127 } | 1152 } |
1128 if (is_cls_parameterized) { | 1153 if (is_cls_parameterized) { |
1129 // RDX: new object type arguments. | 1154 // RDX: new object type arguments. |
1130 // Set the type arguments in the new object. | 1155 // Set the type arguments in the new object. |
1131 intptr_t offset = cls.type_arguments_field_offset(); | 1156 intptr_t offset = cls.type_arguments_field_offset(); |
1132 __ InitializeFieldNoBarrier(RAX, FieldAddress(RAX, offset), RDX); | 1157 __ InitializeFieldNoBarrier(RAX, FieldAddress(RAX, offset), RDX); |
1133 } | 1158 } |
1134 // Done allocating and initializing the instance. | 1159 // Done allocating and initializing the instance. |
1135 // RAX: new object (tagged). | 1160 // RAX: new object (tagged). |
1136 __ ret(); | 1161 __ ret(); |
1137 | 1162 |
1138 __ Bind(&slow_case); | 1163 __ Bind(&slow_case); |
1139 } | 1164 } |
1140 // If is_cls_parameterized: | 1165 // If is_cls_parameterized: |
1141 // RDX: new object type arguments. | 1166 // RDX: new object type arguments. |
1142 // Create a stub frame. | 1167 // Create a stub frame. |
1143 __ EnterStubFrame(); // Uses PP to access class object. | 1168 __ EnterStubFrame(); // Uses PP to access class object. |
1144 __ pushq(R12); // Setup space on stack for return value. | 1169 __ pushq(R9); // Setup space on stack for return value. |
1145 __ PushObject(cls, PP); // Push class of object to be allocated. | 1170 __ PushObject(cls, PP); // Push class of object to be allocated. |
1146 if (is_cls_parameterized) { | 1171 if (is_cls_parameterized) { |
1147 __ pushq(RDX); // Push type arguments of object to be allocated. | 1172 __ pushq(RDX); // Push type arguments of object to be allocated. |
1148 } else { | 1173 } else { |
1149 __ pushq(R12); // Push null type arguments. | 1174 __ pushq(R9); // Push null type arguments. |
1150 } | 1175 } |
1151 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1176 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
1152 __ popq(RAX); // Pop argument (type arguments of object). | 1177 __ popq(RAX); // Pop argument (type arguments of object). |
1153 __ popq(RAX); // Pop argument (class of object). | 1178 __ popq(RAX); // Pop argument (class of object). |
1154 __ popq(RAX); // Pop result (newly allocated object). | 1179 __ popq(RAX); // Pop result (newly allocated object). |
1155 // RAX: new object | 1180 // RAX: new object |
1156 // Restore the frame pointer. | 1181 // Restore the frame pointer. |
1157 __ LeaveStubFrame(); | 1182 __ LeaveStubFrame(); |
1158 __ ret(); | 1183 __ ret(); |
1159 *patch_code_pc_offset = assembler->CodeSize(); | 1184 *patch_code_pc_offset = assembler->CodeSize(); |
1160 StubCode* stub_code = Isolate::Current()->stub_code(); | 1185 StubCode* stub_code = Isolate::Current()->stub_code(); |
1161 __ JmpPatchable(&stub_code->FixAllocationStubTargetLabel(), new_pp); | 1186 __ JmpPatchable(Code::Handle( |
| 1187 stub_code->FixAllocationStubTargetCode()), new_pp); |
1162 } | 1188 } |
1163 | 1189 |
1164 | 1190 |
1165 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function | 1191 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function |
1166 // from the entry code of a dart function after an error in passed argument | 1192 // from the entry code of a dart function after an error in passed argument |
1167 // name or number is detected. | 1193 // name or number is detected. |
1168 // Input parameters: | 1194 // Input parameters: |
1169 // RSP : points to return address. | 1195 // RSP : points to return address. |
1170 // RSP + 8 : address of last argument. | 1196 // RSP + 8 : address of last argument. |
1171 // R10 : arguments descriptor array. | 1197 // R10 : arguments descriptor array. |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1237 Label* not_smi_or_overflow, | 1263 Label* not_smi_or_overflow, |
1238 bool should_update_result_range) { | 1264 bool should_update_result_range) { |
1239 __ Comment("Fast Smi op"); | 1265 __ Comment("Fast Smi op"); |
1240 if (FLAG_throw_on_javascript_int_overflow) { | 1266 if (FLAG_throw_on_javascript_int_overflow) { |
1241 // The overflow check is more complex than implemented below. | 1267 // The overflow check is more complex than implemented below. |
1242 return; | 1268 return; |
1243 } | 1269 } |
1244 ASSERT(num_args == 2); | 1270 ASSERT(num_args == 2); |
1245 __ movq(RCX, Address(RSP, + 1 * kWordSize)); // Right | 1271 __ movq(RCX, Address(RSP, + 1 * kWordSize)); // Right |
1246 __ movq(RAX, Address(RSP, + 2 * kWordSize)); // Left. | 1272 __ movq(RAX, Address(RSP, + 2 * kWordSize)); // Left. |
1247 __ movq(R12, RCX); | 1273 __ movq(R13, RCX); |
1248 __ orq(R12, RAX); | 1274 __ orq(R13, RAX); |
1249 __ testq(R12, Immediate(kSmiTagMask)); | 1275 __ testq(R13, Immediate(kSmiTagMask)); |
1250 __ j(NOT_ZERO, not_smi_or_overflow); | 1276 __ j(NOT_ZERO, not_smi_or_overflow); |
1251 switch (kind) { | 1277 switch (kind) { |
1252 case Token::kADD: { | 1278 case Token::kADD: { |
1253 __ addq(RAX, RCX); | 1279 __ addq(RAX, RCX); |
1254 __ j(OVERFLOW, not_smi_or_overflow); | 1280 __ j(OVERFLOW, not_smi_or_overflow); |
1255 break; | 1281 break; |
1256 } | 1282 } |
1257 case Token::kSUB: { | 1283 case Token::kSUB: { |
1258 __ subq(RAX, RCX); | 1284 __ subq(RAX, RCX); |
1259 __ j(OVERFLOW, not_smi_or_overflow); | 1285 __ j(OVERFLOW, not_smi_or_overflow); |
(...skipping 15 matching lines...) Expand all Loading... |
1275 | 1301 |
1276 | 1302 |
1277 if (should_update_result_range) { | 1303 if (should_update_result_range) { |
1278 Label done; | 1304 Label done; |
1279 __ movq(RSI, RAX); | 1305 __ movq(RSI, RAX); |
1280 __ UpdateRangeFeedback(RSI, 2, RBX, RCX, &done); | 1306 __ UpdateRangeFeedback(RSI, 2, RBX, RCX, &done); |
1281 __ Bind(&done); | 1307 __ Bind(&done); |
1282 } | 1308 } |
1283 | 1309 |
1284 // RBX: IC data object (preserved). | 1310 // RBX: IC data object (preserved). |
1285 __ movq(R12, FieldAddress(RBX, ICData::ic_data_offset())); | 1311 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); |
1286 // R12: ic_data_array with check entries: classes and target functions. | 1312 // R13: ic_data_array with check entries: classes and target functions. |
1287 __ leaq(R12, FieldAddress(R12, Array::data_offset())); | 1313 __ leaq(R13, FieldAddress(R13, Array::data_offset())); |
1288 // R12: points directly to the first ic data array element. | 1314 // R13: points directly to the first ic data array element. |
1289 #if defined(DEBUG) | 1315 #if defined(DEBUG) |
1290 // Check that first entry is for Smi/Smi. | 1316 // Check that first entry is for Smi/Smi. |
1291 Label error, ok; | 1317 Label error, ok; |
1292 const Immediate& imm_smi_cid = | 1318 const Immediate& imm_smi_cid = |
1293 Immediate(reinterpret_cast<intptr_t>(Smi::New(kSmiCid))); | 1319 Immediate(reinterpret_cast<intptr_t>(Smi::New(kSmiCid))); |
1294 __ cmpq(Address(R12, 0 * kWordSize), imm_smi_cid); | 1320 __ cmpq(Address(R13, 0 * kWordSize), imm_smi_cid); |
1295 __ j(NOT_EQUAL, &error, Assembler::kNearJump); | 1321 __ j(NOT_EQUAL, &error, Assembler::kNearJump); |
1296 __ cmpq(Address(R12, 1 * kWordSize), imm_smi_cid); | 1322 __ cmpq(Address(R13, 1 * kWordSize), imm_smi_cid); |
1297 __ j(EQUAL, &ok, Assembler::kNearJump); | 1323 __ j(EQUAL, &ok, Assembler::kNearJump); |
1298 __ Bind(&error); | 1324 __ Bind(&error); |
1299 __ Stop("Incorrect IC data"); | 1325 __ Stop("Incorrect IC data"); |
1300 __ Bind(&ok); | 1326 __ Bind(&ok); |
1301 #endif | 1327 #endif |
1302 | 1328 |
1303 if (FLAG_optimization_counter_threshold >= 0) { | 1329 if (FLAG_optimization_counter_threshold >= 0) { |
1304 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1330 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
1305 // Update counter. | 1331 // Update counter. |
1306 __ movq(R8, Address(R12, count_offset)); | 1332 __ movq(R8, Address(R13, count_offset)); |
1307 __ addq(R8, Immediate(Smi::RawValue(1))); | 1333 __ addq(R8, Immediate(Smi::RawValue(1))); |
1308 __ movq(R13, Immediate(Smi::RawValue(Smi::kMaxValue))); | 1334 __ movq(R9, Immediate(Smi::RawValue(Smi::kMaxValue))); |
1309 __ cmovnoq(R13, R8); | 1335 __ cmovnoq(R9, R8); |
1310 __ StoreIntoSmiField(Address(R12, count_offset), R13); | 1336 __ StoreIntoSmiField(Address(R13, count_offset), R9); |
1311 } | 1337 } |
1312 | 1338 |
1313 __ ret(); | 1339 __ ret(); |
1314 } | 1340 } |
1315 | 1341 |
1316 | 1342 |
1317 // Generate inline cache check for 'num_args'. | 1343 // Generate inline cache check for 'num_args'. |
1318 // RBX: Inline cache data object. | 1344 // RBX: Inline cache data object. |
1319 // TOS(0): return address | 1345 // TOS(0): return address |
1320 // Control flow: | 1346 // Control flow: |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1376 range_collection_mode == kCollectRanges); | 1402 range_collection_mode == kCollectRanges); |
1377 } | 1403 } |
1378 __ Bind(¬_smi_or_overflow); | 1404 __ Bind(¬_smi_or_overflow); |
1379 | 1405 |
1380 __ Comment("Extract ICData initial values and receiver cid"); | 1406 __ Comment("Extract ICData initial values and receiver cid"); |
1381 // Load arguments descriptor into R10. | 1407 // Load arguments descriptor into R10. |
1382 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); | 1408 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); |
1383 // Loop that checks if there is an IC data match. | 1409 // Loop that checks if there is an IC data match. |
1384 Label loop, update, test, found; | 1410 Label loop, update, test, found; |
1385 // RBX: IC data object (preserved). | 1411 // RBX: IC data object (preserved). |
1386 __ movq(R12, FieldAddress(RBX, ICData::ic_data_offset())); | 1412 __ movq(R13, FieldAddress(RBX, ICData::ic_data_offset())); |
1387 // R12: ic_data_array with check entries: classes and target functions. | 1413 // R13: ic_data_array with check entries: classes and target functions. |
1388 __ leaq(R12, FieldAddress(R12, Array::data_offset())); | 1414 __ leaq(R13, FieldAddress(R13, Array::data_offset())); |
1389 // R12: points directly to the first ic data array element. | 1415 // R13: points directly to the first ic data array element. |
1390 | 1416 |
1391 // Get the receiver's class ID (first read number of arguments from | 1417 // Get the receiver's class ID (first read number of arguments from |
1392 // arguments descriptor array and then access the receiver from the stack). | 1418 // arguments descriptor array and then access the receiver from the stack). |
1393 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1419 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1394 __ movq(R13, Address(RSP, RAX, TIMES_4, 0)); // RAX (argument count) is Smi. | 1420 __ movq(R9, Address(RSP, RAX, TIMES_4, 0)); // RAX (argument count) is Smi. |
1395 __ LoadTaggedClassIdMayBeSmi(RAX, R13); | 1421 __ LoadTaggedClassIdMayBeSmi(RAX, R9); |
1396 // RAX: receiver's class ID as smi. | 1422 // RAX: receiver's class ID as smi. |
1397 __ movq(R13, Address(R12, 0)); // First class ID (Smi) to check. | 1423 __ movq(R9, Address(R13, 0)); // First class ID (Smi) to check. |
1398 __ jmp(&test); | 1424 __ jmp(&test); |
1399 | 1425 |
1400 __ Comment("ICData loop"); | 1426 __ Comment("ICData loop"); |
1401 __ Bind(&loop); | 1427 __ Bind(&loop); |
1402 for (int i = 0; i < num_args; i++) { | 1428 for (int i = 0; i < num_args; i++) { |
1403 if (i > 0) { | 1429 if (i > 0) { |
1404 // If not the first, load the next argument's class ID. | 1430 // If not the first, load the next argument's class ID. |
1405 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1431 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1406 __ movq(R13, Address(RSP, RAX, TIMES_4, - i * kWordSize)); | 1432 __ movq(R9, Address(RSP, RAX, TIMES_4, - i * kWordSize)); |
1407 __ LoadTaggedClassIdMayBeSmi(RAX, R13); | 1433 __ LoadTaggedClassIdMayBeSmi(RAX, R9); |
1408 // RAX: next argument class ID (smi). | 1434 // RAX: next argument class ID (smi). |
1409 __ movq(R13, Address(R12, i * kWordSize)); | 1435 __ movq(R9, Address(R13, i * kWordSize)); |
1410 // R13: next class ID to check (smi). | 1436 // R9: next class ID to check (smi). |
1411 } | 1437 } |
1412 __ cmpq(RAX, R13); // Class id match? | 1438 __ cmpq(RAX, R9); // Class id match? |
1413 if (i < (num_args - 1)) { | 1439 if (i < (num_args - 1)) { |
1414 __ j(NOT_EQUAL, &update); // Continue. | 1440 __ j(NOT_EQUAL, &update); // Continue. |
1415 } else { | 1441 } else { |
1416 // Last check, all checks before matched. | 1442 // Last check, all checks before matched. |
1417 __ j(EQUAL, &found); // Break. | 1443 __ j(EQUAL, &found); // Break. |
1418 } | 1444 } |
1419 } | 1445 } |
1420 __ Bind(&update); | 1446 __ Bind(&update); |
1421 // Reload receiver class ID. It has not been destroyed when num_args == 1. | 1447 // Reload receiver class ID. It has not been destroyed when num_args == 1. |
1422 if (num_args > 1) { | 1448 if (num_args > 1) { |
1423 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1449 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1424 __ movq(R13, Address(RSP, RAX, TIMES_4, 0)); | 1450 __ movq(R9, Address(RSP, RAX, TIMES_4, 0)); |
1425 __ LoadTaggedClassIdMayBeSmi(RAX, R13); | 1451 __ LoadTaggedClassIdMayBeSmi(RAX, R9); |
1426 } | 1452 } |
1427 | 1453 |
1428 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; | 1454 const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize; |
1429 __ addq(R12, Immediate(entry_size)); // Next entry. | 1455 __ addq(R13, Immediate(entry_size)); // Next entry. |
1430 __ movq(R13, Address(R12, 0)); // Next class ID. | 1456 __ movq(R9, Address(R13, 0)); // Next class ID. |
1431 | 1457 |
1432 __ Bind(&test); | 1458 __ Bind(&test); |
1433 __ cmpq(R13, Immediate(Smi::RawValue(kIllegalCid))); // Done? | 1459 __ cmpq(R9, Immediate(Smi::RawValue(kIllegalCid))); // Done? |
1434 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 1460 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
1435 | 1461 |
1436 __ Comment("IC miss"); | 1462 __ Comment("IC miss"); |
1437 __ LoadObject(R12, Object::null_object(), PP); | 1463 __ LoadObject(R13, Object::null_object(), PP); |
1438 // Compute address of arguments (first read number of arguments from | 1464 // Compute address of arguments (first read number of arguments from |
1439 // arguments descriptor array and then compute address on the stack). | 1465 // arguments descriptor array and then compute address on the stack). |
1440 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1466 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1441 __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0)); // RAX is Smi. | 1467 __ leaq(RAX, Address(RSP, RAX, TIMES_4, 0)); // RAX is Smi. |
1442 __ EnterStubFrame(); | 1468 __ EnterStubFrame(); |
1443 __ pushq(R10); // Preserve arguments descriptor array. | 1469 __ pushq(R10); // Preserve arguments descriptor array. |
1444 __ pushq(RBX); // Preserve IC data object. | 1470 __ pushq(RBX); // Preserve IC data object. |
1445 __ pushq(R12); // Setup space on stack for result (target code object). | 1471 __ pushq(R13); // Setup space on stack for result (target code object). |
1446 // Push call arguments. | 1472 // Push call arguments. |
1447 for (intptr_t i = 0; i < num_args; i++) { | 1473 for (intptr_t i = 0; i < num_args; i++) { |
1448 __ movq(RCX, Address(RAX, -kWordSize * i)); | 1474 __ movq(RCX, Address(RAX, -kWordSize * i)); |
1449 __ pushq(RCX); | 1475 __ pushq(RCX); |
1450 } | 1476 } |
1451 __ pushq(RBX); // Pass IC data object. | 1477 __ pushq(RBX); // Pass IC data object. |
1452 __ CallRuntime(handle_ic_miss, num_args + 1); | 1478 __ CallRuntime(handle_ic_miss, num_args + 1); |
1453 // Remove the call arguments pushed earlier, including the IC data object. | 1479 // Remove the call arguments pushed earlier, including the IC data object. |
1454 for (intptr_t i = 0; i < num_args + 1; i++) { | 1480 for (intptr_t i = 0; i < num_args + 1; i++) { |
1455 __ popq(RAX); | 1481 __ popq(RAX); |
1456 } | 1482 } |
1457 __ popq(RAX); // Pop returned function object into RAX. | 1483 __ popq(RAX); // Pop returned function object into RAX. |
1458 __ popq(RBX); // Restore IC data array. | 1484 __ popq(RBX); // Restore IC data array. |
1459 __ popq(R10); // Restore arguments descriptor array. | 1485 __ popq(R10); // Restore arguments descriptor array. |
| 1486 if (range_collection_mode == kCollectRanges) { |
| 1487 __ RestoreCodePointer(); |
| 1488 } |
1460 __ LeaveStubFrame(); | 1489 __ LeaveStubFrame(); |
1461 Label call_target_function; | 1490 Label call_target_function; |
1462 __ jmp(&call_target_function); | 1491 __ jmp(&call_target_function); |
1463 | 1492 |
1464 __ Bind(&found); | 1493 __ Bind(&found); |
1465 // R12: Pointer to an IC data check group. | 1494 // R13: Pointer to an IC data check group. |
1466 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; | 1495 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; |
1467 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1496 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
1468 __ movq(RAX, Address(R12, target_offset)); | 1497 __ movq(RAX, Address(R13, target_offset)); |
1469 | 1498 |
1470 if (FLAG_optimization_counter_threshold >= 0) { | 1499 if (FLAG_optimization_counter_threshold >= 0) { |
1471 // Update counter. | 1500 // Update counter. |
1472 __ Comment("Update caller's counter"); | 1501 __ Comment("Update caller's counter"); |
1473 __ movq(R8, Address(R12, count_offset)); | 1502 __ movq(R8, Address(R13, count_offset)); |
1474 __ addq(R8, Immediate(Smi::RawValue(1))); | 1503 __ addq(R8, Immediate(Smi::RawValue(1))); |
1475 __ movq(R13, Immediate(Smi::RawValue(Smi::kMaxValue))); | 1504 __ movq(R9, Immediate(Smi::RawValue(Smi::kMaxValue))); |
1476 __ cmovnoq(R13, R8); | 1505 __ cmovnoq(R9, R8); |
1477 __ StoreIntoSmiField(Address(R12, count_offset), R13); | 1506 __ StoreIntoSmiField(Address(R13, count_offset), R9); |
1478 } | 1507 } |
1479 | 1508 |
1480 __ Comment("Call target"); | 1509 __ Comment("Call target"); |
1481 __ Bind(&call_target_function); | 1510 __ Bind(&call_target_function); |
1482 // RAX: Target function. | 1511 // RAX: Target function. |
1483 Label is_compiled; | 1512 Label is_compiled; |
1484 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); | |
1485 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | |
1486 if (range_collection_mode == kCollectRanges) { | 1513 if (range_collection_mode == kCollectRanges) { |
| 1514 __ movq(R13, FieldAddress(RAX, Function::code_offset())); |
| 1515 __ movq(RCX, FieldAddress(R13, Code::instructions_offset())); |
| 1516 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1487 __ movq(R8, Address(RSP, + 1 * kWordSize)); | 1517 __ movq(R8, Address(RSP, + 1 * kWordSize)); |
1488 if (num_args == 2) { | 1518 if (num_args == 2) { |
1489 __ movq(R13, Address(RSP, + 2 * kWordSize)); | 1519 __ movq(R9, Address(RSP, + 2 * kWordSize)); |
1490 } | 1520 } |
1491 __ EnterStubFrame(); | 1521 __ EnterStubFrame(); |
1492 __ pushq(RBX); | 1522 __ pushq(RBX); |
1493 if (num_args == 2) { | 1523 if (num_args == 2) { |
1494 __ pushq(R13); | 1524 __ pushq(R9); |
1495 } | 1525 } |
1496 __ pushq(R8); | 1526 __ pushq(R8); |
| 1527 __ movq(CODE_REG, R13); |
1497 __ call(RCX); | 1528 __ call(RCX); |
1498 | 1529 |
1499 Label done; | 1530 Label done; |
1500 __ movq(RDX, RAX); | 1531 __ movq(RDX, RAX); |
1501 __ movq(RBX, Address(RBP, kFirstLocalSlotFromFp * kWordSize)); | 1532 __ movq(RBX, Address(RBP, kFirstLocalSlotFromFp * kWordSize)); |
1502 __ UpdateRangeFeedback(RDX, 2, RBX, RCX, &done); | 1533 __ UpdateRangeFeedback(RDX, 2, RBX, RCX, &done); |
1503 __ Bind(&done); | 1534 __ Bind(&done); |
1504 __ LeaveStubFrame(); | 1535 __ LeaveStubFrame(); |
1505 __ ret(); | 1536 __ ret(); |
1506 } else { | 1537 } else { |
| 1538 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); |
| 1539 __ movq(RCX, FieldAddress(CODE_REG, Code::instructions_offset())); |
| 1540 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1507 __ jmp(RCX); | 1541 __ jmp(RCX); |
1508 } | 1542 } |
1509 | 1543 |
1510 if (FLAG_support_debugger && !optimized) { | 1544 if (FLAG_support_debugger && !optimized) { |
1511 __ Bind(&stepping); | 1545 __ Bind(&stepping); |
1512 __ EnterStubFrame(); | 1546 __ EnterStubFrame(); |
1513 __ pushq(RBX); | 1547 __ pushq(RBX); |
1514 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1548 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1515 __ popq(RBX); | 1549 __ popq(RBX); |
| 1550 __ RestoreCodePointer(); |
1516 __ LeaveStubFrame(); | 1551 __ LeaveStubFrame(); |
1517 __ jmp(&done_stepping); | 1552 __ jmp(&done_stepping); |
1518 } | 1553 } |
1519 } | 1554 } |
1520 | 1555 |
1521 | 1556 |
1522 // Use inline cache data array to invoke the target or continue in inline | 1557 // Use inline cache data array to invoke the target or continue in inline |
1523 // cache miss handler. Stub for 1-argument check (receiver class). | 1558 // cache miss handler. Stub for 1-argument check (receiver class). |
1524 // RBX: Inline cache data object. | 1559 // RBX: Inline cache data object. |
1525 // TOS(0): Return address. | 1560 // TOS(0): Return address. |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1674 __ movq(R13, Immediate(Smi::RawValue(Smi::kMaxValue))); | 1709 __ movq(R13, Immediate(Smi::RawValue(Smi::kMaxValue))); |
1675 __ cmovnoq(R13, R8); | 1710 __ cmovnoq(R13, R8); |
1676 __ StoreIntoSmiField(Address(R12, count_offset), R13); | 1711 __ StoreIntoSmiField(Address(R12, count_offset), R13); |
1677 } | 1712 } |
1678 | 1713 |
1679 // Load arguments descriptor into R10. | 1714 // Load arguments descriptor into R10. |
1680 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); | 1715 __ movq(R10, FieldAddress(RBX, ICData::arguments_descriptor_offset())); |
1681 | 1716 |
1682 // Get function and call it, if possible. | 1717 // Get function and call it, if possible. |
1683 __ movq(RAX, Address(R12, target_offset)); | 1718 __ movq(RAX, Address(R12, target_offset)); |
1684 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); | 1719 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); |
| 1720 __ movq(RCX, FieldAddress(CODE_REG, Code::instructions_offset())); |
1685 // RCX: Target instructions. | 1721 // RCX: Target instructions. |
1686 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 1722 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1687 __ jmp(RCX); | 1723 __ jmp(RCX); |
1688 | 1724 |
1689 if (FLAG_support_debugger) { | 1725 if (FLAG_support_debugger) { |
1690 __ Bind(&stepping); | 1726 __ Bind(&stepping); |
1691 __ EnterStubFrame(); | 1727 __ EnterStubFrame(); |
1692 __ pushq(RBX); // Preserve IC data object. | 1728 __ pushq(RBX); // Preserve IC data object. |
1693 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1729 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1694 __ popq(RBX); | 1730 __ popq(RBX); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1727 __ EnterStubFrame(); | 1763 __ EnterStubFrame(); |
1728 __ pushq(R10); // Preserve arguments descriptor array. | 1764 __ pushq(R10); // Preserve arguments descriptor array. |
1729 __ pushq(RBX); // Preserve IC data object. | 1765 __ pushq(RBX); // Preserve IC data object. |
1730 __ pushq(RAX); // Pass function. | 1766 __ pushq(RAX); // Pass function. |
1731 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1767 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); |
1732 __ popq(RAX); // Restore function. | 1768 __ popq(RAX); // Restore function. |
1733 __ popq(RBX); // Restore IC data array. | 1769 __ popq(RBX); // Restore IC data array. |
1734 __ popq(R10); // Restore arguments descriptor array. | 1770 __ popq(R10); // Restore arguments descriptor array. |
1735 __ LeaveStubFrame(); | 1771 __ LeaveStubFrame(); |
1736 | 1772 |
1737 __ movq(RAX, FieldAddress(RAX, Function::instructions_offset())); | 1773 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); |
| 1774 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
1738 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 1775 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1739 __ jmp(RAX); | 1776 __ jmp(RAX); |
1740 } | 1777 } |
1741 | 1778 |
1742 | 1779 |
1743 // RBX: Contains an ICData. | 1780 // RBX: Contains an ICData. |
1744 // TOS(0): return address (Dart code). | 1781 // TOS(0): return address (Dart code). |
1745 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1782 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { |
1746 __ EnterStubFrame(); | 1783 __ EnterStubFrame(); |
1747 // Preserve IC data. | 1784 // Preserve IC data. |
1748 __ pushq(RBX); | 1785 __ pushq(RBX); |
1749 // Room for result. Debugger stub returns address of the | 1786 // Room for result. Debugger stub returns address of the |
1750 // unpatched runtime stub. | 1787 // unpatched runtime stub. |
1751 __ LoadObject(R12, Object::null_object(), PP); | 1788 __ LoadObject(R12, Object::null_object(), PP); |
1752 __ pushq(R12); // Room for result. | 1789 __ pushq(R12); // Room for result. |
1753 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1790 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1754 __ popq(RAX); // Address of original. | 1791 __ popq(CODE_REG); // Address of original. |
1755 __ popq(RBX); // Restore IC data. | 1792 __ popq(RBX); // Restore IC data. |
1756 __ LeaveStubFrame(); | 1793 __ LeaveStubFrame(); |
| 1794 |
| 1795 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
| 1796 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1757 __ jmp(RAX); // Jump to original stub. | 1797 __ jmp(RAX); // Jump to original stub. |
1758 } | 1798 } |
1759 | 1799 |
1760 | 1800 |
1761 // TOS(0): return address (Dart code). | 1801 // TOS(0): return address (Dart code). |
1762 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1802 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { |
1763 __ EnterStubFrame(); | 1803 __ EnterStubFrame(); |
1764 // Room for result. Debugger stub returns address of the | 1804 // Room for result. Debugger stub returns address of the |
1765 // unpatched runtime stub. | 1805 // unpatched runtime stub. |
1766 __ LoadObject(R12, Object::null_object(), PP); | 1806 __ LoadObject(R12, Object::null_object(), PP); |
1767 __ pushq(R12); // Room for result. | 1807 __ pushq(R12); // Room for result. |
1768 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1808 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1769 __ popq(RAX); // Address of original. | 1809 __ popq(CODE_REG); // Address of original. |
1770 __ LeaveStubFrame(); | 1810 __ LeaveStubFrame(); |
| 1811 |
| 1812 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
| 1813 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1771 __ jmp(RAX); // Jump to original stub. | 1814 __ jmp(RAX); // Jump to original stub. |
1772 } | 1815 } |
1773 | 1816 |
1774 | 1817 |
1775 // Called only from unoptimized code. | 1818 // Called only from unoptimized code. |
1776 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { | 1819 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { |
1777 // Check single stepping. | 1820 // Check single stepping. |
1778 Label stepping, done_stepping; | 1821 Label stepping, done_stepping; |
1779 __ LoadIsolate(RAX); | 1822 __ LoadIsolate(RAX); |
1780 __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); | 1823 __ movzxb(RAX, Address(RAX, Isolate::single_step_offset())); |
(...skipping 15 matching lines...) Expand all Loading... |
1796 // TOS + 1: instantiator type arguments (can be NULL). | 1839 // TOS + 1: instantiator type arguments (can be NULL). |
1797 // TOS + 2: instance. | 1840 // TOS + 2: instance. |
1798 // TOS + 3: SubtypeTestCache. | 1841 // TOS + 3: SubtypeTestCache. |
1799 // Result in RCX: null -> not found, otherwise result (true or false). | 1842 // Result in RCX: null -> not found, otherwise result (true or false). |
1800 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { | 1843 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { |
1801 ASSERT((1 <= n) && (n <= 3)); | 1844 ASSERT((1 <= n) && (n <= 3)); |
1802 const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize; | 1845 const intptr_t kInstantiatorTypeArgumentsInBytes = 1 * kWordSize; |
1803 const intptr_t kInstanceOffsetInBytes = 2 * kWordSize; | 1846 const intptr_t kInstanceOffsetInBytes = 2 * kWordSize; |
1804 const intptr_t kCacheOffsetInBytes = 3 * kWordSize; | 1847 const intptr_t kCacheOffsetInBytes = 3 * kWordSize; |
1805 __ movq(RAX, Address(RSP, kInstanceOffsetInBytes)); | 1848 __ movq(RAX, Address(RSP, kInstanceOffsetInBytes)); |
1806 __ LoadObject(R12, Object::null_object(), PP); | 1849 __ LoadObject(R9, Object::null_object(), PP); |
1807 if (n > 1) { | 1850 if (n > 1) { |
1808 __ LoadClass(R10, RAX, kNoRegister); | 1851 __ LoadClass(R10, RAX, kNoRegister); |
1809 // Compute instance type arguments into R13. | 1852 // Compute instance type arguments into R13. |
1810 Label has_no_type_arguments; | 1853 Label has_no_type_arguments; |
1811 __ movq(R13, R12); | 1854 __ movq(R13, R9); |
1812 __ movl(RDI, FieldAddress(R10, | 1855 __ movl(RDI, FieldAddress(R10, |
1813 Class::type_arguments_field_offset_in_words_offset())); | 1856 Class::type_arguments_field_offset_in_words_offset())); |
1814 __ cmpl(RDI, Immediate(Class::kNoTypeArguments)); | 1857 __ cmpl(RDI, Immediate(Class::kNoTypeArguments)); |
1815 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); | 1858 __ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump); |
1816 __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0)); | 1859 __ movq(R13, FieldAddress(RAX, RDI, TIMES_8, 0)); |
1817 __ Bind(&has_no_type_arguments); | 1860 __ Bind(&has_no_type_arguments); |
1818 } | 1861 } |
1819 __ LoadClassId(R10, RAX); | 1862 __ LoadClassId(R10, RAX); |
1820 // RAX: instance, R10: instance class id. | 1863 // RAX: instance, R10: instance class id. |
1821 // R13: instance type arguments or null, used only if n > 1. | 1864 // R13: instance type arguments or null, used only if n > 1. |
1822 __ movq(RDX, Address(RSP, kCacheOffsetInBytes)); | 1865 __ movq(RDX, Address(RSP, kCacheOffsetInBytes)); |
1823 // RDX: SubtypeTestCache. | 1866 // RDX: SubtypeTestCache. |
1824 __ movq(RDX, FieldAddress(RDX, SubtypeTestCache::cache_offset())); | 1867 __ movq(RDX, FieldAddress(RDX, SubtypeTestCache::cache_offset())); |
1825 __ addq(RDX, Immediate(Array::data_offset() - kHeapObjectTag)); | 1868 __ addq(RDX, Immediate(Array::data_offset() - kHeapObjectTag)); |
1826 // RDX: Entry start. | 1869 // RDX: Entry start. |
1827 // R10: instance class id. | 1870 // R10: instance class id. |
1828 // R13: instance type arguments. | 1871 // R13: instance type arguments. |
1829 Label loop, found, not_found, next_iteration; | 1872 Label loop, found, not_found, next_iteration; |
1830 __ SmiTag(R10); | 1873 __ SmiTag(R10); |
1831 __ Bind(&loop); | 1874 __ Bind(&loop); |
1832 __ movq(RDI, Address(RDX, kWordSize * SubtypeTestCache::kInstanceClassId)); | 1875 __ movq(RDI, Address(RDX, kWordSize * SubtypeTestCache::kInstanceClassId)); |
1833 __ cmpq(RDI, R12); | 1876 __ cmpq(RDI, R9); |
1834 __ j(EQUAL, ¬_found, Assembler::kNearJump); | 1877 __ j(EQUAL, ¬_found, Assembler::kNearJump); |
1835 __ cmpq(RDI, R10); | 1878 __ cmpq(RDI, R10); |
1836 if (n == 1) { | 1879 if (n == 1) { |
1837 __ j(EQUAL, &found, Assembler::kNearJump); | 1880 __ j(EQUAL, &found, Assembler::kNearJump); |
1838 } else { | 1881 } else { |
1839 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1882 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); |
1840 __ movq(RDI, | 1883 __ movq(RDI, |
1841 Address(RDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); | 1884 Address(RDX, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); |
1842 __ cmpq(RDI, R13); | 1885 __ cmpq(RDI, R13); |
1843 if (n == 2) { | 1886 if (n == 2) { |
1844 __ j(EQUAL, &found, Assembler::kNearJump); | 1887 __ j(EQUAL, &found, Assembler::kNearJump); |
1845 } else { | 1888 } else { |
1846 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); | 1889 __ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump); |
1847 __ movq(RDI, | 1890 __ movq(RDI, |
1848 Address(RDX, | 1891 Address(RDX, |
1849 kWordSize * SubtypeTestCache::kInstantiatorTypeArguments)); | 1892 kWordSize * SubtypeTestCache::kInstantiatorTypeArguments)); |
1850 __ cmpq(RDI, Address(RSP, kInstantiatorTypeArgumentsInBytes)); | 1893 __ cmpq(RDI, Address(RSP, kInstantiatorTypeArgumentsInBytes)); |
1851 __ j(EQUAL, &found, Assembler::kNearJump); | 1894 __ j(EQUAL, &found, Assembler::kNearJump); |
1852 } | 1895 } |
1853 } | 1896 } |
1854 | 1897 |
1855 __ Bind(&next_iteration); | 1898 __ Bind(&next_iteration); |
1856 __ addq(RDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); | 1899 __ addq(RDX, Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); |
1857 __ jmp(&loop, Assembler::kNearJump); | 1900 __ jmp(&loop, Assembler::kNearJump); |
1858 // Fall through to not found. | 1901 // Fall through to not found. |
1859 __ Bind(¬_found); | 1902 __ Bind(¬_found); |
1860 __ movq(RCX, R12); | 1903 __ movq(RCX, R9); |
1861 __ ret(); | 1904 __ ret(); |
1862 | 1905 |
1863 __ Bind(&found); | 1906 __ Bind(&found); |
1864 __ movq(RCX, Address(RDX, kWordSize * SubtypeTestCache::kTestResult)); | 1907 __ movq(RCX, Address(RDX, kWordSize * SubtypeTestCache::kTestResult)); |
1865 __ ret(); | 1908 __ ret(); |
1866 } | 1909 } |
1867 | 1910 |
1868 | 1911 |
1869 // Used to check class and type arguments. Arguments passed on stack: | 1912 // Used to check class and type arguments. Arguments passed on stack: |
1870 // TOS + 0: return address. | 1913 // TOS + 0: return address. |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1953 // RDI: function to be reoptimized. | 1996 // RDI: function to be reoptimized. |
1954 // R10: argument descriptor (preserved). | 1997 // R10: argument descriptor (preserved). |
1955 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1998 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1956 __ EnterStubFrame(); | 1999 __ EnterStubFrame(); |
1957 __ LoadObject(R12, Object::null_object(), PP); | 2000 __ LoadObject(R12, Object::null_object(), PP); |
1958 __ pushq(R10); | 2001 __ pushq(R10); |
1959 __ pushq(R12); // Setup space on stack for return value. | 2002 __ pushq(R12); // Setup space on stack for return value. |
1960 __ pushq(RDI); | 2003 __ pushq(RDI); |
1961 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 2004 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1962 __ popq(RAX); // Disard argument. | 2005 __ popq(RAX); // Disard argument. |
1963 __ popq(RAX); // Get Code object. | 2006 __ popq(CODE_REG); // Get Code object. |
1964 __ popq(R10); // Restore argument descriptor. | 2007 __ popq(R10); // Restore argument descriptor. |
1965 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 2008 __ movq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
1966 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 2009 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1967 __ LeaveStubFrame(); | 2010 __ LeaveStubFrame(); |
1968 __ jmp(RAX); | 2011 __ jmp(RAX); |
1969 __ int3(); | 2012 __ int3(); |
1970 } | 2013 } |
1971 | 2014 |
1972 | 2015 |
1973 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, | 2016 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, |
1974 BigintCompare, | 2017 BigintCompare, |
1975 RawBigint* left, | 2018 RawBigint* left, |
(...skipping 15 matching lines...) Expand all Loading... |
1991 // If any of the arguments is Smi do reference compare. | 2034 // If any of the arguments is Smi do reference compare. |
1992 __ testq(left, Immediate(kSmiTagMask)); | 2035 __ testq(left, Immediate(kSmiTagMask)); |
1993 __ j(ZERO, &reference_compare); | 2036 __ j(ZERO, &reference_compare); |
1994 __ testq(right, Immediate(kSmiTagMask)); | 2037 __ testq(right, Immediate(kSmiTagMask)); |
1995 __ j(ZERO, &reference_compare); | 2038 __ j(ZERO, &reference_compare); |
1996 | 2039 |
1997 // Value compare for two doubles. | 2040 // Value compare for two doubles. |
1998 __ CompareClassId(left, kDoubleCid); | 2041 __ CompareClassId(left, kDoubleCid); |
1999 __ j(NOT_EQUAL, &check_mint, Assembler::kNearJump); | 2042 __ j(NOT_EQUAL, &check_mint, Assembler::kNearJump); |
2000 __ CompareClassId(right, kDoubleCid); | 2043 __ CompareClassId(right, kDoubleCid); |
2001 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 2044 __ j(NOT_EQUAL, &done, Assembler::kFarJump); |
2002 | 2045 |
2003 // Double values bitwise compare. | 2046 // Double values bitwise compare. |
2004 __ movq(left, FieldAddress(left, Double::value_offset())); | 2047 __ movq(left, FieldAddress(left, Double::value_offset())); |
2005 __ cmpq(left, FieldAddress(right, Double::value_offset())); | 2048 __ cmpq(left, FieldAddress(right, Double::value_offset())); |
2006 __ jmp(&done, Assembler::kNearJump); | 2049 __ jmp(&done, Assembler::kFarJump); |
2007 | 2050 |
2008 __ Bind(&check_mint); | 2051 __ Bind(&check_mint); |
2009 __ CompareClassId(left, kMintCid); | 2052 __ CompareClassId(left, kMintCid); |
2010 __ j(NOT_EQUAL, &check_bigint, Assembler::kNearJump); | 2053 __ j(NOT_EQUAL, &check_bigint, Assembler::kNearJump); |
2011 __ CompareClassId(right, kMintCid); | 2054 __ CompareClassId(right, kMintCid); |
2012 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 2055 __ j(NOT_EQUAL, &done, Assembler::kFarJump); |
2013 __ movq(left, FieldAddress(left, Mint::value_offset())); | 2056 __ movq(left, FieldAddress(left, Mint::value_offset())); |
2014 __ cmpq(left, FieldAddress(right, Mint::value_offset())); | 2057 __ cmpq(left, FieldAddress(right, Mint::value_offset())); |
2015 __ jmp(&done, Assembler::kNearJump); | 2058 __ jmp(&done, Assembler::kFarJump); |
2016 | 2059 |
2017 __ Bind(&check_bigint); | 2060 __ Bind(&check_bigint); |
2018 __ CompareClassId(left, kBigintCid); | 2061 __ CompareClassId(left, kBigintCid); |
2019 __ j(NOT_EQUAL, &reference_compare, Assembler::kNearJump); | 2062 __ j(NOT_EQUAL, &reference_compare, Assembler::kFarJump); |
2020 __ CompareClassId(right, kBigintCid); | 2063 __ CompareClassId(right, kBigintCid); |
2021 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 2064 __ j(NOT_EQUAL, &done, Assembler::kFarJump); |
2022 __ EnterStubFrame(); | 2065 __ EnterStubFrame(); |
2023 __ ReserveAlignedFrameSpace(0); | 2066 __ ReserveAlignedFrameSpace(0); |
2024 __ movq(CallingConventions::kArg1Reg, left); | 2067 __ movq(CallingConventions::kArg1Reg, left); |
2025 __ movq(CallingConventions::kArg2Reg, right); | 2068 __ movq(CallingConventions::kArg2Reg, right); |
2026 __ CallRuntime(kBigintCompareRuntimeEntry, 2); | 2069 __ CallRuntime(kBigintCompareRuntimeEntry, 2); |
2027 // Result in RAX, 0 means equal. | 2070 // Result in RAX, 0 means equal. |
2028 __ LeaveStubFrame(); | 2071 __ LeaveStubFrame(); |
2029 __ cmpq(RAX, Immediate(0)); | 2072 __ cmpq(RAX, Immediate(0)); |
2030 __ jmp(&done); | 2073 __ jmp(&done); |
2031 | 2074 |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2114 __ j(ZERO, &call_target_function, Assembler::kNearJump); | 2157 __ j(ZERO, &call_target_function, Assembler::kNearJump); |
2115 __ cmpq(RDX, RAX); | 2158 __ cmpq(RDX, RAX); |
2116 __ j(NOT_EQUAL, &update, Assembler::kNearJump); | 2159 __ j(NOT_EQUAL, &update, Assembler::kNearJump); |
2117 | 2160 |
2118 __ Bind(&call_target_function); | 2161 __ Bind(&call_target_function); |
2119 // Call the target found in the cache. For a class id match, this is a | 2162 // Call the target found in the cache. For a class id match, this is a |
2120 // proper target for the given name and arguments descriptor. If the | 2163 // proper target for the given name and arguments descriptor. If the |
2121 // illegal class id was found, the target is a cache miss handler that can | 2164 // illegal class id was found, the target is a cache miss handler that can |
2122 // be invoked as a normal Dart function. | 2165 // be invoked as a normal Dart function. |
2123 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); | 2166 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); |
2124 __ movq(target, FieldAddress(RAX, Function::instructions_offset())); | 2167 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); |
| 2168 __ movq(target, FieldAddress(CODE_REG, Code::instructions_offset())); |
2125 // TODO(srdjan): Evaluate performance impact of moving the instruction below | 2169 // TODO(srdjan): Evaluate performance impact of moving the instruction below |
2126 // to the call site, instead of having it here. | 2170 // to the call site, instead of having it here. |
2127 __ AddImmediate( | 2171 __ AddImmediate( |
2128 target, Immediate(Instructions::HeaderSize() - kHeapObjectTag), PP); | 2172 target, Immediate(Instructions::HeaderSize() - kHeapObjectTag), PP); |
2129 } | 2173 } |
2130 | 2174 |
2131 | 2175 |
2132 // Called from megamorphic calls. | 2176 // Called from megamorphic calls. |
2133 // RDI: receiver. | 2177 // RDI: receiver. |
2134 // RBX: lookup cache. | 2178 // RBX: lookup cache. |
2135 // Result: | 2179 // Result: |
2136 // RCX: entry point. | 2180 // RCX: entry point. |
2137 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2181 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
2138 EmitMegamorphicLookup(assembler, RDI, RBX, RCX); | 2182 EmitMegamorphicLookup(assembler, RDI, RBX, RCX); |
2139 __ ret(); | 2183 __ ret(); |
2140 } | 2184 } |
2141 | 2185 |
2142 } // namespace dart | 2186 } // namespace dart |
2143 | 2187 |
2144 #endif // defined TARGET_ARCH_X64 | 2188 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |