OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
352 // | 352 // |
353 // Parts of the code cannot GC, part of the code can GC. | 353 // Parts of the code cannot GC, part of the code can GC. |
354 static void GenerateDeoptimizationSequence(Assembler* assembler, | 354 static void GenerateDeoptimizationSequence(Assembler* assembler, |
355 DeoptStubKind kind) { | 355 DeoptStubKind kind) { |
356 // Leaf runtime function DeoptimizeCopyFrame expects a Dart frame. | 356 // Leaf runtime function DeoptimizeCopyFrame expects a Dart frame. |
357 __ EnterDartFrame(0); | 357 __ EnterDartFrame(0); |
358 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 358 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
359 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 359 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
360 const intptr_t saved_result_slot_from_fp = | 360 const intptr_t saved_result_slot_from_fp = |
361 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - EAX); | 361 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - EAX); |
| 362 const intptr_t saved_exception_slot_from_fp = |
| 363 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - EAX); |
| 364 const intptr_t saved_stacktrace_slot_from_fp = |
| 365 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - EDX); |
362 // Result in EAX is preserved as part of pushing all registers below. | 366 // Result in EAX is preserved as part of pushing all registers below. |
363 | 367 |
364 // Push registers in their enumeration order: lowest register number at | 368 // Push registers in their enumeration order: lowest register number at |
365 // lowest address. | 369 // lowest address. |
366 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 370 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
367 if (i == CODE_REG) { | 371 if (i == CODE_REG) { |
368 // Save the original value of CODE_REG pushed before invoking this stub | 372 // Save the original value of CODE_REG pushed before invoking this stub |
369 // instead of the value used to call this stub. | 373 // instead of the value used to call this stub. |
370 __ pushl(Address(EBP, 2 * kWordSize)); | 374 __ pushl(Address(EBP, 2 * kWordSize)); |
371 } else { | 375 } else { |
372 __ pushl(static_cast<Register>(i)); | 376 __ pushl(static_cast<Register>(i)); |
373 } | 377 } |
374 } | 378 } |
375 __ subl(ESP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); | 379 __ subl(ESP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize)); |
376 intptr_t offset = 0; | 380 intptr_t offset = 0; |
377 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 381 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
378 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 382 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
379 __ movups(Address(ESP, offset), xmm_reg); | 383 __ movups(Address(ESP, offset), xmm_reg); |
380 offset += kFpuRegisterSize; | 384 offset += kFpuRegisterSize; |
381 } | 385 } |
382 | 386 |
383 __ movl(ECX, ESP); // Preserve saved registers block. | 387 __ movl(ECX, ESP); // Preserve saved registers block. |
384 __ ReserveAlignedFrameSpace(2 * kWordSize); | 388 __ ReserveAlignedFrameSpace(2 * kWordSize); |
385 __ movl(Address(ESP, 0 * kWordSize), ECX); // Start of register block. | 389 __ movl(Address(ESP, 0 * kWordSize), ECX); // Start of register block. |
386 __ movl(Address(ESP, 1 * kWordSize), Immediate(kind == kLazyDeopt ? 1 : 0)); | 390 bool is_lazy = (kind == kLazyDeoptFromReturn) || |
| 391 (kind == kLazyDeoptFromThrow); |
| 392 __ movl(Address(ESP, 1 * kWordSize), Immediate(is_lazy ? 1 : 0)); |
387 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 393 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
388 // Result (EAX) is stack-size (FP - SP) in bytes. | 394 // Result (EAX) is stack-size (FP - SP) in bytes. |
389 | 395 |
390 const bool preserve_result = (kind == kLazyDeopt); | 396 if (kind == kLazyDeoptFromReturn) { |
391 if (preserve_result) { | |
392 // Restore result into EBX temporarily. | 397 // Restore result into EBX temporarily. |
393 __ movl(EBX, Address(EBP, saved_result_slot_from_fp * kWordSize)); | 398 __ movl(EBX, Address(EBP, saved_result_slot_from_fp * kWordSize)); |
| 399 } else if (kind == kLazyDeoptFromThrow) { |
| 400 // Restore result into EBX temporarily. |
| 401 __ movl(EBX, Address(EBP, saved_exception_slot_from_fp * kWordSize)); |
| 402 __ movl(ECX, Address(EBP, saved_stacktrace_slot_from_fp * kWordSize)); |
394 } | 403 } |
395 | 404 |
396 __ LeaveFrame(); | 405 __ LeaveFrame(); |
397 __ popl(EDX); // Preserve return address. | 406 __ popl(EDX); // Preserve return address. |
398 __ movl(ESP, EBP); // Discard optimized frame. | 407 __ movl(ESP, EBP); // Discard optimized frame. |
399 __ subl(ESP, EAX); // Reserve space for deoptimized frame. | 408 __ subl(ESP, EAX); // Reserve space for deoptimized frame. |
400 __ pushl(EDX); // Restore return address. | 409 __ pushl(EDX); // Restore return address. |
401 | 410 |
402 // Leaf runtime function DeoptimizeFillFrame expects a Dart frame. | 411 // Leaf runtime function DeoptimizeFillFrame expects a Dart frame. |
403 __ EnterDartFrame(0); | 412 __ EnterDartFrame(0); |
404 if (preserve_result) { | 413 if (kind == kLazyDeoptFromReturn) { |
405 __ pushl(EBX); // Preserve result as first local. | 414 __ pushl(EBX); // Preserve result as first local. |
| 415 } else if (kind == kLazyDeoptFromThrow) { |
| 416 __ pushl(EBX); // Preserve exception as first local. |
| 417 __ pushl(ECX); // Preserve stacktrace as first local. |
406 } | 418 } |
407 __ ReserveAlignedFrameSpace(1 * kWordSize); | 419 __ ReserveAlignedFrameSpace(1 * kWordSize); |
408 __ movl(Address(ESP, 0), EBP); // Pass last FP as parameter on stack. | 420 __ movl(Address(ESP, 0), EBP); // Pass last FP as parameter on stack. |
409 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); | 421 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); |
410 if (preserve_result) { | 422 if (kind == kLazyDeoptFromReturn) { |
411 // Restore result into EBX. | 423 // Restore result into EBX. |
412 __ movl(EBX, Address(EBP, kFirstLocalSlotFromFp * kWordSize)); | 424 __ movl(EBX, Address(EBP, kFirstLocalSlotFromFp * kWordSize)); |
| 425 } else if (kind == kLazyDeoptFromThrow) { |
| 426 // Restore result into EBX. |
| 427 __ movl(EBX, Address(EBP, kFirstLocalSlotFromFp * kWordSize)); |
| 428 __ movl(ECX, Address(EBP, (kFirstLocalSlotFromFp - 1) * kWordSize)); |
413 } | 429 } |
414 // Code above cannot cause GC. | 430 // Code above cannot cause GC. |
415 __ LeaveFrame(); | 431 __ LeaveFrame(); |
416 | 432 |
417 // Frame is fully rewritten at this point and it is safe to perform a GC. | 433 // Frame is fully rewritten at this point and it is safe to perform a GC. |
418 // Materialize any objects that were deferred by FillFrame because they | 434 // Materialize any objects that were deferred by FillFrame because they |
419 // require allocation. | 435 // require allocation. |
420 __ EnterStubFrame(); | 436 __ EnterStubFrame(); |
421 if (preserve_result) { | 437 if (kind == kLazyDeoptFromReturn) { |
422 __ pushl(EBX); // Preserve result, it will be GC-d here. | 438 __ pushl(EBX); // Preserve result, it will be GC-d here. |
| 439 } else if (kind == kLazyDeoptFromThrow) { |
| 440 __ pushl(EBX); // Preserve exception, it will be GC-d here. |
| 441 __ pushl(ECX); // Preserve stacktrace, it will be GC-d here. |
423 } | 442 } |
424 __ pushl(Immediate(Smi::RawValue(0))); // Space for the result. | 443 __ pushl(Immediate(Smi::RawValue(0))); // Space for the result. |
425 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 444 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
426 // Result tells stub how many bytes to remove from the expression stack | 445 // Result tells stub how many bytes to remove from the expression stack |
427 // of the bottom-most frame. They were used as materialization arguments. | 446 // of the bottom-most frame. They were used as materialization arguments. |
428 __ popl(EBX); | 447 __ popl(EBX); |
429 __ SmiUntag(EBX); | 448 __ SmiUntag(EBX); |
430 if (preserve_result) { | 449 if (kind == kLazyDeoptFromReturn) { |
431 __ popl(EAX); // Restore result. | 450 __ popl(EAX); // Restore result. |
| 451 } else if (kind == kLazyDeoptFromThrow) { |
| 452 __ popl(EDX); // Restore exception. |
| 453 __ popl(EAX); // Restore stacktrace. |
432 } | 454 } |
433 __ LeaveFrame(); | 455 __ LeaveFrame(); |
434 | 456 |
435 __ popl(ECX); // Pop return address. | 457 __ popl(ECX); // Pop return address. |
436 __ addl(ESP, EBX); // Remove materialization arguments. | 458 __ addl(ESP, EBX); // Remove materialization arguments. |
437 __ pushl(ECX); // Push return address. | 459 __ pushl(ECX); // Push return address. |
438 __ ret(); | 460 __ ret(); |
439 } | 461 } |
440 | 462 |
441 | 463 |
442 // TOS: return address + call-instruction-size (5 bytes). | 464 // TOS: return address + call-instruction-size (5 bytes). |
443 // EAX: result, must be preserved | 465 // EAX: result, must be preserved |
444 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 466 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
445 // Correct return address to point just after the call that is being | 467 // Correct return address to point just after the call that is being |
446 // deoptimized. | 468 // deoptimized. |
447 __ popl(EBX); | 469 __ popl(EBX); |
448 __ subl(EBX, Immediate(CallPattern::pattern_length_in_bytes())); | 470 __ subl(EBX, Immediate(CallPattern::pattern_length_in_bytes())); |
449 __ pushl(EBX); | 471 __ pushl(EBX); |
450 GenerateDeoptimizationSequence(assembler, kLazyDeopt); | 472 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
451 } | 473 } |
452 | 474 |
453 | 475 |
| 476 // TOS: return address + call-instruction-size (5 bytes). |
| 477 // EAX: exception, must be preserved |
| 478 // EDX: stacktrace, must be preserved |
| 479 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
| 480 // Correct return address to point just after the call that is being |
| 481 // deoptimized. |
| 482 __ popl(EBX); |
| 483 __ subl(EBX, Immediate(CallPattern::pattern_length_in_bytes())); |
| 484 __ pushl(EBX); |
| 485 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 486 } |
| 487 |
| 488 |
454 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 489 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
455 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 490 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
456 } | 491 } |
457 | 492 |
458 | 493 |
459 static void GenerateDispatcherCode(Assembler* assembler, | 494 static void GenerateDispatcherCode(Assembler* assembler, |
460 Label* call_target_function) { | 495 Label* call_target_function) { |
461 __ Comment("NoSuchMethodDispatch"); | 496 __ Comment("NoSuchMethodDispatch"); |
462 // When lazily generated invocation dispatchers are disabled, the | 497 // When lazily generated invocation dispatchers are disabled, the |
463 // miss-handler may return null. | 498 // miss-handler may return null. |
(...skipping 1573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2037 } | 2072 } |
2038 | 2073 |
2039 | 2074 |
2040 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2075 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2041 __ int3(); | 2076 __ int3(); |
2042 } | 2077 } |
2043 | 2078 |
2044 } // namespace dart | 2079 } // namespace dart |
2045 | 2080 |
2046 #endif // defined TARGET_ARCH_IA32 | 2081 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |