Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(131)

Side by Side Diff: src/x64/virtual-frame-x64.cc

Issue 141043: X64 implementation: Emit correct merge code for virtual frames at CFG merges. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after
386 } else { 386 } else {
387 // The stored-to slot holds the same value as the top but 387 // The stored-to slot holds the same value as the top but
388 // unsynced. (We do not have copies of constants yet.) 388 // unsynced. (We do not have copies of constants yet.)
389 ASSERT(top.is_constant()); 389 ASSERT(top.is_constant());
390 elements_[index].clear_sync(); 390 elements_[index].clear_sync();
391 } 391 }
392 } 392 }
393 393
394 394
395 void VirtualFrame::MakeMergable() { 395 void VirtualFrame::MakeMergable() {
396 // UNIMPLEMENTED(); 396 for (int i = 0; i < element_count(); i++) {
397 } 397 FrameElement element = elements_[i];
398 398
399 void VirtualFrame::MergeTo(VirtualFrame* a) { 399 if (element.is_constant() || element.is_copy()) {
400 UNIMPLEMENTED(); 400 if (element.is_synced()) {
401 // Just spill.
402 elements_[i] = FrameElement::MemoryElement();
403 } else {
404 // Allocate to a register.
405 FrameElement backing_element; // Invalid if not a copy.
406 if (element.is_copy()) {
407 backing_element = elements_[element.index()];
408 }
409 Result fresh = cgen()->allocator()->Allocate();
410 ASSERT(fresh.is_valid()); // A register was spilled if all were in use.
411 elements_[i] =
412 FrameElement::RegisterElement(fresh.reg(),
413 FrameElement::NOT_SYNCED);
414 Use(fresh.reg(), i);
415
416 // Emit a move.
417 if (element.is_constant()) {
418 __ Move(fresh.reg(), element.handle());
419 } else {
420 ASSERT(element.is_copy());
421 // Copies are only backed by register or memory locations.
422 if (backing_element.is_register()) {
423 // The backing store may have been spilled by allocating,
424 // but that's OK. If it was, the value is right where we
425 // want it.
426 if (!fresh.reg().is(backing_element.reg())) {
427 __ movq(fresh.reg(), backing_element.reg());
428 }
429 } else {
430 ASSERT(backing_element.is_memory());
431 __ movq(fresh.reg(), Operand(rbp, fp_relative(element.index())));
432 }
433 }
434 }
435 // No need to set the copied flag---there are no copies.
436 elements_[i].set_static_type(element.static_type());
Lasse Reichstein 2009/06/22 12:43:27 This has changed in ia32 (to setting the type to S
William Hesse 2009/06/22 14:33:40 Done.
437 } else {
438 // Clear the copy flag of non-constant, non-copy elements.
439 // They cannot be copied because copies are not allowed.
440 // The copy flag is not relied on before the end of this loop,
441 // including when registers are spilled.
442 elements_[i].clear_copied();
443 }
444 }
445 }
446
447
448 void VirtualFrame::MergeTo(VirtualFrame* expected) {
449 Comment cmnt(masm(), "[ Merge frame");
450 // We should always be merging the code generator's current frame to an
451 // expected frame.
452 ASSERT(cgen()->frame() == this);
453
454 // Adjust the stack pointer upward (toward the top of the virtual
455 // frame) if necessary.
456 if (stack_pointer_ < expected->stack_pointer_) {
457 int difference = expected->stack_pointer_ - stack_pointer_;
458 stack_pointer_ = expected->stack_pointer_;
459 __ subq(rsp, Immediate(difference * kPointerSize));
460 }
461
462 MergeMoveRegistersToMemory(expected);
463 MergeMoveRegistersToRegisters(expected);
464 MergeMoveMemoryToRegisters(expected);
465
466 // Adjust the stack pointer downward if necessary.
467 if (stack_pointer_ > expected->stack_pointer_) {
468 int difference = stack_pointer_ - expected->stack_pointer_;
469 stack_pointer_ = expected->stack_pointer_;
470 __ addq(rsp, Immediate(difference * kPointerSize));
471 }
472
473 // At this point, the frames should be identical.
474 ASSERT(Equals(expected));
475 }
476
477
478 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
479 ASSERT(stack_pointer_ >= expected->stack_pointer_);
480
481 // Move registers, constants, and copies to memory. Perform moves
482 // from the top downward in the frame in order to leave the backing
483 // stores of copies in registers.
484 for (int i = element_count() - 1; i >= 0; i--) {
485 FrameElement target = expected->elements_[i];
486 if (target.is_register()) continue; // Handle registers later.
487 if (target.is_memory()) {
488 FrameElement source = elements_[i];
489 switch (source.type()) {
490 case FrameElement::INVALID:
491 // Not a legal merge move.
492 UNREACHABLE();
493 break;
494
495 case FrameElement::MEMORY:
496 // Already in place.
497 break;
498
499 case FrameElement::REGISTER:
500 Unuse(source.reg());
501 if (!source.is_synced()) {
502 __ movq(Operand(rbp, fp_relative(i)), source.reg());
503 }
504 break;
505
506 case FrameElement::CONSTANT:
507 if (!source.is_synced()) {
508 __ Move(Operand(rbp, fp_relative(i)), source.handle());
509 }
510 break;
511
512 case FrameElement::COPY:
513 if (!source.is_synced()) {
514 int backing_index = source.index();
515 FrameElement backing_element = elements_[backing_index];
516 if (backing_element.is_memory()) {
517 __ movq(kScratchRegister,
518 Operand(rbp, fp_relative(backing_index)));
519 __ movq(Operand(rbp, fp_relative(i)), kScratchRegister);
520 } else {
521 ASSERT(backing_element.is_register());
522 __ movq(Operand(rbp, fp_relative(i)), backing_element.reg());
523 }
524 }
525 break;
526 }
527 }
528 elements_[i] = target;
529 }
530 }
531
532
533 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
534 // We have already done X-to-memory moves.
535 ASSERT(stack_pointer_ >= expected->stack_pointer_);
536
537 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
538 // Move the right value into register i if it is currently in a register.
539 int index = expected->register_location(i);
540 int use_index = register_location(i);
541 // Skip if register i is unused in the target or else if source is
542 // not a register (this is not a register-to-register move).
543 if (index == kIllegalIndex || !elements_[index].is_register()) continue;
544
545 Register target = RegisterAllocator::ToRegister(i);
546 Register source = elements_[index].reg();
547 if (index != use_index) {
548 if (use_index == kIllegalIndex) { // Target is currently unused.
549 // Copy contents of source from source to target.
550 // Set frame element register to target.
551 Use(target, index);
552 Unuse(source);
553 __ movq(target, source);
554 } else {
555 // Exchange contents of registers source and target.
556 // Nothing except the register backing use_index has changed.
557 elements_[use_index].set_reg(source);
558 set_register_location(target, index);
559 set_register_location(source, use_index);
560 __ xchg(source, target);
561 }
562 }
563
564 if (!elements_[index].is_synced() &&
565 expected->elements_[index].is_synced()) {
566 __ movq(Operand(rbp, fp_relative(index)), target);
567 }
568 elements_[index] = expected->elements_[index];
569 }
570 }
571
572
573 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
574 // Move memory, constants, and copies to registers. This is the
575 // final step and since it is not done from the bottom up, but in
576 // register code order, we have special code to ensure that the backing
577 // elements of copies are in their correct locations when we
578 // encounter the copies.
579 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
580 int index = expected->register_location(i);
581 if (index != kIllegalIndex) {
582 FrameElement source = elements_[index];
583 FrameElement target = expected->elements_[index];
584 Register target_reg = RegisterAllocator::ToRegister(i);
585 ASSERT(target.reg().is(target_reg));
586 switch (source.type()) {
587 case FrameElement::INVALID: // Fall through.
588 UNREACHABLE();
589 break;
590 case FrameElement::REGISTER:
591 ASSERT(source.Equals(target));
592 // Go to next iteration. Skips Use(target_reg) and syncing
593 // below. It is safe to skip syncing because a target
594 // register frame element would only be synced if all source
595 // elements were.
596 continue;
597 break;
598 case FrameElement::MEMORY:
599 ASSERT(index <= stack_pointer_);
600 __ movq(target_reg, Operand(rbp, fp_relative(index)));
601 break;
602
603 case FrameElement::CONSTANT:
604 __ Move(target_reg, source.handle());
605 break;
606
607 case FrameElement::COPY: {
608 int backing_index = source.index();
609 FrameElement backing = elements_[backing_index];
610 ASSERT(backing.is_memory() || backing.is_register());
611 if (backing.is_memory()) {
612 ASSERT(backing_index <= stack_pointer_);
613 // Code optimization if backing store should also move
614 // to a register: move backing store to its register first.
615 if (expected->elements_[backing_index].is_register()) {
616 FrameElement new_backing = expected->elements_[backing_index];
617 Register new_backing_reg = new_backing.reg();
618 ASSERT(!is_used(new_backing_reg));
619 elements_[backing_index] = new_backing;
620 Use(new_backing_reg, backing_index);
621 __ movq(new_backing_reg,
622 Operand(rbp, fp_relative(backing_index)));
Lasse Reichstein 2009/06/22 12:43:27 Indentation.
William Hesse 2009/06/22 14:33:40 Done.
623 __ movq(target_reg, new_backing_reg);
624 } else {
625 __ movq(target_reg, Operand(rbp, fp_relative(backing_index)));
626 }
627 } else {
628 __ movq(target_reg, backing.reg());
629 }
630 }
631 }
632 // Ensure the proper sync state.
633 if (target.is_synced() && !source.is_synced()) {
634 __ movq(Operand(rbp, fp_relative(index)), target_reg);
635 }
636 Use(target_reg, index);
637 elements_[index] = target;
638 }
639 }
401 } 640 }
402 641
403 642
404 Result VirtualFrame::Pop() { 643 Result VirtualFrame::Pop() {
405 FrameElement element = elements_.RemoveLast(); 644 FrameElement element = elements_.RemoveLast();
406 int index = element_count(); 645 int index = element_count();
407 ASSERT(element.is_valid()); 646 ASSERT(element.is_valid());
408 647
409 bool pop_needed = (stack_pointer_ == index); 648 bool pop_needed = (stack_pointer_ == index);
410 if (pop_needed) { 649 if (pop_needed) {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
480 // This function should not be called with synced elements. 719 // This function should not be called with synced elements.
481 // (memory elements are always synced). 720 // (memory elements are always synced).
482 UNREACHABLE(); 721 UNREACHABLE();
483 break; 722 break;
484 723
485 case FrameElement::REGISTER: 724 case FrameElement::REGISTER:
486 __ movq(Operand(rbp, fp_relative(index)), element.reg()); 725 __ movq(Operand(rbp, fp_relative(index)), element.reg());
487 break; 726 break;
488 727
489 case FrameElement::CONSTANT: 728 case FrameElement::CONSTANT:
490 if (element.handle()->IsSmi()) { 729 __ Move(Operand(rbp, fp_relative(index)), element.handle());
491 if (CodeGeneratorScope::Current()->IsUnsafeSmi(element.handle())) {
492 CodeGeneratorScope::Current()->LoadUnsafeSmi(kScratchRegister,
493 element.handle());
494 } else {
495 __ movq(kScratchRegister, element.handle(), RelocInfo::NONE);
496 }
497 } else {
498 __ movq(kScratchRegister,
499 element.handle(),
500 RelocInfo::EMBEDDED_OBJECT);
501 }
502 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
503 break; 730 break;
504 731
505 case FrameElement::COPY: { 732 case FrameElement::COPY: {
506 int backing_index = element.index(); 733 int backing_index = element.index();
507 FrameElement backing_element = elements_[backing_index]; 734 FrameElement backing_element = elements_[backing_index];
508 if (backing_element.is_memory()) { 735 if (backing_element.is_memory()) {
509 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index))); 736 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
510 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister); 737 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
511 } else { 738 } else {
512 ASSERT(backing_element.is_register()); 739 ASSERT(backing_element.is_register());
(...skipping 21 matching lines...) Expand all
534 case FrameElement::MEMORY: 761 case FrameElement::MEMORY:
535 // No memory elements exist above the stack pointer. 762 // No memory elements exist above the stack pointer.
536 UNREACHABLE(); 763 UNREACHABLE();
537 break; 764 break;
538 765
539 case FrameElement::REGISTER: 766 case FrameElement::REGISTER:
540 __ push(element.reg()); 767 __ push(element.reg());
541 break; 768 break;
542 769
543 case FrameElement::CONSTANT: 770 case FrameElement::CONSTANT:
544 if (element.handle()->IsSmi()) { 771 __ Move(kScratchRegister, element.handle());
545 if (CodeGeneratorScope::Current()->IsUnsafeSmi(element.handle())) {
546 CodeGeneratorScope::Current()->LoadUnsafeSmi(kScratchRegister,
547 element.handle());
548 } else {
549 CodeGeneratorScope::Current()->masm()->
550 movq(kScratchRegister, element.handle(), RelocInfo::NONE);
551 }
552 } else {
553 CodeGeneratorScope::Current()->masm()->
554 movq(kScratchRegister,
555 element.handle(),
556 RelocInfo::EMBEDDED_OBJECT);
557 }
558 __ push(kScratchRegister); 772 __ push(kScratchRegister);
559 break; 773 break;
560 774
561 case FrameElement::COPY: { 775 case FrameElement::COPY: {
562 int backing_index = element.index(); 776 int backing_index = element.index();
563 FrameElement backing = elements_[backing_index]; 777 FrameElement backing = elements_[backing_index];
564 ASSERT(backing.is_memory() || backing.is_register()); 778 ASSERT(backing.is_memory() || backing.is_register());
565 if (backing.is_memory()) { 779 if (backing.is_memory()) {
566 __ push(Operand(rbp, fp_relative(backing_index))); 780 __ push(Operand(rbp, fp_relative(backing_index)));
567 } else { 781 } else {
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
641 PrepareForCall(arg_count + 2, arg_count + 1); 855 PrepareForCall(arg_count + 2, arg_count + 1);
642 return RawCallCodeObject(ic, mode); 856 return RawCallCodeObject(ic, mode);
643 } 857 }
644 858
645 859
646 860
647 861
648 #undef __ 862 #undef __
649 863
650 } } // namespace v8::internal 864 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698