Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(195)

Side by Side Diff: src/ia32/code-stubs-ia32.h

Issue 7945009: Merge experimental/gc branch to the bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/assembler-ia32-inl.h ('k') | src/ia32/code-stubs-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
53 TranscendentalCache::Type type_; 53 TranscendentalCache::Type type_;
54 ArgumentType argument_type_; 54 ArgumentType argument_type_;
55 55
56 Major MajorKey() { return TranscendentalCache; } 56 Major MajorKey() { return TranscendentalCache; }
57 int MinorKey() { return type_ | argument_type_; } 57 int MinorKey() { return type_ | argument_type_; }
58 Runtime::FunctionId RuntimeFunction(); 58 Runtime::FunctionId RuntimeFunction();
59 void GenerateOperation(MacroAssembler* masm); 59 void GenerateOperation(MacroAssembler* masm);
60 }; 60 };
61 61
62 62
63 class StoreBufferOverflowStub: public CodeStub {
64 public:
65 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
66 : save_doubles_(save_fp) { }
67
68 void Generate(MacroAssembler* masm);
69
70 virtual bool CompilingCallsToThisStubIsGCSafe() { return true; }
71 static void GenerateFixedRegStubsAheadOfTime();
72 virtual bool SometimesSetsUpAFrame() { return false; }
73
74 private:
75 SaveFPRegsMode save_doubles_;
76
77 Major MajorKey() { return StoreBufferOverflow; }
78 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; }
79 };
80
81
63 class UnaryOpStub: public CodeStub { 82 class UnaryOpStub: public CodeStub {
64 public: 83 public:
65 UnaryOpStub(Token::Value op, 84 UnaryOpStub(Token::Value op,
66 UnaryOverwriteMode mode, 85 UnaryOverwriteMode mode,
67 UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) 86 UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
68 : op_(op), 87 : op_(op),
69 mode_(mode), 88 mode_(mode),
70 operand_type_(operand_type) { 89 operand_type_(operand_type) {
71 } 90 }
72 91
(...skipping 373 matching lines...) Expand 10 before | Expand all | Expand 10 after
446 class IndexBits: public BitField<int, 6, 3> {}; 465 class IndexBits: public BitField<int, 6, 3> {};
447 class LookupModeBits: public BitField<LookupMode, 9, 1> {}; 466 class LookupModeBits: public BitField<LookupMode, 9, 1> {};
448 467
449 Register dictionary_; 468 Register dictionary_;
450 Register result_; 469 Register result_;
451 Register index_; 470 Register index_;
452 LookupMode mode_; 471 LookupMode mode_;
453 }; 472 };
454 473
455 474
475 class RecordWriteStub: public CodeStub {
476 public:
477 RecordWriteStub(Register object,
478 Register value,
479 Register address,
480 RememberedSetAction remembered_set_action,
481 SaveFPRegsMode fp_mode)
482 : object_(object),
483 value_(value),
484 address_(address),
485 remembered_set_action_(remembered_set_action),
486 save_fp_regs_mode_(fp_mode),
487 regs_(object, // An input reg.
488 address, // An input reg.
489 value) { // One scratch reg.
490 }
491
492 enum Mode {
493 STORE_BUFFER_ONLY,
494 INCREMENTAL,
495 INCREMENTAL_COMPACTION
496 };
497
498 virtual bool CompilingCallsToThisStubIsGCSafe();
499 static void GenerateFixedRegStubsAheadOfTime();
500 virtual bool SometimesSetsUpAFrame() { return false; }
501
502 static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
503 static const byte kTwoByteJumpInstruction = 0xeb; // Jmp #imm8.
504
505 static const byte kFiveByteNopInstruction = 0x3d; // Cmpl eax, #imm32.
506 static const byte kFiveByteJumpInstruction = 0xe9; // Jmp #imm32.
507
508 static Mode GetMode(Code* stub) {
509 byte first_instruction = stub->instruction_start()[0];
510 byte second_instruction = stub->instruction_start()[2];
511
512 if (first_instruction == kTwoByteJumpInstruction) {
513 return INCREMENTAL;
514 }
515
516 ASSERT(first_instruction == kTwoByteNopInstruction);
517
518 if (second_instruction == kFiveByteJumpInstruction) {
519 return INCREMENTAL_COMPACTION;
520 }
521
522 ASSERT(second_instruction == kFiveByteNopInstruction);
523
524 return STORE_BUFFER_ONLY;
525 }
526
527 static void Patch(Code* stub, Mode mode) {
528 switch (mode) {
529 case STORE_BUFFER_ONLY:
530 ASSERT(GetMode(stub) == INCREMENTAL ||
531 GetMode(stub) == INCREMENTAL_COMPACTION);
532 stub->instruction_start()[0] = kTwoByteNopInstruction;
533 stub->instruction_start()[2] = kFiveByteNopInstruction;
534 break;
535 case INCREMENTAL:
536 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
537 stub->instruction_start()[0] = kTwoByteJumpInstruction;
538 break;
539 case INCREMENTAL_COMPACTION:
540 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
541 stub->instruction_start()[0] = kTwoByteNopInstruction;
542 stub->instruction_start()[2] = kFiveByteJumpInstruction;
543 break;
544 }
545 ASSERT(GetMode(stub) == mode);
546 CPU::FlushICache(stub->instruction_start(), 7);
547 }
548
549 private:
550 // This is a helper class for freeing up 3 scratch registers, where the third
551 // is always ecx (needed for shift operations). The input is two registers
552 // that must be preserved and one scratch register provided by the caller.
553 class RegisterAllocation {
554 public:
555 RegisterAllocation(Register object,
556 Register address,
557 Register scratch0)
558 : object_orig_(object),
559 address_orig_(address),
560 scratch0_orig_(scratch0),
561 object_(object),
562 address_(address),
563 scratch0_(scratch0) {
564 ASSERT(!AreAliased(scratch0, object, address, no_reg));
565 scratch1_ = GetRegThatIsNotEcxOr(object_, address_, scratch0_);
566 if (scratch0.is(ecx)) {
567 scratch0_ = GetRegThatIsNotEcxOr(object_, address_, scratch1_);
568 }
569 if (object.is(ecx)) {
570 object_ = GetRegThatIsNotEcxOr(address_, scratch0_, scratch1_);
571 }
572 if (address.is(ecx)) {
573 address_ = GetRegThatIsNotEcxOr(object_, scratch0_, scratch1_);
574 }
575 ASSERT(!AreAliased(scratch0_, object_, address_, ecx));
576 }
577
578 void Save(MacroAssembler* masm) {
579 ASSERT(!address_orig_.is(object_));
580 ASSERT(object_.is(object_orig_) || address_.is(address_orig_));
581 ASSERT(!AreAliased(object_, address_, scratch1_, scratch0_));
582 ASSERT(!AreAliased(object_orig_, address_, scratch1_, scratch0_));
583 ASSERT(!AreAliased(object_, address_orig_, scratch1_, scratch0_));
584 // We don't have to save scratch0_orig_ because it was given to us as
585 // a scratch register. But if we had to switch to a different reg then
586 // we should save the new scratch0_.
587 if (!scratch0_.is(scratch0_orig_)) masm->push(scratch0_);
588 if (!ecx.is(scratch0_orig_) &&
589 !ecx.is(object_orig_) &&
590 !ecx.is(address_orig_)) {
591 masm->push(ecx);
592 }
593 masm->push(scratch1_);
594 if (!address_.is(address_orig_)) {
595 masm->push(address_);
596 masm->mov(address_, address_orig_);
597 }
598 if (!object_.is(object_orig_)) {
599 masm->push(object_);
600 masm->mov(object_, object_orig_);
601 }
602 }
603
604 void Restore(MacroAssembler* masm) {
605 // These will have been preserved the entire time, so we just need to move
606 // them back. Only in one case is the orig_ reg different from the plain
607 // one, since only one of them can alias with ecx.
608 if (!object_.is(object_orig_)) {
609 masm->mov(object_orig_, object_);
610 masm->pop(object_);
611 }
612 if (!address_.is(address_orig_)) {
613 masm->mov(address_orig_, address_);
614 masm->pop(address_);
615 }
616 masm->pop(scratch1_);
617 if (!ecx.is(scratch0_orig_) &&
618 !ecx.is(object_orig_) &&
619 !ecx.is(address_orig_)) {
620 masm->pop(ecx);
621 }
622 if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_);
623 }
624
625 // If we have to call into C then we need to save and restore all caller-
626 // saved registers that were not already preserved. The caller saved
627 // registers are eax, ecx and edx. The three scratch registers (incl. ecx)
628 // will be restored by other means so we don't bother pushing them here.
629 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
630 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->push(eax);
631 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->push(edx);
632 if (mode == kSaveFPRegs) {
633 CpuFeatures::Scope scope(SSE2);
634 masm->sub(Operand(esp),
635 Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
636 // Save all XMM registers except XMM0.
637 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
638 XMMRegister reg = XMMRegister::from_code(i);
639 masm->movdbl(Operand(esp, (i - 1) * kDoubleSize), reg);
640 }
641 }
642 }
643
644 inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
645 SaveFPRegsMode mode) {
646 if (mode == kSaveFPRegs) {
647 CpuFeatures::Scope scope(SSE2);
648 // Restore all XMM registers except XMM0.
649 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
650 XMMRegister reg = XMMRegister::from_code(i);
651 masm->movdbl(reg, Operand(esp, (i - 1) * kDoubleSize));
652 }
653 masm->add(Operand(esp),
654 Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
655 }
656 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->pop(edx);
657 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->pop(eax);
658 }
659
660 inline Register object() { return object_; }
661 inline Register address() { return address_; }
662 inline Register scratch0() { return scratch0_; }
663 inline Register scratch1() { return scratch1_; }
664
665 private:
666 Register object_orig_;
667 Register address_orig_;
668 Register scratch0_orig_;
669 Register object_;
670 Register address_;
671 Register scratch0_;
672 Register scratch1_;
673 // Third scratch register is always ecx.
674
675 Register GetRegThatIsNotEcxOr(Register r1,
676 Register r2,
677 Register r3) {
678 for (int i = 0; i < Register::kNumAllocatableRegisters; i++) {
679 Register candidate = Register::FromAllocationIndex(i);
680 if (candidate.is(ecx)) continue;
681 if (candidate.is(r1)) continue;
682 if (candidate.is(r2)) continue;
683 if (candidate.is(r3)) continue;
684 return candidate;
685 }
686 UNREACHABLE();
687 return no_reg;
688 }
689 friend class RecordWriteStub;
690 };
691
692 enum OnNoNeedToInformIncrementalMarker {
693 kReturnOnNoNeedToInformIncrementalMarker,
694 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
695 }
696 ;
697 void Generate(MacroAssembler* masm);
698 void GenerateIncremental(MacroAssembler* masm, Mode mode);
699 void CheckNeedsToInformIncrementalMarker(
700 MacroAssembler* masm,
701 OnNoNeedToInformIncrementalMarker on_no_need,
702 Mode mode);
703 void InformIncrementalMarker(MacroAssembler* masm, Mode mode);
704
705 Major MajorKey() { return RecordWrite; }
706
707 int MinorKey() {
708 return ObjectBits::encode(object_.code()) |
709 ValueBits::encode(value_.code()) |
710 AddressBits::encode(address_.code()) |
711 RememberedSetActionBits::encode(remembered_set_action_) |
712 SaveFPRegsModeBits::encode(save_fp_regs_mode_);
713 }
714
715 bool MustBeInStubCache() {
716 // All stubs must be registered in the stub cache
717 // otherwise IncrementalMarker would not be able to find
718 // and patch it.
719 return true;
720 }
721
722 void Activate(Code* code) {
723 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
724 }
725
726 class ObjectBits: public BitField<int, 0, 3> {};
727 class ValueBits: public BitField<int, 3, 3> {};
728 class AddressBits: public BitField<int, 6, 3> {};
729 class RememberedSetActionBits: public BitField<RememberedSetAction, 9, 1> {};
730 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 10, 1> {};
731
732 Register object_;
733 Register value_;
734 Register address_;
735 RememberedSetAction remembered_set_action_;
736 SaveFPRegsMode save_fp_regs_mode_;
737 RegisterAllocation regs_;
738 };
739
740
456 } } // namespace v8::internal 741 } } // namespace v8::internal
457 742
458 #endif // V8_IA32_CODE_STUBS_IA32_H_ 743 #endif // V8_IA32_CODE_STUBS_IA32_H_
OLDNEW
« no previous file with comments | « src/ia32/assembler-ia32-inl.h ('k') | src/ia32/code-stubs-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698