OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/interpreter/bytecode-array-builder.h" | 5 #include "src/interpreter/bytecode-array-builder.h" |
6 | 6 |
7 namespace v8 { | 7 namespace v8 { |
8 namespace internal { | 8 namespace internal { |
9 namespace interpreter { | 9 namespace interpreter { |
10 | 10 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
45 array_builder_.bytecodes()->at(operand_offset)); | 45 array_builder_.bytecodes()->at(operand_offset)); |
46 case OperandSize::kShort: | 46 case OperandSize::kShort: |
47 uint16_t operand = | 47 uint16_t operand = |
48 (array_builder_.bytecodes()->at(operand_offset) << 8) + | 48 (array_builder_.bytecodes()->at(operand_offset) << 8) + |
49 array_builder_.bytecodes()->at(operand_offset + 1); | 49 array_builder_.bytecodes()->at(operand_offset + 1); |
50 return static_cast<uint32_t>(operand); | 50 return static_cast<uint32_t>(operand); |
51 } | 51 } |
52 } | 52 } |
53 | 53 |
54 Handle<Object> GetConstantForIndexOperand(int operand_index) const { | 54 Handle<Object> GetConstantForIndexOperand(int operand_index) const { |
55 return array_builder_.constants_.at(GetOperand(operand_index)); | 55 return array_builder_.constant_array_builder()->at( |
56 GetOperand(operand_index)); | |
56 } | 57 } |
57 | 58 |
58 private: | 59 private: |
59 const BytecodeArrayBuilder& array_builder_; | 60 const BytecodeArrayBuilder& array_builder_; |
60 size_t previous_bytecode_start_; | 61 size_t previous_bytecode_start_; |
61 | 62 |
62 DISALLOW_COPY_AND_ASSIGN(PreviousBytecodeHelper); | 63 DISALLOW_COPY_AND_ASSIGN(PreviousBytecodeHelper); |
63 }; | 64 }; |
64 | 65 |
65 | 66 |
66 BytecodeArrayBuilder::BytecodeArrayBuilder(Isolate* isolate, Zone* zone) | 67 BytecodeArrayBuilder::BytecodeArrayBuilder(Isolate* isolate, Zone* zone) |
67 : isolate_(isolate), | 68 : isolate_(isolate), |
68 zone_(zone), | 69 zone_(zone), |
69 bytecodes_(zone), | 70 bytecodes_(zone), |
70 bytecode_generated_(false), | 71 bytecode_generated_(false), |
72 constant_array_builder_(isolate, zone), | |
71 last_block_end_(0), | 73 last_block_end_(0), |
72 last_bytecode_start_(~0), | 74 last_bytecode_start_(~0), |
73 exit_seen_in_block_(false), | 75 exit_seen_in_block_(false), |
74 unbound_jumps_(0), | 76 unbound_jumps_(0), |
75 constants_map_(isolate->heap(), zone), | |
76 constants_(zone), | |
77 parameter_count_(-1), | 77 parameter_count_(-1), |
78 local_register_count_(-1), | 78 local_register_count_(-1), |
79 context_register_count_(-1), | 79 context_register_count_(-1), |
80 temporary_register_count_(0), | 80 temporary_register_count_(0), |
81 free_temporaries_(zone) {} | 81 free_temporaries_(zone) {} |
82 | 82 |
83 | 83 |
84 BytecodeArrayBuilder::~BytecodeArrayBuilder() { DCHECK_EQ(0, unbound_jumps_); } | 84 BytecodeArrayBuilder::~BytecodeArrayBuilder() { DCHECK_EQ(0, unbound_jumps_); } |
85 | 85 |
86 | 86 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
137 | 137 |
138 | 138 |
139 bool BytecodeArrayBuilder::RegisterIsTemporary(Register reg) const { | 139 bool BytecodeArrayBuilder::RegisterIsTemporary(Register reg) const { |
140 return temporary_register_count_ > 0 && first_temporary_register() <= reg && | 140 return temporary_register_count_ > 0 && first_temporary_register() <= reg && |
141 reg <= last_temporary_register(); | 141 reg <= last_temporary_register(); |
142 } | 142 } |
143 | 143 |
144 | 144 |
145 Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray() { | 145 Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray() { |
146 DCHECK_EQ(bytecode_generated_, false); | 146 DCHECK_EQ(bytecode_generated_, false); |
147 | |
148 EnsureReturn(); | 147 EnsureReturn(); |
149 | 148 |
150 int bytecode_size = static_cast<int>(bytecodes_.size()); | 149 int bytecode_size = static_cast<int>(bytecodes_.size()); |
151 int register_count = fixed_register_count() + temporary_register_count_; | 150 int register_count = fixed_register_count() + temporary_register_count_; |
152 int frame_size = register_count * kPointerSize; | 151 int frame_size = register_count * kPointerSize; |
153 | |
154 Factory* factory = isolate_->factory(); | 152 Factory* factory = isolate_->factory(); |
155 int constants_count = static_cast<int>(constants_.size()); | |
156 Handle<FixedArray> constant_pool = | 153 Handle<FixedArray> constant_pool = |
157 factory->NewFixedArray(constants_count, TENURED); | 154 constant_array_builder()->ToFixedArray(factory, TENURED); |
158 for (int i = 0; i < constants_count; i++) { | |
159 constant_pool->set(i, *constants_[i]); | |
160 } | |
161 | |
162 Handle<BytecodeArray> output = | 155 Handle<BytecodeArray> output = |
163 factory->NewBytecodeArray(bytecode_size, &bytecodes_.front(), frame_size, | 156 factory->NewBytecodeArray(bytecode_size, &bytecodes_.front(), frame_size, |
164 parameter_count(), constant_pool); | 157 parameter_count(), constant_pool); |
165 bytecode_generated_ = true; | 158 bytecode_generated_ = true; |
166 return output; | 159 return output; |
167 } | 160 } |
168 | 161 |
169 | 162 |
170 template <size_t N> | 163 template <size_t N> |
171 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t(&operands)[N]) { | 164 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t(&operands)[N]) { |
(...skipping 575 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
747 return Bytecode::kJumpIfNullConstant; | 740 return Bytecode::kJumpIfNullConstant; |
748 case Bytecode::kJumpIfUndefined: | 741 case Bytecode::kJumpIfUndefined: |
749 return Bytecode::kJumpIfUndefinedConstant; | 742 return Bytecode::kJumpIfUndefinedConstant; |
750 default: | 743 default: |
751 UNREACHABLE(); | 744 UNREACHABLE(); |
752 return Bytecode::kJumpConstant; | 745 return Bytecode::kJumpConstant; |
753 } | 746 } |
754 } | 747 } |
755 | 748 |
756 | 749 |
757 void BytecodeArrayBuilder::PatchJump( | 750 // static |
758 const ZoneVector<uint8_t>::iterator& jump_target, | 751 Bytecode BytecodeArrayBuilder::GetJumpWithConstantWideOperand( |
759 ZoneVector<uint8_t>::iterator jump_location) { | 752 Bytecode jump_bytecode) { |
760 Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location); | 753 switch (jump_bytecode) { |
761 int delta = static_cast<int>(jump_target - jump_location); | 754 case Bytecode::kJump: |
762 | 755 return Bytecode::kJumpConstantWide; |
763 DCHECK(Bytecodes::IsJump(jump_bytecode)); | 756 case Bytecode::kJumpIfTrue: |
764 DCHECK_EQ(Bytecodes::Size(jump_bytecode), 2); | 757 return Bytecode::kJumpIfTrueConstantWide; |
765 DCHECK_NE(delta, 0); | 758 case Bytecode::kJumpIfFalse: |
766 | 759 return Bytecode::kJumpIfFalseConstantWide; |
767 if (FitsInImm8Operand(delta)) { | 760 case Bytecode::kJumpIfToBooleanTrue: |
768 // Just update the operand | 761 return Bytecode::kJumpIfToBooleanTrueConstantWide; |
769 jump_location++; | 762 case Bytecode::kJumpIfToBooleanFalse: |
770 *jump_location = static_cast<uint8_t>(delta); | 763 return Bytecode::kJumpIfToBooleanFalseConstantWide; |
771 } else { | 764 case Bytecode::kJumpIfNull: |
772 // Update the jump type and operand | 765 return Bytecode::kJumpIfNullConstantWide; |
773 size_t entry = GetConstantPoolEntry(handle(Smi::FromInt(delta), isolate())); | 766 case Bytecode::kJumpIfUndefined: |
774 if (FitsInIdx8Operand(entry)) { | 767 return Bytecode::kJumpIfUndefinedConstantWide; |
775 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); | 768 default: |
776 *jump_location++ = Bytecodes::ToByte(jump_bytecode); | 769 UNREACHABLE(); |
777 *jump_location = static_cast<uint8_t>(entry); | 770 return Bytecode::kJumpConstantWide; |
778 } else { | |
779 // TODO(oth): OutputJump should reserve a constant pool entry | |
780 // when jump is written. The reservation should be used here if | |
781 // needed, or cancelled if not. This is due to the patch needing | |
782 // to match the size of the code it's replacing. In future, | |
783 // there will probably be a jump with 32-bit operand for cases | |
784 // when constant pool is full, but that needs to be emitted in | |
785 // OutputJump too. | |
786 UNIMPLEMENTED(); | |
787 } | |
788 } | 771 } |
789 unbound_jumps_--; | |
790 } | 772 } |
791 | 773 |
792 | 774 |
793 // static | 775 // static |
794 Bytecode BytecodeArrayBuilder::GetJumpWithToBoolean(Bytecode jump_bytecode) { | 776 Bytecode BytecodeArrayBuilder::GetJumpWithToBoolean(Bytecode jump_bytecode) { |
795 switch (jump_bytecode) { | 777 switch (jump_bytecode) { |
796 case Bytecode::kJump: | 778 case Bytecode::kJump: |
797 case Bytecode::kJumpIfNull: | 779 case Bytecode::kJumpIfNull: |
798 case Bytecode::kJumpIfUndefined: | 780 case Bytecode::kJumpIfUndefined: |
799 return jump_bytecode; | 781 return jump_bytecode; |
800 case Bytecode::kJumpIfTrue: | 782 case Bytecode::kJumpIfTrue: |
801 return Bytecode::kJumpIfToBooleanTrue; | 783 return Bytecode::kJumpIfToBooleanTrue; |
802 case Bytecode::kJumpIfFalse: | 784 case Bytecode::kJumpIfFalse: |
803 return Bytecode::kJumpIfToBooleanFalse; | 785 return Bytecode::kJumpIfToBooleanFalse; |
804 default: | 786 default: |
805 UNREACHABLE(); | 787 UNREACHABLE(); |
806 } | 788 } |
807 return static_cast<Bytecode>(-1); | 789 return static_cast<Bytecode>(-1); |
808 } | 790 } |
809 | 791 |
810 | 792 |
793 void BytecodeArrayBuilder::PatchJump( | |
794 const ZoneVector<uint8_t>::iterator& jump_target, | |
795 const ZoneVector<uint8_t>::iterator& jump_location) { | |
796 int delta = static_cast<int>(jump_target - jump_location); | |
797 Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location); | |
798 auto operand_location = jump_location + 1; | |
799 auto reservation_token = | |
800 static_cast<ConstantArrayBuilder::ReservationToken>(*operand_location); | |
801 if (Bytecodes::IsJumpImmediate(jump_bytecode)) { | |
mythria
2015/12/24 15:53:44
May be we can check for reservation token to be Id
oth
2015/12/27 08:42:34
Done.
| |
802 // A reservation for an entry in the constant array with an 8-bit index. | |
803 DCHECK(ConstantArrayBuilder::ReservationToken::kIdx8 == reservation_token); | |
804 if (FitsInImm8Operand(delta)) { | |
805 // The jump fits with the range of an Imm8 operand, so cancel | |
806 // the reservation and jump directly. | |
807 constant_array_builder()->DiscardReservedEntry(reservation_token); | |
808 *operand_location = static_cast<uint8_t>(delta); | |
809 } else { | |
810 // The jump does not fit in range of an Imm8 operand, so commit | |
811 // reservation and update the jump instruction and operand. | |
812 size_t entry = constant_array_builder()->CommitReservedEntry( | |
813 reservation_token, handle(Smi::FromInt(delta), isolate())); | |
814 DCHECK(FitsInIdx8Operand(entry)); | |
815 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); | |
816 *jump_location = Bytecodes::ToByte(jump_bytecode); | |
817 *operand_location = static_cast<uint8_t>(entry); | |
818 } | |
819 } else { | |
820 // A reservation for an entry in the constant array with a 16-bit index. | |
821 DCHECK(ConstantArrayBuilder::ReservationToken::kIdx16 == reservation_token); | |
822 DCHECK(Bytecodes::IsJumpConstantWide(jump_bytecode)); | |
823 size_t entry = constant_array_builder()->CommitReservedEntry( | |
824 reservation_token, handle(Smi::FromInt(delta), isolate())); | |
825 DCHECK(FitsInIdx16Operand(entry)); | |
826 uint8_t operand_bytes[2]; | |
827 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry)); | |
828 *operand_location++ = operand_bytes[0]; | |
829 *operand_location = operand_bytes[1]; | |
830 } | |
831 unbound_jumps_--; | |
832 } | |
833 | |
834 | |
811 BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(Bytecode jump_bytecode, | 835 BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(Bytecode jump_bytecode, |
812 BytecodeLabel* label) { | 836 BytecodeLabel* label) { |
813 // Don't emit dead code. | 837 // Don't emit dead code. |
814 if (exit_seen_in_block_) return *this; | 838 if (exit_seen_in_block_) return *this; |
815 | 839 |
816 // Check if the value in accumulator is boolean, if not choose an | 840 // Check if the value in accumulator is boolean, if not choose an |
817 // appropriate JumpIfToBoolean bytecode. | 841 // appropriate JumpIfToBoolean bytecode. |
818 if (NeedToBooleanCast()) { | 842 if (NeedToBooleanCast()) { |
819 jump_bytecode = GetJumpWithToBoolean(jump_bytecode); | 843 jump_bytecode = GetJumpWithToBoolean(jump_bytecode); |
820 } | 844 } |
821 | 845 |
822 int delta; | |
823 if (label->is_bound()) { | 846 if (label->is_bound()) { |
824 // Label has been bound already so this is a backwards jump. | 847 // Label has been bound already so this is a backwards jump. |
825 CHECK_GE(bytecodes()->size(), label->offset()); | 848 CHECK_GE(bytecodes()->size(), label->offset()); |
826 CHECK_LE(bytecodes()->size(), static_cast<size_t>(kMaxInt)); | 849 CHECK_LE(bytecodes()->size(), static_cast<size_t>(kMaxInt)); |
827 size_t abs_delta = bytecodes()->size() - label->offset(); | 850 size_t abs_delta = bytecodes()->size() - label->offset(); |
828 delta = -static_cast<int>(abs_delta); | 851 int delta = -static_cast<int>(abs_delta); |
852 | |
853 if (FitsInImm8Operand(delta)) { | |
854 Output(jump_bytecode, static_cast<uint8_t>(delta)); | |
855 } else { | |
856 size_t entry = | |
857 GetConstantPoolEntry(handle(Smi::FromInt(delta), isolate())); | |
858 if (FitsInIdx8Operand(entry)) { | |
859 Output(GetJumpWithConstantOperand(jump_bytecode), | |
860 static_cast<uint8_t>(entry)); | |
861 } else if (FitsInIdx16Operand(entry)) { | |
862 Output(GetJumpWithConstantWideOperand(jump_bytecode), | |
863 static_cast<uint16_t>(entry)); | |
864 } else { | |
865 UNREACHABLE(); | |
866 } | |
867 } | |
829 } else { | 868 } else { |
830 // Label has not yet been bound so this is a forward reference | 869 // Label has not yet been bound so this is a forward reference |
831 // that will be patched when the label is bound. | 870 // that will be patched when the label is bound. |
832 label->set_referrer(bytecodes()->size()); | 871 label->set_referrer(bytecodes()->size()); |
833 delta = 0; | |
834 unbound_jumps_++; | 872 unbound_jumps_++; |
873 auto token = constant_array_builder()->CreateReservedEntry(); | |
874 switch (token) { | |
875 case ConstantArrayBuilder::ReservationToken::kIdx8: { | |
876 Output(jump_bytecode, static_cast<uint8_t>(token)); | |
877 break; | |
878 } | |
879 case ConstantArrayBuilder::ReservationToken::kIdx16: { | |
880 uint16_t target = static_cast<uint16_t>(token); | |
881 target |= target << 8; | |
882 Output(GetJumpWithConstantWideOperand(jump_bytecode), target); | |
883 break; | |
884 } | |
885 } | |
835 } | 886 } |
836 | 887 |
837 if (FitsInImm8Operand(delta)) { | |
838 Output(jump_bytecode, static_cast<uint8_t>(delta)); | |
839 } else { | |
840 size_t entry = GetConstantPoolEntry(handle(Smi::FromInt(delta), isolate())); | |
841 if (FitsInIdx8Operand(entry)) { | |
842 Output(GetJumpWithConstantOperand(jump_bytecode), | |
843 static_cast<uint8_t>(entry)); | |
844 } else { | |
845 UNIMPLEMENTED(); | |
846 } | |
847 } | |
848 LeaveBasicBlock(); | 888 LeaveBasicBlock(); |
849 return *this; | 889 return *this; |
850 } | 890 } |
851 | 891 |
852 | 892 |
853 BytecodeArrayBuilder& BytecodeArrayBuilder::Jump(BytecodeLabel* label) { | 893 BytecodeArrayBuilder& BytecodeArrayBuilder::Jump(BytecodeLabel* label) { |
854 return OutputJump(Bytecode::kJump, label); | 894 return OutputJump(Bytecode::kJump, label); |
855 } | 895 } |
856 | 896 |
857 | 897 |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
995 | 1035 |
996 | 1036 |
997 BytecodeArrayBuilder& BytecodeArrayBuilder::Delete(Register object, | 1037 BytecodeArrayBuilder& BytecodeArrayBuilder::Delete(Register object, |
998 LanguageMode language_mode) { | 1038 LanguageMode language_mode) { |
999 Output(BytecodeForDelete(language_mode), object.ToOperand()); | 1039 Output(BytecodeForDelete(language_mode), object.ToOperand()); |
1000 return *this; | 1040 return *this; |
1001 } | 1041 } |
1002 | 1042 |
1003 | 1043 |
1004 size_t BytecodeArrayBuilder::GetConstantPoolEntry(Handle<Object> object) { | 1044 size_t BytecodeArrayBuilder::GetConstantPoolEntry(Handle<Object> object) { |
1005 // These constants shouldn't be added to the constant pool, the should use | 1045 return constant_array_builder()->Insert(object); |
1006 // specialzed bytecodes instead. | |
1007 DCHECK(!object.is_identical_to(isolate_->factory()->undefined_value())); | |
1008 DCHECK(!object.is_identical_to(isolate_->factory()->null_value())); | |
1009 DCHECK(!object.is_identical_to(isolate_->factory()->the_hole_value())); | |
1010 DCHECK(!object.is_identical_to(isolate_->factory()->true_value())); | |
1011 DCHECK(!object.is_identical_to(isolate_->factory()->false_value())); | |
1012 | |
1013 size_t* entry = constants_map_.Find(object); | |
1014 if (!entry) { | |
1015 entry = constants_map_.Get(object); | |
1016 *entry = constants_.size(); | |
1017 constants_.push_back(object); | |
1018 } | |
1019 DCHECK(constants_[*entry].is_identical_to(object)); | |
1020 return *entry; | |
1021 } | 1046 } |
1022 | 1047 |
1023 | 1048 |
1024 int BytecodeArrayBuilder::BorrowTemporaryRegister() { | 1049 int BytecodeArrayBuilder::BorrowTemporaryRegister() { |
1025 if (free_temporaries_.empty()) { | 1050 if (free_temporaries_.empty()) { |
1026 temporary_register_count_ += 1; | 1051 temporary_register_count_ += 1; |
1027 return last_temporary_register().index(); | 1052 return last_temporary_register().index(); |
1028 } else { | 1053 } else { |
1029 auto pos = free_temporaries_.begin(); | 1054 auto pos = free_temporaries_.begin(); |
1030 int retval = *pos; | 1055 int retval = *pos; |
(...skipping 502 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1533 DCHECK_GT(next_consecutive_count_, 0); | 1558 DCHECK_GT(next_consecutive_count_, 0); |
1534 builder_->BorrowConsecutiveTemporaryRegister(next_consecutive_register_); | 1559 builder_->BorrowConsecutiveTemporaryRegister(next_consecutive_register_); |
1535 allocated_.push_back(next_consecutive_register_); | 1560 allocated_.push_back(next_consecutive_register_); |
1536 next_consecutive_count_--; | 1561 next_consecutive_count_--; |
1537 return Register(next_consecutive_register_++); | 1562 return Register(next_consecutive_register_++); |
1538 } | 1563 } |
1539 | 1564 |
1540 } // namespace interpreter | 1565 } // namespace interpreter |
1541 } // namespace internal | 1566 } // namespace internal |
1542 } // namespace v8 | 1567 } // namespace v8 |
OLD | NEW |