| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_SAFEPOINT_TABLE_H_ | 28 #ifndef V8_SAFEPOINT_TABLE_H_ |
| 29 #define V8_SAFEPOINT_TABLE_H_ | 29 #define V8_SAFEPOINT_TABLE_H_ |
| 30 | 30 |
| 31 #include "v8.h" | 31 #include "heap.h" |
| 32 | |
| 33 #include "macro-assembler.h" | |
| 34 #include "zone.h" | 32 #include "zone.h" |
| 35 #include "zone-inl.h" | |
| 36 | 33 |
| 37 namespace v8 { | 34 namespace v8 { |
| 38 namespace internal { | 35 namespace internal { |
| 39 | 36 |
| 37 struct Register; |
| 38 |
| 39 class SafepointEntry BASE_EMBEDDED { |
| 40 public: |
| 41 SafepointEntry() : info_(0), bits_(NULL) {} |
| 42 |
| 43 SafepointEntry(unsigned info, uint8_t* bits) : info_(info), bits_(bits) { |
| 44 ASSERT(is_valid()); |
| 45 } |
| 46 |
| 47 bool is_valid() const { return bits_ != NULL; } |
| 48 |
| 49 bool Equals(const SafepointEntry& other) const { |
| 50 return info_ == other.info_ && bits_ == other.bits_; |
| 51 } |
| 52 |
| 53 void Reset() { |
| 54 info_ = 0; |
| 55 bits_ = NULL; |
| 56 } |
| 57 |
| 58 int deoptimization_index() const { |
| 59 ASSERT(is_valid()); |
| 60 return DeoptimizationIndexField::decode(info_); |
| 61 } |
| 62 |
| 63 int gap_code_size() const { |
| 64 ASSERT(is_valid()); |
| 65 return GapCodeSizeField::decode(info_); |
| 66 } |
| 67 |
| 68 int argument_count() const { |
| 69 ASSERT(is_valid()); |
| 70 return ArgumentsField::decode(info_); |
| 71 } |
| 72 |
| 73 uint8_t* bits() { |
| 74 ASSERT(is_valid()); |
| 75 return bits_; |
| 76 } |
| 77 |
| 78 bool HasRegisters() const; |
| 79 bool HasRegisterAt(int reg_index) const; |
| 80 |
| 81 // Reserve 13 bits for the gap code size. On ARM a constant pool can be |
| 82 // emitted when generating the gap code. The size of the const pool is less |
| 83 // than what can be represented in 12 bits, so 13 bits gives room for having |
| 84 // instructions before potentially emitting a constant pool. |
| 85 static const int kGapCodeSizeBits = 13; |
| 86 static const int kArgumentsFieldBits = 3; |
| 87 static const int kDeoptIndexBits = |
| 88 32 - kGapCodeSizeBits - kArgumentsFieldBits; |
| 89 class GapCodeSizeField: public BitField<unsigned, 0, kGapCodeSizeBits> {}; |
| 90 class DeoptimizationIndexField: public BitField<int, |
| 91 kGapCodeSizeBits, |
| 92 kDeoptIndexBits> {}; // NOLIN
T |
| 93 class ArgumentsField: public BitField<unsigned, |
| 94 kGapCodeSizeBits + kDeoptIndexBits, |
| 95 kArgumentsFieldBits> {}; // NOLINT |
| 96 private: |
| 97 unsigned info_; |
| 98 uint8_t* bits_; |
| 99 }; |
| 100 |
| 101 |
| 40 class SafepointTable BASE_EMBEDDED { | 102 class SafepointTable BASE_EMBEDDED { |
| 41 public: | 103 public: |
| 42 explicit SafepointTable(Code* code); | 104 explicit SafepointTable(Code* code); |
| 43 | 105 |
| 44 int size() const { | 106 int size() const { |
| 45 return kHeaderSize + | 107 return kHeaderSize + |
| 46 (length_ * (kPcAndDeoptimizationIndexSize + entry_size_)); } | 108 (length_ * (kPcAndDeoptimizationIndexSize + entry_size_)); } |
| 47 unsigned length() const { return length_; } | 109 unsigned length() const { return length_; } |
| 48 unsigned entry_size() const { return entry_size_; } | 110 unsigned entry_size() const { return entry_size_; } |
| 49 | 111 |
| 50 unsigned GetPcOffset(unsigned index) const { | 112 unsigned GetPcOffset(unsigned index) const { |
| 51 ASSERT(index < length_); | 113 ASSERT(index < length_); |
| 52 return Memory::uint32_at(GetPcOffsetLocation(index)); | 114 return Memory::uint32_at(GetPcOffsetLocation(index)); |
| 53 } | 115 } |
| 54 | 116 |
| 55 int GetDeoptimizationIndex(unsigned index) const { | 117 SafepointEntry GetEntry(unsigned index) const { |
| 56 ASSERT(index < length_); | 118 ASSERT(index < length_); |
| 57 unsigned value = Memory::uint32_at(GetDeoptimizationLocation(index)); | 119 unsigned info = Memory::uint32_at(GetInfoLocation(index)); |
| 58 return DeoptimizationIndexField::decode(value); | 120 uint8_t* bits = &Memory::uint8_at(entries_ + (index * entry_size_)); |
| 121 return SafepointEntry(info, bits); |
| 59 } | 122 } |
| 60 | 123 |
| 61 unsigned GetGapCodeSize(unsigned index) const { | 124 // Returns the entry for the given pc. |
| 62 ASSERT(index < length_); | 125 SafepointEntry FindEntry(Address pc) const; |
| 63 unsigned value = Memory::uint32_at(GetDeoptimizationLocation(index)); | |
| 64 return GapCodeSizeField::decode(value); | |
| 65 } | |
| 66 | |
| 67 uint8_t* GetEntry(unsigned index) const { | |
| 68 ASSERT(index < length_); | |
| 69 return &Memory::uint8_at(entries_ + (index * entry_size_)); | |
| 70 } | |
| 71 | |
| 72 class GapCodeSizeField: public BitField<unsigned, 0, 8> {}; | |
| 73 class DeoptimizationIndexField: public BitField<int, 8, 24> {}; | |
| 74 | |
| 75 static bool HasRegisters(uint8_t* entry); | |
| 76 static bool HasRegisterAt(uint8_t* entry, int reg_index); | |
| 77 | 126 |
| 78 void PrintEntry(unsigned index) const; | 127 void PrintEntry(unsigned index) const; |
| 79 | 128 |
| 80 private: | 129 private: |
| 81 static const uint8_t kNoRegisters = 0xFF; | 130 static const uint8_t kNoRegisters = 0xFF; |
| 82 | 131 |
| 83 static const int kLengthOffset = 0; | 132 static const int kLengthOffset = 0; |
| 84 static const int kEntrySizeOffset = kLengthOffset + kIntSize; | 133 static const int kEntrySizeOffset = kLengthOffset + kIntSize; |
| 85 static const int kHeaderSize = kEntrySizeOffset + kIntSize; | 134 static const int kHeaderSize = kEntrySizeOffset + kIntSize; |
| 86 | 135 |
| 87 static const int kPcSize = kIntSize; | 136 static const int kPcSize = kIntSize; |
| 88 static const int kDeoptimizationIndexSize = kIntSize; | 137 static const int kDeoptimizationIndexSize = kIntSize; |
| 89 static const int kPcAndDeoptimizationIndexSize = | 138 static const int kPcAndDeoptimizationIndexSize = |
| 90 kPcSize + kDeoptimizationIndexSize; | 139 kPcSize + kDeoptimizationIndexSize; |
| 91 | 140 |
| 92 Address GetPcOffsetLocation(unsigned index) const { | 141 Address GetPcOffsetLocation(unsigned index) const { |
| 93 return pc_and_deoptimization_indexes_ + | 142 return pc_and_deoptimization_indexes_ + |
| 94 (index * kPcAndDeoptimizationIndexSize); | 143 (index * kPcAndDeoptimizationIndexSize); |
| 95 } | 144 } |
| 96 | 145 |
| 97 Address GetDeoptimizationLocation(unsigned index) const { | 146 Address GetInfoLocation(unsigned index) const { |
| 98 return GetPcOffsetLocation(index) + kPcSize; | 147 return GetPcOffsetLocation(index) + kPcSize; |
| 99 } | 148 } |
| 100 | 149 |
| 101 static void PrintBits(uint8_t byte, int digits); | 150 static void PrintBits(uint8_t byte, int digits); |
| 102 | 151 |
| 103 AssertNoAllocation no_allocation_; | 152 AssertNoAllocation no_allocation_; |
| 104 Code* code_; | 153 Code* code_; |
| 105 unsigned length_; | 154 unsigned length_; |
| 106 unsigned entry_size_; | 155 unsigned entry_size_; |
| 107 | 156 |
| 108 Address pc_and_deoptimization_indexes_; | 157 Address pc_and_deoptimization_indexes_; |
| 109 Address entries_; | 158 Address entries_; |
| 110 | 159 |
| 111 friend class SafepointTableBuilder; | 160 friend class SafepointTableBuilder; |
| 161 friend class SafepointEntry; |
| 162 |
| 163 DISALLOW_COPY_AND_ASSIGN(SafepointTable); |
| 112 }; | 164 }; |
| 113 | 165 |
| 114 | 166 |
| 115 class Safepoint BASE_EMBEDDED { | 167 class Safepoint BASE_EMBEDDED { |
| 116 public: | 168 public: |
| 117 static const int kNoDeoptimizationIndex = 0x00ffffff; | 169 static const int kNoDeoptimizationIndex = |
| 170 (1 << (SafepointEntry::kDeoptIndexBits)) - 1; |
| 118 | 171 |
| 119 void DefinePointerSlot(int index) { indexes_->Add(index); } | 172 void DefinePointerSlot(int index) { indexes_->Add(index); } |
| 120 void DefinePointerRegister(Register reg) { registers_->Add(reg.code()); } | 173 void DefinePointerRegister(Register reg); |
| 121 | 174 |
| 122 private: | 175 private: |
| 123 Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers) : | 176 Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers) : |
| 124 indexes_(indexes), registers_(registers) { } | 177 indexes_(indexes), registers_(registers) { } |
| 125 ZoneList<int>* indexes_; | 178 ZoneList<int>* indexes_; |
| 126 ZoneList<int>* registers_; | 179 ZoneList<int>* registers_; |
| 127 | 180 |
| 128 friend class SafepointTableBuilder; | 181 friend class SafepointTableBuilder; |
| 129 }; | 182 }; |
| 130 | 183 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 163 | 216 |
| 164 // Emit the safepoint table after the body. The number of bits per | 217 // Emit the safepoint table after the body. The number of bits per |
| 165 // entry must be enough to hold all the pointer indexes. | 218 // entry must be enough to hold all the pointer indexes. |
| 166 void Emit(Assembler* assembler, int bits_per_entry); | 219 void Emit(Assembler* assembler, int bits_per_entry); |
| 167 | 220 |
| 168 private: | 221 private: |
| 169 struct DeoptimizationInfo { | 222 struct DeoptimizationInfo { |
| 170 unsigned pc; | 223 unsigned pc; |
| 171 unsigned deoptimization_index; | 224 unsigned deoptimization_index; |
| 172 unsigned pc_after_gap; | 225 unsigned pc_after_gap; |
| 226 unsigned arguments; |
| 173 }; | 227 }; |
| 174 | 228 |
| 175 uint32_t EncodeDeoptimizationIndexAndGap(DeoptimizationInfo info); | 229 uint32_t EncodeExceptPC(const DeoptimizationInfo& info); |
| 176 | 230 |
| 177 ZoneList<DeoptimizationInfo> deoptimization_info_; | 231 ZoneList<DeoptimizationInfo> deoptimization_info_; |
| 178 ZoneList<ZoneList<int>*> indexes_; | 232 ZoneList<ZoneList<int>*> indexes_; |
| 179 ZoneList<ZoneList<int>*> registers_; | 233 ZoneList<ZoneList<int>*> registers_; |
| 180 | 234 |
| 181 bool emitted_; | 235 bool emitted_; |
| 182 unsigned offset_; | 236 unsigned offset_; |
| 183 | 237 |
| 184 DISALLOW_COPY_AND_ASSIGN(SafepointTableBuilder); | 238 DISALLOW_COPY_AND_ASSIGN(SafepointTableBuilder); |
| 185 }; | 239 }; |
| 186 | 240 |
| 187 } } // namespace v8::internal | 241 } } // namespace v8::internal |
| 188 | 242 |
| 189 #endif // V8_SAFEPOINT_TABLE_H_ | 243 #endif // V8_SAFEPOINT_TABLE_H_ |
| OLD | NEW |