OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 11 matching lines...) Expand all Loading... |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_LITHIUM_H_ | 28 #ifndef V8_LITHIUM_H_ |
29 #define V8_LITHIUM_H_ | 29 #define V8_LITHIUM_H_ |
30 | 30 |
31 #include "hydrogen.h" | 31 #include "hydrogen.h" |
32 #include "lithium-allocator.h" | |
33 #include "safepoint-table.h" | 32 #include "safepoint-table.h" |
34 | 33 |
35 namespace v8 { | 34 namespace v8 { |
36 namespace internal { | 35 namespace internal { |
37 | 36 |
| 37 class LOperand: public ZoneObject { |
| 38 public: |
| 39 enum Kind { |
| 40 INVALID, |
| 41 UNALLOCATED, |
| 42 CONSTANT_OPERAND, |
| 43 STACK_SLOT, |
| 44 DOUBLE_STACK_SLOT, |
| 45 REGISTER, |
| 46 DOUBLE_REGISTER, |
| 47 ARGUMENT |
| 48 }; |
| 49 |
| 50 LOperand() : value_(KindField::encode(INVALID)) { } |
| 51 |
| 52 Kind kind() const { return KindField::decode(value_); } |
| 53 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; } |
| 54 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; } |
| 55 bool IsStackSlot() const { return kind() == STACK_SLOT; } |
| 56 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; } |
| 57 bool IsRegister() const { return kind() == REGISTER; } |
| 58 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; } |
| 59 bool IsArgument() const { return kind() == ARGUMENT; } |
| 60 bool IsUnallocated() const { return kind() == UNALLOCATED; } |
| 61 bool Equals(LOperand* other) const { return value_ == other->value_; } |
| 62 int VirtualRegister(); |
| 63 |
| 64 void PrintTo(StringStream* stream); |
| 65 void ConvertTo(Kind kind, int index) { |
| 66 value_ = KindField::encode(kind); |
| 67 value_ |= index << kKindFieldWidth; |
| 68 ASSERT(this->index() == index); |
| 69 } |
| 70 |
| 71 protected: |
| 72 static const int kKindFieldWidth = 3; |
| 73 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; |
| 74 |
| 75 LOperand(Kind kind, int index) { ConvertTo(kind, index); } |
| 76 |
| 77 unsigned value_; |
| 78 }; |
| 79 |
| 80 |
| 81 class LUnallocated: public LOperand { |
| 82 public: |
| 83 enum Policy { |
| 84 NONE, |
| 85 ANY, |
| 86 FIXED_REGISTER, |
| 87 FIXED_DOUBLE_REGISTER, |
| 88 FIXED_SLOT, |
| 89 MUST_HAVE_REGISTER, |
| 90 WRITABLE_REGISTER, |
| 91 SAME_AS_FIRST_INPUT, |
| 92 IGNORE |
| 93 }; |
| 94 |
| 95 // Lifetime of operand inside the instruction. |
| 96 enum Lifetime { |
| 97 // USED_AT_START operand is guaranteed to be live only at |
| 98 // instruction start. Register allocator is free to assign the same register |
| 99 // to some other operand used inside instruction (i.e. temporary or |
| 100 // output). |
| 101 USED_AT_START, |
| 102 |
| 103 // USED_AT_END operand is treated as live until the end of |
| 104 // instruction. This means that register allocator will not reuse it's |
| 105 // register for any other operand inside instruction. |
| 106 USED_AT_END |
| 107 }; |
| 108 |
| 109 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) { |
| 110 Initialize(policy, 0, USED_AT_END); |
| 111 } |
| 112 |
| 113 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) { |
| 114 Initialize(policy, fixed_index, USED_AT_END); |
| 115 } |
| 116 |
| 117 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) { |
| 118 Initialize(policy, 0, lifetime); |
| 119 } |
| 120 |
| 121 // The superclass has a KindField. Some policies have a signed fixed |
| 122 // index in the upper bits. |
| 123 static const int kPolicyWidth = 4; |
| 124 static const int kLifetimeWidth = 1; |
| 125 static const int kVirtualRegisterWidth = 17; |
| 126 |
| 127 static const int kPolicyShift = kKindFieldWidth; |
| 128 static const int kLifetimeShift = kPolicyShift + kPolicyWidth; |
| 129 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth; |
| 130 static const int kFixedIndexShift = |
| 131 kVirtualRegisterShift + kVirtualRegisterWidth; |
| 132 |
| 133 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { }; |
| 134 |
| 135 class LifetimeField |
| 136 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> { |
| 137 }; |
| 138 |
| 139 class VirtualRegisterField |
| 140 : public BitField<unsigned, |
| 141 kVirtualRegisterShift, |
| 142 kVirtualRegisterWidth> { |
| 143 }; |
| 144 |
| 145 static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1); |
| 146 static const int kMaxFixedIndices = 128; |
| 147 |
| 148 bool HasIgnorePolicy() const { return policy() == IGNORE; } |
| 149 bool HasNoPolicy() const { return policy() == NONE; } |
| 150 bool HasAnyPolicy() const { |
| 151 return policy() == ANY; |
| 152 } |
| 153 bool HasFixedPolicy() const { |
| 154 return policy() == FIXED_REGISTER || |
| 155 policy() == FIXED_DOUBLE_REGISTER || |
| 156 policy() == FIXED_SLOT; |
| 157 } |
| 158 bool HasRegisterPolicy() const { |
| 159 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER; |
| 160 } |
| 161 bool HasSameAsInputPolicy() const { |
| 162 return policy() == SAME_AS_FIRST_INPUT; |
| 163 } |
| 164 Policy policy() const { return PolicyField::decode(value_); } |
| 165 void set_policy(Policy policy) { |
| 166 value_ &= ~PolicyField::mask(); |
| 167 value_ |= PolicyField::encode(policy); |
| 168 } |
| 169 int fixed_index() const { |
| 170 return static_cast<int>(value_) >> kFixedIndexShift; |
| 171 } |
| 172 |
| 173 unsigned virtual_register() const { |
| 174 return VirtualRegisterField::decode(value_); |
| 175 } |
| 176 |
| 177 void set_virtual_register(unsigned id) { |
| 178 value_ &= ~VirtualRegisterField::mask(); |
| 179 value_ |= VirtualRegisterField::encode(id); |
| 180 } |
| 181 |
| 182 LUnallocated* CopyUnconstrained() { |
| 183 LUnallocated* result = new LUnallocated(ANY); |
| 184 result->set_virtual_register(virtual_register()); |
| 185 return result; |
| 186 } |
| 187 |
| 188 static LUnallocated* cast(LOperand* op) { |
| 189 ASSERT(op->IsUnallocated()); |
| 190 return reinterpret_cast<LUnallocated*>(op); |
| 191 } |
| 192 |
| 193 bool IsUsedAtStart() { |
| 194 return LifetimeField::decode(value_) == USED_AT_START; |
| 195 } |
| 196 |
| 197 private: |
| 198 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) { |
| 199 value_ |= PolicyField::encode(policy); |
| 200 value_ |= LifetimeField::encode(lifetime); |
| 201 value_ |= fixed_index << kFixedIndexShift; |
| 202 ASSERT(this->fixed_index() == fixed_index); |
| 203 } |
| 204 }; |
| 205 |
| 206 |
| 207 class LMoveOperands BASE_EMBEDDED { |
| 208 public: |
| 209 LMoveOperands(LOperand* source, LOperand* destination) |
| 210 : source_(source), destination_(destination) { |
| 211 } |
| 212 |
| 213 LOperand* source() const { return source_; } |
| 214 void set_source(LOperand* operand) { source_ = operand; } |
| 215 |
| 216 LOperand* destination() const { return destination_; } |
| 217 void set_destination(LOperand* operand) { destination_ = operand; } |
| 218 |
| 219 // The gap resolver marks moves as "in-progress" by clearing the |
| 220 // destination (but not the source). |
| 221 bool IsPending() const { |
| 222 return destination_ == NULL && source_ != NULL; |
| 223 } |
| 224 |
| 225 // True if this move a move into the given destination operand. |
| 226 bool Blocks(LOperand* operand) const { |
| 227 return !IsEliminated() && source()->Equals(operand); |
| 228 } |
| 229 |
| 230 // A move is redundant if it's been eliminated, if its source and |
| 231 // destination are the same, or if its destination is unneeded. |
| 232 bool IsRedundant() const { |
| 233 return IsEliminated() || source_->Equals(destination_) || IsIgnored(); |
| 234 } |
| 235 |
| 236 bool IsIgnored() const { |
| 237 return destination_ != NULL && |
| 238 destination_->IsUnallocated() && |
| 239 LUnallocated::cast(destination_)->HasIgnorePolicy(); |
| 240 } |
| 241 |
| 242 // We clear both operands to indicate move that's been eliminated. |
| 243 void Eliminate() { source_ = destination_ = NULL; } |
| 244 bool IsEliminated() const { |
| 245 ASSERT(source_ != NULL || destination_ == NULL); |
| 246 return source_ == NULL; |
| 247 } |
| 248 |
| 249 private: |
| 250 LOperand* source_; |
| 251 LOperand* destination_; |
| 252 }; |
| 253 |
| 254 |
| 255 class LConstantOperand: public LOperand { |
| 256 public: |
| 257 static LConstantOperand* Create(int index) { |
| 258 ASSERT(index >= 0); |
| 259 if (index < kNumCachedOperands) return &cache[index]; |
| 260 return new LConstantOperand(index); |
| 261 } |
| 262 |
| 263 static LConstantOperand* cast(LOperand* op) { |
| 264 ASSERT(op->IsConstantOperand()); |
| 265 return reinterpret_cast<LConstantOperand*>(op); |
| 266 } |
| 267 |
| 268 static void SetupCache(); |
| 269 |
| 270 private: |
| 271 static const int kNumCachedOperands = 128; |
| 272 static LConstantOperand cache[]; |
| 273 |
| 274 LConstantOperand() : LOperand() { } |
| 275 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { } |
| 276 }; |
| 277 |
| 278 |
| 279 class LArgument: public LOperand { |
| 280 public: |
| 281 explicit LArgument(int index) : LOperand(ARGUMENT, index) { } |
| 282 |
| 283 static LArgument* cast(LOperand* op) { |
| 284 ASSERT(op->IsArgument()); |
| 285 return reinterpret_cast<LArgument*>(op); |
| 286 } |
| 287 }; |
| 288 |
| 289 |
| 290 class LStackSlot: public LOperand { |
| 291 public: |
| 292 static LStackSlot* Create(int index) { |
| 293 ASSERT(index >= 0); |
| 294 if (index < kNumCachedOperands) return &cache[index]; |
| 295 return new LStackSlot(index); |
| 296 } |
| 297 |
| 298 static LStackSlot* cast(LOperand* op) { |
| 299 ASSERT(op->IsStackSlot()); |
| 300 return reinterpret_cast<LStackSlot*>(op); |
| 301 } |
| 302 |
| 303 static void SetupCache(); |
| 304 |
| 305 private: |
| 306 static const int kNumCachedOperands = 128; |
| 307 static LStackSlot cache[]; |
| 308 |
| 309 LStackSlot() : LOperand() { } |
| 310 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { } |
| 311 }; |
| 312 |
| 313 |
| 314 class LDoubleStackSlot: public LOperand { |
| 315 public: |
| 316 static LDoubleStackSlot* Create(int index) { |
| 317 ASSERT(index >= 0); |
| 318 if (index < kNumCachedOperands) return &cache[index]; |
| 319 return new LDoubleStackSlot(index); |
| 320 } |
| 321 |
| 322 static LDoubleStackSlot* cast(LOperand* op) { |
| 323 ASSERT(op->IsStackSlot()); |
| 324 return reinterpret_cast<LDoubleStackSlot*>(op); |
| 325 } |
| 326 |
| 327 static void SetupCache(); |
| 328 |
| 329 private: |
| 330 static const int kNumCachedOperands = 128; |
| 331 static LDoubleStackSlot cache[]; |
| 332 |
| 333 LDoubleStackSlot() : LOperand() { } |
| 334 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } |
| 335 }; |
| 336 |
| 337 |
| 338 class LRegister: public LOperand { |
| 339 public: |
| 340 static LRegister* Create(int index) { |
| 341 ASSERT(index >= 0); |
| 342 if (index < kNumCachedOperands) return &cache[index]; |
| 343 return new LRegister(index); |
| 344 } |
| 345 |
| 346 static LRegister* cast(LOperand* op) { |
| 347 ASSERT(op->IsRegister()); |
| 348 return reinterpret_cast<LRegister*>(op); |
| 349 } |
| 350 |
| 351 static void SetupCache(); |
| 352 |
| 353 private: |
| 354 static const int kNumCachedOperands = 16; |
| 355 static LRegister cache[]; |
| 356 |
| 357 LRegister() : LOperand() { } |
| 358 explicit LRegister(int index) : LOperand(REGISTER, index) { } |
| 359 }; |
| 360 |
| 361 |
| 362 class LDoubleRegister: public LOperand { |
| 363 public: |
| 364 static LDoubleRegister* Create(int index) { |
| 365 ASSERT(index >= 0); |
| 366 if (index < kNumCachedOperands) return &cache[index]; |
| 367 return new LDoubleRegister(index); |
| 368 } |
| 369 |
| 370 static LDoubleRegister* cast(LOperand* op) { |
| 371 ASSERT(op->IsDoubleRegister()); |
| 372 return reinterpret_cast<LDoubleRegister*>(op); |
| 373 } |
| 374 |
| 375 static void SetupCache(); |
| 376 |
| 377 private: |
| 378 static const int kNumCachedOperands = 16; |
| 379 static LDoubleRegister cache[]; |
| 380 |
| 381 LDoubleRegister() : LOperand() { } |
| 382 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } |
| 383 }; |
| 384 |
| 385 |
38 class LParallelMove : public ZoneObject { | 386 class LParallelMove : public ZoneObject { |
39 public: | 387 public: |
40 LParallelMove() : move_operands_(4) { } | 388 LParallelMove() : move_operands_(4) { } |
41 | 389 |
42 void AddMove(LOperand* from, LOperand* to) { | 390 void AddMove(LOperand* from, LOperand* to) { |
43 move_operands_.Add(LMoveOperands(from, to)); | 391 move_operands_.Add(LMoveOperands(from, to)); |
44 } | 392 } |
45 | 393 |
46 bool IsRedundant() const; | 394 bool IsRedundant() const; |
47 | 395 |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
157 LOperand** spilled_double_registers_; | 505 LOperand** spilled_double_registers_; |
158 | 506 |
159 LEnvironment* outer_; | 507 LEnvironment* outer_; |
160 | 508 |
161 friend class LCodegen; | 509 friend class LCodegen; |
162 }; | 510 }; |
163 | 511 |
164 } } // namespace v8::internal | 512 } } // namespace v8::internal |
165 | 513 |
166 #endif // V8_LITHIUM_H_ | 514 #endif // V8_LITHIUM_H_ |
OLD | NEW |