| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 #ifndef V8_LITHIUM_H_ | 28 #ifndef V8_LITHIUM_H_ |
| 29 #define V8_LITHIUM_H_ | 29 #define V8_LITHIUM_H_ |
| 30 | 30 |
| 31 #include "allocation.h" | 31 #include "allocation.h" |
| 32 #include "hydrogen.h" | 32 #include "hydrogen.h" |
| 33 #include "safepoint-table.h" | 33 #include "safepoint-table.h" |
| 34 | 34 |
| 35 namespace v8 { | 35 namespace v8 { |
| 36 namespace internal { | 36 namespace internal { |
| 37 | 37 |
| 38 #define LITHIUM_OPERAND_LIST(V) \ | 38 #define LITHIUM_OPERAND_LIST(V) \ |
| 39 V(ConstantOperand, CONSTANT_OPERAND) \ | 39 V(ConstantOperand, CONSTANT_OPERAND) \ |
| 40 V(StackSlot, STACK_SLOT) \ | 40 V(StackSlot, STACK_SLOT) \ |
| 41 V(DoubleStackSlot, DOUBLE_STACK_SLOT) \ | 41 V(DoubleStackSlot, DOUBLE_STACK_SLOT) \ |
| 42 V(Register, REGISTER) \ | 42 V(Float32x4StackSlot, FLOAT32x4_STACK_SLOT) \ |
| 43 V(DoubleRegister, DOUBLE_REGISTER) | 43 V(Int32x4StackSlot, INT32x4_STACK_SLOT) \ |
| 44 V(Register, REGISTER) \ |
| 45 V(DoubleRegister, DOUBLE_REGISTER) \ |
| 46 V(Float32x4Register, FLOAT32x4_REGISTER) \ |
| 47 V(Int32x4Register, INT32x4_REGISTER) |
| 44 | 48 |
| 45 | 49 |
| 46 class LOperand : public ZoneObject { | 50 class LOperand : public ZoneObject { |
| 47 public: | 51 public: |
| 48 enum Kind { | 52 enum Kind { |
| 49 INVALID, | 53 INVALID, |
| 50 UNALLOCATED, | 54 UNALLOCATED, |
| 51 CONSTANT_OPERAND, | 55 CONSTANT_OPERAND, |
| 52 STACK_SLOT, | 56 STACK_SLOT, |
| 53 DOUBLE_STACK_SLOT, | 57 DOUBLE_STACK_SLOT, |
| 58 FLOAT32x4_STACK_SLOT, |
| 59 INT32x4_STACK_SLOT, |
| 54 REGISTER, | 60 REGISTER, |
| 55 DOUBLE_REGISTER, | 61 DOUBLE_REGISTER, |
| 62 FLOAT32x4_REGISTER, |
| 63 INT32x4_REGISTER, |
| 56 ARGUMENT | 64 ARGUMENT |
| 57 }; | 65 }; |
| 58 | 66 |
| 59 LOperand() : value_(KindField::encode(INVALID)) { } | 67 LOperand() : value_(KindField::encode(INVALID)) { } |
| 60 | 68 |
| 61 Kind kind() const { return KindField::decode(value_); } | 69 Kind kind() const { return KindField::decode(value_); } |
| 62 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; } | 70 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; } |
| 63 #define LITHIUM_OPERAND_PREDICATE(name, type) \ | 71 #define LITHIUM_OPERAND_PREDICATE(name, type) \ |
| 64 bool Is##name() const { return kind() == type; } | 72 bool Is##name() const { return kind() == type; } |
| 65 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE) | 73 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE) |
| 66 LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT) | 74 LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT) |
| 67 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED) | 75 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED) |
| 68 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID) | 76 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID) |
| 69 #undef LITHIUM_OPERAND_PREDICATE | 77 #undef LITHIUM_OPERAND_PREDICATE |
| 70 bool Equals(LOperand* other) const { return value_ == other->value_; } | 78 bool IsXMMRegister() const { |
| 79 return kind() == FLOAT32x4_REGISTER || kind() == INT32x4_REGISTER; |
| 80 } |
| 81 bool IsXMMStackSlot() const { |
| 82 return kind() == FLOAT32x4_STACK_SLOT || kind() == INT32x4_STACK_SLOT; |
| 83 } |
| 84 bool Equals(LOperand* other) const { |
| 85 return value_ == other->value_ || (index() == other->index() && |
| 86 ((IsXMMRegister() && other->IsXMMRegister()) || |
| 87 (IsXMMStackSlot() && other->IsXMMStackSlot()))); |
| 88 } |
| 71 | 89 |
| 72 void PrintTo(StringStream* stream); | 90 void PrintTo(StringStream* stream); |
| 73 void ConvertTo(Kind kind, int index) { | 91 void ConvertTo(Kind kind, int index) { |
| 74 value_ = KindField::encode(kind); | 92 value_ = KindField::encode(kind); |
| 75 value_ |= index << kKindFieldWidth; | 93 value_ |= index << kKindFieldWidth; |
| 76 ASSERT(this->index() == index); | 94 ASSERT(this->index() == index); |
| 77 } | 95 } |
| 78 | 96 |
| 79 // Calls SetUpCache()/TearDownCache() for each subclass. | 97 // Calls SetUpCache()/TearDownCache() for each subclass. |
| 80 static void SetUpCaches(); | 98 static void SetUpCaches(); |
| 81 static void TearDownCaches(); | 99 static void TearDownCaches(); |
| 82 | 100 |
| 83 protected: | 101 protected: |
| 84 static const int kKindFieldWidth = 3; | 102 static const int kKindFieldWidth = 4; |
| 85 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; | 103 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; |
| 86 | 104 |
| 87 LOperand(Kind kind, int index) { ConvertTo(kind, index); } | 105 LOperand(Kind kind, int index) { ConvertTo(kind, index); } |
| 88 | 106 |
| 89 unsigned value_; | 107 unsigned value_; |
| 90 }; | 108 }; |
| 91 | 109 |
| 92 | 110 |
| 93 class LUnallocated : public LOperand { | 111 class LUnallocated : public LOperand { |
| 94 public: | 112 public: |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 158 static LUnallocated* cast(LOperand* op) { | 176 static LUnallocated* cast(LOperand* op) { |
| 159 ASSERT(op->IsUnallocated()); | 177 ASSERT(op->IsUnallocated()); |
| 160 return reinterpret_cast<LUnallocated*>(op); | 178 return reinterpret_cast<LUnallocated*>(op); |
| 161 } | 179 } |
| 162 | 180 |
| 163 // The encoding used for LUnallocated operands depends on the policy that is | 181 // The encoding used for LUnallocated operands depends on the policy that is |
| 164 // stored within the operand. The FIXED_SLOT policy uses a compact encoding | 182 // stored within the operand. The FIXED_SLOT policy uses a compact encoding |
| 165 // because it accommodates a larger pay-load. | 183 // because it accommodates a larger pay-load. |
| 166 // | 184 // |
| 167 // For FIXED_SLOT policy: | 185 // For FIXED_SLOT policy: |
| 168 // +------------------------------------------+ | 186 // +-------------------------------------------+ |
| 169 // | slot_index | vreg | 0 | 001 | | 187 // | slot_index | vreg | 0 | 0001 | |
| 170 // +------------------------------------------+ | 188 // +-------------------------------------------+ |
| 171 // | 189 // |
| 172 // For all other (extended) policies: | 190 // For all other (extended) policies: |
| 173 // +------------------------------------------+ | 191 // +-------------------------------------------+ |
| 174 // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime | 192 // | reg_index | L | PPP | vreg | 1 | 0001 | L ... Lifetime |
| 175 // +------------------------------------------+ P ... Policy | 193 // +-------------------------------------------+ P ... Policy |
| 176 // | 194 // |
| 177 // The slot index is a signed value which requires us to decode it manually | 195 // The slot index is a signed value which requires us to decode it manually |
| 178 // instead of using the BitField utility class. | 196 // instead of using the BitField utility class. |
| 179 | 197 |
| 180 // The superclass has a KindField. | 198 // The superclass has a KindField. |
| 181 STATIC_ASSERT(kKindFieldWidth == 3); | 199 STATIC_ASSERT(kKindFieldWidth == 4); |
| 182 | 200 |
| 183 // BitFields for all unallocated operands. | 201 // BitFields for all unallocated operands. |
| 184 class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {}; | 202 class BasicPolicyField : public BitField<BasicPolicy, 4, 1> {}; |
| 185 class VirtualRegisterField : public BitField<unsigned, 4, 18> {}; | 203 class VirtualRegisterField : public BitField<unsigned, 5, 18> {}; |
| 186 | 204 |
| 187 // BitFields specific to BasicPolicy::FIXED_SLOT. | 205 // BitFields specific to BasicPolicy::FIXED_SLOT. |
| 188 class FixedSlotIndexField : public BitField<int, 22, 10> {}; | 206 class FixedSlotIndexField : public BitField<int, 23, 9> {}; |
| 189 | 207 |
| 190 // BitFields specific to BasicPolicy::EXTENDED_POLICY. | 208 // BitFields specific to BasicPolicy::EXTENDED_POLICY. |
| 191 class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {}; | 209 class ExtendedPolicyField : public BitField<ExtendedPolicy, 23, 3> {}; |
| 192 class LifetimeField : public BitField<Lifetime, 25, 1> {}; | 210 class LifetimeField : public BitField<Lifetime, 26, 1> {}; |
| 193 class FixedRegisterField : public BitField<int, 26, 6> {}; | 211 class FixedRegisterField : public BitField<int, 27, 5> {}; |
| 194 | 212 |
| 195 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1; | 213 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1; |
| 196 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize; | 214 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize; |
| 197 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1; | 215 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1; |
| 198 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1)); | 216 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1)); |
| 199 | 217 |
| 200 // Predicates for the operand policy. | 218 // Predicates for the operand policy. |
| 201 bool HasAnyPolicy() const { | 219 bool HasAnyPolicy() const { |
| 202 return basic_policy() == EXTENDED_POLICY && | 220 return basic_policy() == EXTENDED_POLICY && |
| 203 extended_policy() == ANY; | 221 extended_policy() == ANY; |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 396 | 414 |
| 397 private: | 415 private: |
| 398 static const int kNumCachedOperands = 128; | 416 static const int kNumCachedOperands = 128; |
| 399 static LDoubleStackSlot* cache; | 417 static LDoubleStackSlot* cache; |
| 400 | 418 |
| 401 LDoubleStackSlot() : LOperand() { } | 419 LDoubleStackSlot() : LOperand() { } |
| 402 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } | 420 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } |
| 403 }; | 421 }; |
| 404 | 422 |
| 405 | 423 |
| 424 class LFloat32x4StackSlot V8_FINAL : public LOperand { |
| 425 public: |
| 426 static LFloat32x4StackSlot* Create(int index, Zone* zone) { |
| 427 ASSERT(index >= 0); |
| 428 if (index < kNumCachedOperands) return &cache[index]; |
| 429 return new(zone) LFloat32x4StackSlot(index); |
| 430 } |
| 431 |
| 432 static LFloat32x4StackSlot* cast(LOperand* op) { |
| 433 ASSERT(op->IsStackSlot()); |
| 434 return reinterpret_cast<LFloat32x4StackSlot*>(op); |
| 435 } |
| 436 |
| 437 static void SetUpCache(); |
| 438 static void TearDownCache(); |
| 439 |
| 440 private: |
| 441 static const int kNumCachedOperands = 128; |
| 442 static LFloat32x4StackSlot* cache; |
| 443 |
| 444 LFloat32x4StackSlot() : LOperand() { } |
| 445 explicit LFloat32x4StackSlot(int index) |
| 446 : LOperand(FLOAT32x4_STACK_SLOT, index) { } |
| 447 }; |
| 448 |
| 449 |
| 450 class LInt32x4StackSlot V8_FINAL : public LOperand { |
| 451 public: |
| 452 static LInt32x4StackSlot* Create(int index, Zone* zone) { |
| 453 ASSERT(index >= 0); |
| 454 if (index < kNumCachedOperands) return &cache[index]; |
| 455 return new(zone) LInt32x4StackSlot(index); |
| 456 } |
| 457 |
| 458 static LInt32x4StackSlot* cast(LOperand* op) { |
| 459 ASSERT(op->IsStackSlot()); |
| 460 return reinterpret_cast<LInt32x4StackSlot*>(op); |
| 461 } |
| 462 |
| 463 static void SetUpCache(); |
| 464 static void TearDownCache(); |
| 465 |
| 466 private: |
| 467 static const int kNumCachedOperands = 128; |
| 468 static LInt32x4StackSlot* cache; |
| 469 |
| 470 LInt32x4StackSlot() : LOperand() { } |
| 471 explicit LInt32x4StackSlot(int index) |
| 472 : LOperand(INT32x4_STACK_SLOT, index) { } |
| 473 }; |
| 474 |
| 475 |
| 406 class LRegister V8_FINAL : public LOperand { | 476 class LRegister V8_FINAL : public LOperand { |
| 407 public: | 477 public: |
| 408 static LRegister* Create(int index, Zone* zone) { | 478 static LRegister* Create(int index, Zone* zone) { |
| 409 ASSERT(index >= 0); | 479 ASSERT(index >= 0); |
| 410 if (index < kNumCachedOperands) return &cache[index]; | 480 if (index < kNumCachedOperands) return &cache[index]; |
| 411 return new(zone) LRegister(index); | 481 return new(zone) LRegister(index); |
| 412 } | 482 } |
| 413 | 483 |
| 414 static LRegister* cast(LOperand* op) { | 484 static LRegister* cast(LOperand* op) { |
| 415 ASSERT(op->IsRegister()); | 485 ASSERT(op->IsRegister()); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 446 | 516 |
| 447 private: | 517 private: |
| 448 static const int kNumCachedOperands = 16; | 518 static const int kNumCachedOperands = 16; |
| 449 static LDoubleRegister* cache; | 519 static LDoubleRegister* cache; |
| 450 | 520 |
| 451 LDoubleRegister() : LOperand() { } | 521 LDoubleRegister() : LOperand() { } |
| 452 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } | 522 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } |
| 453 }; | 523 }; |
| 454 | 524 |
| 455 | 525 |
| 526 class LFloat32x4Register V8_FINAL : public LOperand { |
| 527 public: |
| 528 static LFloat32x4Register* Create(int index, Zone* zone) { |
| 529 ASSERT(index >= 0); |
| 530 if (index < kNumCachedOperands) return &cache[index]; |
| 531 return new(zone) LFloat32x4Register(index); |
| 532 } |
| 533 |
| 534 static LFloat32x4Register* cast(LOperand* op) { |
| 535 ASSERT(op->IsFloat32x4Register()); |
| 536 return reinterpret_cast<LFloat32x4Register*>(op); |
| 537 } |
| 538 |
| 539 static void SetUpCache(); |
| 540 static void TearDownCache(); |
| 541 |
| 542 private: |
| 543 static const int kNumCachedOperands = 16; |
| 544 static LFloat32x4Register* cache; |
| 545 |
| 546 LFloat32x4Register() : LOperand() { } |
| 547 explicit LFloat32x4Register(int index) |
| 548 : LOperand(FLOAT32x4_REGISTER, index) { } |
| 549 }; |
| 550 |
| 551 |
| 552 class LInt32x4Register V8_FINAL : public LOperand { |
| 553 public: |
| 554 static LInt32x4Register* Create(int index, Zone* zone) { |
| 555 ASSERT(index >= 0); |
| 556 if (index < kNumCachedOperands) return &cache[index]; |
| 557 return new(zone) LInt32x4Register(index); |
| 558 } |
| 559 |
| 560 static LInt32x4Register* cast(LOperand* op) { |
| 561 ASSERT(op->IsInt32x4Register()); |
| 562 return reinterpret_cast<LInt32x4Register*>(op); |
| 563 } |
| 564 |
| 565 static void SetUpCache(); |
| 566 static void TearDownCache(); |
| 567 |
| 568 private: |
| 569 static const int kNumCachedOperands = 16; |
| 570 static LInt32x4Register* cache; |
| 571 |
| 572 LInt32x4Register() : LOperand() { } |
| 573 explicit LInt32x4Register(int index) |
| 574 : LOperand(INT32x4_REGISTER, index) { } |
| 575 }; |
| 576 |
| 577 |
| 456 class LParallelMove V8_FINAL : public ZoneObject { | 578 class LParallelMove V8_FINAL : public ZoneObject { |
| 457 public: | 579 public: |
| 458 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { } | 580 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { } |
| 459 | 581 |
| 460 void AddMove(LOperand* from, LOperand* to, Zone* zone) { | 582 void AddMove(LOperand* from, LOperand* to, Zone* zone) { |
| 461 move_operands_.Add(LMoveOperands(from, to), zone); | 583 move_operands_.Add(LMoveOperands(from, to), zone); |
| 462 } | 584 } |
| 463 | 585 |
| 464 bool IsRedundant() const; | 586 bool IsRedundant() const; |
| 465 | 587 |
| (...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 809 private: | 931 private: |
| 810 LChunk* chunk_; | 932 LChunk* chunk_; |
| 811 | 933 |
| 812 DISALLOW_COPY_AND_ASSIGN(LPhase); | 934 DISALLOW_COPY_AND_ASSIGN(LPhase); |
| 813 }; | 935 }; |
| 814 | 936 |
| 815 | 937 |
| 816 } } // namespace v8::internal | 938 } } // namespace v8::internal |
| 817 | 939 |
| 818 #endif // V8_LITHIUM_H_ | 940 #endif // V8_LITHIUM_H_ |
| OLD | NEW |