| Index: src/lithium.h
|
| ===================================================================
|
| --- src/lithium.h (revision 6389)
|
| +++ src/lithium.h (working copy)
|
| @@ -29,12 +29,360 @@
|
| #define V8_LITHIUM_H_
|
|
|
| #include "hydrogen.h"
|
| -#include "lithium-allocator.h"
|
| #include "safepoint-table.h"
|
|
|
| namespace v8 {
|
| namespace internal {
|
|
|
| +class LOperand: public ZoneObject {
|
| + public:
|
| + enum Kind {
|
| + INVALID,
|
| + UNALLOCATED,
|
| + CONSTANT_OPERAND,
|
| + STACK_SLOT,
|
| + DOUBLE_STACK_SLOT,
|
| + REGISTER,
|
| + DOUBLE_REGISTER,
|
| + ARGUMENT
|
| + };
|
| +
|
| + LOperand() : value_(KindField::encode(INVALID)) { }
|
| +
|
| + Kind kind() const { return KindField::decode(value_); }
|
| + int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
|
| + bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
|
| + bool IsStackSlot() const { return kind() == STACK_SLOT; }
|
| + bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
|
| + bool IsRegister() const { return kind() == REGISTER; }
|
| + bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
|
| + bool IsArgument() const { return kind() == ARGUMENT; }
|
| + bool IsUnallocated() const { return kind() == UNALLOCATED; }
|
| + bool Equals(LOperand* other) const { return value_ == other->value_; }
|
| + int VirtualRegister();
|
| +
|
| + void PrintTo(StringStream* stream);
|
| + void ConvertTo(Kind kind, int index) {
|
| + value_ = KindField::encode(kind);
|
| + value_ |= index << kKindFieldWidth;
|
| + ASSERT(this->index() == index);
|
| + }
|
| +
|
| + protected:
|
| + static const int kKindFieldWidth = 3;
|
| + class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
|
| +
|
| + LOperand(Kind kind, int index) { ConvertTo(kind, index); }
|
| +
|
| + unsigned value_;
|
| +};
|
| +
|
| +
|
| +class LUnallocated: public LOperand {
|
| + public:
|
| + enum Policy {
|
| + NONE,
|
| + ANY,
|
| + FIXED_REGISTER,
|
| + FIXED_DOUBLE_REGISTER,
|
| + FIXED_SLOT,
|
| + MUST_HAVE_REGISTER,
|
| + WRITABLE_REGISTER,
|
| + SAME_AS_FIRST_INPUT,
|
| + IGNORE
|
| + };
|
| +
|
| + // Lifetime of operand inside the instruction.
|
| + enum Lifetime {
|
| + // USED_AT_START operand is guaranteed to be live only at
|
| + // instruction start. Register allocator is free to assign the same register
|
| + // to some other operand used inside instruction (i.e. temporary or
|
| + // output).
|
| + USED_AT_START,
|
| +
|
| + // USED_AT_END operand is treated as live until the end of
|
| + // instruction. This means that register allocator will not reuse it's
|
| + // register for any other operand inside instruction.
|
| + USED_AT_END
|
| + };
|
| +
|
| + explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
|
| + Initialize(policy, 0, USED_AT_END);
|
| + }
|
| +
|
| + LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
|
| + Initialize(policy, fixed_index, USED_AT_END);
|
| + }
|
| +
|
| + LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
|
| + Initialize(policy, 0, lifetime);
|
| + }
|
| +
|
| + // The superclass has a KindField. Some policies have a signed fixed
|
| + // index in the upper bits.
|
| + static const int kPolicyWidth = 4;
|
| + static const int kLifetimeWidth = 1;
|
| + static const int kVirtualRegisterWidth = 17;
|
| +
|
| + static const int kPolicyShift = kKindFieldWidth;
|
| + static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
|
| + static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
|
| + static const int kFixedIndexShift =
|
| + kVirtualRegisterShift + kVirtualRegisterWidth;
|
| +
|
| + class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
|
| +
|
| + class LifetimeField
|
| + : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
|
| + };
|
| +
|
| + class VirtualRegisterField
|
| + : public BitField<unsigned,
|
| + kVirtualRegisterShift,
|
| + kVirtualRegisterWidth> {
|
| + };
|
| +
|
| + static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
|
| + static const int kMaxFixedIndices = 128;
|
| +
|
| + bool HasIgnorePolicy() const { return policy() == IGNORE; }
|
| + bool HasNoPolicy() const { return policy() == NONE; }
|
| + bool HasAnyPolicy() const {
|
| + return policy() == ANY;
|
| + }
|
| + bool HasFixedPolicy() const {
|
| + return policy() == FIXED_REGISTER ||
|
| + policy() == FIXED_DOUBLE_REGISTER ||
|
| + policy() == FIXED_SLOT;
|
| + }
|
| + bool HasRegisterPolicy() const {
|
| + return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
|
| + }
|
| + bool HasSameAsInputPolicy() const {
|
| + return policy() == SAME_AS_FIRST_INPUT;
|
| + }
|
| + Policy policy() const { return PolicyField::decode(value_); }
|
| + void set_policy(Policy policy) {
|
| + value_ &= ~PolicyField::mask();
|
| + value_ |= PolicyField::encode(policy);
|
| + }
|
| + int fixed_index() const {
|
| + return static_cast<int>(value_) >> kFixedIndexShift;
|
| + }
|
| +
|
| + unsigned virtual_register() const {
|
| + return VirtualRegisterField::decode(value_);
|
| + }
|
| +
|
| + void set_virtual_register(unsigned id) {
|
| + value_ &= ~VirtualRegisterField::mask();
|
| + value_ |= VirtualRegisterField::encode(id);
|
| + }
|
| +
|
| + LUnallocated* CopyUnconstrained() {
|
| + LUnallocated* result = new LUnallocated(ANY);
|
| + result->set_virtual_register(virtual_register());
|
| + return result;
|
| + }
|
| +
|
| + static LUnallocated* cast(LOperand* op) {
|
| + ASSERT(op->IsUnallocated());
|
| + return reinterpret_cast<LUnallocated*>(op);
|
| + }
|
| +
|
| + bool IsUsedAtStart() {
|
| + return LifetimeField::decode(value_) == USED_AT_START;
|
| + }
|
| +
|
| + private:
|
| + void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
|
| + value_ |= PolicyField::encode(policy);
|
| + value_ |= LifetimeField::encode(lifetime);
|
| + value_ |= fixed_index << kFixedIndexShift;
|
| + ASSERT(this->fixed_index() == fixed_index);
|
| + }
|
| +};
|
| +
|
| +
|
| +class LMoveOperands BASE_EMBEDDED {
|
| + public:
|
| + LMoveOperands(LOperand* source, LOperand* destination)
|
| + : source_(source), destination_(destination) {
|
| + }
|
| +
|
| + LOperand* source() const { return source_; }
|
| + void set_source(LOperand* operand) { source_ = operand; }
|
| +
|
| + LOperand* destination() const { return destination_; }
|
| + void set_destination(LOperand* operand) { destination_ = operand; }
|
| +
|
| + // The gap resolver marks moves as "in-progress" by clearing the
|
| + // destination (but not the source).
|
| + bool IsPending() const {
|
| + return destination_ == NULL && source_ != NULL;
|
| + }
|
| +
|
| + // True if this move a move into the given destination operand.
|
| + bool Blocks(LOperand* operand) const {
|
| + return !IsEliminated() && source()->Equals(operand);
|
| + }
|
| +
|
| + // A move is redundant if it's been eliminated, if its source and
|
| + // destination are the same, or if its destination is unneeded.
|
| + bool IsRedundant() const {
|
| + return IsEliminated() || source_->Equals(destination_) || IsIgnored();
|
| + }
|
| +
|
| + bool IsIgnored() const {
|
| + return destination_ != NULL &&
|
| + destination_->IsUnallocated() &&
|
| + LUnallocated::cast(destination_)->HasIgnorePolicy();
|
| + }
|
| +
|
| + // We clear both operands to indicate move that's been eliminated.
|
| + void Eliminate() { source_ = destination_ = NULL; }
|
| + bool IsEliminated() const {
|
| + ASSERT(source_ != NULL || destination_ == NULL);
|
| + return source_ == NULL;
|
| + }
|
| +
|
| + private:
|
| + LOperand* source_;
|
| + LOperand* destination_;
|
| +};
|
| +
|
| +
|
| +class LConstantOperand: public LOperand {
|
| + public:
|
| + static LConstantOperand* Create(int index) {
|
| + ASSERT(index >= 0);
|
| + if (index < kNumCachedOperands) return &cache[index];
|
| + return new LConstantOperand(index);
|
| + }
|
| +
|
| + static LConstantOperand* cast(LOperand* op) {
|
| + ASSERT(op->IsConstantOperand());
|
| + return reinterpret_cast<LConstantOperand*>(op);
|
| + }
|
| +
|
| + static void SetupCache();
|
| +
|
| + private:
|
| + static const int kNumCachedOperands = 128;
|
| + static LConstantOperand cache[];
|
| +
|
| + LConstantOperand() : LOperand() { }
|
| + explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
|
| +};
|
| +
|
| +
|
| +class LArgument: public LOperand {
|
| + public:
|
| + explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
|
| +
|
| + static LArgument* cast(LOperand* op) {
|
| + ASSERT(op->IsArgument());
|
| + return reinterpret_cast<LArgument*>(op);
|
| + }
|
| +};
|
| +
|
| +
|
| +class LStackSlot: public LOperand {
|
| + public:
|
| + static LStackSlot* Create(int index) {
|
| + ASSERT(index >= 0);
|
| + if (index < kNumCachedOperands) return &cache[index];
|
| + return new LStackSlot(index);
|
| + }
|
| +
|
| + static LStackSlot* cast(LOperand* op) {
|
| + ASSERT(op->IsStackSlot());
|
| + return reinterpret_cast<LStackSlot*>(op);
|
| + }
|
| +
|
| + static void SetupCache();
|
| +
|
| + private:
|
| + static const int kNumCachedOperands = 128;
|
| + static LStackSlot cache[];
|
| +
|
| + LStackSlot() : LOperand() { }
|
| + explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
|
| +};
|
| +
|
| +
|
| +class LDoubleStackSlot: public LOperand {
|
| + public:
|
| + static LDoubleStackSlot* Create(int index) {
|
| + ASSERT(index >= 0);
|
| + if (index < kNumCachedOperands) return &cache[index];
|
| + return new LDoubleStackSlot(index);
|
| + }
|
| +
|
| + static LDoubleStackSlot* cast(LOperand* op) {
|
| + ASSERT(op->IsStackSlot());
|
| + return reinterpret_cast<LDoubleStackSlot*>(op);
|
| + }
|
| +
|
| + static void SetupCache();
|
| +
|
| + private:
|
| + static const int kNumCachedOperands = 128;
|
| + static LDoubleStackSlot cache[];
|
| +
|
| + LDoubleStackSlot() : LOperand() { }
|
| + explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
|
| +};
|
| +
|
| +
|
| +class LRegister: public LOperand {
|
| + public:
|
| + static LRegister* Create(int index) {
|
| + ASSERT(index >= 0);
|
| + if (index < kNumCachedOperands) return &cache[index];
|
| + return new LRegister(index);
|
| + }
|
| +
|
| + static LRegister* cast(LOperand* op) {
|
| + ASSERT(op->IsRegister());
|
| + return reinterpret_cast<LRegister*>(op);
|
| + }
|
| +
|
| + static void SetupCache();
|
| +
|
| + private:
|
| + static const int kNumCachedOperands = 16;
|
| + static LRegister cache[];
|
| +
|
| + LRegister() : LOperand() { }
|
| + explicit LRegister(int index) : LOperand(REGISTER, index) { }
|
| +};
|
| +
|
| +
|
| +class LDoubleRegister: public LOperand {
|
| + public:
|
| + static LDoubleRegister* Create(int index) {
|
| + ASSERT(index >= 0);
|
| + if (index < kNumCachedOperands) return &cache[index];
|
| + return new LDoubleRegister(index);
|
| + }
|
| +
|
| + static LDoubleRegister* cast(LOperand* op) {
|
| + ASSERT(op->IsDoubleRegister());
|
| + return reinterpret_cast<LDoubleRegister*>(op);
|
| + }
|
| +
|
| + static void SetupCache();
|
| +
|
| + private:
|
| + static const int kNumCachedOperands = 16;
|
| + static LDoubleRegister cache[];
|
| +
|
| + LDoubleRegister() : LOperand() { }
|
| + explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
|
| +};
|
| +
|
| +
|
| class LParallelMove : public ZoneObject {
|
| public:
|
| LParallelMove() : move_operands_(4) { }
|
|
|