Chromium Code Reviews| Index: src/interpreter/bytecode-peephole-optimizer.cc |
| diff --git a/src/interpreter/bytecode-peephole-optimizer.cc b/src/interpreter/bytecode-peephole-optimizer.cc |
| index 1108d8304f855f74dcd5b3f6e2965104f068684c..f303bcc3e562718bfc3c0d47d97e6520d3e13336 100644 |
| --- a/src/interpreter/bytecode-peephole-optimizer.cc |
| +++ b/src/interpreter/bytecode-peephole-optimizer.cc |
| @@ -12,10 +12,209 @@ namespace v8 { |
| namespace internal { |
| namespace interpreter { |
| +namespace { |
| + |
| +// Actions to take when a pair of bytes is encountered. A handler |
| +// exists for each action. |
| +enum class PeepholeAction : uint8_t { |
| + // Actions for non-jump bytecodes. |
| + kDefaultAction, |
| + kUpdateLastAction, |
| + kElideCurrentAction, |
| + kElideCurrentIfOperand0MatchesAction, |
| + kElideCurrentIfLoadingNameConstantAction, |
| + kElideLastAction, |
| + kChangeBytecodeAction, |
| + kTransformLdaStarToLdrLdarAction, |
| + |
| + // Actions for jump bytecodes. |
| + kDefaultJumpAction, |
| + kUpdateLastJumpAction, |
| + kChangeJumpBytecodeAction, |
| + kElideLastBeforeJumpAction, |
| +}; |
| + |
| +// Tuple of action to take when pair of bytecodes is encountered and |
| +// optional data to invoke handler with. |
| +struct PeepholeActionAndData final { |
| + // Action to take when tuple of bytecodes encountered. |
| + PeepholeAction action; |
| + |
| + // Replacement bytecode (if valid). |
| + Bytecode bytecode; |
| +}; |
| + |
| +class PeepholeActionTable final { |
| + public: |
| + static void Initialize(); |
| + |
| + static const PeepholeActionAndData* const Lookup(Bytecode last, |
| + Bytecode current) { |
| + size_t index = ToIndex(last, current); |
| + return &table_[index]; |
| + } |
| + |
| + static bool initialized() { return initialized_; } |
| + |
| + private: |
| + static const size_t kNumberOfBytecodes = |
| + static_cast<size_t>(Bytecode::kLast) + 1; |
| + static const size_t kTableSize = kNumberOfBytecodes * kNumberOfBytecodes; |
| + |
| + static PeepholeActionAndData LookupActionAndData(Bytecode last, |
| + Bytecode current); |
| + static void InsertActionAndData(Bytecode last, Bytecode current, |
| + PeepholeActionAndData action_data); |
| + static Bytecode NextBytecode(Bytecode bytecode); |
| + static size_t ToIndex(Bytecode last, Bytecode current); |
| + |
| + static bool initialized_; |
| + static PeepholeActionAndData table_[kTableSize]; |
| +}; |
| + |
| +bool PeepholeActionTable::initialized_ = false; |
| +PeepholeActionAndData PeepholeActionTable::table_[kTableSize]; |
| + |
| +// static |
| +void PeepholeActionTable::Initialize() { |
| + DCHECK(!initialized_); |
| + for (Bytecode last = Bytecode::kWide; last <= Bytecode::kLast; |
| + last = NextBytecode(last)) { |
| + for (Bytecode current = Bytecode::kWide; current <= Bytecode::kLast; |
| + current = NextBytecode(current)) { |
| + PeepholeActionAndData action_data = LookupActionAndData(last, current); |
| + InsertActionAndData(last, current, action_data); |
| + } |
| + } |
| + initialized_ = true; |
| +} |
| + |
| +// static |
| +Bytecode PeepholeActionTable::NextBytecode(Bytecode bytecode) { |
| + return static_cast<Bytecode>(static_cast<size_t>(bytecode) + 1); |
| +} |
| + |
| +// static |
| +size_t PeepholeActionTable::ToIndex(Bytecode last, Bytecode current) { |
| + return static_cast<size_t>(last) * kNumberOfBytecodes + |
| + static_cast<size_t>(current); |
| +} |
| + |
| +// static |
| +void PeepholeActionTable::InsertActionAndData( |
| + Bytecode last, Bytecode current, PeepholeActionAndData action_data) { |
| + size_t index = ToIndex(last, current); |
| + table_[index] = action_data; |
| +} |
| + |
| +// static |
| +PeepholeActionAndData PeepholeActionTable::LookupActionAndData( |
| + Bytecode last, Bytecode current) { |
| + // TODO(oth): Investigate generating this table at compile time. |
| + |
| + // Optimize various accumulator loads followed by store accumulator |
| + // to an equivalent register load and loading the accumulator with |
| + // the register. The latter accumulator load can often be elided as |
| + // it is side-effect free and often followed by another accumulator |
| + // load so can be elided. |
| + if (current == Bytecode::kStar && last == Bytecode::kLdaNamedProperty) { |
| + return {PeepholeAction::kTransformLdaStarToLdrLdarAction, |
| + Bytecode::kLdrNamedProperty}; |
| + } else if (current == Bytecode::kStar && |
| + last == Bytecode::kLdaKeyedProperty) { |
| + return {PeepholeAction::kTransformLdaStarToLdrLdarAction, |
| + Bytecode::kLdrKeyedProperty}; |
| + } else if (current == Bytecode::kStar && last == Bytecode::kLdaGlobal) { |
| + return {PeepholeAction::kTransformLdaStarToLdrLdarAction, |
| + Bytecode::kLdrGlobal}; |
| + } else if (current == Bytecode::kStar && last == Bytecode::kLdaContextSlot) { |
| + return {PeepholeAction::kTransformLdaStarToLdrLdarAction, |
| + Bytecode::kLdrContextSlot}; |
| + } else if (current == Bytecode::kStar && last == Bytecode::kLdaUndefined) { |
| + return {PeepholeAction::kTransformLdaStarToLdrLdarAction, |
| + Bytecode::kLdrUndefined}; |
| + } |
| + |
| + // ToName optimizations: remove unnecessary ToName bytecodes. |
| + if (last == Bytecode::kLdaConstant && current == Bytecode::kToName) { |
| + return {PeepholeAction::kElideCurrentIfLoadingNameConstantAction, |
| + Bytecode::kIllegal}; |
| + } else if (Bytecodes::PutsNameInAccumulator(last) && |
| + current == Bytecode::kToName) { |
| + return {PeepholeAction::kElideCurrentAction, Bytecode::kIllegal}; |
| + } |
| + |
| + // Nop are placeholders for holding source position information and can be |
| + // elided if there is no source information. |
| + if (last == Bytecode::kNop) { |
| + if (Bytecodes::IsJump(current)) { |
| + return {PeepholeAction::kElideLastBeforeJumpAction, Bytecode::kIllegal}; |
| + } else { |
| + return {PeepholeAction::kElideLastAction, Bytecode::kIllegal}; |
| + } |
| + } |
| + |
| + // The accumulator is invisible to the debugger. If there is a sequence |
| + // of consecutive accumulator loads (that don't have side effects) then |
| + // only the final load is potentially visible. |
| + if (Bytecodes::IsAccumulatorLoadWithoutEffects(last) && |
| + Bytecodes::IsAccumulatorLoadWithoutEffects(current)) { |
| + return {PeepholeAction::kElideLastAction, Bytecode::kIllegal}; |
| + } |
| + |
| + // The current instruction clobbers the accumulator without reading |
| + // it. The load in the last instruction can be elided as it has no |
| + // effect. |
| + if (Bytecodes::IsAccumulatorLoadWithoutEffects(last) && |
| + Bytecodes::GetAccumulatorUse(current) == AccumulatorUse::kWrite) { |
| + return {PeepholeAction::kElideLastAction, Bytecode::kIllegal}; |
| + } |
| + |
| + // Ldar and Star make the accumulator and register hold equivalent |
| + // values. Only the first bytecode is needed if there's a sequence |
| + // of back-to-back Ldar and Star bytecodes with the same operand. |
| + if (Bytecodes::IsLdarOrStar(last) && Bytecodes::IsLdarOrStar(current)) { |
| + return {PeepholeAction::kElideCurrentIfOperand0MatchesAction, |
| + Bytecode::kIllegal}; |
| + } |
| + |
| + // Remove ToBoolean coercion from conditional jumps where possible. |
| + if (Bytecodes::WritesBooleanToAccumulator(last) && |
| + Bytecodes::IsJumpIfToBoolean(current)) { |
| + return {PeepholeAction::kChangeJumpBytecodeAction, |
| + Bytecodes::GetJumpWithoutToBoolean(current)}; |
| + } else if (Bytecodes::WritesBooleanToAccumulator(last) && |
| + current == Bytecode::kToBooleanLogicalNot) { |
| + return {PeepholeAction::kChangeBytecodeAction, Bytecode::kLogicalNot}; |
| + } |
| + |
| + // If there is no last bytecode to optimize against, store the incoming |
| + // bytecode or for jumps emit incoming bytecode immediately. |
| + if (last == Bytecode::kIllegal) { |
| + if (Bytecodes::IsJump(current)) { |
| + return {PeepholeAction::kUpdateLastJumpAction, Bytecode::kIllegal}; |
| + } else { |
| + return {PeepholeAction::kUpdateLastAction, Bytecode::kIllegal}; |
| + } |
| + } |
| + |
| + // No matches, take the default action. |
| + if (Bytecodes::IsJump(current)) { |
| + return {PeepholeAction::kDefaultJumpAction, Bytecode::kIllegal}; |
| + } else { |
| + return {PeepholeAction::kDefaultAction, Bytecode::kIllegal}; |
| + } |
| +} |
| + |
| +} // namespace |
| + |
| BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( |
| ConstantArrayBuilder* constant_array_builder, |
| BytecodePipelineStage* next_stage) |
| : constant_array_builder_(constant_array_builder), next_stage_(next_stage) { |
| + if (!PeepholeActionTable::initialized()) { |
| + PeepholeActionTable::Initialize(); |
|
rmcilroy
2016/07/04 12:45:46
How does this play with threading? There could be
|
| + } |
| InvalidateLast(); |
| } |
| @@ -29,21 +228,6 @@ Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray( |
| } |
| // override |
| -void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { |
| - node = OptimizeAndEmitLast(node); |
| - if (node != nullptr) { |
| - SetLast(node); |
| - } |
| -} |
| - |
| -// override |
| -void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node, |
| - BytecodeLabel* label) { |
| - node = OptimizeAndEmitLast(node); |
| - next_stage_->WriteJump(node, label); |
| -} |
| - |
| -// override |
| void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) { |
| Flush(); |
| next_stage_->BindLabel(label); |
| @@ -52,14 +236,22 @@ void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) { |
| // override |
| void BytecodePeepholeOptimizer::BindLabel(const BytecodeLabel& target, |
| BytecodeLabel* label) { |
| - // There is no need to flush here, it will have been flushed when |target| |
| - // was bound. |
| + // There is no need to flush here, it will have been flushed when |
| + // |target| was bound. |
| next_stage_->BindLabel(target, label); |
| } |
| +// override |
| +void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node, |
| + BytecodeLabel* label) { |
| + Optimize(node); |
| + next_stage()->WriteJump(node, label); |
| +} |
| + |
| +// override |
| +void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { Optimize(node); } |
| + |
| void BytecodePeepholeOptimizer::Flush() { |
| - // TODO(oth/rmcilroy): We could check CanElideLast() here to potentially |
| - // eliminate last rather than writing it. |
| if (LastIsValid()) { |
| next_stage_->Write(&last_); |
| InvalidateLast(); |
| @@ -86,50 +278,6 @@ Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( |
| return constant_array_builder_->At(index_operand); |
| } |
| -bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const { |
| - DCHECK(LastIsValid()); |
| - return (last_.bytecode() == Bytecode::kTypeOf || |
| - last_.bytecode() == Bytecode::kToName || |
| - (last_.bytecode() == Bytecode::kLdaConstant && |
| - GetConstantForIndexOperand(&last_, 0)->IsName())); |
| -} |
| - |
| -void BytecodePeepholeOptimizer::TryToRemoveLastExpressionPosition( |
| - const BytecodeNode* const current) { |
| - if (current->source_info().is_valid() && |
| - last_.source_info().is_expression() && |
| - Bytecodes::IsWithoutExternalSideEffects(last_.bytecode())) { |
| - // The last bytecode has been marked as expression. It has no |
| - // external effects so can't throw and the current bytecode is a |
| - // source position. Remove the expression position on the last |
| - // bytecode to open up potential peephole optimizations and to |
| - // save the memory and perf cost of storing the unneeded |
| - // expression position. |
| - last_.source_info().set_invalid(); |
| - } |
| -} |
| - |
| -bool BytecodePeepholeOptimizer::CanElideCurrent( |
| - const BytecodeNode* const current) const { |
| - if (Bytecodes::IsLdarOrStar(last_.bytecode()) && |
| - Bytecodes::IsLdarOrStar(current->bytecode()) && |
| - current->operand(0) == last_.operand(0)) { |
| - // Ldar and Star make the accumulator and register hold equivalent |
| - // values. Only the first bytecode is needed if there's a sequence |
| - // of back-to-back Ldar and Star bytecodes with the same operand. |
| - return true; |
| - } else if (current->bytecode() == Bytecode::kToName && |
| - LastBytecodePutsNameInAccumulator()) { |
| - // If the previous bytecode ensured a name was in the accumulator, |
| - // the type coercion ToName() can be elided. |
| - return true; |
| - } else { |
| - // Additional candidates for eliding current: |
| - // (i) ToNumber if the last puts a number in the accumulator. |
| - return false; |
| - } |
| -} |
| - |
| bool BytecodePeepholeOptimizer::CanElideLastBasedOnSourcePosition( |
| const BytecodeNode* const current) const { |
| // |
| @@ -155,10 +303,7 @@ bool BytecodePeepholeOptimizer::CanElideLastBasedOnSourcePosition( |
| // The last bytecode can be elided for the MAYBE cases if the last |
| // bytecode is known not to throw. If it throws, the system would |
| // not have correct stack trace information. The appropriate check |
| - // for this would be Bytecodes::IsWithoutExternalSideEffects(), |
| - // which is checked in |
| - // BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes() to |
| - // keep the check here simple. |
| + // for this would be Bytecodes::IsWithoutExternalSideEffects(). |
| // |
| // In rare cases, bytecode generation produces consecutive bytecodes |
| // with the same expression positions. In principle, the latter of |
| @@ -190,136 +335,178 @@ void TransformLdaStarToLdrLdar(Bytecode new_bytecode, BytecodeNode* const last, |
| } // namespace |
| -bool BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes( |
| - BytecodeNode* const current) { |
| - if (current->bytecode() == Bytecode::kStar && |
| - !current->source_info().is_statement()) { |
| - // Note: If the Star is tagged with a statement position, we can't |
| - // perform this transform as the store to the register will |
| - // have the wrong ordering for stepping in the debugger. |
| - switch (last_.bytecode()) { |
| - case Bytecode::kLdaNamedProperty: |
| - TransformLdaStarToLdrLdar(Bytecode::kLdrNamedProperty, &last_, current); |
| - return true; |
| - case Bytecode::kLdaKeyedProperty: |
| - TransformLdaStarToLdrLdar(Bytecode::kLdrKeyedProperty, &last_, current); |
| - return true; |
| - case Bytecode::kLdaGlobal: |
| - TransformLdaStarToLdrLdar(Bytecode::kLdrGlobal, &last_, current); |
| - return true; |
| - case Bytecode::kLdaContextSlot: |
| - TransformLdaStarToLdrLdar(Bytecode::kLdrContextSlot, &last_, current); |
| - return true; |
| - case Bytecode::kLdaUndefined: |
| - TransformLdaStarToLdrLdar(Bytecode::kLdrUndefined, &last_, current); |
| - return true; |
| - default: |
| - break; |
| - } |
| - } |
| - return false; |
| +void BytecodePeepholeOptimizer::DefaultAction(BytecodeNode* const node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + next_stage()->Write(last()); |
| + SetLast(node); |
| } |
| -bool BytecodePeepholeOptimizer::RemoveToBooleanFromJump( |
| - BytecodeNode* const current) { |
| - bool can_remove = Bytecodes::IsJumpIfToBoolean(current->bytecode()) && |
| - Bytecodes::WritesBooleanToAccumulator(last_.bytecode()); |
| - if (can_remove) { |
| - // Conditional jumps with boolean conditions are emiitted in |
| - // ToBoolean form by the bytecode array builder, |
| - // i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean |
| - // element can be removed if the previous bytecode put a boolean |
| - // value in the accumulator. |
| - Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode()); |
| - current->set_bytecode(jump, current->operand(0)); |
| - } |
| - return can_remove; |
| +void BytecodePeepholeOptimizer::UpdateLastAction(BytecodeNode* const node) { |
| + DCHECK(!LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + SetLast(node); |
| } |
| -bool BytecodePeepholeOptimizer::RemoveToBooleanFromLogicalNot( |
| - BytecodeNode* const current) { |
| - bool can_remove = current->bytecode() == Bytecode::kToBooleanLogicalNot && |
| - Bytecodes::WritesBooleanToAccumulator(last_.bytecode()); |
| - if (can_remove) { |
| - // Logical-nots are emitted in ToBoolean form by the bytecode array |
| - // builder, The ToBoolean element can be removed if the previous bytecode |
| - // put a boolean value in the accumulator. |
| - current->set_bytecode(Bytecode::kLogicalNot); |
| +void BytecodePeepholeOptimizer::ElideCurrentAction(BytecodeNode* const node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + if (node->source_info().is_valid()) { |
| + // Preserve the source information by replacing the node bytecode |
| + // with a no op bytecode. |
| + node->set_bytecode(Bytecode::kNop); |
| + DefaultAction(node); |
| + } else { |
| + // Nothing to do, keep last and wait for next bytecode to pair with it. |
| } |
| - return can_remove; |
| } |
| -bool BytecodePeepholeOptimizer::TransformCurrentBytecode( |
| - BytecodeNode* const current) { |
| - return RemoveToBooleanFromJump(current) || |
| - RemoveToBooleanFromLogicalNot(current); |
| -} |
| +void BytecodePeepholeOptimizer::ElideCurrentIfOperand0MatchesAction( |
| + BytecodeNode* const node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| -bool BytecodePeepholeOptimizer::CanElideLast( |
| - const BytecodeNode* const current) const { |
| - if (last_.bytecode() == Bytecode::kNop) { |
| - // Nop are placeholders for holding source position information. |
| - return true; |
| - } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && |
| - Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { |
| - // The accumulator is invisible to the debugger. If there is a sequence of |
| - // consecutive accumulator loads (that don't have side effects) then only |
| - // the final load is potentially visible. |
| - return true; |
| - } else if (Bytecodes::GetAccumulatorUse(current->bytecode()) == |
| - AccumulatorUse::kWrite && |
| - Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { |
| - // The current instruction clobbers the accumulator without reading it. The |
| - // load in the last instruction can be elided as it has no effect. |
| - return true; |
| + if (last()->operand(0) == node->operand(0)) { |
| + ElideCurrentAction(node); |
| } else { |
| - return false; |
| + DefaultAction(node); |
| } |
| } |
| -BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) { |
| - TryToRemoveLastExpressionPosition(current); |
| +void BytecodePeepholeOptimizer::ElideCurrentIfLoadingNameConstantAction( |
| + BytecodeNode* const node) { |
| + DCHECK_EQ(last()->bytecode(), Bytecode::kLdaConstant); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| - if (TransformCurrentBytecode(current) || |
| - TransformLastAndCurrentBytecodes(current)) { |
| - return current; |
| + if (GetConstantForIndexOperand(last(), 0)->IsName()) { |
| + ElideCurrentAction(node); |
| + } else { |
| + DefaultAction(node); |
| } |
| +} |
| - if (CanElideCurrent(current)) { |
| - if (current->source_info().is_valid()) { |
| - // Preserve the source information by replacing the current bytecode |
| - // with a no op bytecode. |
| - current->set_bytecode(Bytecode::kNop); |
| - } else { |
| - current = nullptr; |
| +void BytecodePeepholeOptimizer::ElideLastAction(BytecodeNode* const node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + if (CanElideLastBasedOnSourcePosition(node)) { |
| + if (last()->source_info().is_valid()) { |
| + // Node can not be valid per CanElideLastBasedOnSourcePosition(). |
| + node->source_info().Clone(last()->source_info()); |
| } |
| - return current; |
| + SetLast(node); |
| + } else { |
| + DefaultAction(node); |
| } |
| +} |
| - if (CanElideLast(current) && CanElideLastBasedOnSourcePosition(current)) { |
| - if (last_.source_info().is_valid()) { |
| - // Current can not be valid per CanElideLastBasedOnSourcePosition(). |
| - current->source_info().Clone(last_.source_info()); |
| - } |
| - InvalidateLast(); |
| - return current; |
| +void BytecodePeepholeOptimizer::ChangeBytecodeAction(BytecodeNode* node, |
| + Bytecode replacement) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + node->set_bytecode(replacement); |
| + DefaultAction(node); |
| +} |
| + |
| +void BytecodePeepholeOptimizer::TransformLdaStarToLdrLdarAction( |
| + BytecodeNode* node, Bytecode replacement) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| + |
| + if (!node->source_info().is_statement()) { |
| + TransformLdaStarToLdrLdar(replacement, last(), node); |
| } |
| + DefaultAction(node); |
| +} |
| + |
| +void BytecodePeepholeOptimizer::DefaultJumpAction(BytecodeNode* node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(Bytecodes::IsJump(node->bytecode())); |
| - return current; |
| + next_stage()->Write(last()); |
| + InvalidateLast(); |
| } |
| -BytecodeNode* BytecodePeepholeOptimizer::OptimizeAndEmitLast( |
| - BytecodeNode* current) { |
| - // Attempt optimization if there is an earlier node to optimize with. |
| - if (LastIsValid()) { |
| - current = Optimize(current); |
| - // Only output the last node if it wasn't invalidated by the optimization. |
| - if (LastIsValid()) { |
| - next_stage_->Write(&last_); |
| - InvalidateLast(); |
| - } |
| +void BytecodePeepholeOptimizer::UpdateLastJumpAction(BytecodeNode* node) { |
| + DCHECK(!LastIsValid()); |
| + DCHECK(Bytecodes::IsJump(node->bytecode())); |
| +} |
| + |
| +void BytecodePeepholeOptimizer::ChangeJumpBytecodeAction(BytecodeNode* node, |
| + Bytecode replacement) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(Bytecodes::IsJump(node->bytecode())); |
| + |
| + next_stage()->Write(last()); |
| + InvalidateLast(); |
| + node->set_bytecode(replacement, node->operand(0)); |
| +} |
| + |
| +void BytecodePeepholeOptimizer::ElideLastBeforeJumpAction(BytecodeNode* node) { |
| + DCHECK(LastIsValid()); |
| + DCHECK(Bytecodes::IsJump(node->bytecode())); |
| + |
| + if (!node->source_info().is_valid()) { |
| + node->source_info().Clone(last()->source_info()); |
| + } else { |
| + next_stage()->Write(last()); |
| + } |
| + InvalidateLast(); |
| +} |
| + |
| +void BytecodePeepholeOptimizer::Optimize(BytecodeNode* const node) { |
| + // A single table is used for looking up peephole optimization |
| + // matches as it is observed to have better performance. This is |
| + // inspite of the fact that jump bytecodes and non-jump bytecodes |
| + // have different processing logic, in particular a jump bytecode |
| + // always needs to emit the jump via WriteJump(). |
| + const PeepholeActionAndData* const action_data = |
| + PeepholeActionTable::Lookup(last()->bytecode(), node->bytecode()); |
|
rmcilroy
2016/07/04 12:45:46
You could possibly store function pointers in the
|
| + switch (action_data->action) { |
| + case PeepholeAction::kDefaultAction: |
| + DefaultAction(node); |
| + break; |
| + case PeepholeAction::kUpdateLastAction: |
| + UpdateLastAction(node); |
| + break; |
| + case PeepholeAction::kElideCurrentAction: |
| + ElideCurrentAction(node); |
| + break; |
| + case PeepholeAction::kElideCurrentIfOperand0MatchesAction: |
| + ElideCurrentIfOperand0MatchesAction(node); |
| + break; |
| + case PeepholeAction::kElideCurrentIfLoadingNameConstantAction: |
| + ElideCurrentIfLoadingNameConstantAction(node); |
| + break; |
| + case PeepholeAction::kElideLastAction: |
| + ElideLastAction(node); |
| + break; |
| + case PeepholeAction::kChangeBytecodeAction: |
| + ChangeBytecodeAction(node, action_data->bytecode); |
| + break; |
| + case PeepholeAction::kTransformLdaStarToLdrLdarAction: |
| + TransformLdaStarToLdrLdarAction(node, action_data->bytecode); |
| + break; |
| + case PeepholeAction::kDefaultJumpAction: |
| + DefaultJumpAction(node); |
| + break; |
| + case PeepholeAction::kUpdateLastJumpAction: |
| + UpdateLastJumpAction(node); |
| + break; |
| + case PeepholeAction::kChangeJumpBytecodeAction: |
| + ChangeJumpBytecodeAction(node, action_data->bytecode); |
| + break; |
| + case PeepholeAction::kElideLastBeforeJumpAction: |
| + ElideLastBeforeJumpAction(node); |
| + break; |
| + default: |
| + UNREACHABLE(); |
| + break; |
| } |
| - return current; |
| } |
| } // namespace interpreter |