Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/interpreter/bytecode-peephole-optimizer.h" | |
| 6 | |
| 7 #include "src/interpreter/constant-array-builder.h" | |
| 8 #include "src/objects-inl.h" | |
| 9 #include "src/objects.h" | |
| 10 | |
| 11 namespace v8 { | |
| 12 namespace internal { | |
| 13 namespace interpreter { | |
| 14 | |
| 15 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( | |
| 16 ConstantArrayBuilder* constant_array_builder, | |
| 17 BytecodePipelineStage* next_stage) | |
| 18 : constant_array_builder_(constant_array_builder), | |
| 19 next_stage_(next_stage), | |
| 20 last_(Bytecode::kNop), | |
|
rmcilroy
2016/05/12 12:15:13
nit - as mentioned offline, would be nice to use k
oth
2016/05/12 14:59:52
Done.
| |
| 21 last_is_valid_(false), | |
| 22 last_is_discardable_(false) {} | |
| 23 | |
| 24 void BytecodePeepholeOptimizer::InvalidateLast() { last_is_valid_ = false; } | |
| 25 | |
| 26 bool BytecodePeepholeOptimizer::LastIsValid() const { return last_is_valid_; } | |
| 27 | |
| 28 void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { | |
| 29 memcpy(&last_, node, sizeof(*node)); | |
| 30 last_is_valid_ = true; | |
| 31 last_is_discardable_ = true; | |
| 32 } | |
| 33 | |
| 34 // override | |
| 35 size_t BytecodePeepholeOptimizer::FlushForOffset() { | |
| 36 size_t buffered_size = next_stage_->FlushForOffset(); | |
| 37 if (LastIsValid()) { | |
| 38 if (last_.bytecode() == Bytecode::kNop && | |
| 39 !last_.source_info().is_statement()) { | |
|
rmcilroy
2016/05/12 12:15:13
nit - please add a comment here as to why the Nop
oth
2016/05/12 14:59:52
Done.
| |
| 40 InvalidateLast(); | |
| 41 } else { | |
| 42 buffered_size += last_.Size(); | |
| 43 last_is_discardable_ = false; | |
| 44 } | |
| 45 } | |
| 46 return buffered_size; | |
| 47 } | |
| 48 | |
| 49 // override | |
| 50 void BytecodePeepholeOptimizer::FlushBasicBlock() { | |
| 51 if (LastIsValid()) { | |
| 52 next_stage_->Write(&last_); | |
| 53 InvalidateLast(); | |
| 54 } | |
| 55 last_is_discardable_ = false; | |
| 56 next_stage_->FlushBasicBlock(); | |
| 57 } | |
| 58 | |
| 59 // override | |
| 60 void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { | |
| 61 // Attempt optimization if there is an earlier node to optimize with. | |
| 62 if (LastIsValid()) { | |
| 63 node = Optimize(node); | |
| 64 // Only output if optimization did not invalidate earlier node. | |
|
rmcilroy
2016/05/12 12:15:13
nit - // Only output the last node if it wasn't in
oth
2016/05/12 14:59:52
Done.
| |
| 65 if (LastIsValid()) { | |
| 66 next_stage_->Write(&last_); | |
| 67 InvalidateLast(); | |
| 68 } | |
| 69 } | |
| 70 | |
| 71 if (node != nullptr) { | |
| 72 SetLast(node); | |
| 73 } | |
| 74 } | |
| 75 | |
| 76 Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( | |
| 77 const BytecodeNode* const node, int index) const { | |
| 78 DCHECK_LE(index, node->operand_count()); | |
| 79 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); | |
| 80 uint32_t index_operand = node->operand(0); | |
| 81 return constant_array_builder_->At(index_operand); | |
| 82 } | |
| 83 | |
| 84 bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const { | |
| 85 DCHECK(LastIsValid()); | |
| 86 return (last_.bytecode() == Bytecode::kTypeOf || | |
| 87 last_.bytecode() == Bytecode::kToName || | |
| 88 (last_.bytecode() == Bytecode::kLdaConstant && | |
| 89 GetConstantForIndexOperand(&last_, 0)->IsName())); | |
|
rmcilroy
2016/05/12 12:15:13
I wish we could put this in bytecodes.h but don't
oth
2016/05/12 14:59:52
Acknowledged.
| |
| 90 } | |
| 91 | |
| 92 void BytecodePeepholeOptimizer::UpdateCurrentBytecode(BytecodeNode* current) { | |
| 93 // Conditional jumps with boolean conditions are emiitted in | |
| 94 // ToBoolean form by the bytecode array builder, | |
| 95 // i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean element | |
| 96 // can be removed if the previous bytecode put a boolean value in | |
| 97 // the accumulator. | |
| 98 if (Bytecodes::IsJumpIfToBoolean(current->bytecode()) && | |
| 99 Bytecodes::WritesBooleanToAccumulator(last_.bytecode())) { | |
| 100 Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode()); | |
| 101 current->set_bytecode(jump, current->operand(0), current->operand_scale()); | |
| 102 } | |
| 103 } | |
| 104 | |
| 105 bool BytecodePeepholeOptimizer::CanElideCurrent( | |
| 106 const BytecodeNode* const current) const { | |
| 107 if (Bytecodes::IsLdarOrStar(last_.bytecode()) && | |
| 108 Bytecodes::IsLdarOrStar(current->bytecode()) && | |
| 109 current->operand(0) == last_.operand(0)) { | |
| 110 // Ldar and Star make the accumulator and register hold equivalent | |
| 111 // values. Only the first bytecode is needed if there's a sequence | |
| 112 // of back-to-back of Ldar and Star bytecodes with the same operand. | |
|
rmcilroy
2016/05/12 12:15:13
nit - remove second "of"
oth
2016/05/12 14:59:52
Done.
| |
| 113 return true; | |
| 114 } else if (current->bytecode() == Bytecode::kToName && | |
| 115 LastBytecodePutsNameInAccumulator()) { | |
| 116 // If the previous bytecode ensured a name was in the accumulator, | |
| 117 // the type coercion ToName() can be elided. | |
| 118 return true; | |
| 119 } else { | |
| 120 return false; | |
| 121 } | |
| 122 } | |
| 123 | |
| 124 bool BytecodePeepholeOptimizer::CanElideLast( | |
| 125 const BytecodeNode* const current) const { | |
| 126 if (!last_is_discardable_) { | |
| 127 return false; | |
| 128 } | |
| 129 | |
| 130 if (last_.bytecode() == Bytecode::kNop) { | |
| 131 // Nop are placeholders for holding source position information | |
| 132 // and can be elided. | |
| 133 return true; | |
| 134 } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && | |
| 135 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { | |
| 136 // The accumulator is invisible to the debugger. If there is a sequence of | |
| 137 // consecutive accumulator loads (that don't have side effects) then only | |
| 138 // the final load is potentially visible. | |
| 139 return true; | |
| 140 } else { | |
| 141 return false; | |
| 142 } | |
| 143 } | |
| 144 | |
| 145 BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) { | |
| 146 UpdateCurrentBytecode(current); | |
| 147 | |
| 148 if (CanElideCurrent(current)) { | |
| 149 if (current->source_info().is_valid()) { | |
| 150 current->set_bytecode(Bytecode::kNop); | |
| 151 } else { | |
| 152 current = nullptr; | |
| 153 } | |
| 154 } else if (CanElideLast(current)) { | |
| 155 if (last_.source_info().is_valid()) { | |
| 156 current->source_info().Update(last_.source_info()); | |
| 157 } | |
| 158 InvalidateLast(); | |
| 159 } | |
| 160 return current; | |
| 161 } | |
| 162 | |
| 163 } // namespace interpreter | |
| 164 } // namespace internal | |
| 165 } // namespace v8 | |
| OLD | NEW |