| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/bytecode-peephole-optimizer.h" | 5 #include "src/interpreter/bytecode-peephole-optimizer.h" |
| 6 | 6 |
| 7 // The following file is generated by mkpeephole. |
| 8 #include <bytecode-peephole-table.h> |
| 9 |
| 7 #include "src/interpreter/constant-array-builder.h" | 10 #include "src/interpreter/constant-array-builder.h" |
| 8 #include "src/objects-inl.h" | 11 #include "src/objects-inl.h" |
| 9 #include "src/objects.h" | 12 #include "src/objects.h" |
| 10 | 13 |
| 11 namespace v8 { | 14 namespace v8 { |
| 12 namespace internal { | 15 namespace internal { |
| 13 namespace interpreter { | 16 namespace interpreter { |
| 14 | 17 |
| 15 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( | 18 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( |
| 16 ConstantArrayBuilder* constant_array_builder, | 19 ConstantArrayBuilder* constant_array_builder, |
| 17 BytecodePipelineStage* next_stage) | 20 BytecodePipelineStage* next_stage) |
| 18 : constant_array_builder_(constant_array_builder), next_stage_(next_stage) { | 21 : constant_array_builder_(constant_array_builder), next_stage_(next_stage) { |
| 19 InvalidateLast(); | 22 InvalidateLast(); |
| 20 } | 23 } |
| 21 | 24 |
| 22 // override | 25 // override |
| 23 Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray( | 26 Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray( |
| 24 int fixed_register_count, int parameter_count, | 27 int fixed_register_count, int parameter_count, |
| 25 Handle<FixedArray> handler_table) { | 28 Handle<FixedArray> handler_table) { |
| 26 Flush(); | 29 Flush(); |
| 27 return next_stage_->ToBytecodeArray(fixed_register_count, parameter_count, | 30 return next_stage_->ToBytecodeArray(fixed_register_count, parameter_count, |
| 28 handler_table); | 31 handler_table); |
| 29 } | 32 } |
| 30 | 33 |
| 31 // override | 34 // override |
| 32 void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { | |
| 33 node = OptimizeAndEmitLast(node); | |
| 34 if (node != nullptr) { | |
| 35 SetLast(node); | |
| 36 } | |
| 37 } | |
| 38 | |
| 39 // override | |
| 40 void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node, | |
| 41 BytecodeLabel* label) { | |
| 42 node = OptimizeAndEmitLast(node); | |
| 43 next_stage_->WriteJump(node, label); | |
| 44 } | |
| 45 | |
| 46 // override | |
| 47 void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) { | 35 void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) { |
| 48 Flush(); | 36 Flush(); |
| 49 next_stage_->BindLabel(label); | 37 next_stage_->BindLabel(label); |
| 50 } | 38 } |
| 51 | 39 |
| 52 // override | 40 // override |
| 53 void BytecodePeepholeOptimizer::BindLabel(const BytecodeLabel& target, | 41 void BytecodePeepholeOptimizer::BindLabel(const BytecodeLabel& target, |
| 54 BytecodeLabel* label) { | 42 BytecodeLabel* label) { |
| 55 // There is no need to flush here, it will have been flushed when |target| | 43 // There is no need to flush here, it will have been flushed when |
| 56 // was bound. | 44 // |target| was bound. |
| 57 next_stage_->BindLabel(target, label); | 45 next_stage_->BindLabel(target, label); |
| 58 } | 46 } |
| 59 | 47 |
| 48 // override |
| 49 void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node, |
| 50 BytecodeLabel* label) { |
| 51 Optimize(node); |
| 52 next_stage()->WriteJump(node, label); |
| 53 } |
| 54 |
| 55 // override |
| 56 void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { Optimize(node); } |
| 57 |
| 60 void BytecodePeepholeOptimizer::Flush() { | 58 void BytecodePeepholeOptimizer::Flush() { |
| 61 // TODO(oth/rmcilroy): We could check CanElideLast() here to potentially | |
| 62 // eliminate last rather than writing it. | |
| 63 if (LastIsValid()) { | 59 if (LastIsValid()) { |
| 64 next_stage_->Write(&last_); | 60 next_stage_->Write(&last_); |
| 65 InvalidateLast(); | 61 InvalidateLast(); |
| 66 } | 62 } |
| 67 } | 63 } |
| 68 | 64 |
| 69 void BytecodePeepholeOptimizer::InvalidateLast() { | 65 void BytecodePeepholeOptimizer::InvalidateLast() { |
| 70 last_.set_bytecode(Bytecode::kIllegal); | 66 last_.set_bytecode(Bytecode::kIllegal); |
| 71 } | 67 } |
| 72 | 68 |
| 73 bool BytecodePeepholeOptimizer::LastIsValid() const { | 69 bool BytecodePeepholeOptimizer::LastIsValid() const { |
| 74 return last_.bytecode() != Bytecode::kIllegal; | 70 return last_.bytecode() != Bytecode::kIllegal; |
| 75 } | 71 } |
| 76 | 72 |
| 77 void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { | 73 void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { |
| 78 last_.Clone(node); | 74 last_.Clone(node); |
| 79 } | 75 } |
| 80 | 76 |
| 81 Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( | 77 Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( |
| 82 const BytecodeNode* const node, int index) const { | 78 const BytecodeNode* const node, int index) const { |
| 83 DCHECK_LE(index, node->operand_count()); | 79 DCHECK_LE(index, node->operand_count()); |
| 84 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); | 80 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); |
| 85 uint32_t index_operand = node->operand(0); | 81 uint32_t index_operand = node->operand(0); |
| 86 return constant_array_builder_->At(index_operand); | 82 return constant_array_builder_->At(index_operand); |
| 87 } | 83 } |
| 88 | 84 |
| 89 bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const { | |
| 90 DCHECK(LastIsValid()); | |
| 91 return (last_.bytecode() == Bytecode::kTypeOf || | |
| 92 last_.bytecode() == Bytecode::kToName || | |
| 93 (last_.bytecode() == Bytecode::kLdaConstant && | |
| 94 GetConstantForIndexOperand(&last_, 0)->IsName())); | |
| 95 } | |
| 96 | |
| 97 void BytecodePeepholeOptimizer::TryToRemoveLastExpressionPosition( | |
| 98 const BytecodeNode* const current) { | |
| 99 if (current->source_info().is_valid() && | |
| 100 last_.source_info().is_expression() && | |
| 101 Bytecodes::IsWithoutExternalSideEffects(last_.bytecode())) { | |
| 102 // The last bytecode has been marked as expression. It has no | |
| 103 // external effects so can't throw and the current bytecode is a | |
| 104 // source position. Remove the expression position on the last | |
| 105 // bytecode to open up potential peephole optimizations and to | |
| 106 // save the memory and perf cost of storing the unneeded | |
| 107 // expression position. | |
| 108 last_.source_info().set_invalid(); | |
| 109 } | |
| 110 } | |
| 111 | |
| 112 bool BytecodePeepholeOptimizer::CanElideCurrent( | |
| 113 const BytecodeNode* const current) const { | |
| 114 if (Bytecodes::IsLdarOrStar(last_.bytecode()) && | |
| 115 Bytecodes::IsLdarOrStar(current->bytecode()) && | |
| 116 current->operand(0) == last_.operand(0)) { | |
| 117 // Ldar and Star make the accumulator and register hold equivalent | |
| 118 // values. Only the first bytecode is needed if there's a sequence | |
| 119 // of back-to-back Ldar and Star bytecodes with the same operand. | |
| 120 return true; | |
| 121 } else if (current->bytecode() == Bytecode::kToName && | |
| 122 LastBytecodePutsNameInAccumulator()) { | |
| 123 // If the previous bytecode ensured a name was in the accumulator, | |
| 124 // the type coercion ToName() can be elided. | |
| 125 return true; | |
| 126 } else { | |
| 127 // Additional candidates for eliding current: | |
| 128 // (i) current is Nop. | |
| 129 // (ii) ToNumber if the last puts a number in the accumulator. | |
| 130 return false; | |
| 131 } | |
| 132 } | |
| 133 | |
| 134 bool BytecodePeepholeOptimizer::CanElideLastBasedOnSourcePosition( | 85 bool BytecodePeepholeOptimizer::CanElideLastBasedOnSourcePosition( |
| 135 const BytecodeNode* const current) const { | 86 const BytecodeNode* const current) const { |
| 136 // | 87 // |
| 137 // The rules for allowing the elision of the last bytecode based | 88 // The rules for allowing the elision of the last bytecode based |
| 138 // on source position are: | 89 // on source position are: |
| 139 // | 90 // |
| 140 // C U R R E N T | 91 // C U R R E N T |
| 141 // +--------+--------+--------+ | 92 // +--------+--------+--------+ |
| 142 // | None | Expr | Stmt | | 93 // | None | Expr | Stmt | |
| 143 // L +--------+--------+--------+--------+ | 94 // L +--------+--------+--------+--------+ |
| 144 // | None | YES | YES | YES | | 95 // | None | YES | YES | YES | |
| 145 // A +--------+--------+--------+--------+ | 96 // A +--------+--------+--------+--------+ |
| 146 // | Expr | YES | MAYBE | MAYBE | | 97 // | Expr | YES | MAYBE | MAYBE | |
| 147 // S +--------+--------+--------+--------+ | 98 // S +--------+--------+--------+--------+ |
| 148 // | Stmt | YES | NO | NO | | 99 // | Stmt | YES | NO | NO | |
| 149 // T +--------+--------+--------+--------+ | 100 // T +--------+--------+--------+--------+ |
| 150 // | 101 // |
| 151 // The goal is not lose any statement positions and not lose useful | 102 // The goal is not lose any statement positions and not lose useful |
| 152 // expression positions. Whenever the last bytecode is elided it's | 103 // expression positions. Whenever the last bytecode is elided it's |
| 153 // source position information is applied to the current node | 104 // source position information is applied to the current node |
| 154 // updating it if necessary. | 105 // updating it if necessary. |
| 155 // | 106 // |
| 156 // The last bytecode can be elided for the MAYBE cases if the last | 107 // The last bytecode can be elided for the MAYBE cases if the last |
| 157 // bytecode is known not to throw. If it throws, the system would | 108 // bytecode is known not to throw. If it throws, the system would |
| 158 // not have correct stack trace information. The appropriate check | 109 // not have correct stack trace information. The appropriate check |
| 159 // for this would be Bytecodes::IsWithoutExternalSideEffects(), | 110 // for this would be Bytecodes::IsWithoutExternalSideEffects(). |
| 160 // which is checked in | |
| 161 // BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes() to | |
| 162 // keep the check here simple. | |
| 163 // | 111 // |
| 164 // In rare cases, bytecode generation produces consecutive bytecodes | 112 // In rare cases, bytecode generation produces consecutive bytecodes |
| 165 // with the same expression positions. In principle, the latter of | 113 // with the same expression positions. In principle, the latter of |
| 166 // these can be elided, but would make this function more expensive. | 114 // these can be elided, but would make this function more expensive. |
| 167 // | 115 // |
| 168 return (!last_.source_info().is_valid() || | 116 return (!last_.source_info().is_valid() || |
| 169 !current->source_info().is_valid()); | 117 !current->source_info().is_valid()); |
| 170 } | 118 } |
| 171 | 119 |
| 172 namespace { | 120 namespace { |
| 173 | 121 |
| 174 void TransformLdaStarToLdrLdar(Bytecode new_bytecode, BytecodeNode* const last, | 122 void TransformLdaStarToLdrLdar(Bytecode new_bytecode, BytecodeNode* const last, |
| 175 BytecodeNode* const current) { | 123 BytecodeNode* const current) { |
| 176 DCHECK_EQ(current->bytecode(), Bytecode::kStar); | 124 DCHECK_EQ(current->bytecode(), Bytecode::kStar); |
| 177 | 125 |
| 178 // | 126 // |
| 179 // An example transformation here would be: | 127 // An example transformation here would be: |
| 180 // | 128 // |
| 181 // LdaGlobal i0, i1 ____\ LdrGlobal i0, i1, R | 129 // LdaGlobal i0, i1 ____\ LdrGlobal i0, i1, R |
| 182 // Star R ====/ Ldar R | 130 // Star R ====/ Ldar R |
| 183 // | 131 // |
| 184 // which loads a global value into both a register and the | 132 // which loads a global value into both a register and the |
| 185 // accumulator. However, in the second form the Ldar can often be | 133 // accumulator. However, in the second form the Ldar can often be |
| 186 // peephole optimized away unlike the Star in the first form. | 134 // peephole optimized away unlike the Star in the first form. |
| 187 // | 135 // |
| 188 last->Transform(new_bytecode, current->operand(0)); | 136 last->Transform(new_bytecode, current->operand(0)); |
| 189 current->set_bytecode(Bytecode::kLdar, current->operand(0)); | 137 current->set_bytecode(Bytecode::kLdar, current->operand(0)); |
| 190 } | 138 } |
| 191 | 139 |
| 192 void TransformToBinaryOpWithSmiOnRhs(Bytecode new_bytecode, | 140 void TransformLdaSmiBinaryOpToBinaryOpWithSmi(Bytecode new_bytecode, |
| 193 BytecodeNode* const last, | 141 BytecodeNode* const last, |
| 194 BytecodeNode* const current) { | 142 BytecodeNode* const current) { |
| 195 DCHECK(Bytecodes::IsLdaSmiOrLdaZero(last->bytecode())); | 143 DCHECK_EQ(last->bytecode(), Bytecode::kLdaSmi); |
| 196 uint32_t imm_operand = | 144 current->set_bytecode(new_bytecode, last->operand(0), current->operand(0)); |
| 197 last->bytecode() == Bytecode::kLdaSmi ? last->operand(0) : 0; | |
| 198 current->set_bytecode(new_bytecode, imm_operand, current->operand(0)); | |
| 199 if (last->source_info().is_valid()) { | 145 if (last->source_info().is_valid()) { |
| 200 current->source_info().Clone(last->source_info()); | 146 current->source_info().Clone(last->source_info()); |
| 201 } | 147 } |
| 148 } |
| 149 |
| 150 void TransformLdaZeroBinaryOpToBinaryOpWithZero(Bytecode new_bytecode, |
| 151 BytecodeNode* const last, |
| 152 BytecodeNode* const current) { |
| 153 DCHECK_EQ(last->bytecode(), Bytecode::kLdaZero); |
| 154 current->set_bytecode(new_bytecode, 0, current->operand(0)); |
| 155 if (last->source_info().is_valid()) { |
| 156 current->source_info().Clone(last->source_info()); |
| 157 } |
| 202 } | 158 } |
| 203 | 159 |
| 204 } // namespace | 160 } // namespace |
| 205 | 161 |
| 206 bool BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes( | 162 void BytecodePeepholeOptimizer::DefaultAction( |
| 207 BytecodeNode* const current) { | 163 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 208 if (current->bytecode() == Bytecode::kStar && | 164 DCHECK(LastIsValid()); |
| 209 !current->source_info().is_statement()) { | 165 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 210 // Note: If the Star is tagged with a statement position, we can't | |
| 211 // perform this transform as the store to the register will | |
| 212 // have the wrong ordering for stepping in the debugger. | |
| 213 switch (last_.bytecode()) { | |
| 214 case Bytecode::kLdaNamedProperty: | |
| 215 TransformLdaStarToLdrLdar(Bytecode::kLdrNamedProperty, &last_, current); | |
| 216 return true; | |
| 217 case Bytecode::kLdaKeyedProperty: | |
| 218 TransformLdaStarToLdrLdar(Bytecode::kLdrKeyedProperty, &last_, current); | |
| 219 return true; | |
| 220 case Bytecode::kLdaGlobal: | |
| 221 TransformLdaStarToLdrLdar(Bytecode::kLdrGlobal, &last_, current); | |
| 222 return true; | |
| 223 case Bytecode::kLdaContextSlot: | |
| 224 TransformLdaStarToLdrLdar(Bytecode::kLdrContextSlot, &last_, current); | |
| 225 return true; | |
| 226 case Bytecode::kLdaUndefined: | |
| 227 TransformLdaStarToLdrLdar(Bytecode::kLdrUndefined, &last_, current); | |
| 228 return true; | |
| 229 default: | |
| 230 break; | |
| 231 } | |
| 232 } else if (Bytecodes::IsLdaSmiOrLdaZero(last_.bytecode()) && | |
| 233 (!last_.source_info().is_valid() || | |
| 234 !current->source_info().is_valid())) { | |
| 235 switch (current->bytecode()) { | |
| 236 case Bytecode::kAdd: | |
| 237 TransformToBinaryOpWithSmiOnRhs(Bytecode::kAddSmi, &last_, current); | |
| 238 InvalidateLast(); | |
| 239 return true; | |
| 240 case Bytecode::kSub: | |
| 241 TransformToBinaryOpWithSmiOnRhs(Bytecode::kSubSmi, &last_, current); | |
| 242 InvalidateLast(); | |
| 243 return true; | |
| 244 case Bytecode::kBitwiseOr: | |
| 245 TransformToBinaryOpWithSmiOnRhs(Bytecode::kBitwiseOrSmi, &last_, | |
| 246 current); | |
| 247 InvalidateLast(); | |
| 248 return true; | |
| 249 case Bytecode::kBitwiseAnd: | |
| 250 TransformToBinaryOpWithSmiOnRhs(Bytecode::kBitwiseAndSmi, &last_, | |
| 251 current); | |
| 252 InvalidateLast(); | |
| 253 return true; | |
| 254 case Bytecode::kShiftLeft: | |
| 255 TransformToBinaryOpWithSmiOnRhs(Bytecode::kShiftLeftSmi, &last_, | |
| 256 current); | |
| 257 InvalidateLast(); | |
| 258 return true; | |
| 259 case Bytecode::kShiftRight: | |
| 260 TransformToBinaryOpWithSmiOnRhs(Bytecode::kShiftRightSmi, &last_, | |
| 261 current); | |
| 262 InvalidateLast(); | |
| 263 return true; | |
| 264 default: | |
| 265 break; | |
| 266 } | |
| 267 } | |
| 268 | 166 |
| 269 return false; | 167 next_stage()->Write(last()); |
| 168 SetLast(node); |
| 270 } | 169 } |
| 271 | 170 |
| 272 bool BytecodePeepholeOptimizer::RemoveToBooleanFromJump( | 171 void BytecodePeepholeOptimizer::UpdateLastAction( |
| 273 BytecodeNode* const current) { | 172 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 274 bool can_remove = Bytecodes::IsJumpIfToBoolean(current->bytecode()) && | 173 DCHECK(!LastIsValid()); |
| 275 Bytecodes::WritesBooleanToAccumulator(last_.bytecode()); | 174 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 276 if (can_remove) { | 175 |
| 277 // Conditional jumps with boolean conditions are emiitted in | 176 SetLast(node); |
| 278 // ToBoolean form by the bytecode array builder, | |
| 279 // i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean | |
| 280 // element can be removed if the previous bytecode put a boolean | |
| 281 // value in the accumulator. | |
| 282 Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode()); | |
| 283 current->set_bytecode(jump, current->operand(0)); | |
| 284 } | |
| 285 return can_remove; | |
| 286 } | 177 } |
| 287 | 178 |
| 288 bool BytecodePeepholeOptimizer::RemoveToBooleanFromLogicalNot( | 179 void BytecodePeepholeOptimizer::ElideCurrentAction( |
| 289 BytecodeNode* const current) { | 180 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 290 bool can_remove = current->bytecode() == Bytecode::kToBooleanLogicalNot && | 181 DCHECK(LastIsValid()); |
| 291 Bytecodes::WritesBooleanToAccumulator(last_.bytecode()); | 182 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 292 if (can_remove) { | |
| 293 // Logical-nots are emitted in ToBoolean form by the bytecode array | |
| 294 // builder, The ToBoolean element can be removed if the previous bytecode | |
| 295 // put a boolean value in the accumulator. | |
| 296 current->set_bytecode(Bytecode::kLogicalNot); | |
| 297 } | |
| 298 return can_remove; | |
| 299 } | |
| 300 | 183 |
| 301 bool BytecodePeepholeOptimizer::TransformCurrentBytecode( | 184 if (node->source_info().is_valid()) { |
| 302 BytecodeNode* const current) { | 185 // Preserve the source information by replacing the node bytecode |
| 303 return RemoveToBooleanFromJump(current) || | 186 // with a no op bytecode. |
| 304 RemoveToBooleanFromLogicalNot(current); | 187 node->set_bytecode(Bytecode::kNop); |
| 305 } | 188 DefaultAction(node); |
| 306 | |
| 307 bool BytecodePeepholeOptimizer::CanElideLast( | |
| 308 const BytecodeNode* const current) const { | |
| 309 if (last_.bytecode() == Bytecode::kNop) { | |
| 310 // Nop are placeholders for holding source position information. | |
| 311 return true; | |
| 312 } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && | |
| 313 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { | |
| 314 // The accumulator is invisible to the debugger. If there is a sequence of | |
| 315 // consecutive accumulator loads (that don't have side effects) then only | |
| 316 // the final load is potentially visible. | |
| 317 return true; | |
| 318 } else if (Bytecodes::GetAccumulatorUse(current->bytecode()) == | |
| 319 AccumulatorUse::kWrite && | |
| 320 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { | |
| 321 // The current instruction clobbers the accumulator without reading it. The | |
| 322 // load in the last instruction can be elided as it has no effect. | |
| 323 return true; | |
| 324 } else { | 189 } else { |
| 325 return false; | 190 // Nothing to do, keep last and wait for next bytecode to pair with it. |
| 326 } | 191 } |
| 327 } | 192 } |
| 328 | 193 |
| 329 BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) { | 194 void BytecodePeepholeOptimizer::ElideCurrentIfOperand0MatchesAction( |
| 330 TryToRemoveLastExpressionPosition(current); | 195 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 331 if (TransformCurrentBytecode(current) || | 196 DCHECK(LastIsValid()); |
| 332 TransformLastAndCurrentBytecodes(current)) { | 197 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 333 return current; | 198 |
| 199 if (last()->operand(0) == node->operand(0)) { |
| 200 ElideCurrentAction(node); |
| 201 } else { |
| 202 DefaultAction(node); |
| 334 } | 203 } |
| 335 | |
| 336 if (CanElideCurrent(current)) { | |
| 337 if (current->source_info().is_valid()) { | |
| 338 // Preserve the source information by replacing the current bytecode | |
| 339 // with a no op bytecode. | |
| 340 current->set_bytecode(Bytecode::kNop); | |
| 341 } else { | |
| 342 current = nullptr; | |
| 343 } | |
| 344 return current; | |
| 345 } | |
| 346 | |
| 347 if (CanElideLast(current) && CanElideLastBasedOnSourcePosition(current)) { | |
| 348 if (last_.source_info().is_valid()) { | |
| 349 // Current can not be valid per CanElideLastBasedOnSourcePosition(). | |
| 350 current->source_info().Clone(last_.source_info()); | |
| 351 } | |
| 352 InvalidateLast(); | |
| 353 return current; | |
| 354 } | |
| 355 | |
| 356 return current; | |
| 357 } | 204 } |
| 358 | 205 |
| 359 BytecodeNode* BytecodePeepholeOptimizer::OptimizeAndEmitLast( | 206 void BytecodePeepholeOptimizer::ElideCurrentIfLoadingNameConstantAction( |
| 360 BytecodeNode* current) { | 207 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 361 // Attempt optimization if there is an earlier node to optimize with. | 208 DCHECK_EQ(last()->bytecode(), Bytecode::kLdaConstant); |
| 362 if (LastIsValid()) { | 209 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 363 current = Optimize(current); | 210 |
| 364 // Only output the last node if it wasn't invalidated by the optimization. | 211 if (GetConstantForIndexOperand(last(), 0)->IsName()) { |
| 365 if (LastIsValid()) { | 212 ElideCurrentAction(node); |
| 366 next_stage_->Write(&last_); | 213 } else { |
| 367 InvalidateLast(); | 214 DefaultAction(node); |
| 215 } |
| 216 } |
| 217 |
| 218 void BytecodePeepholeOptimizer::ElideLastAction( |
| 219 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 220 DCHECK(LastIsValid()); |
| 221 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 222 |
| 223 if (CanElideLastBasedOnSourcePosition(node)) { |
| 224 if (last()->source_info().is_valid()) { |
| 225 // Node can not be valid per CanElideLastBasedOnSourcePosition(). |
| 226 node->source_info().Clone(last()->source_info()); |
| 368 } | 227 } |
| 228 SetLast(node); |
| 229 } else { |
| 230 DefaultAction(node); |
| 369 } | 231 } |
| 370 return current; | 232 } |
| 233 |
| 234 void BytecodePeepholeOptimizer::ChangeBytecodeAction( |
| 235 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 236 DCHECK(LastIsValid()); |
| 237 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 238 |
| 239 node->set_bytecode(action_data->bytecode); |
| 240 DefaultAction(node); |
| 241 } |
| 242 |
| 243 void BytecodePeepholeOptimizer::TransformLdaStarToLdrLdarAction( |
| 244 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 245 DCHECK(LastIsValid()); |
| 246 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 247 |
| 248 if (!node->source_info().is_statement()) { |
| 249 TransformLdaStarToLdrLdar(action_data->bytecode, last(), node); |
| 250 } |
| 251 DefaultAction(node); |
| 252 } |
| 253 |
| 254 void BytecodePeepholeOptimizer::TransformLdaSmiBinaryOpToBinaryOpWithSmiAction( |
| 255 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 256 DCHECK(LastIsValid()); |
| 257 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 258 |
| 259 if (!node->source_info().is_valid() || !last()->source_info().is_valid()) { |
| 260 // Fused last and current into current. |
| 261 TransformLdaSmiBinaryOpToBinaryOpWithSmi(action_data->bytecode, last(), |
| 262 node); |
| 263 // Discard last. |
| 264 InvalidateLast(); |
| 265 // Immediately emit current node as nothing fuses with AddSmi, SubSmi, etc. |
| 266 next_stage()->Write(node); |
| 267 } else { |
| 268 DefaultAction(node); |
| 269 } |
| 270 } |
| 271 |
| 272 void BytecodePeepholeOptimizer:: |
| 273 TransformLdaZeroBinaryOpToBinaryOpWithZeroAction( |
| 274 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 275 DCHECK(LastIsValid()); |
| 276 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
| 277 if (!node->source_info().is_valid() || !last()->source_info().is_valid()) { |
| 278 // Fused last and current into current. |
| 279 TransformLdaZeroBinaryOpToBinaryOpWithZero(action_data->bytecode, last(), |
| 280 node); |
| 281 // Discard last. |
| 282 InvalidateLast(); |
| 283 // Immediately emit current node as nothing fuses with AddSmi, SubSmi, etc. |
| 284 next_stage()->Write(node); |
| 285 } else { |
| 286 DefaultAction(node); |
| 287 } |
| 288 } |
| 289 |
| 290 void BytecodePeepholeOptimizer::DefaultJumpAction( |
| 291 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 292 DCHECK(LastIsValid()); |
| 293 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 294 |
| 295 next_stage()->Write(last()); |
| 296 InvalidateLast(); |
| 297 } |
| 298 |
| 299 void BytecodePeepholeOptimizer::UpdateLastJumpAction( |
| 300 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 301 DCHECK(!LastIsValid()); |
| 302 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 303 } |
| 304 |
| 305 void BytecodePeepholeOptimizer::ChangeJumpBytecodeAction( |
| 306 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 307 DCHECK(LastIsValid()); |
| 308 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 309 |
| 310 next_stage()->Write(last()); |
| 311 InvalidateLast(); |
| 312 node->set_bytecode(action_data->bytecode, node->operand(0)); |
| 313 } |
| 314 |
| 315 void BytecodePeepholeOptimizer::ElideLastBeforeJumpAction( |
| 316 BytecodeNode* const node, const PeepholeActionAndData* action_data) { |
| 317 DCHECK(LastIsValid()); |
| 318 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 319 |
| 320 if (!node->source_info().is_valid()) { |
| 321 node->source_info().Clone(last()->source_info()); |
| 322 } else { |
| 323 next_stage()->Write(last()); |
| 324 } |
| 325 InvalidateLast(); |
| 326 } |
| 327 |
| 328 void BytecodePeepholeOptimizer::Optimize(BytecodeNode* const node) { |
| 329 // A single table is used for looking up peephole optimization |
| 330 // matches as it is observed to have better performance. This is |
| 331 // inspite of the fact that jump bytecodes and non-jump bytecodes |
| 332 // have different processing logic, in particular a jump bytecode |
| 333 // always needs to emit the jump via WriteJump(). |
| 334 // |
| 335 // The PeepholeActionTable class is emitted by mkpeephole.cc. |
| 336 const PeepholeActionAndData* const action_data = |
| 337 PeepholeActionTable::Lookup(last()->bytecode(), node->bytecode()); |
| 338 switch (action_data->action) { |
| 339 #define CASE(Action) \ |
| 340 case PeepholeAction::k##Action: \ |
| 341 Action(node, action_data); \ |
| 342 break; |
| 343 PEEPHOLE_ACTION_LIST(CASE) |
| 344 #undef CASE |
| 345 default: |
| 346 UNREACHABLE(); |
| 347 break; |
| 348 } |
| 371 } | 349 } |
| 372 | 350 |
| 373 } // namespace interpreter | 351 } // namespace interpreter |
| 374 } // namespace internal | 352 } // namespace internal |
| 375 } // namespace v8 | 353 } // namespace v8 |
| OLD | NEW |