OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/interpreter/bytecode-peephole-optimizer.h" | 5 #include "src/interpreter/bytecode-peephole-optimizer.h" |
6 | 6 |
7 #include "src/interpreter/constant-array-builder.h" | 7 #include "src/interpreter/constant-array-builder.h" |
8 #include "src/objects-inl.h" | 8 #include "src/objects-inl.h" |
9 #include "src/objects.h" | 9 #include "src/objects.h" |
10 | 10 |
11 namespace v8 { | 11 namespace v8 { |
12 namespace internal { | 12 namespace internal { |
13 namespace interpreter { | 13 namespace interpreter { |
14 | 14 |
15 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( | 15 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( |
16 ConstantArrayBuilder* constant_array_builder, | 16 ConstantArrayBuilder* constant_array_builder, |
17 BytecodePipelineStage* next_stage) | 17 BytecodePipelineStage* next_stage) |
18 : constant_array_builder_(constant_array_builder), | 18 : constant_array_builder_(constant_array_builder), next_stage_(next_stage) { |
19 next_stage_(next_stage), | |
20 last_is_discardable_(false) { | |
21 InvalidateLast(); | 19 InvalidateLast(); |
22 } | 20 } |
23 | 21 |
| 22 // override |
| 23 Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray( |
| 24 int fixed_register_count, int parameter_count, |
| 25 Handle<FixedArray> handler_table) { |
| 26 Flush(); |
| 27 return next_stage_->ToBytecodeArray(fixed_register_count, parameter_count, |
| 28 handler_table); |
| 29 } |
| 30 |
| 31 // override |
| 32 void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { |
| 33 node = OptimizeAndEmitLast(node); |
| 34 if (node != nullptr) { |
| 35 SetLast(node); |
| 36 } |
| 37 } |
| 38 |
| 39 // override |
| 40 void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node, |
| 41 BytecodeLabel* label) { |
| 42 node = OptimizeAndEmitLast(node); |
| 43 next_stage_->WriteJump(node, label); |
| 44 } |
| 45 |
| 46 // override |
| 47 void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) { |
| 48 Flush(); |
| 49 next_stage_->BindLabel(label); |
| 50 } |
| 51 |
| 52 // override |
| 53 void BytecodePeepholeOptimizer::BindLabel(const BytecodeLabel& target, |
| 54 BytecodeLabel* label) { |
| 55 // There is no need to flush here, it will have been flushed when |target| |
| 56 // was bound. |
| 57 next_stage_->BindLabel(target, label); |
| 58 } |
| 59 |
| 60 void BytecodePeepholeOptimizer::Flush() { |
| 61 // TODO(oth/rmcilroy): We could check CanElideLast() here to potentially |
| 62 // eliminate last rather than writing it. |
| 63 if (LastIsValid()) { |
| 64 next_stage_->Write(&last_); |
| 65 InvalidateLast(); |
| 66 } |
| 67 } |
| 68 |
24 void BytecodePeepholeOptimizer::InvalidateLast() { | 69 void BytecodePeepholeOptimizer::InvalidateLast() { |
25 last_.set_bytecode(Bytecode::kIllegal); | 70 last_.set_bytecode(Bytecode::kIllegal); |
26 } | 71 } |
27 | 72 |
28 bool BytecodePeepholeOptimizer::LastIsValid() const { | 73 bool BytecodePeepholeOptimizer::LastIsValid() const { |
29 return last_.bytecode() != Bytecode::kIllegal; | 74 return last_.bytecode() != Bytecode::kIllegal; |
30 } | 75 } |
31 | 76 |
32 void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { | 77 void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { |
33 last_.Clone(node); | 78 last_.Clone(node); |
34 last_is_discardable_ = true; | |
35 } | |
36 | |
37 // override | |
38 size_t BytecodePeepholeOptimizer::FlushForOffset() { | |
39 size_t buffered_size = next_stage_->FlushForOffset(); | |
40 if (LastIsValid()) { | |
41 if (last_.bytecode() == Bytecode::kNop && | |
42 !last_.source_info().is_statement()) { | |
43 // The Nop can be dropped as it doesn't have a statement | |
44 // position for the debugger and doesn't have any effects by | |
45 // definition. | |
46 InvalidateLast(); | |
47 } else { | |
48 buffered_size += last_.Size(); | |
49 last_is_discardable_ = false; | |
50 } | |
51 } | |
52 return buffered_size; | |
53 } | |
54 | |
55 // override | |
56 void BytecodePeepholeOptimizer::FlushBasicBlock() { | |
57 if (LastIsValid()) { | |
58 next_stage_->Write(&last_); | |
59 InvalidateLast(); | |
60 } | |
61 next_stage_->FlushBasicBlock(); | |
62 } | |
63 | |
64 // override | |
65 void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { | |
66 // Attempt optimization if there is an earlier node to optimize with. | |
67 if (LastIsValid()) { | |
68 node = Optimize(node); | |
69 // Only output the last node if it wasn't invalidated by the optimization. | |
70 if (LastIsValid()) { | |
71 next_stage_->Write(&last_); | |
72 InvalidateLast(); | |
73 } | |
74 } | |
75 | |
76 if (node != nullptr) { | |
77 SetLast(node); | |
78 } | |
79 } | 79 } |
80 | 80 |
81 Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( | 81 Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( |
82 const BytecodeNode* const node, int index) const { | 82 const BytecodeNode* const node, int index) const { |
83 DCHECK_LE(index, node->operand_count()); | 83 DCHECK_LE(index, node->operand_count()); |
84 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); | 84 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); |
85 uint32_t index_operand = node->operand(0); | 85 uint32_t index_operand = node->operand(0); |
86 return constant_array_builder_->At(index_operand); | 86 return constant_array_builder_->At(index_operand); |
87 } | 87 } |
88 | 88 |
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
253 } | 253 } |
254 | 254 |
255 bool BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes( | 255 bool BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes( |
256 BytecodeNode* const current) { | 256 BytecodeNode* const current) { |
257 return RemoveToBooleanFromJump(current) || | 257 return RemoveToBooleanFromJump(current) || |
258 RemoveToBooleanFromLogicalNot(current) || ChangeLdaToLdr(current); | 258 RemoveToBooleanFromLogicalNot(current) || ChangeLdaToLdr(current); |
259 } | 259 } |
260 | 260 |
261 bool BytecodePeepholeOptimizer::CanElideLast( | 261 bool BytecodePeepholeOptimizer::CanElideLast( |
262 const BytecodeNode* const current) const { | 262 const BytecodeNode* const current) const { |
263 if (!last_is_discardable_) { | |
264 return false; | |
265 } | |
266 | |
267 if (last_.bytecode() == Bytecode::kNop) { | 263 if (last_.bytecode() == Bytecode::kNop) { |
268 // Nop are placeholders for holding source position information. | 264 // Nop are placeholders for holding source position information. |
269 return true; | 265 return true; |
270 } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && | 266 } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && |
271 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { | 267 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { |
272 // The accumulator is invisible to the debugger. If there is a sequence of | 268 // The accumulator is invisible to the debugger. If there is a sequence of |
273 // consecutive accumulator loads (that don't have side effects) then only | 269 // consecutive accumulator loads (that don't have side effects) then only |
274 // the final load is potentially visible. | 270 // the final load is potentially visible. |
275 return true; | 271 return true; |
276 } else if (Bytecodes::GetAccumulatorUse(current->bytecode()) == | 272 } else if (Bytecodes::GetAccumulatorUse(current->bytecode()) == |
(...skipping 27 matching lines...) Expand all Loading... |
304 | 300 |
305 if (CanElideLast(current) && CanElideLastBasedOnSourcePosition(current)) { | 301 if (CanElideLast(current) && CanElideLastBasedOnSourcePosition(current)) { |
306 current->source_info().Update(last_.source_info()); | 302 current->source_info().Update(last_.source_info()); |
307 InvalidateLast(); | 303 InvalidateLast(); |
308 return current; | 304 return current; |
309 } | 305 } |
310 | 306 |
311 return current; | 307 return current; |
312 } | 308 } |
313 | 309 |
| 310 BytecodeNode* BytecodePeepholeOptimizer::OptimizeAndEmitLast( |
| 311 BytecodeNode* current) { |
| 312 // Attempt optimization if there is an earlier node to optimize with. |
| 313 if (LastIsValid()) { |
| 314 current = Optimize(current); |
| 315 // Only output the last node if it wasn't invalidated by the optimization. |
| 316 if (LastIsValid()) { |
| 317 next_stage_->Write(&last_); |
| 318 InvalidateLast(); |
| 319 } |
| 320 } |
| 321 return current; |
| 322 } |
| 323 |
314 } // namespace interpreter | 324 } // namespace interpreter |
315 } // namespace internal | 325 } // namespace internal |
316 } // namespace v8 | 326 } // namespace v8 |
OLD | NEW |