| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-scheduler.h" | 5 #include "src/compiler/instruction-scheduler.h" |
| 6 | 6 |
| 7 namespace v8 { | 7 namespace v8 { |
| 8 namespace internal { | 8 namespace internal { |
| 9 namespace compiler { | 9 namespace compiler { |
| 10 | 10 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 29 case kX64Or: | 29 case kX64Or: |
| 30 case kX64Or32: | 30 case kX64Or32: |
| 31 case kX64Xor: | 31 case kX64Xor: |
| 32 case kX64Xor32: | 32 case kX64Xor32: |
| 33 case kX64Sub: | 33 case kX64Sub: |
| 34 case kX64Sub32: | 34 case kX64Sub32: |
| 35 case kX64Imul: | 35 case kX64Imul: |
| 36 case kX64Imul32: | 36 case kX64Imul32: |
| 37 case kX64ImulHigh32: | 37 case kX64ImulHigh32: |
| 38 case kX64UmulHigh32: | 38 case kX64UmulHigh32: |
| 39 case kX64Idiv: | |
| 40 case kX64Idiv32: | |
| 41 case kX64Udiv: | |
| 42 case kX64Udiv32: | |
| 43 case kX64Not: | 39 case kX64Not: |
| 44 case kX64Not32: | 40 case kX64Not32: |
| 45 case kX64Neg: | 41 case kX64Neg: |
| 46 case kX64Neg32: | 42 case kX64Neg32: |
| 47 case kX64Shl: | 43 case kX64Shl: |
| 48 case kX64Shl32: | 44 case kX64Shl32: |
| 49 case kX64Shr: | 45 case kX64Shr: |
| 50 case kX64Shr32: | 46 case kX64Shr32: |
| 51 case kX64Sar: | 47 case kX64Sar: |
| 52 case kX64Sar32: | 48 case kX64Sar32: |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 126 case kX64Lea32: | 122 case kX64Lea32: |
| 127 case kX64Lea: | 123 case kX64Lea: |
| 128 case kX64Dec32: | 124 case kX64Dec32: |
| 129 case kX64Inc32: | 125 case kX64Inc32: |
| 130 case kX64Int32x4Create: | 126 case kX64Int32x4Create: |
| 131 case kX64Int32x4ExtractLane: | 127 case kX64Int32x4ExtractLane: |
| 132 return (instr->addressing_mode() == kMode_None) | 128 return (instr->addressing_mode() == kMode_None) |
| 133 ? kNoOpcodeFlags | 129 ? kNoOpcodeFlags |
| 134 : kIsLoadOperation | kHasSideEffect; | 130 : kIsLoadOperation | kHasSideEffect; |
| 135 | 131 |
| 132 case kX64Idiv: |
| 133 case kX64Idiv32: |
| 134 case kX64Udiv: |
| 135 case kX64Udiv32: |
| 136 return (instr->addressing_mode() == kMode_None) |
| 137 ? kMayNeedDeoptCheck |
| 138 : kMayNeedDeoptCheck | kIsLoadOperation | kHasSideEffect; |
| 139 |
| 136 case kX64Movsxbl: | 140 case kX64Movsxbl: |
| 137 case kX64Movzxbl: | 141 case kX64Movzxbl: |
| 138 case kX64Movsxbq: | 142 case kX64Movsxbq: |
| 139 case kX64Movzxbq: | 143 case kX64Movzxbq: |
| 140 case kX64Movsxwl: | 144 case kX64Movsxwl: |
| 141 case kX64Movzxwl: | 145 case kX64Movzxwl: |
| 142 case kX64Movsxwq: | 146 case kX64Movsxwq: |
| 143 case kX64Movzxwq: | 147 case kX64Movzxwq: |
| 144 case kX64Movsxlq: | 148 case kX64Movsxlq: |
| 145 DCHECK(instr->InputCount() >= 1); | 149 DCHECK(instr->InputCount() >= 1); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 | 194 |
| 191 | 195 |
| 192 int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { | 196 int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { |
| 193 // TODO(all): Add instruction cost modeling. | 197 // TODO(all): Add instruction cost modeling. |
| 194 return 1; | 198 return 1; |
| 195 } | 199 } |
| 196 | 200 |
| 197 } // namespace compiler | 201 } // namespace compiler |
| 198 } // namespace internal | 202 } // namespace internal |
| 199 } // namespace v8 | 203 } // namespace v8 |
| OLD | NEW |