| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/bits.h" | 5 #include "src/base/bits.h" |
| 6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
| 7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| 11 namespace compiler { | 11 namespace compiler { |
| 12 | 12 |
| 13 #define TRACE_UNIMPL() \ | 13 #define TRACE_UNIMPL() \ |
| 14 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) | 14 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) |
| 15 | 15 |
| 16 #define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) | 16 #define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) |
| 17 | 17 |
| 18 | 18 |
| 19 // Adds Mips-specific methods for generating InstructionOperands. | 19 // Adds Mips-specific methods for generating InstructionOperands. |
| 20 class MipsOperandGenerator FINAL : public OperandGenerator { | 20 class Mips64OperandGenerator FINAL : public OperandGenerator { |
| 21 public: | 21 public: |
| 22 explicit MipsOperandGenerator(InstructionSelector* selector) | 22 explicit Mips64OperandGenerator(InstructionSelector* selector) |
| 23 : OperandGenerator(selector) {} | 23 : OperandGenerator(selector) {} |
| 24 | 24 |
| 25 InstructionOperand* UseOperand(Node* node, InstructionCode opcode) { | 25 InstructionOperand* UseOperand(Node* node, InstructionCode opcode) { |
| 26 if (CanBeImmediate(node, opcode)) { | 26 if (CanBeImmediate(node, opcode)) { |
| 27 return UseImmediate(node); | 27 return UseImmediate(node); |
| 28 } | 28 } |
| 29 return UseRegister(node); | 29 return UseRegister(node); |
| 30 } | 30 } |
| 31 | 31 |
| 32 bool CanBeImmediate(Node* node, InstructionCode opcode) { | 32 bool CanBeImmediate(Node* node, InstructionCode opcode) { |
| 33 Int32Matcher m(node); | 33 int64_t value; |
| 34 if (!m.HasValue()) return false; | 34 if (node->opcode() == IrOpcode::kInt32Constant) |
| 35 int32_t value = m.Value(); | 35 value = OpParameter<int32_t>(node); |
| 36 else if (node->opcode() == IrOpcode::kInt64Constant) |
| 37 value = OpParameter<int64_t>(node); |
| 38 else |
| 39 return false; |
| 36 switch (ArchOpcodeField::decode(opcode)) { | 40 switch (ArchOpcodeField::decode(opcode)) { |
| 37 case kMipsShl: | 41 case kMips64Shl: |
| 38 case kMipsSar: | 42 case kMips64Sar: |
| 39 case kMipsShr: | 43 case kMips64Shr: |
| 40 return is_uint5(value); | 44 return is_uint5(value); |
| 41 case kMipsXor: | 45 case kMips64Dshl: |
| 46 case kMips64Dsar: |
| 47 case kMips64Dshr: |
| 48 return is_uint6(value); |
| 49 case kMips64Xor: |
| 42 return is_uint16(value); | 50 return is_uint16(value); |
| 43 case kMipsLdc1: | 51 case kMips64Ldc1: |
| 44 case kMipsSdc1: | 52 case kMips64Sdc1: |
| 45 return is_int16(value + kIntSize); | 53 return is_int16(value + kIntSize); |
| 46 default: | 54 default: |
| 47 return is_int16(value); | 55 return is_int16(value); |
| 48 } | 56 } |
| 49 } | 57 } |
| 50 | 58 |
| 59 |
| 60 bool CanBeImmediate(Node* node, InstructionCode opcode, |
| 61 FlagsContinuation* cont) { |
| 62 int64_t value; |
| 63 if (node->opcode() == IrOpcode::kInt32Constant) |
| 64 value = OpParameter<int32_t>(node); |
| 65 else if (node->opcode() == IrOpcode::kInt64Constant) |
| 66 value = OpParameter<int64_t>(node); |
| 67 else |
| 68 return false; |
| 69 switch (ArchOpcodeField::decode(opcode)) { |
| 70 case kMips64Cmp32: |
| 71 switch (cont->condition()) { |
| 72 case kUnsignedLessThan: |
| 73 case kUnsignedGreaterThanOrEqual: |
| 74 case kUnsignedLessThanOrEqual: |
| 75 case kUnsignedGreaterThan: |
| 76 // Immediate operands for unsigned 32-bit compare operations |
| 77 // should not be sign-extended. |
| 78 return is_uint15(value); |
| 79 default: |
| 80 return false; |
| 81 } |
| 82 default: |
| 83 return is_int16(value); |
| 84 } |
| 85 } |
| 86 |
| 87 |
| 51 private: | 88 private: |
| 52 bool ImmediateFitsAddrMode1Instruction(int32_t imm) const { | 89 bool ImmediateFitsAddrMode1Instruction(int32_t imm) const { |
| 53 TRACE_UNIMPL(); | 90 TRACE_UNIMPL(); |
| 54 return false; | 91 return false; |
| 55 } | 92 } |
| 56 }; | 93 }; |
| 57 | 94 |
| 58 | 95 |
| 96 static void VisitRR(InstructionSelector* selector, ArchOpcode opcode, |
| 97 Node* node) { |
| 98 Mips64OperandGenerator g(selector); |
| 99 selector->Emit(opcode, g.DefineAsRegister(node), |
| 100 g.UseRegister(node->InputAt(0))); |
| 101 } |
| 102 |
| 103 |
| 59 static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, | 104 static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, |
| 60 Node* node) { | 105 Node* node) { |
| 61 MipsOperandGenerator g(selector); | 106 Mips64OperandGenerator g(selector); |
| 62 selector->Emit(opcode, g.DefineAsRegister(node), | 107 selector->Emit(opcode, g.DefineAsRegister(node), |
| 63 g.UseRegister(node->InputAt(0)), | 108 g.UseRegister(node->InputAt(0)), |
| 64 g.UseRegister(node->InputAt(1))); | 109 g.UseRegister(node->InputAt(1))); |
| 65 } | 110 } |
| 66 | 111 |
| 67 | 112 |
| 68 static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, | 113 static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, |
| 69 Node* node) { | 114 Node* node) { |
| 70 MipsOperandGenerator g(selector); | 115 Mips64OperandGenerator g(selector); |
| 71 selector->Emit(opcode, g.DefineAsRegister(node), | 116 selector->Emit(opcode, g.DefineAsRegister(node), |
| 72 g.UseRegister(node->InputAt(0)), | 117 g.UseRegister(node->InputAt(0)), |
| 73 g.UseOperand(node->InputAt(1), opcode)); | 118 g.UseOperand(node->InputAt(1), opcode)); |
| 74 } | 119 } |
| 75 | 120 |
| 76 | 121 |
| 77 static void VisitBinop(InstructionSelector* selector, Node* node, | 122 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 78 InstructionCode opcode, FlagsContinuation* cont) { | 123 InstructionCode opcode, FlagsContinuation* cont) { |
| 79 MipsOperandGenerator g(selector); | 124 Mips64OperandGenerator g(selector); |
| 80 Int32BinopMatcher m(node); | 125 Int32BinopMatcher m(node); |
| 81 InstructionOperand* inputs[4]; | 126 InstructionOperand* inputs[4]; |
| 82 size_t input_count = 0; | 127 size_t input_count = 0; |
| 83 InstructionOperand* outputs[2]; | 128 InstructionOperand* outputs[2]; |
| 84 size_t output_count = 0; | 129 size_t output_count = 0; |
| 85 | 130 |
| 86 inputs[input_count++] = g.UseRegister(m.left().node()); | 131 inputs[input_count++] = g.UseRegister(m.left().node()); |
| 87 inputs[input_count++] = g.UseOperand(m.right().node(), opcode); | 132 inputs[input_count++] = g.UseOperand(m.right().node(), opcode); |
| 88 | 133 |
| 89 if (cont->IsBranch()) { | 134 if (cont->IsBranch()) { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 110 static void VisitBinop(InstructionSelector* selector, Node* node, | 155 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 111 InstructionCode opcode) { | 156 InstructionCode opcode) { |
| 112 FlagsContinuation cont; | 157 FlagsContinuation cont; |
| 113 VisitBinop(selector, node, opcode, &cont); | 158 VisitBinop(selector, node, opcode, &cont); |
| 114 } | 159 } |
| 115 | 160 |
| 116 | 161 |
| 117 void InstructionSelector::VisitLoad(Node* node) { | 162 void InstructionSelector::VisitLoad(Node* node) { |
| 118 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); | 163 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); |
| 119 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); | 164 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); |
| 120 MipsOperandGenerator g(this); | 165 Mips64OperandGenerator g(this); |
| 121 Node* base = node->InputAt(0); | 166 Node* base = node->InputAt(0); |
| 122 Node* index = node->InputAt(1); | 167 Node* index = node->InputAt(1); |
| 123 | 168 |
| 124 ArchOpcode opcode; | 169 ArchOpcode opcode; |
| 125 switch (rep) { | 170 switch (rep) { |
| 126 case kRepFloat32: | 171 case kRepFloat32: |
| 127 opcode = kMipsLwc1; | 172 opcode = kMips64Lwc1; |
| 128 break; | 173 break; |
| 129 case kRepFloat64: | 174 case kRepFloat64: |
| 130 opcode = kMipsLdc1; | 175 opcode = kMips64Ldc1; |
| 131 break; | 176 break; |
| 132 case kRepBit: // Fall through. | 177 case kRepBit: // Fall through. |
| 133 case kRepWord8: | 178 case kRepWord8: |
| 134 opcode = typ == kTypeUint32 ? kMipsLbu : kMipsLb; | 179 opcode = typ == kTypeUint32 ? kMips64Lbu : kMips64Lb; |
| 135 break; | 180 break; |
| 136 case kRepWord16: | 181 case kRepWord16: |
| 137 opcode = typ == kTypeUint32 ? kMipsLhu : kMipsLh; | 182 opcode = typ == kTypeUint32 ? kMips64Lhu : kMips64Lh; |
| 183 break; |
| 184 case kRepWord32: |
| 185 opcode = kMips64Lw; |
| 138 break; | 186 break; |
| 139 case kRepTagged: // Fall through. | 187 case kRepTagged: // Fall through. |
| 140 case kRepWord32: | 188 case kRepWord64: |
| 141 opcode = kMipsLw; | 189 opcode = kMips64Ld; |
| 142 break; | 190 break; |
| 143 default: | 191 default: |
| 144 UNREACHABLE(); | 192 UNREACHABLE(); |
| 145 return; | 193 return; |
| 146 } | 194 } |
| 147 | 195 |
| 148 if (g.CanBeImmediate(index, opcode)) { | 196 if (g.CanBeImmediate(index, opcode)) { |
| 149 Emit(opcode | AddressingModeField::encode(kMode_MRI), | 197 Emit(opcode | AddressingModeField::encode(kMode_MRI), |
| 150 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index)); | 198 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index)); |
| 151 } else { | 199 } else { |
| 152 InstructionOperand* addr_reg = g.TempRegister(); | 200 InstructionOperand* addr_reg = g.TempRegister(); |
| 153 Emit(kMipsAdd | AddressingModeField::encode(kMode_None), addr_reg, | 201 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg, |
| 154 g.UseRegister(index), g.UseRegister(base)); | 202 g.UseRegister(index), g.UseRegister(base)); |
| 155 // Emit desired load opcode, using temp addr_reg. | 203 // Emit desired load opcode, using temp addr_reg. |
| 156 Emit(opcode | AddressingModeField::encode(kMode_MRI), | 204 Emit(opcode | AddressingModeField::encode(kMode_MRI), |
| 157 g.DefineAsRegister(node), addr_reg, g.TempImmediate(0)); | 205 g.DefineAsRegister(node), addr_reg, g.TempImmediate(0)); |
| 158 } | 206 } |
| 159 } | 207 } |
| 160 | 208 |
| 161 | 209 |
| 162 void InstructionSelector::VisitStore(Node* node) { | 210 void InstructionSelector::VisitStore(Node* node) { |
| 163 MipsOperandGenerator g(this); | 211 Mips64OperandGenerator g(this); |
| 164 Node* base = node->InputAt(0); | 212 Node* base = node->InputAt(0); |
| 165 Node* index = node->InputAt(1); | 213 Node* index = node->InputAt(1); |
| 166 Node* value = node->InputAt(2); | 214 Node* value = node->InputAt(2); |
| 167 | 215 |
| 168 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); | 216 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); |
| 169 MachineType rep = RepresentationOf(store_rep.machine_type()); | 217 MachineType rep = RepresentationOf(store_rep.machine_type()); |
| 170 if (store_rep.write_barrier_kind() == kFullWriteBarrier) { | 218 if (store_rep.write_barrier_kind() == kFullWriteBarrier) { |
| 171 DCHECK(rep == kRepTagged); | 219 DCHECK(rep == kRepTagged); |
| 172 // TODO(dcarney): refactor RecordWrite function to take temp registers | 220 // TODO(dcarney): refactor RecordWrite function to take temp registers |
| 173 // and pass them here instead of using fixed regs | 221 // and pass them here instead of using fixed regs |
| 174 // TODO(dcarney): handle immediate indices. | 222 // TODO(dcarney): handle immediate indices. |
| 175 InstructionOperand* temps[] = {g.TempRegister(t1), g.TempRegister(t2)}; | 223 InstructionOperand* temps[] = {g.TempRegister(t1), g.TempRegister(t2)}; |
| 176 Emit(kMipsStoreWriteBarrier, NULL, g.UseFixed(base, t0), | 224 Emit(kMips64StoreWriteBarrier, NULL, g.UseFixed(base, t0), |
| 177 g.UseFixed(index, t1), g.UseFixed(value, t2), arraysize(temps), temps); | 225 g.UseFixed(index, t1), g.UseFixed(value, t2), arraysize(temps), temps); |
| 178 return; | 226 return; |
| 179 } | 227 } |
| 180 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); | 228 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); |
| 181 | 229 |
| 182 ArchOpcode opcode; | 230 ArchOpcode opcode; |
| 183 switch (rep) { | 231 switch (rep) { |
| 184 case kRepFloat32: | 232 case kRepFloat32: |
| 185 opcode = kMipsSwc1; | 233 opcode = kMips64Swc1; |
| 186 break; | 234 break; |
| 187 case kRepFloat64: | 235 case kRepFloat64: |
| 188 opcode = kMipsSdc1; | 236 opcode = kMips64Sdc1; |
| 189 break; | 237 break; |
| 190 case kRepBit: // Fall through. | 238 case kRepBit: // Fall through. |
| 191 case kRepWord8: | 239 case kRepWord8: |
| 192 opcode = kMipsSb; | 240 opcode = kMips64Sb; |
| 193 break; | 241 break; |
| 194 case kRepWord16: | 242 case kRepWord16: |
| 195 opcode = kMipsSh; | 243 opcode = kMips64Sh; |
| 244 break; |
| 245 case kRepWord32: |
| 246 opcode = kMips64Sw; |
| 196 break; | 247 break; |
| 197 case kRepTagged: // Fall through. | 248 case kRepTagged: // Fall through. |
| 198 case kRepWord32: | 249 case kRepWord64: |
| 199 opcode = kMipsSw; | 250 opcode = kMips64Sd; |
| 200 break; | 251 break; |
| 201 default: | 252 default: |
| 202 UNREACHABLE(); | 253 UNREACHABLE(); |
| 203 return; | 254 return; |
| 204 } | 255 } |
| 205 | 256 |
| 206 if (g.CanBeImmediate(index, opcode)) { | 257 if (g.CanBeImmediate(index, opcode)) { |
| 207 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, | 258 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, |
| 208 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); | 259 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); |
| 209 } else { | 260 } else { |
| 210 InstructionOperand* addr_reg = g.TempRegister(); | 261 InstructionOperand* addr_reg = g.TempRegister(); |
| 211 Emit(kMipsAdd | AddressingModeField::encode(kMode_None), addr_reg, | 262 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg, |
| 212 g.UseRegister(index), g.UseRegister(base)); | 263 g.UseRegister(index), g.UseRegister(base)); |
| 213 // Emit desired store opcode, using temp addr_reg. | 264 // Emit desired store opcode, using temp addr_reg. |
| 214 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, addr_reg, | 265 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, addr_reg, |
| 215 g.TempImmediate(0), g.UseRegister(value)); | 266 g.TempImmediate(0), g.UseRegister(value)); |
| 216 } | 267 } |
| 217 } | 268 } |
| 218 | 269 |
| 219 | 270 |
| 220 void InstructionSelector::VisitWord32And(Node* node) { | 271 void InstructionSelector::VisitWord32And(Node* node) { |
| 221 VisitBinop(this, node, kMipsAnd); | 272 VisitBinop(this, node, kMips64And); |
| 273 } |
| 274 |
| 275 |
| 276 void InstructionSelector::VisitWord64And(Node* node) { |
| 277 VisitBinop(this, node, kMips64And); |
| 222 } | 278 } |
| 223 | 279 |
| 224 | 280 |
| 225 void InstructionSelector::VisitWord32Or(Node* node) { | 281 void InstructionSelector::VisitWord32Or(Node* node) { |
| 226 VisitBinop(this, node, kMipsOr); | 282 VisitBinop(this, node, kMips64Or); |
| 283 } |
| 284 |
| 285 |
| 286 void InstructionSelector::VisitWord64Or(Node* node) { |
| 287 VisitBinop(this, node, kMips64Or); |
| 227 } | 288 } |
| 228 | 289 |
| 229 | 290 |
| 230 void InstructionSelector::VisitWord32Xor(Node* node) { | 291 void InstructionSelector::VisitWord32Xor(Node* node) { |
| 231 VisitBinop(this, node, kMipsXor); | 292 VisitBinop(this, node, kMips64Xor); |
| 293 } |
| 294 |
| 295 |
| 296 void InstructionSelector::VisitWord64Xor(Node* node) { |
| 297 VisitBinop(this, node, kMips64Xor); |
| 232 } | 298 } |
| 233 | 299 |
| 234 | 300 |
| 235 void InstructionSelector::VisitWord32Shl(Node* node) { | 301 void InstructionSelector::VisitWord32Shl(Node* node) { |
| 236 VisitRRO(this, kMipsShl, node); | 302 VisitRRO(this, kMips64Shl, node); |
| 237 } | 303 } |
| 238 | 304 |
| 239 | 305 |
| 240 void InstructionSelector::VisitWord32Shr(Node* node) { | 306 void InstructionSelector::VisitWord32Shr(Node* node) { |
| 241 VisitRRO(this, kMipsShr, node); | 307 VisitRRO(this, kMips64Shr, node); |
| 242 } | 308 } |
| 243 | 309 |
| 244 | 310 |
| 245 void InstructionSelector::VisitWord32Sar(Node* node) { | 311 void InstructionSelector::VisitWord32Sar(Node* node) { |
| 246 VisitRRO(this, kMipsSar, node); | 312 VisitRRO(this, kMips64Sar, node); |
| 313 } |
| 314 |
| 315 |
| 316 void InstructionSelector::VisitWord64Shl(Node* node) { |
| 317 VisitRRO(this, kMips64Dshl, node); |
| 318 } |
| 319 |
| 320 |
| 321 void InstructionSelector::VisitWord64Shr(Node* node) { |
| 322 VisitRRO(this, kMips64Dshr, node); |
| 323 } |
| 324 |
| 325 |
| 326 void InstructionSelector::VisitWord64Sar(Node* node) { |
| 327 VisitRRO(this, kMips64Dsar, node); |
| 247 } | 328 } |
| 248 | 329 |
| 249 | 330 |
| 250 void InstructionSelector::VisitWord32Ror(Node* node) { | 331 void InstructionSelector::VisitWord32Ror(Node* node) { |
| 251 VisitRRO(this, kMipsRor, node); | 332 VisitRRO(this, kMips64Ror, node); |
| 333 } |
| 334 |
| 335 |
| 336 void InstructionSelector::VisitWord64Ror(Node* node) { |
| 337 VisitRRO(this, kMips64Dror, node); |
| 252 } | 338 } |
| 253 | 339 |
| 254 | 340 |
| 255 void InstructionSelector::VisitInt32Add(Node* node) { | 341 void InstructionSelector::VisitInt32Add(Node* node) { |
| 256 MipsOperandGenerator g(this); | 342 Mips64OperandGenerator g(this); |
| 257 | |
| 258 // TODO(plind): Consider multiply & add optimization from arm port. | 343 // TODO(plind): Consider multiply & add optimization from arm port. |
| 259 VisitBinop(this, node, kMipsAdd); | 344 VisitBinop(this, node, kMips64Add); |
| 345 } |
| 346 |
| 347 |
| 348 void InstructionSelector::VisitInt64Add(Node* node) { |
| 349 Mips64OperandGenerator g(this); |
| 350 // TODO(plind): Consider multiply & add optimization from arm port. |
| 351 VisitBinop(this, node, kMips64Dadd); |
| 260 } | 352 } |
| 261 | 353 |
| 262 | 354 |
| 263 void InstructionSelector::VisitInt32Sub(Node* node) { | 355 void InstructionSelector::VisitInt32Sub(Node* node) { |
| 264 VisitBinop(this, node, kMipsSub); | 356 VisitBinop(this, node, kMips64Sub); |
| 357 } |
| 358 |
| 359 |
| 360 void InstructionSelector::VisitInt64Sub(Node* node) { |
| 361 VisitBinop(this, node, kMips64Dsub); |
| 265 } | 362 } |
| 266 | 363 |
| 267 | 364 |
| 268 void InstructionSelector::VisitInt32Mul(Node* node) { | 365 void InstructionSelector::VisitInt32Mul(Node* node) { |
| 269 MipsOperandGenerator g(this); | 366 Mips64OperandGenerator g(this); |
| 270 Int32BinopMatcher m(node); | 367 Int32BinopMatcher m(node); |
| 271 if (m.right().HasValue() && m.right().Value() > 0) { | 368 if (m.right().HasValue() && m.right().Value() > 0) { |
| 272 int32_t value = m.right().Value(); | 369 int32_t value = m.right().Value(); |
| 273 if (base::bits::IsPowerOfTwo32(value)) { | 370 if (base::bits::IsPowerOfTwo32(value)) { |
| 274 Emit(kMipsShl | AddressingModeField::encode(kMode_None), | 371 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), |
| 275 g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 372 g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 276 g.TempImmediate(WhichPowerOf2(value))); | 373 g.TempImmediate(WhichPowerOf2(value))); |
| 277 return; | 374 return; |
| 278 } | 375 } |
| 279 if (base::bits::IsPowerOfTwo32(value - 1)) { | 376 if (base::bits::IsPowerOfTwo32(value - 1)) { |
| 280 InstructionOperand* temp = g.TempRegister(); | 377 InstructionOperand* temp = g.TempRegister(); |
| 281 Emit(kMipsShl | AddressingModeField::encode(kMode_None), temp, | 378 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp, |
| 282 g.UseRegister(m.left().node()), | 379 g.UseRegister(m.left().node()), |
| 283 g.TempImmediate(WhichPowerOf2(value - 1))); | 380 g.TempImmediate(WhichPowerOf2(value - 1))); |
| 284 Emit(kMipsAdd | AddressingModeField::encode(kMode_None), | 381 Emit(kMips64Add | AddressingModeField::encode(kMode_None), |
| 285 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp); | 382 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp); |
| 286 return; | 383 return; |
| 287 } | 384 } |
| 288 if (base::bits::IsPowerOfTwo32(value + 1)) { | 385 if (base::bits::IsPowerOfTwo32(value + 1)) { |
| 289 InstructionOperand* temp = g.TempRegister(); | 386 InstructionOperand* temp = g.TempRegister(); |
| 290 Emit(kMipsShl | AddressingModeField::encode(kMode_None), temp, | 387 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp, |
| 291 g.UseRegister(m.left().node()), | 388 g.UseRegister(m.left().node()), |
| 292 g.TempImmediate(WhichPowerOf2(value + 1))); | 389 g.TempImmediate(WhichPowerOf2(value + 1))); |
| 293 Emit(kMipsSub | AddressingModeField::encode(kMode_None), | 390 Emit(kMips64Sub | AddressingModeField::encode(kMode_None), |
| 294 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node())); | 391 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node())); |
| 295 return; | 392 return; |
| 296 } | 393 } |
| 297 } | 394 } |
| 298 Emit(kMipsMul, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 395 Emit(kMips64Mul, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 299 g.UseRegister(m.right().node())); | 396 g.UseRegister(m.right().node())); |
| 300 } | 397 } |
| 301 | 398 |
| 302 | 399 |
| 303 void InstructionSelector::VisitInt32MulHigh(Node* node) { | 400 void InstructionSelector::VisitInt32MulHigh(Node* node) { |
| 304 MipsOperandGenerator g(this); | 401 Mips64OperandGenerator g(this); |
| 305 Emit(kMipsMulHigh, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)), | 402 Emit(kMips64MulHigh, g.DefineAsRegister(node), |
| 403 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); |
| 404 } |
| 405 |
| 406 |
| 407 void InstructionSelector::VisitUint32MulHigh(Node* node) { |
| 408 Mips64OperandGenerator g(this); |
| 409 InstructionOperand* const dmul_operand = g.TempRegister(); |
| 410 Emit(kMips64MulHighU, dmul_operand, g.UseRegister(node->InputAt(0)), |
| 306 g.UseRegister(node->InputAt(1))); | 411 g.UseRegister(node->InputAt(1))); |
| 307 } | 412 Emit(kMips64Ext, g.DefineAsRegister(node), dmul_operand, g.TempImmediate(0), |
| 308 | 413 g.TempImmediate(32)); |
| 309 | 414 } |
| 310 void InstructionSelector::VisitUint32MulHigh(Node* node) { | 415 |
| 311 MipsOperandGenerator g(this); | 416 |
| 312 Emit(kMipsMulHighU, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)), | 417 void InstructionSelector::VisitInt64Mul(Node* node) { |
| 313 g.UseRegister(node->InputAt(1))); | 418 Mips64OperandGenerator g(this); |
| 419 Int64BinopMatcher m(node); |
| 420 // TODO(dusmil): Add optimization for shifts larger than 32. |
| 421 if (m.right().HasValue() && m.right().Value() > 0) { |
| 422 int64_t value = m.right().Value(); |
| 423 if (base::bits::IsPowerOfTwo32(value)) { |
| 424 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), |
| 425 g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 426 g.TempImmediate(WhichPowerOf2(value))); |
| 427 return; |
| 428 } |
| 429 if (base::bits::IsPowerOfTwo32(value - 1)) { |
| 430 InstructionOperand* temp = g.TempRegister(); |
| 431 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp, |
| 432 g.UseRegister(m.left().node()), |
| 433 g.TempImmediate(WhichPowerOf2(value - 1))); |
| 434 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), |
| 435 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp); |
| 436 return; |
| 437 } |
| 438 if (base::bits::IsPowerOfTwo32(value + 1)) { |
| 439 InstructionOperand* temp = g.TempRegister(); |
| 440 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp, |
| 441 g.UseRegister(m.left().node()), |
| 442 g.TempImmediate(WhichPowerOf2(value + 1))); |
| 443 Emit(kMips64Dsub | AddressingModeField::encode(kMode_None), |
| 444 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node())); |
| 445 return; |
| 446 } |
| 447 } |
| 448 Emit(kMips64Dmul, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 449 g.UseRegister(m.right().node())); |
| 314 } | 450 } |
| 315 | 451 |
| 316 | 452 |
| 317 void InstructionSelector::VisitInt32Div(Node* node) { | 453 void InstructionSelector::VisitInt32Div(Node* node) { |
| 318 MipsOperandGenerator g(this); | 454 Mips64OperandGenerator g(this); |
| 319 Int32BinopMatcher m(node); | 455 Int32BinopMatcher m(node); |
| 320 Emit(kMipsDiv, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 456 Emit(kMips64Div, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 321 g.UseRegister(m.right().node())); | 457 g.UseRegister(m.right().node())); |
| 322 } | 458 } |
| 323 | 459 |
| 324 | 460 |
| 325 void InstructionSelector::VisitUint32Div(Node* node) { | 461 void InstructionSelector::VisitUint32Div(Node* node) { |
| 326 MipsOperandGenerator g(this); | 462 Mips64OperandGenerator g(this); |
| 327 Int32BinopMatcher m(node); | 463 Int32BinopMatcher m(node); |
| 328 Emit(kMipsDivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 464 Emit(kMips64DivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 329 g.UseRegister(m.right().node())); | 465 g.UseRegister(m.right().node())); |
| 330 } | 466 } |
| 331 | 467 |
| 332 | 468 |
| 333 void InstructionSelector::VisitInt32Mod(Node* node) { | 469 void InstructionSelector::VisitInt32Mod(Node* node) { |
| 334 MipsOperandGenerator g(this); | 470 Mips64OperandGenerator g(this); |
| 335 Int32BinopMatcher m(node); | 471 Int32BinopMatcher m(node); |
| 336 Emit(kMipsMod, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 472 Emit(kMips64Mod, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 337 g.UseRegister(m.right().node())); | 473 g.UseRegister(m.right().node())); |
| 338 } | 474 } |
| 339 | 475 |
| 340 | 476 |
| 341 void InstructionSelector::VisitUint32Mod(Node* node) { | 477 void InstructionSelector::VisitUint32Mod(Node* node) { |
| 342 MipsOperandGenerator g(this); | 478 Mips64OperandGenerator g(this); |
| 343 Int32BinopMatcher m(node); | 479 Int32BinopMatcher m(node); |
| 344 Emit(kMipsModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 480 Emit(kMips64ModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 481 g.UseRegister(m.right().node())); |
| 482 } |
| 483 |
| 484 |
| 485 void InstructionSelector::VisitInt64Div(Node* node) { |
| 486 Mips64OperandGenerator g(this); |
| 487 Int64BinopMatcher m(node); |
| 488 Emit(kMips64Ddiv, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 489 g.UseRegister(m.right().node())); |
| 490 } |
| 491 |
| 492 |
| 493 void InstructionSelector::VisitUint64Div(Node* node) { |
| 494 Mips64OperandGenerator g(this); |
| 495 Int64BinopMatcher m(node); |
| 496 Emit(kMips64DdivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 497 g.UseRegister(m.right().node())); |
| 498 } |
| 499 |
| 500 |
| 501 void InstructionSelector::VisitInt64Mod(Node* node) { |
| 502 Mips64OperandGenerator g(this); |
| 503 Int64BinopMatcher m(node); |
| 504 Emit(kMips64Dmod, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 505 g.UseRegister(m.right().node())); |
| 506 } |
| 507 |
| 508 |
| 509 void InstructionSelector::VisitUint64Mod(Node* node) { |
| 510 Mips64OperandGenerator g(this); |
| 511 Int64BinopMatcher m(node); |
| 512 Emit(kMips64DmodU, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 345 g.UseRegister(m.right().node())); | 513 g.UseRegister(m.right().node())); |
| 346 } | 514 } |
| 347 | 515 |
| 348 | 516 |
| 349 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) { | 517 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) { |
| 350 MipsOperandGenerator g(this); | 518 Mips64OperandGenerator g(this); |
| 351 Emit(kMipsCvtDS, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 519 Emit(kMips64CvtDS, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); |
| 352 } | 520 } |
| 353 | 521 |
| 354 | 522 |
| 355 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) { | 523 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) { |
| 356 MipsOperandGenerator g(this); | 524 Mips64OperandGenerator g(this); |
| 357 Emit(kMipsCvtDW, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 525 Emit(kMips64CvtDW, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); |
| 358 } | 526 } |
| 359 | 527 |
| 360 | 528 |
| 361 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) { | 529 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) { |
| 362 MipsOperandGenerator g(this); | 530 Mips64OperandGenerator g(this); |
| 363 Emit(kMipsCvtDUw, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 531 Emit(kMips64CvtDUw, g.DefineAsRegister(node), |
| 532 g.UseRegister(node->InputAt(0))); |
| 364 } | 533 } |
| 365 | 534 |
| 366 | 535 |
| 367 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) { | 536 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) { |
| 368 MipsOperandGenerator g(this); | 537 Mips64OperandGenerator g(this); |
| 369 Emit(kMipsTruncWD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 538 Emit(kMips64TruncWD, g.DefineAsRegister(node), |
| 539 g.UseRegister(node->InputAt(0))); |
| 370 } | 540 } |
| 371 | 541 |
| 372 | 542 |
| 373 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) { | 543 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) { |
| 374 MipsOperandGenerator g(this); | 544 Mips64OperandGenerator g(this); |
| 375 Emit(kMipsTruncUwD, g.DefineAsRegister(node), | 545 Emit(kMips64TruncUwD, g.DefineAsRegister(node), |
| 376 g.UseRegister(node->InputAt(0))); | 546 g.UseRegister(node->InputAt(0))); |
| 377 } | 547 } |
| 378 | 548 |
| 379 | 549 |
| 550 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) { |
| 551 Mips64OperandGenerator g(this); |
| 552 Emit(kMips64Shl, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)), |
| 553 g.TempImmediate(0)); |
| 554 } |
| 555 |
| 556 |
| 557 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) { |
| 558 Mips64OperandGenerator g(this); |
| 559 Emit(kMips64Dext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)), |
| 560 g.TempImmediate(0), g.TempImmediate(32)); |
| 561 } |
| 562 |
| 563 |
| 564 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) { |
| 565 Mips64OperandGenerator g(this); |
| 566 Emit(kMips64Ext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)), |
| 567 g.TempImmediate(0), g.TempImmediate(32)); |
| 568 } |
| 569 |
| 570 |
| 380 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) { | 571 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) { |
| 381 MipsOperandGenerator g(this); | 572 Mips64OperandGenerator g(this); |
| 382 Emit(kMipsCvtSD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 573 Emit(kMips64CvtSD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); |
| 383 } | 574 } |
| 384 | 575 |
| 385 | 576 |
| 386 void InstructionSelector::VisitFloat64Add(Node* node) { | 577 void InstructionSelector::VisitFloat64Add(Node* node) { |
| 387 VisitRRR(this, kMipsAddD, node); | 578 VisitRRR(this, kMips64AddD, node); |
| 388 } | 579 } |
| 389 | 580 |
| 390 | 581 |
| 391 void InstructionSelector::VisitFloat64Sub(Node* node) { | 582 void InstructionSelector::VisitFloat64Sub(Node* node) { |
| 392 VisitRRR(this, kMipsSubD, node); | 583 VisitRRR(this, kMips64SubD, node); |
| 393 } | 584 } |
| 394 | 585 |
| 395 | 586 |
| 396 void InstructionSelector::VisitFloat64Mul(Node* node) { | 587 void InstructionSelector::VisitFloat64Mul(Node* node) { |
| 397 VisitRRR(this, kMipsMulD, node); | 588 VisitRRR(this, kMips64MulD, node); |
| 398 } | 589 } |
| 399 | 590 |
| 400 | 591 |
| 401 void InstructionSelector::VisitFloat64Div(Node* node) { | 592 void InstructionSelector::VisitFloat64Div(Node* node) { |
| 402 VisitRRR(this, kMipsDivD, node); | 593 VisitRRR(this, kMips64DivD, node); |
| 403 } | 594 } |
| 404 | 595 |
| 405 | 596 |
| 406 void InstructionSelector::VisitFloat64Mod(Node* node) { | 597 void InstructionSelector::VisitFloat64Mod(Node* node) { |
| 407 MipsOperandGenerator g(this); | 598 Mips64OperandGenerator g(this); |
| 408 Emit(kMipsModD, g.DefineAsFixed(node, f0), g.UseFixed(node->InputAt(0), f12), | 599 Emit(kMips64ModD, g.DefineAsFixed(node, f0), |
| 600 g.UseFixed(node->InputAt(0), f12), |
| 409 g.UseFixed(node->InputAt(1), f14))->MarkAsCall(); | 601 g.UseFixed(node->InputAt(1), f14))->MarkAsCall(); |
| 410 } | 602 } |
| 411 | 603 |
| 412 | 604 |
| 413 void InstructionSelector::VisitFloat64Sqrt(Node* node) { | 605 void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
| 414 MipsOperandGenerator g(this); | 606 Mips64OperandGenerator g(this); |
| 415 Emit(kMipsSqrtD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); | 607 Emit(kMips64SqrtD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0))); |
| 416 } | 608 } |
| 417 | 609 |
| 418 | 610 |
| 419 void InstructionSelector::VisitFloat64Floor(Node* node) { UNREACHABLE(); } | 611 void InstructionSelector::VisitFloat64Floor(Node* node) { |
| 420 | 612 VisitRR(this, kMips64FloorD, node); |
| 421 | 613 } |
| 422 void InstructionSelector::VisitFloat64Ceil(Node* node) { UNREACHABLE(); } | 614 |
| 615 |
| 616 void InstructionSelector::VisitFloat64Ceil(Node* node) { |
| 617 VisitRR(this, kMips64CeilD, node); |
| 618 } |
| 423 | 619 |
| 424 | 620 |
| 425 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { | 621 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { |
| 426 UNREACHABLE(); | 622 VisitRR(this, kMips64RoundTruncateD, node); |
| 427 } | 623 } |
| 428 | 624 |
| 429 | 625 |
| 430 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { | 626 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { |
| 431 UNREACHABLE(); | 627 UNREACHABLE(); |
| 432 } | 628 } |
| 433 | 629 |
| 434 | 630 |
| 435 void InstructionSelector::VisitCall(Node* node) { | 631 void InstructionSelector::VisitCall(Node* node) { |
| 436 MipsOperandGenerator g(this); | 632 Mips64OperandGenerator g(this); |
| 437 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node); | 633 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node); |
| 438 | 634 |
| 439 FrameStateDescriptor* frame_state_descriptor = NULL; | 635 FrameStateDescriptor* frame_state_descriptor = NULL; |
| 440 if (descriptor->NeedsFrameState()) { | 636 if (descriptor->NeedsFrameState()) { |
| 441 frame_state_descriptor = | 637 frame_state_descriptor = |
| 442 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); | 638 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
| 443 } | 639 } |
| 444 | 640 |
| 445 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 641 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 446 | 642 |
| 447 // Compute InstructionOperands for inputs and outputs. | 643 // Compute InstructionOperands for inputs and outputs. |
| 448 InitializeCallBuffer(node, &buffer, true, false); | 644 InitializeCallBuffer(node, &buffer, true, false); |
| 449 | 645 |
| 450 // TODO(dcarney): might be possible to use claim/poke instead | 646 // TODO(dcarney): might be possible to use claim/poke instead |
| 451 // Push any stack arguments. | 647 // Push any stack arguments. |
| 452 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); | 648 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); |
| 453 input != buffer.pushed_nodes.rend(); input++) { | 649 input != buffer.pushed_nodes.rend(); input++) { |
| 454 // TODO(plind): inefficient for MIPS, use MultiPush here. | 650 // TODO(plind): inefficient for MIPS, use MultiPush here. |
| 455 // - Also need to align the stack. See arm64. | 651 // - Also need to align the stack. See arm64. |
| 456 // - Maybe combine with arg slot stuff in DirectCEntry stub. | 652 // - Maybe combine with arg slot stuff in DirectCEntry stub. |
| 457 Emit(kMipsPush, NULL, g.UseRegister(*input)); | 653 Emit(kMips64Push, NULL, g.UseRegister(*input)); |
| 458 } | 654 } |
| 459 | 655 |
| 460 // Select the appropriate opcode based on the call type. | 656 // Select the appropriate opcode based on the call type. |
| 461 InstructionCode opcode; | 657 InstructionCode opcode; |
| 462 switch (descriptor->kind()) { | 658 switch (descriptor->kind()) { |
| 463 case CallDescriptor::kCallCodeObject: { | 659 case CallDescriptor::kCallCodeObject: { |
| 464 opcode = kArchCallCodeObject; | 660 opcode = kArchCallCodeObject; |
| 465 break; | 661 break; |
| 466 } | 662 } |
| 467 case CallDescriptor::kCallJSFunction: | 663 case CallDescriptor::kCallJSFunction: |
| 468 opcode = kArchCallJSFunction; | 664 opcode = kArchCallJSFunction; |
| 469 break; | 665 break; |
| 470 default: | 666 default: |
| 471 UNREACHABLE(); | 667 UNREACHABLE(); |
| 472 return; | 668 return; |
| 473 } | 669 } |
| 474 opcode |= MiscField::encode(descriptor->flags()); | 670 opcode |= MiscField::encode(descriptor->flags()); |
| 475 | 671 |
| 476 // Emit the call instruction. | 672 // Emit the call instruction. |
| 477 InstructionOperand** first_output = | |
| 478 buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL; | |
| 479 Instruction* call_instr = | 673 Instruction* call_instr = |
| 480 Emit(opcode, buffer.outputs.size(), first_output, | 674 Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), |
| 481 buffer.instruction_args.size(), &buffer.instruction_args.front()); | 675 buffer.instruction_args.size(), &buffer.instruction_args.front()); |
| 676 |
| 482 call_instr->MarkAsCall(); | 677 call_instr->MarkAsCall(); |
| 483 } | 678 } |
| 484 | 679 |
| 485 | 680 |
| 486 namespace { | 681 namespace { |
| 487 | 682 |
| 488 // Shared routine for multiple compare operations. | 683 // Shared routine for multiple compare operations. |
| 489 static void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 684 static void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
| 490 InstructionOperand* left, InstructionOperand* right, | 685 InstructionOperand* left, InstructionOperand* right, |
| 491 FlagsContinuation* cont) { | 686 FlagsContinuation* cont) { |
| 492 MipsOperandGenerator g(selector); | 687 Mips64OperandGenerator g(selector); |
| 493 opcode = cont->Encode(opcode); | 688 opcode = cont->Encode(opcode); |
| 494 if (cont->IsBranch()) { | 689 if (cont->IsBranch()) { |
| 495 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()), | 690 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()), |
| 496 g.Label(cont->false_block()))->MarkAsControl(); | 691 g.Label(cont->false_block()))->MarkAsControl(); |
| 497 } else { | 692 } else { |
| 498 DCHECK(cont->IsSet()); | 693 DCHECK(cont->IsSet()); |
| 499 // TODO(plind): Revisit and test this path. | |
| 500 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right); | 694 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right); |
| 501 } | 695 } |
| 502 } | 696 } |
| 503 | 697 |
| 504 | 698 |
| 505 // Shared routine for multiple float compare operations. | 699 // Shared routine for multiple float compare operations. |
| 506 void VisitFloat64Compare(InstructionSelector* selector, Node* node, | 700 void VisitFloat64Compare(InstructionSelector* selector, Node* node, |
| 507 FlagsContinuation* cont) { | 701 FlagsContinuation* cont) { |
| 508 MipsOperandGenerator g(selector); | 702 Mips64OperandGenerator g(selector); |
| 509 Node* left = node->InputAt(0); | 703 Node* left = node->InputAt(0); |
| 510 Node* right = node->InputAt(1); | 704 Node* right = node->InputAt(1); |
| 511 VisitCompare(selector, kMipsCmpD, g.UseRegister(left), g.UseRegister(right), | 705 VisitCompare(selector, kMips64CmpD, g.UseRegister(left), g.UseRegister(right), |
| 512 cont); | 706 cont); |
| 513 } | 707 } |
| 514 | 708 |
| 515 | 709 |
| 516 // Shared routine for multiple word compare operations. | 710 // Shared routine for multiple word compare operations. |
| 517 void VisitWordCompare(InstructionSelector* selector, Node* node, | 711 void VisitWordCompare(InstructionSelector* selector, Node* node, |
| 518 InstructionCode opcode, FlagsContinuation* cont, | 712 InstructionCode opcode, FlagsContinuation* cont, |
| 519 bool commutative) { | 713 bool commutative) { |
| 520 MipsOperandGenerator g(selector); | 714 Mips64OperandGenerator g(selector); |
| 521 Node* left = node->InputAt(0); | 715 Node* left = node->InputAt(0); |
| 522 Node* right = node->InputAt(1); | 716 Node* right = node->InputAt(1); |
| 523 | 717 |
| 524 // Match immediates on left or right side of comparison. | 718 // Match immediates on left or right side of comparison. |
| 525 if (g.CanBeImmediate(right, opcode)) { | 719 if (g.CanBeImmediate(right, opcode, cont)) { |
| 526 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right), | 720 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right), |
| 527 cont); | 721 cont); |
| 528 } else if (g.CanBeImmediate(left, opcode)) { | 722 } else if (g.CanBeImmediate(left, opcode, cont)) { |
| 529 if (!commutative) cont->Commute(); | 723 if (!commutative) cont->Commute(); |
| 530 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), | 724 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), |
| 531 cont); | 725 cont); |
| 532 } else { | 726 } else { |
| 533 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), | 727 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), |
| 534 cont); | 728 cont); |
| 535 } | 729 } |
| 536 } | 730 } |
| 537 | 731 |
| 538 | 732 |
| 539 void VisitWordCompare(InstructionSelector* selector, Node* node, | 733 void VisitWord32Compare(InstructionSelector* selector, Node* node, |
| 540 FlagsContinuation* cont) { | 734 FlagsContinuation* cont) { |
| 541 VisitWordCompare(selector, node, kMipsCmp, cont, false); | 735 VisitWordCompare(selector, node, kMips64Cmp32, cont, false); |
| 736 } |
| 737 |
| 738 |
| 739 void VisitWord64Compare(InstructionSelector* selector, Node* node, |
| 740 FlagsContinuation* cont) { |
| 741 VisitWordCompare(selector, node, kMips64Cmp, cont, false); |
| 542 } | 742 } |
| 543 | 743 |
| 544 } // namespace | 744 } // namespace |
| 545 | 745 |
| 546 | 746 |
| 747 void EmitWordCompareZero(InstructionSelector* selector, InstructionCode opcode, |
| 748 Node* value, FlagsContinuation* cont) { |
| 749 Mips64OperandGenerator g(selector); |
| 750 opcode = cont->Encode(opcode); |
| 751 InstructionOperand* const value_operand = g.UseRegister(value); |
| 752 if (cont->IsBranch()) { |
| 753 selector->Emit(opcode, nullptr, value_operand, g.TempImmediate(0), |
| 754 g.Label(cont->true_block()), |
| 755 g.Label(cont->false_block()))->MarkAsControl(); |
| 756 } else { |
| 757 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand, |
| 758 g.TempImmediate(0)); |
| 759 } |
| 760 } |
| 761 |
| 762 |
| 547 // Shared routine for word comparisons against zero. | 763 // Shared routine for word comparisons against zero. |
| 548 void VisitWordCompareZero(InstructionSelector* selector, Node* user, | 764 void VisitWordCompareZero(InstructionSelector* selector, Node* user, |
| 549 Node* value, FlagsContinuation* cont) { | 765 Node* value, FlagsContinuation* cont) { |
| 766 // Initially set comparison against 0 to be 64-bit variant for branches that |
| 767 // cannot combine. |
| 768 InstructionCode opcode = kMips64Cmp; |
| 550 while (selector->CanCover(user, value)) { | 769 while (selector->CanCover(user, value)) { |
| 770 if (user->opcode() == IrOpcode::kWord32Equal) { |
| 771 opcode = kMips64Cmp32; |
| 772 } |
| 551 switch (value->opcode()) { | 773 switch (value->opcode()) { |
| 552 case IrOpcode::kWord32Equal: { | 774 case IrOpcode::kWord32Equal: { |
| 553 // Combine with comparisons against 0 by simply inverting the | 775 // Combine with comparisons against 0 by simply inverting the |
| 554 // continuation. | 776 // continuation. |
| 555 Int32BinopMatcher m(value); | 777 Int32BinopMatcher m(value); |
| 556 if (m.right().Is(0)) { | 778 if (m.right().Is(0)) { |
| 557 user = value; | 779 user = value; |
| 558 value = m.left().node(); | 780 value = m.left().node(); |
| 559 cont->Negate(); | 781 cont->Negate(); |
| 782 opcode = kMips64Cmp32; |
| 783 continue; |
| 784 } |
| 785 cont->OverwriteAndNegateIfEqual(kEqual); |
| 786 return VisitWord32Compare(selector, value, cont); |
| 787 } |
| 788 case IrOpcode::kInt32LessThan: |
| 789 cont->OverwriteAndNegateIfEqual(kSignedLessThan); |
| 790 return VisitWord32Compare(selector, value, cont); |
| 791 case IrOpcode::kInt32LessThanOrEqual: |
| 792 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
| 793 return VisitWord32Compare(selector, value, cont); |
| 794 case IrOpcode::kUint32LessThan: |
| 795 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); |
| 796 return VisitWord32Compare(selector, value, cont); |
| 797 case IrOpcode::kUint32LessThanOrEqual: |
| 798 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); |
| 799 return VisitWord32Compare(selector, value, cont); |
| 800 case IrOpcode::kWord64Equal: { |
| 801 // Combine with comparisons against 0 by simply inverting the |
| 802 // continuation. |
| 803 Int64BinopMatcher m(value); |
| 804 if (m.right().Is(0)) { |
| 805 user = value; |
| 806 value = m.left().node(); |
| 807 cont->Negate(); |
| 560 continue; | 808 continue; |
| 561 } | 809 } |
| 562 cont->OverwriteAndNegateIfEqual(kEqual); | 810 cont->OverwriteAndNegateIfEqual(kEqual); |
| 563 return VisitWordCompare(selector, value, cont); | 811 return VisitWord64Compare(selector, value, cont); |
| 564 } | 812 } |
| 565 case IrOpcode::kInt32LessThan: | 813 case IrOpcode::kInt64LessThan: |
| 566 cont->OverwriteAndNegateIfEqual(kSignedLessThan); | 814 cont->OverwriteAndNegateIfEqual(kSignedLessThan); |
| 567 return VisitWordCompare(selector, value, cont); | 815 return VisitWord64Compare(selector, value, cont); |
| 568 case IrOpcode::kInt32LessThanOrEqual: | 816 case IrOpcode::kInt64LessThanOrEqual: |
| 569 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); | 817 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
| 570 return VisitWordCompare(selector, value, cont); | 818 return VisitWord64Compare(selector, value, cont); |
| 571 case IrOpcode::kUint32LessThan: | 819 case IrOpcode::kUint64LessThan: |
| 572 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); | 820 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); |
| 573 return VisitWordCompare(selector, value, cont); | 821 return VisitWord64Compare(selector, value, cont); |
| 574 case IrOpcode::kUint32LessThanOrEqual: | |
| 575 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); | |
| 576 return VisitWordCompare(selector, value, cont); | |
| 577 case IrOpcode::kFloat64Equal: | 822 case IrOpcode::kFloat64Equal: |
| 578 cont->OverwriteAndNegateIfEqual(kUnorderedEqual); | 823 cont->OverwriteAndNegateIfEqual(kUnorderedEqual); |
| 579 return VisitFloat64Compare(selector, value, cont); | 824 return VisitFloat64Compare(selector, value, cont); |
| 580 case IrOpcode::kFloat64LessThan: | 825 case IrOpcode::kFloat64LessThan: |
| 581 cont->OverwriteAndNegateIfEqual(kUnorderedLessThan); | 826 cont->OverwriteAndNegateIfEqual(kUnorderedLessThan); |
| 582 return VisitFloat64Compare(selector, value, cont); | 827 return VisitFloat64Compare(selector, value, cont); |
| 583 case IrOpcode::kFloat64LessThanOrEqual: | 828 case IrOpcode::kFloat64LessThanOrEqual: |
| 584 cont->OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); | 829 cont->OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); |
| 585 return VisitFloat64Compare(selector, value, cont); | 830 return VisitFloat64Compare(selector, value, cont); |
| 586 case IrOpcode::kProjection: | 831 case IrOpcode::kProjection: |
| 587 // Check if this is the overflow output projection of an | 832 // Check if this is the overflow output projection of an |
| 588 // <Operation>WithOverflow node. | 833 // <Operation>WithOverflow node. |
| 589 if (OpParameter<size_t>(value) == 1u) { | 834 if (OpParameter<size_t>(value) == 1u) { |
| 590 // We cannot combine the <Operation>WithOverflow with this branch | 835 // We cannot combine the <Operation>WithOverflow with this branch |
| 591 // unless the 0th projection (the use of the actual value of the | 836 // unless the 0th projection (the use of the actual value of the |
| 592 // <Operation> is either NULL, which means there's no use of the | 837 // <Operation> is either NULL, which means there's no use of the |
| 593 // actual value, or was already defined, which means it is scheduled | 838 // actual value, or was already defined, which means it is scheduled |
| 594 // *AFTER* this branch). | 839 // *AFTER* this branch). |
| 595 Node* const node = value->InputAt(0); | 840 Node* node = value->InputAt(0); |
| 596 Node* const result = node->FindProjection(0); | 841 Node* result = node->FindProjection(0); |
| 597 if (!result || selector->IsDefined(result)) { | 842 if (result == NULL || selector->IsDefined(result)) { |
| 598 switch (node->opcode()) { | 843 switch (node->opcode()) { |
| 599 case IrOpcode::kInt32AddWithOverflow: | 844 case IrOpcode::kInt32AddWithOverflow: |
| 600 cont->OverwriteAndNegateIfEqual(kOverflow); | 845 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 601 return VisitBinop(selector, node, kMipsAddOvf, cont); | 846 return VisitBinop(selector, node, kMips64Dadd, cont); |
| 602 case IrOpcode::kInt32SubWithOverflow: | 847 case IrOpcode::kInt32SubWithOverflow: |
| 603 cont->OverwriteAndNegateIfEqual(kOverflow); | 848 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 604 return VisitBinop(selector, node, kMipsSubOvf, cont); | 849 return VisitBinop(selector, node, kMips64Dsub, cont); |
| 605 default: | 850 default: |
| 606 break; | 851 break; |
| 607 } | 852 } |
| 608 } | 853 } |
| 609 } | 854 } |
| 610 break; | 855 break; |
| 611 case IrOpcode::kWord32And: | 856 case IrOpcode::kWord32And: |
| 612 return VisitWordCompare(selector, value, kMipsTst, cont, true); | 857 return VisitWordCompare(selector, value, kMips64Tst32, cont, true); |
| 858 case IrOpcode::kWord64And: |
| 859 return VisitWordCompare(selector, value, kMips64Tst, cont, true); |
| 613 default: | 860 default: |
| 614 break; | 861 break; |
| 615 } | 862 } |
| 616 break; | 863 break; |
| 617 } | 864 } |
| 618 | 865 |
| 619 // Continuation could not be combined with a compare, emit compare against 0. | 866 // Continuation could not be combined with a compare, emit compare against 0. |
| 620 MipsOperandGenerator g(selector); | 867 EmitWordCompareZero(selector, opcode, value, cont); |
| 621 InstructionCode const opcode = cont->Encode(kMipsCmp); | |
| 622 InstructionOperand* const value_operand = g.UseRegister(value); | |
| 623 if (cont->IsBranch()) { | |
| 624 selector->Emit(opcode, nullptr, value_operand, g.TempImmediate(0), | |
| 625 g.Label(cont->true_block()), | |
| 626 g.Label(cont->false_block()))->MarkAsControl(); | |
| 627 } else { | |
| 628 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand, | |
| 629 g.TempImmediate(0)); | |
| 630 } | |
| 631 } | 868 } |
| 632 | 869 |
| 633 | 870 |
| 634 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, | 871 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, |
| 635 BasicBlock* fbranch) { | 872 BasicBlock* fbranch) { |
| 636 FlagsContinuation cont(kNotEqual, tbranch, fbranch); | 873 FlagsContinuation cont(kNotEqual, tbranch, fbranch); |
| 637 // If we can fall through to the true block, invert the branch. | 874 // If we can fall through to the true block, invert the branch. |
| 638 if (IsNextInAssemblyOrder(tbranch)) { | 875 if (IsNextInAssemblyOrder(tbranch)) { |
| 639 cont.Negate(); | 876 cont.Negate(); |
| 640 cont.SwapBlocks(); | 877 cont.SwapBlocks(); |
| 641 } | 878 } |
| 879 |
| 642 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); | 880 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); |
| 643 } | 881 } |
| 644 | 882 |
| 645 | 883 |
| 646 void InstructionSelector::VisitWord32Equal(Node* const node) { | 884 void InstructionSelector::VisitWord32Equal(Node* const node) { |
| 647 FlagsContinuation cont(kEqual, node); | 885 FlagsContinuation cont(kEqual, node); |
| 648 Int32BinopMatcher m(node); | 886 Int32BinopMatcher m(node); |
| 649 if (m.right().Is(0)) { | 887 if (m.right().Is(0)) { |
| 650 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont); | 888 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont); |
| 651 } | 889 } |
| 652 VisitWordCompare(this, node, &cont); | 890 |
| 891 VisitWord32Compare(this, node, &cont); |
| 653 } | 892 } |
| 654 | 893 |
| 655 | 894 |
| 656 void InstructionSelector::VisitInt32LessThan(Node* node) { | 895 void InstructionSelector::VisitInt32LessThan(Node* node) { |
| 657 FlagsContinuation cont(kSignedLessThan, node); | 896 FlagsContinuation cont(kSignedLessThan, node); |
| 658 VisitWordCompare(this, node, &cont); | 897 VisitWord32Compare(this, node, &cont); |
| 659 } | 898 } |
| 660 | 899 |
| 661 | 900 |
| 662 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) { | 901 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) { |
| 663 FlagsContinuation cont(kSignedLessThanOrEqual, node); | 902 FlagsContinuation cont(kSignedLessThanOrEqual, node); |
| 664 VisitWordCompare(this, node, &cont); | 903 VisitWord32Compare(this, node, &cont); |
| 665 } | 904 } |
| 666 | 905 |
| 667 | 906 |
| 668 void InstructionSelector::VisitUint32LessThan(Node* node) { | 907 void InstructionSelector::VisitUint32LessThan(Node* node) { |
| 669 FlagsContinuation cont(kUnsignedLessThan, node); | 908 FlagsContinuation cont(kUnsignedLessThan, node); |
| 670 VisitWordCompare(this, node, &cont); | 909 VisitWord32Compare(this, node, &cont); |
| 671 } | 910 } |
| 672 | 911 |
| 673 | 912 |
| 674 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { | 913 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { |
| 675 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); | 914 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); |
| 676 VisitWordCompare(this, node, &cont); | 915 VisitWord32Compare(this, node, &cont); |
| 677 } | 916 } |
| 678 | 917 |
| 679 | 918 |
| 680 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { | 919 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 681 if (Node* ovf = node->FindProjection(1)) { | 920 if (Node* ovf = node->FindProjection(1)) { |
| 682 FlagsContinuation cont(kOverflow, ovf); | 921 FlagsContinuation cont(kOverflow, ovf); |
| 683 return VisitBinop(this, node, kMipsAddOvf, &cont); | 922 return VisitBinop(this, node, kMips64Dadd, &cont); |
| 684 } | 923 } |
| 685 FlagsContinuation cont; | 924 FlagsContinuation cont; |
| 686 VisitBinop(this, node, kMipsAddOvf, &cont); | 925 VisitBinop(this, node, kMips64Dadd, &cont); |
| 687 } | 926 } |
| 688 | 927 |
| 689 | 928 |
| 690 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { | 929 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
| 691 if (Node* ovf = node->FindProjection(1)) { | 930 if (Node* ovf = node->FindProjection(1)) { |
| 692 FlagsContinuation cont(kOverflow, ovf); | 931 FlagsContinuation cont(kOverflow, ovf); |
| 693 return VisitBinop(this, node, kMipsSubOvf, &cont); | 932 return VisitBinop(this, node, kMips64Dsub, &cont); |
| 694 } | 933 } |
| 695 FlagsContinuation cont; | 934 FlagsContinuation cont; |
| 696 VisitBinop(this, node, kMipsSubOvf, &cont); | 935 VisitBinop(this, node, kMips64Dsub, &cont); |
| 697 } | 936 } |
| 698 | 937 |
| 699 | 938 |
| 939 void InstructionSelector::VisitWord64Equal(Node* const node) { |
| 940 FlagsContinuation cont(kEqual, node); |
| 941 Int64BinopMatcher m(node); |
| 942 if (m.right().Is(0)) { |
| 943 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont); |
| 944 } |
| 945 |
| 946 VisitWord64Compare(this, node, &cont); |
| 947 } |
| 948 |
| 949 |
| 950 void InstructionSelector::VisitInt64LessThan(Node* node) { |
| 951 FlagsContinuation cont(kSignedLessThan, node); |
| 952 VisitWord64Compare(this, node, &cont); |
| 953 } |
| 954 |
| 955 |
| 956 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) { |
| 957 FlagsContinuation cont(kSignedLessThanOrEqual, node); |
| 958 VisitWord64Compare(this, node, &cont); |
| 959 } |
| 960 |
| 961 |
| 962 void InstructionSelector::VisitUint64LessThan(Node* node) { |
| 963 FlagsContinuation cont(kUnsignedLessThan, node); |
| 964 VisitWord64Compare(this, node, &cont); |
| 965 } |
| 966 |
| 967 |
| 700 void InstructionSelector::VisitFloat64Equal(Node* node) { | 968 void InstructionSelector::VisitFloat64Equal(Node* node) { |
| 701 FlagsContinuation cont(kUnorderedEqual, node); | 969 FlagsContinuation cont(kUnorderedEqual, node); |
| 702 VisitFloat64Compare(this, node, &cont); | 970 VisitFloat64Compare(this, node, &cont); |
| 703 } | 971 } |
| 704 | 972 |
| 705 | 973 |
| 706 void InstructionSelector::VisitFloat64LessThan(Node* node) { | 974 void InstructionSelector::VisitFloat64LessThan(Node* node) { |
| 707 FlagsContinuation cont(kUnorderedLessThan, node); | 975 FlagsContinuation cont(kUnorderedLessThan, node); |
| 708 VisitFloat64Compare(this, node, &cont); | 976 VisitFloat64Compare(this, node, &cont); |
| 709 } | 977 } |
| 710 | 978 |
| 711 | 979 |
| 712 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { | 980 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { |
| 713 FlagsContinuation cont(kUnorderedLessThanOrEqual, node); | 981 FlagsContinuation cont(kUnorderedLessThanOrEqual, node); |
| 714 VisitFloat64Compare(this, node, &cont); | 982 VisitFloat64Compare(this, node, &cont); |
| 715 } | 983 } |
| 716 | 984 |
| 717 | 985 |
| 718 // static | 986 // static |
| 719 MachineOperatorBuilder::Flags | 987 MachineOperatorBuilder::Flags |
| 720 InstructionSelector::SupportedMachineOperatorFlags() { | 988 InstructionSelector::SupportedMachineOperatorFlags() { |
| 721 return MachineOperatorBuilder::kNoFlags; | 989 return MachineOperatorBuilder::kNoFlags; |
| 722 } | 990 } |
| 723 | 991 |
| 724 } // namespace compiler | 992 } // namespace compiler |
| 725 } // namespace internal | 993 } // namespace internal |
| 726 } // namespace v8 | 994 } // namespace v8 |
| OLD | NEW |