| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
| 7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
| 8 #include "src/compiler/node-properties.h" | 8 #include "src/compiler/node-properties.h" |
| 9 #include "src/s390/frames-s390.h" | 9 #include "src/s390/frames-s390.h" |
| 10 | 10 |
| 11 namespace v8 { | 11 namespace v8 { |
| 12 namespace internal { | 12 namespace internal { |
| 13 namespace compiler { | 13 namespace compiler { |
| 14 | 14 |
| 15 enum ImmediateMode { | 15 enum ImmediateMode { |
| 16 kInt16Imm, | |
| 17 kInt16Imm_Unsigned, | |
| 18 kInt16Imm_Negate, | |
| 19 kInt16Imm_4ByteAligned, | |
| 20 kShift32Imm, | 16 kShift32Imm, |
| 21 kShift64Imm, | 17 kShift64Imm, |
| 18 kInt32Imm, |
| 19 kInt32Imm_Negate, |
| 20 kUint32Imm, |
| 21 kInt20Imm, |
| 22 kNoImmediate | 22 kNoImmediate |
| 23 }; | 23 }; |
| 24 | 24 |
| 25 // Adds S390-specific methods for generating operands. | 25 // Adds S390-specific methods for generating operands. |
| 26 class S390OperandGenerator final : public OperandGenerator { | 26 class S390OperandGenerator final : public OperandGenerator { |
| 27 public: | 27 public: |
| 28 explicit S390OperandGenerator(InstructionSelector* selector) | 28 explicit S390OperandGenerator(InstructionSelector* selector) |
| 29 : OperandGenerator(selector) {} | 29 : OperandGenerator(selector) {} |
| 30 | 30 |
| 31 InstructionOperand UseOperand(Node* node, ImmediateMode mode) { | 31 InstructionOperand UseOperand(Node* node, ImmediateMode mode) { |
| 32 if (CanBeImmediate(node, mode)) { | 32 if (CanBeImmediate(node, mode)) { |
| 33 return UseImmediate(node); | 33 return UseImmediate(node); |
| 34 } | 34 } |
| 35 return UseRegister(node); | 35 return UseRegister(node); |
| 36 } | 36 } |
| 37 | 37 |
| 38 bool CanBeImmediate(Node* node, ImmediateMode mode) { | 38 bool CanBeImmediate(Node* node, ImmediateMode mode) { |
| 39 int64_t value; | 39 int64_t value; |
| 40 if (node->opcode() == IrOpcode::kInt32Constant) | 40 if (node->opcode() == IrOpcode::kInt32Constant) |
| 41 value = OpParameter<int32_t>(node); | 41 value = OpParameter<int32_t>(node); |
| 42 else if (node->opcode() == IrOpcode::kInt64Constant) | 42 else if (node->opcode() == IrOpcode::kInt64Constant) |
| 43 value = OpParameter<int64_t>(node); | 43 value = OpParameter<int64_t>(node); |
| 44 else | 44 else |
| 45 return false; | 45 return false; |
| 46 return CanBeImmediate(value, mode); | 46 return CanBeImmediate(value, mode); |
| 47 } | 47 } |
| 48 | 48 |
| 49 bool CanBeImmediate(int64_t value, ImmediateMode mode) { | 49 bool CanBeImmediate(int64_t value, ImmediateMode mode) { |
| 50 switch (mode) { | 50 switch (mode) { |
| 51 case kInt16Imm: | |
| 52 return is_int16(value); | |
| 53 case kInt16Imm_Unsigned: | |
| 54 return is_uint16(value); | |
| 55 case kInt16Imm_Negate: | |
| 56 return is_int16(-value); | |
| 57 case kInt16Imm_4ByteAligned: | |
| 58 return is_int16(value) && !(value & 3); | |
| 59 case kShift32Imm: | 51 case kShift32Imm: |
| 60 return 0 <= value && value < 32; | 52 return 0 <= value && value < 32; |
| 61 case kShift64Imm: | 53 case kShift64Imm: |
| 62 return 0 <= value && value < 64; | 54 return 0 <= value && value < 64; |
| 55 case kInt32Imm: |
| 56 return is_int32(value); |
| 57 case kInt32Imm_Negate: |
| 58 return is_int32(-value); |
| 59 case kUint32Imm: |
| 60 return is_uint32(value); |
| 61 case kInt20Imm: |
| 62 return is_int20(value); |
| 63 case kNoImmediate: | 63 case kNoImmediate: |
| 64 return false; | 64 return false; |
| 65 } | 65 } |
| 66 return false; | 66 return false; |
| 67 } | 67 } |
| 68 |
| 69 AddressingMode GenerateMemoryOperandInputs(Node* index, Node* base, |
| 70 Node* displacement, |
| 71 DisplacementMode displacement_mode, |
| 72 InstructionOperand inputs[], |
| 73 size_t* input_count) { |
| 74 AddressingMode mode = kMode_MRI; |
| 75 if (base != nullptr) { |
| 76 inputs[(*input_count)++] = UseRegister(base); |
| 77 if (index != nullptr) { |
| 78 inputs[(*input_count)++] = UseRegister(index); |
| 79 if (displacement != nullptr) { |
| 80 inputs[(*input_count)++] = displacement_mode |
| 81 ? UseNegatedImmediate(displacement) |
| 82 : UseImmediate(displacement); |
| 83 mode = kMode_MRRI; |
| 84 } else { |
| 85 mode = kMode_MRR; |
| 86 } |
| 87 } else { |
| 88 if (displacement == nullptr) { |
| 89 mode = kMode_MR; |
| 90 } else { |
| 91 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement |
| 92 ? UseNegatedImmediate(displacement) |
| 93 : UseImmediate(displacement); |
| 94 mode = kMode_MRI; |
| 95 } |
| 96 } |
| 97 } else { |
| 98 DCHECK_NOT_NULL(index); |
| 99 inputs[(*input_count)++] = UseRegister(index); |
| 100 if (displacement != nullptr) { |
| 101 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement |
| 102 ? UseNegatedImmediate(displacement) |
| 103 : UseImmediate(displacement); |
| 104 mode = kMode_MRI; |
| 105 } else { |
| 106 mode = kMode_MR; |
| 107 } |
| 108 } |
| 109 return mode; |
| 110 } |
| 111 |
| 112 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand, |
| 113 InstructionOperand inputs[], |
| 114 size_t* input_count) { |
| 115 #if V8_TARGET_ARCH_S390X |
| 116 BaseWithIndexAndDisplacement64Matcher m(operand, |
| 117 AddressOption::kAllowInputSwap); |
| 118 #else |
| 119 BaseWithIndexAndDisplacement32Matcher m(operand, |
| 120 AddressOption::kAllowInputSwap); |
| 121 #endif |
| 122 DCHECK(m.matches()); |
| 123 if ((m.displacement() == nullptr || |
| 124 CanBeImmediate(m.displacement(), kInt20Imm))) { |
| 125 DCHECK(m.scale() == 0); |
| 126 return GenerateMemoryOperandInputs(m.index(), m.base(), m.displacement(), |
| 127 m.displacement_mode(), inputs, |
| 128 input_count); |
| 129 } else { |
| 130 inputs[(*input_count)++] = UseRegister(operand->InputAt(0)); |
| 131 inputs[(*input_count)++] = UseRegister(operand->InputAt(1)); |
| 132 return kMode_MRR; |
| 133 } |
| 134 } |
| 68 }; | 135 }; |
| 69 | 136 |
| 70 namespace { | 137 namespace { |
| 71 | 138 |
| 72 void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) { | 139 void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) { |
| 73 S390OperandGenerator g(selector); | 140 S390OperandGenerator g(selector); |
| 74 selector->Emit(opcode, g.DefineAsRegister(node), | 141 selector->Emit(opcode, g.DefineAsRegister(node), |
| 75 g.UseRegister(node->InputAt(0))); | 142 g.UseRegister(node->InputAt(0))); |
| 76 } | 143 } |
| 77 | 144 |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 160 ImmediateMode operand_mode) { | 227 ImmediateMode operand_mode) { |
| 161 FlagsContinuation cont; | 228 FlagsContinuation cont; |
| 162 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont); | 229 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont); |
| 163 } | 230 } |
| 164 | 231 |
| 165 } // namespace | 232 } // namespace |
| 166 | 233 |
| 167 void InstructionSelector::VisitLoad(Node* node) { | 234 void InstructionSelector::VisitLoad(Node* node) { |
| 168 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); | 235 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); |
| 169 S390OperandGenerator g(this); | 236 S390OperandGenerator g(this); |
| 170 Node* base = node->InputAt(0); | |
| 171 Node* offset = node->InputAt(1); | |
| 172 ArchOpcode opcode = kArchNop; | 237 ArchOpcode opcode = kArchNop; |
| 173 ImmediateMode mode = kInt16Imm; | |
| 174 switch (load_rep.representation()) { | 238 switch (load_rep.representation()) { |
| 175 case MachineRepresentation::kFloat32: | 239 case MachineRepresentation::kFloat32: |
| 176 opcode = kS390_LoadFloat32; | 240 opcode = kS390_LoadFloat32; |
| 177 break; | 241 break; |
| 178 case MachineRepresentation::kFloat64: | 242 case MachineRepresentation::kFloat64: |
| 179 opcode = kS390_LoadDouble; | 243 opcode = kS390_LoadDouble; |
| 180 break; | 244 break; |
| 181 case MachineRepresentation::kBit: // Fall through. | 245 case MachineRepresentation::kBit: // Fall through. |
| 182 case MachineRepresentation::kWord8: | 246 case MachineRepresentation::kWord8: |
| 183 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8; | 247 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8; |
| 184 break; | 248 break; |
| 185 case MachineRepresentation::kWord16: | 249 case MachineRepresentation::kWord16: |
| 186 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16; | 250 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16; |
| 187 break; | 251 break; |
| 188 #if !V8_TARGET_ARCH_S390X | 252 #if !V8_TARGET_ARCH_S390X |
| 189 case MachineRepresentation::kTagged: // Fall through. | 253 case MachineRepresentation::kTagged: // Fall through. |
| 190 #endif | 254 #endif |
| 191 case MachineRepresentation::kWord32: | 255 case MachineRepresentation::kWord32: |
| 192 opcode = kS390_LoadWordU32; | 256 opcode = kS390_LoadWordU32; |
| 193 break; | 257 break; |
| 194 #if V8_TARGET_ARCH_S390X | 258 #if V8_TARGET_ARCH_S390X |
| 195 case MachineRepresentation::kTagged: // Fall through. | 259 case MachineRepresentation::kTagged: // Fall through. |
| 196 case MachineRepresentation::kWord64: | 260 case MachineRepresentation::kWord64: |
| 197 opcode = kS390_LoadWord64; | 261 opcode = kS390_LoadWord64; |
| 198 mode = kInt16Imm_4ByteAligned; | |
| 199 break; | 262 break; |
| 200 #else | 263 #else |
| 201 case MachineRepresentation::kWord64: // Fall through. | 264 case MachineRepresentation::kWord64: // Fall through. |
| 202 #endif | 265 #endif |
| 203 case MachineRepresentation::kSimd128: // Fall through. | 266 case MachineRepresentation::kSimd128: // Fall through. |
| 204 case MachineRepresentation::kNone: | 267 case MachineRepresentation::kNone: |
| 205 UNREACHABLE(); | 268 UNREACHABLE(); |
| 206 return; | 269 return; |
| 207 } | 270 } |
| 208 if (g.CanBeImmediate(offset, mode)) { | 271 InstructionOperand outputs[1]; |
| 209 Emit(opcode | AddressingModeField::encode(kMode_MRI), | 272 outputs[0] = g.DefineAsRegister(node); |
| 210 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset)); | 273 InstructionOperand inputs[3]; |
| 211 } else if (g.CanBeImmediate(base, mode)) { | 274 size_t input_count = 0; |
| 212 Emit(opcode | AddressingModeField::encode(kMode_MRI), | 275 AddressingMode mode = |
| 213 g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base)); | 276 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); |
| 214 } else { | 277 InstructionCode code = opcode | AddressingModeField::encode(mode); |
| 215 Emit(opcode | AddressingModeField::encode(kMode_MRR), | 278 Emit(code, 1, outputs, input_count, inputs); |
| 216 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset)); | |
| 217 } | |
| 218 } | 279 } |
| 219 | 280 |
| 220 void InstructionSelector::VisitStore(Node* node) { | 281 void InstructionSelector::VisitStore(Node* node) { |
| 221 S390OperandGenerator g(this); | 282 S390OperandGenerator g(this); |
| 222 Node* base = node->InputAt(0); | 283 Node* base = node->InputAt(0); |
| 223 Node* offset = node->InputAt(1); | 284 Node* offset = node->InputAt(1); |
| 224 Node* value = node->InputAt(2); | 285 Node* value = node->InputAt(2); |
| 225 | 286 |
| 226 StoreRepresentation store_rep = StoreRepresentationOf(node->op()); | 287 StoreRepresentation store_rep = StoreRepresentationOf(node->op()); |
| 227 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); | 288 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); |
| 228 MachineRepresentation rep = store_rep.representation(); | 289 MachineRepresentation rep = store_rep.representation(); |
| 229 | 290 |
| 230 if (write_barrier_kind != kNoWriteBarrier) { | 291 if (write_barrier_kind != kNoWriteBarrier) { |
| 231 DCHECK_EQ(MachineRepresentation::kTagged, rep); | 292 DCHECK_EQ(MachineRepresentation::kTagged, rep); |
| 232 AddressingMode addressing_mode; | 293 AddressingMode addressing_mode; |
| 233 InstructionOperand inputs[3]; | 294 InstructionOperand inputs[3]; |
| 234 size_t input_count = 0; | 295 size_t input_count = 0; |
| 235 inputs[input_count++] = g.UseUniqueRegister(base); | 296 inputs[input_count++] = g.UseUniqueRegister(base); |
| 236 // OutOfLineRecordWrite uses the offset in an 'AddP' instruction as well as | 297 // OutOfLineRecordWrite uses the offset in an 'AddP' instruction as well as |
| 237 // for the store itself, so we must check compatibility with both. | 298 // for the store itself, so we must check compatibility with both. |
| 238 if (g.CanBeImmediate(offset, kInt16Imm) | 299 if (g.CanBeImmediate(offset, kInt20Imm)) { |
| 239 #if V8_TARGET_ARCH_S390X | |
| 240 && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned) | |
| 241 #endif | |
| 242 ) { | |
| 243 inputs[input_count++] = g.UseImmediate(offset); | 300 inputs[input_count++] = g.UseImmediate(offset); |
| 244 addressing_mode = kMode_MRI; | 301 addressing_mode = kMode_MRI; |
| 245 } else { | 302 } else { |
| 246 inputs[input_count++] = g.UseUniqueRegister(offset); | 303 inputs[input_count++] = g.UseUniqueRegister(offset); |
| 247 addressing_mode = kMode_MRR; | 304 addressing_mode = kMode_MRR; |
| 248 } | 305 } |
| 249 inputs[input_count++] = g.UseUniqueRegister(value); | 306 inputs[input_count++] = g.UseUniqueRegister(value); |
| 250 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny; | 307 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny; |
| 251 switch (write_barrier_kind) { | 308 switch (write_barrier_kind) { |
| 252 case kNoWriteBarrier: | 309 case kNoWriteBarrier: |
| (...skipping 10 matching lines...) Expand all Loading... |
| 263 break; | 320 break; |
| 264 } | 321 } |
| 265 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()}; | 322 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()}; |
| 266 size_t const temp_count = arraysize(temps); | 323 size_t const temp_count = arraysize(temps); |
| 267 InstructionCode code = kArchStoreWithWriteBarrier; | 324 InstructionCode code = kArchStoreWithWriteBarrier; |
| 268 code |= AddressingModeField::encode(addressing_mode); | 325 code |= AddressingModeField::encode(addressing_mode); |
| 269 code |= MiscField::encode(static_cast<int>(record_write_mode)); | 326 code |= MiscField::encode(static_cast<int>(record_write_mode)); |
| 270 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps); | 327 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps); |
| 271 } else { | 328 } else { |
| 272 ArchOpcode opcode = kArchNop; | 329 ArchOpcode opcode = kArchNop; |
| 273 ImmediateMode mode = kInt16Imm; | |
| 274 NodeMatcher m(value); | 330 NodeMatcher m(value); |
| 275 switch (rep) { | 331 switch (rep) { |
| 276 case MachineRepresentation::kFloat32: | 332 case MachineRepresentation::kFloat32: |
| 277 opcode = kS390_StoreFloat32; | 333 opcode = kS390_StoreFloat32; |
| 278 break; | 334 break; |
| 279 case MachineRepresentation::kFloat64: | 335 case MachineRepresentation::kFloat64: |
| 280 opcode = kS390_StoreDouble; | 336 opcode = kS390_StoreDouble; |
| 281 break; | 337 break; |
| 282 case MachineRepresentation::kBit: // Fall through. | 338 case MachineRepresentation::kBit: // Fall through. |
| 283 case MachineRepresentation::kWord8: | 339 case MachineRepresentation::kWord8: |
| 284 opcode = kS390_StoreWord8; | 340 opcode = kS390_StoreWord8; |
| 285 break; | 341 break; |
| 286 case MachineRepresentation::kWord16: | 342 case MachineRepresentation::kWord16: |
| 287 opcode = kS390_StoreWord16; | 343 opcode = kS390_StoreWord16; |
| 288 break; | 344 break; |
| 289 #if !V8_TARGET_ARCH_S390X | 345 #if !V8_TARGET_ARCH_S390X |
| 290 case MachineRepresentation::kTagged: // Fall through. | 346 case MachineRepresentation::kTagged: // Fall through. |
| 291 #endif | 347 #endif |
| 292 case MachineRepresentation::kWord32: | 348 case MachineRepresentation::kWord32: |
| 293 opcode = kS390_StoreWord32; | 349 opcode = kS390_StoreWord32; |
| 294 if (m.IsWord32ReverseBytes()) { | 350 if (m.IsWord32ReverseBytes()) { |
| 295 opcode = kS390_StoreReverse32; | 351 opcode = kS390_StoreReverse32; |
| 296 value = value->InputAt(0); | 352 value = value->InputAt(0); |
| 297 } | 353 } |
| 298 break; | 354 break; |
| 299 #if V8_TARGET_ARCH_S390X | 355 #if V8_TARGET_ARCH_S390X |
| 300 case MachineRepresentation::kTagged: // Fall through. | 356 case MachineRepresentation::kTagged: // Fall through. |
| 301 case MachineRepresentation::kWord64: | 357 case MachineRepresentation::kWord64: |
| 302 opcode = kS390_StoreWord64; | 358 opcode = kS390_StoreWord64; |
| 303 mode = kInt16Imm_4ByteAligned; | |
| 304 if (m.IsWord64ReverseBytes()) { | 359 if (m.IsWord64ReverseBytes()) { |
| 305 opcode = kS390_StoreReverse64; | 360 opcode = kS390_StoreReverse64; |
| 306 value = value->InputAt(0); | 361 value = value->InputAt(0); |
| 307 } | 362 } |
| 308 break; | 363 break; |
| 309 #else | 364 #else |
| 310 case MachineRepresentation::kWord64: // Fall through. | 365 case MachineRepresentation::kWord64: // Fall through. |
| 311 #endif | 366 #endif |
| 312 case MachineRepresentation::kSimd128: // Fall through. | 367 case MachineRepresentation::kSimd128: // Fall through. |
| 313 case MachineRepresentation::kNone: | 368 case MachineRepresentation::kNone: |
| 314 UNREACHABLE(); | 369 UNREACHABLE(); |
| 315 return; | 370 return; |
| 316 } | 371 } |
| 317 if (g.CanBeImmediate(offset, mode)) { | 372 InstructionOperand inputs[4]; |
| 318 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(), | 373 size_t input_count = 0; |
| 319 g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value)); | 374 AddressingMode addressing_mode = |
| 320 } else if (g.CanBeImmediate(base, mode)) { | 375 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); |
| 321 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(), | 376 InstructionCode code = |
| 322 g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value)); | 377 opcode | AddressingModeField::encode(addressing_mode); |
| 323 } else { | 378 InstructionOperand value_operand = g.UseRegister(value); |
| 324 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(), | 379 inputs[input_count++] = value_operand; |
| 325 g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value)); | 380 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, |
| 326 } | 381 inputs); |
| 327 } | 382 } |
| 328 } | 383 } |
| 329 | 384 |
| 330 // Architecture supports unaligned access, therefore VisitLoad is used instead | 385 // Architecture supports unaligned access, therefore VisitLoad is used instead |
| 331 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); } | 386 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); } |
| 332 | 387 |
| 333 // Architecture supports unaligned access, therefore VisitStore is used instead | 388 // Architecture supports unaligned access, therefore VisitStore is used instead |
| 334 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); } | 389 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); } |
| 335 | 390 |
| 336 void InstructionSelector::VisitCheckedLoad(Node* node) { | 391 void InstructionSelector::VisitCheckedLoad(Node* node) { |
| (...skipping 30 matching lines...) Expand all Loading... |
| 367 case MachineRepresentation::kWord64: // Fall through. | 422 case MachineRepresentation::kWord64: // Fall through. |
| 368 #endif | 423 #endif |
| 369 case MachineRepresentation::kSimd128: // Fall through. | 424 case MachineRepresentation::kSimd128: // Fall through. |
| 370 case MachineRepresentation::kNone: | 425 case MachineRepresentation::kNone: |
| 371 UNREACHABLE(); | 426 UNREACHABLE(); |
| 372 return; | 427 return; |
| 373 } | 428 } |
| 374 AddressingMode addressingMode = kMode_MRR; | 429 AddressingMode addressingMode = kMode_MRR; |
| 375 Emit(opcode | AddressingModeField::encode(addressingMode), | 430 Emit(opcode | AddressingModeField::encode(addressingMode), |
| 376 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset), | 431 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset), |
| 377 g.UseOperand(length, kInt16Imm_Unsigned)); | 432 g.UseOperand(length, kUint32Imm)); |
| 378 } | 433 } |
| 379 | 434 |
| 380 void InstructionSelector::VisitCheckedStore(Node* node) { | 435 void InstructionSelector::VisitCheckedStore(Node* node) { |
| 381 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op()); | 436 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op()); |
| 382 S390OperandGenerator g(this); | 437 S390OperandGenerator g(this); |
| 383 Node* const base = node->InputAt(0); | 438 Node* const base = node->InputAt(0); |
| 384 Node* const offset = node->InputAt(1); | 439 Node* const offset = node->InputAt(1); |
| 385 Node* const length = node->InputAt(2); | 440 Node* const length = node->InputAt(2); |
| 386 Node* const value = node->InputAt(3); | 441 Node* const value = node->InputAt(3); |
| 387 ArchOpcode opcode = kArchNop; | 442 ArchOpcode opcode = kArchNop; |
| (...skipping 24 matching lines...) Expand all Loading... |
| 412 case MachineRepresentation::kWord64: // Fall through. | 467 case MachineRepresentation::kWord64: // Fall through. |
| 413 #endif | 468 #endif |
| 414 case MachineRepresentation::kSimd128: // Fall through. | 469 case MachineRepresentation::kSimd128: // Fall through. |
| 415 case MachineRepresentation::kNone: | 470 case MachineRepresentation::kNone: |
| 416 UNREACHABLE(); | 471 UNREACHABLE(); |
| 417 return; | 472 return; |
| 418 } | 473 } |
| 419 AddressingMode addressingMode = kMode_MRR; | 474 AddressingMode addressingMode = kMode_MRR; |
| 420 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(), | 475 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(), |
| 421 g.UseRegister(base), g.UseRegister(offset), | 476 g.UseRegister(base), g.UseRegister(offset), |
| 422 g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value)); | 477 g.UseOperand(length, kUint32Imm), g.UseRegister(value)); |
| 423 } | 478 } |
| 424 | 479 |
| 425 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) { | 480 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) { |
| 426 int mask_width = base::bits::CountPopulation32(value); | 481 int mask_width = base::bits::CountPopulation32(value); |
| 427 int mask_msb = base::bits::CountLeadingZeros32(value); | 482 int mask_msb = base::bits::CountLeadingZeros32(value); |
| 428 int mask_lsb = base::bits::CountTrailingZeros32(value); | 483 int mask_lsb = base::bits::CountTrailingZeros32(value); |
| 429 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32)) | 484 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32)) |
| 430 return false; | 485 return false; |
| 431 *mb = mask_lsb + mask_width - 1; | 486 *mb = mask_lsb + mask_width - 1; |
| 432 *me = mask_lsb; | 487 *me = mask_lsb; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 470 } | 525 } |
| 471 } | 526 } |
| 472 } | 527 } |
| 473 if (mb >= me) { | 528 if (mb >= me) { |
| 474 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node), | 529 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node), |
| 475 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb), | 530 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb), |
| 476 g.TempImmediate(me)); | 531 g.TempImmediate(me)); |
| 477 return; | 532 return; |
| 478 } | 533 } |
| 479 } | 534 } |
| 480 VisitBinop<Int32BinopMatcher>(this, node, kS390_And32, kInt16Imm_Unsigned); | 535 VisitBinop<Int32BinopMatcher>(this, node, kS390_And32, kUint32Imm); |
| 481 } | 536 } |
| 482 | 537 |
| 483 #if V8_TARGET_ARCH_S390X | 538 #if V8_TARGET_ARCH_S390X |
| 484 void InstructionSelector::VisitWord64And(Node* node) { | 539 void InstructionSelector::VisitWord64And(Node* node) { |
| 485 S390OperandGenerator g(this); | 540 S390OperandGenerator g(this); |
| 486 Int64BinopMatcher m(node); | 541 Int64BinopMatcher m(node); |
| 487 int mb = 0; | 542 int mb = 0; |
| 488 int me = 0; | 543 int me = 0; |
| 489 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) { | 544 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) { |
| 490 int sh = 0; | 545 int sh = 0; |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 522 opcode = kS390_RotLeftAndClear64; | 577 opcode = kS390_RotLeftAndClear64; |
| 523 mask = mb; | 578 mask = mb; |
| 524 } | 579 } |
| 525 if (match) { | 580 if (match) { |
| 526 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left), | 581 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left), |
| 527 g.TempImmediate(sh), g.TempImmediate(mask)); | 582 g.TempImmediate(sh), g.TempImmediate(mask)); |
| 528 return; | 583 return; |
| 529 } | 584 } |
| 530 } | 585 } |
| 531 } | 586 } |
| 532 VisitBinop<Int64BinopMatcher>(this, node, kS390_And64, kInt16Imm_Unsigned); | 587 VisitBinop<Int64BinopMatcher>(this, node, kS390_And64, kUint32Imm); |
| 533 } | 588 } |
| 534 #endif | 589 #endif |
| 535 | 590 |
| 536 void InstructionSelector::VisitWord32Or(Node* node) { | 591 void InstructionSelector::VisitWord32Or(Node* node) { |
| 537 Int32BinopMatcher m(node); | 592 Int32BinopMatcher m(node); |
| 538 VisitBinop<Int32BinopMatcher>(this, node, kS390_Or32, kInt16Imm_Unsigned); | 593 VisitBinop<Int32BinopMatcher>(this, node, kS390_Or32, kUint32Imm); |
| 539 } | 594 } |
| 540 | 595 |
| 541 #if V8_TARGET_ARCH_S390X | 596 #if V8_TARGET_ARCH_S390X |
| 542 void InstructionSelector::VisitWord64Or(Node* node) { | 597 void InstructionSelector::VisitWord64Or(Node* node) { |
| 543 Int64BinopMatcher m(node); | 598 Int64BinopMatcher m(node); |
| 544 VisitBinop<Int64BinopMatcher>(this, node, kS390_Or64, kInt16Imm_Unsigned); | 599 VisitBinop<Int64BinopMatcher>(this, node, kS390_Or64, kUint32Imm); |
| 545 } | 600 } |
| 546 #endif | 601 #endif |
| 547 | 602 |
| 548 void InstructionSelector::VisitWord32Xor(Node* node) { | 603 void InstructionSelector::VisitWord32Xor(Node* node) { |
| 549 S390OperandGenerator g(this); | 604 S390OperandGenerator g(this); |
| 550 Int32BinopMatcher m(node); | 605 Int32BinopMatcher m(node); |
| 551 if (m.right().Is(-1)) { | 606 if (m.right().Is(-1)) { |
| 552 Emit(kS390_Not32, g.DefineAsRegister(node), g.UseRegister(m.left().node())); | 607 Emit(kS390_Not32, g.DefineAsRegister(node), g.UseRegister(m.left().node())); |
| 553 } else { | 608 } else { |
| 554 VisitBinop<Int32BinopMatcher>(this, node, kS390_Xor32, kInt16Imm_Unsigned); | 609 VisitBinop<Int32BinopMatcher>(this, node, kS390_Xor32, kUint32Imm); |
| 555 } | 610 } |
| 556 } | 611 } |
| 557 | 612 |
| 558 #if V8_TARGET_ARCH_S390X | 613 #if V8_TARGET_ARCH_S390X |
| 559 void InstructionSelector::VisitWord64Xor(Node* node) { | 614 void InstructionSelector::VisitWord64Xor(Node* node) { |
| 560 S390OperandGenerator g(this); | 615 S390OperandGenerator g(this); |
| 561 Int64BinopMatcher m(node); | 616 Int64BinopMatcher m(node); |
| 562 if (m.right().Is(-1)) { | 617 if (m.right().Is(-1)) { |
| 563 Emit(kS390_Not64, g.DefineAsRegister(node), g.UseRegister(m.left().node())); | 618 Emit(kS390_Not64, g.DefineAsRegister(node), g.UseRegister(m.left().node())); |
| 564 } else { | 619 } else { |
| 565 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor64, kInt16Imm_Unsigned); | 620 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor64, kUint32Imm); |
| 566 } | 621 } |
| 567 } | 622 } |
| 568 #endif | 623 #endif |
| 569 | 624 |
| 570 void InstructionSelector::VisitWord32Shl(Node* node) { | 625 void InstructionSelector::VisitWord32Shl(Node* node) { |
| 571 S390OperandGenerator g(this); | 626 S390OperandGenerator g(this); |
| 572 Int32BinopMatcher m(node); | 627 Int32BinopMatcher m(node); |
| 573 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { | 628 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { |
| 574 Int32BinopMatcher mleft(m.left().node()); | 629 Int32BinopMatcher mleft(m.left().node()); |
| 575 int sh = m.right().Value(); | 630 int sh = m.right().Value(); |
| (...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 867 g.DefineAsRegister(node), g.UseRegister(base), | 922 g.DefineAsRegister(node), g.UseRegister(base), |
| 868 g.UseRegister(offset)); | 923 g.UseRegister(offset)); |
| 869 return; | 924 return; |
| 870 } | 925 } |
| 871 } | 926 } |
| 872 Emit(kS390_LoadReverse32RR, g.DefineAsRegister(node), | 927 Emit(kS390_LoadReverse32RR, g.DefineAsRegister(node), |
| 873 g.UseRegister(node->InputAt(0))); | 928 g.UseRegister(node->InputAt(0))); |
| 874 } | 929 } |
| 875 | 930 |
| 876 void InstructionSelector::VisitInt32Add(Node* node) { | 931 void InstructionSelector::VisitInt32Add(Node* node) { |
| 877 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt16Imm); | 932 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt32Imm); |
| 878 } | 933 } |
| 879 | 934 |
| 880 #if V8_TARGET_ARCH_S390X | 935 #if V8_TARGET_ARCH_S390X |
| 881 void InstructionSelector::VisitInt64Add(Node* node) { | 936 void InstructionSelector::VisitInt64Add(Node* node) { |
| 882 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt16Imm); | 937 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt32Imm); |
| 883 } | 938 } |
| 884 #endif | 939 #endif |
| 885 | 940 |
| 886 void InstructionSelector::VisitInt32Sub(Node* node) { | 941 void InstructionSelector::VisitInt32Sub(Node* node) { |
| 887 S390OperandGenerator g(this); | 942 S390OperandGenerator g(this); |
| 888 Int32BinopMatcher m(node); | 943 Int32BinopMatcher m(node); |
| 889 if (m.left().Is(0)) { | 944 if (m.left().Is(0)) { |
| 890 Emit(kS390_Neg32, g.DefineAsRegister(node), | 945 Emit(kS390_Neg32, g.DefineAsRegister(node), |
| 891 g.UseRegister(m.right().node())); | 946 g.UseRegister(m.right().node())); |
| 892 } else { | 947 } else { |
| 893 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, kInt16Imm_Negate); | 948 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, kInt32Imm_Negate); |
| 894 } | 949 } |
| 895 } | 950 } |
| 896 | 951 |
| 897 #if V8_TARGET_ARCH_S390X | 952 #if V8_TARGET_ARCH_S390X |
| 898 void InstructionSelector::VisitInt64Sub(Node* node) { | 953 void InstructionSelector::VisitInt64Sub(Node* node) { |
| 899 S390OperandGenerator g(this); | 954 S390OperandGenerator g(this); |
| 900 Int64BinopMatcher m(node); | 955 Int64BinopMatcher m(node); |
| 901 if (m.left().Is(0)) { | 956 if (m.left().Is(0)) { |
| 902 Emit(kS390_Neg64, g.DefineAsRegister(node), | 957 Emit(kS390_Neg64, g.DefineAsRegister(node), |
| 903 g.UseRegister(m.right().node())); | 958 g.UseRegister(m.right().node())); |
| 904 } else { | 959 } else { |
| 905 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, kInt16Imm_Negate); | 960 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, kInt32Imm_Negate); |
| 906 } | 961 } |
| 907 } | 962 } |
| 908 #endif | 963 #endif |
| 909 | 964 |
| 910 namespace { | 965 namespace { |
| 911 | 966 |
| 912 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 967 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
| 913 InstructionOperand left, InstructionOperand right, | 968 InstructionOperand left, InstructionOperand right, |
| 914 FlagsContinuation* cont); | 969 FlagsContinuation* cont); |
| 915 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node, | 970 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node, |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1247 VisitRR(this, kS390_NegFloat, node); | 1302 VisitRR(this, kS390_NegFloat, node); |
| 1248 } | 1303 } |
| 1249 | 1304 |
| 1250 void InstructionSelector::VisitFloat64Neg(Node* node) { | 1305 void InstructionSelector::VisitFloat64Neg(Node* node) { |
| 1251 VisitRR(this, kS390_NegDouble, node); | 1306 VisitRR(this, kS390_NegDouble, node); |
| 1252 } | 1307 } |
| 1253 | 1308 |
| 1254 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { | 1309 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 1255 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1310 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 1256 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); | 1311 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); |
| 1257 return VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt16Imm, | 1312 return VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt32Imm, |
| 1258 &cont); | 1313 &cont); |
| 1259 } | 1314 } |
| 1260 FlagsContinuation cont; | 1315 FlagsContinuation cont; |
| 1261 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt16Imm, &cont); | 1316 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add32, kInt32Imm, &cont); |
| 1262 } | 1317 } |
| 1263 | 1318 |
| 1264 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { | 1319 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
| 1265 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1320 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 1266 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); | 1321 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); |
| 1267 return VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, | 1322 return VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, |
| 1268 kInt16Imm_Negate, &cont); | 1323 kInt32Imm_Negate, &cont); |
| 1269 } | 1324 } |
| 1270 FlagsContinuation cont; | 1325 FlagsContinuation cont; |
| 1271 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, kInt16Imm_Negate, | 1326 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub32, kInt32Imm_Negate, |
| 1272 &cont); | 1327 &cont); |
| 1273 } | 1328 } |
| 1274 | 1329 |
| 1275 #if V8_TARGET_ARCH_S390X | 1330 #if V8_TARGET_ARCH_S390X |
| 1276 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) { | 1331 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) { |
| 1277 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1332 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 1278 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); | 1333 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); |
| 1279 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt16Imm, | 1334 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt32Imm, |
| 1280 &cont); | 1335 &cont); |
| 1281 } | 1336 } |
| 1282 FlagsContinuation cont; | 1337 FlagsContinuation cont; |
| 1283 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt16Imm, &cont); | 1338 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add64, kInt32Imm, &cont); |
| 1284 } | 1339 } |
| 1285 | 1340 |
| 1286 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { | 1341 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { |
| 1287 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1342 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 1288 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); | 1343 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); |
| 1289 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, | 1344 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, |
| 1290 kInt16Imm_Negate, &cont); | 1345 kInt32Imm_Negate, &cont); |
| 1291 } | 1346 } |
| 1292 FlagsContinuation cont; | 1347 FlagsContinuation cont; |
| 1293 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, kInt16Imm_Negate, | 1348 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub64, kInt32Imm_Negate, |
| 1294 &cont); | 1349 &cont); |
| 1295 } | 1350 } |
| 1296 #endif | 1351 #endif |
| 1297 | 1352 |
| 1298 static bool CompareLogical(FlagsContinuation* cont) { | 1353 static bool CompareLogical(FlagsContinuation* cont) { |
| 1299 switch (cont->condition()) { | 1354 switch (cont->condition()) { |
| 1300 case kUnsignedLessThan: | 1355 case kUnsignedLessThan: |
| 1301 case kUnsignedGreaterThanOrEqual: | 1356 case kUnsignedGreaterThanOrEqual: |
| 1302 case kUnsignedLessThanOrEqual: | 1357 case kUnsignedLessThanOrEqual: |
| 1303 case kUnsignedGreaterThan: | 1358 case kUnsignedGreaterThan: |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1346 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), | 1401 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), |
| 1347 cont); | 1402 cont); |
| 1348 } else { | 1403 } else { |
| 1349 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), | 1404 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), |
| 1350 cont); | 1405 cont); |
| 1351 } | 1406 } |
| 1352 } | 1407 } |
| 1353 | 1408 |
| 1354 void VisitWord32Compare(InstructionSelector* selector, Node* node, | 1409 void VisitWord32Compare(InstructionSelector* selector, Node* node, |
| 1355 FlagsContinuation* cont) { | 1410 FlagsContinuation* cont) { |
| 1356 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm); | 1411 ImmediateMode mode = (CompareLogical(cont) ? kUint32Imm : kInt32Imm); |
| 1357 VisitWordCompare(selector, node, kS390_Cmp32, cont, false, mode); | 1412 VisitWordCompare(selector, node, kS390_Cmp32, cont, false, mode); |
| 1358 } | 1413 } |
| 1359 | 1414 |
| 1360 #if V8_TARGET_ARCH_S390X | 1415 #if V8_TARGET_ARCH_S390X |
| 1361 void VisitWord64Compare(InstructionSelector* selector, Node* node, | 1416 void VisitWord64Compare(InstructionSelector* selector, Node* node, |
| 1362 FlagsContinuation* cont) { | 1417 FlagsContinuation* cont) { |
| 1363 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm); | 1418 ImmediateMode mode = (CompareLogical(cont) ? kUint32Imm : kUint32Imm); |
| 1364 VisitWordCompare(selector, node, kS390_Cmp64, cont, false, mode); | 1419 VisitWordCompare(selector, node, kS390_Cmp64, cont, false, mode); |
| 1365 } | 1420 } |
| 1366 #endif | 1421 #endif |
| 1367 | 1422 |
| 1368 // Shared routine for multiple float32 compare operations. | 1423 // Shared routine for multiple float32 compare operations. |
| 1369 void VisitFloat32Compare(InstructionSelector* selector, Node* node, | 1424 void VisitFloat32Compare(InstructionSelector* selector, Node* node, |
| 1370 FlagsContinuation* cont) { | 1425 FlagsContinuation* cont) { |
| 1371 S390OperandGenerator g(selector); | 1426 S390OperandGenerator g(selector); |
| 1372 Node* left = node->InputAt(0); | 1427 Node* left = node->InputAt(0); |
| 1373 Node* right = node->InputAt(1); | 1428 Node* right = node->InputAt(1); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1460 // <Operation> is either nullptr, which means there's no use of the | 1515 // <Operation> is either nullptr, which means there's no use of the |
| 1461 // actual value, or was already defined, which means it is scheduled | 1516 // actual value, or was already defined, which means it is scheduled |
| 1462 // *AFTER* this branch). | 1517 // *AFTER* this branch). |
| 1463 Node* const node = value->InputAt(0); | 1518 Node* const node = value->InputAt(0); |
| 1464 Node* const result = NodeProperties::FindProjection(node, 0); | 1519 Node* const result = NodeProperties::FindProjection(node, 0); |
| 1465 if (result == nullptr || selector->IsDefined(result)) { | 1520 if (result == nullptr || selector->IsDefined(result)) { |
| 1466 switch (node->opcode()) { | 1521 switch (node->opcode()) { |
| 1467 case IrOpcode::kInt32AddWithOverflow: | 1522 case IrOpcode::kInt32AddWithOverflow: |
| 1468 cont->OverwriteAndNegateIfEqual(kOverflow); | 1523 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1469 return VisitBinop<Int32BinopMatcher>( | 1524 return VisitBinop<Int32BinopMatcher>( |
| 1470 selector, node, kS390_Add32, kInt16Imm, cont); | 1525 selector, node, kS390_Add32, kInt32Imm, cont); |
| 1471 case IrOpcode::kInt32SubWithOverflow: | 1526 case IrOpcode::kInt32SubWithOverflow: |
| 1472 cont->OverwriteAndNegateIfEqual(kOverflow); | 1527 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1473 return VisitBinop<Int32BinopMatcher>( | 1528 return VisitBinop<Int32BinopMatcher>( |
| 1474 selector, node, kS390_Sub32, kInt16Imm_Negate, cont); | 1529 selector, node, kS390_Sub32, kInt32Imm_Negate, cont); |
| 1475 case IrOpcode::kInt32MulWithOverflow: | 1530 case IrOpcode::kInt32MulWithOverflow: |
| 1476 cont->OverwriteAndNegateIfEqual(kNotEqual); | 1531 cont->OverwriteAndNegateIfEqual(kNotEqual); |
| 1477 return EmitInt32MulWithOverflow(selector, node, cont); | 1532 return EmitInt32MulWithOverflow(selector, node, cont); |
| 1478 #if V8_TARGET_ARCH_S390X | 1533 #if V8_TARGET_ARCH_S390X |
| 1479 case IrOpcode::kInt64AddWithOverflow: | 1534 case IrOpcode::kInt64AddWithOverflow: |
| 1480 cont->OverwriteAndNegateIfEqual(kOverflow); | 1535 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1481 return VisitBinop<Int64BinopMatcher>( | 1536 return VisitBinop<Int64BinopMatcher>( |
| 1482 selector, node, kS390_Add64, kInt16Imm, cont); | 1537 selector, node, kS390_Add64, kInt32Imm, cont); |
| 1483 case IrOpcode::kInt64SubWithOverflow: | 1538 case IrOpcode::kInt64SubWithOverflow: |
| 1484 cont->OverwriteAndNegateIfEqual(kOverflow); | 1539 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1485 return VisitBinop<Int64BinopMatcher>( | 1540 return VisitBinop<Int64BinopMatcher>( |
| 1486 selector, node, kS390_Sub64, kInt16Imm_Negate, cont); | 1541 selector, node, kS390_Sub64, kInt32Imm_Negate, cont); |
| 1487 #endif | 1542 #endif |
| 1488 default: | 1543 default: |
| 1489 break; | 1544 break; |
| 1490 } | 1545 } |
| 1491 } | 1546 } |
| 1492 } | 1547 } |
| 1493 break; | 1548 break; |
| 1494 case IrOpcode::kInt32Sub: | 1549 case IrOpcode::kInt32Sub: |
| 1495 return VisitWord32Compare(selector, value, cont); | 1550 return VisitWord32Compare(selector, value, cont); |
| 1496 case IrOpcode::kWord32And: | 1551 case IrOpcode::kWord32And: |
| 1497 return VisitWordCompare(selector, value, kS390_Tst32, cont, true, | 1552 return VisitWordCompare(selector, value, kS390_Tst32, cont, true, |
| 1498 kInt16Imm_Unsigned); | 1553 kUint32Imm); |
| 1499 // TODO(mbrandy): Handle? | 1554 // TODO(mbrandy): Handle? |
| 1500 // case IrOpcode::kInt32Add: | 1555 // case IrOpcode::kInt32Add: |
| 1501 // case IrOpcode::kWord32Or: | 1556 // case IrOpcode::kWord32Or: |
| 1502 // case IrOpcode::kWord32Xor: | 1557 // case IrOpcode::kWord32Xor: |
| 1503 // case IrOpcode::kWord32Sar: | 1558 // case IrOpcode::kWord32Sar: |
| 1504 // case IrOpcode::kWord32Shl: | 1559 // case IrOpcode::kWord32Shl: |
| 1505 // case IrOpcode::kWord32Shr: | 1560 // case IrOpcode::kWord32Shr: |
| 1506 // case IrOpcode::kWord32Ror: | 1561 // case IrOpcode::kWord32Ror: |
| 1507 #if V8_TARGET_ARCH_S390X | 1562 #if V8_TARGET_ARCH_S390X |
| 1508 case IrOpcode::kInt64Sub: | 1563 case IrOpcode::kInt64Sub: |
| 1509 return VisitWord64Compare(selector, value, cont); | 1564 return VisitWord64Compare(selector, value, cont); |
| 1510 case IrOpcode::kWord64And: | 1565 case IrOpcode::kWord64And: |
| 1511 return VisitWordCompare(selector, value, kS390_Tst64, cont, true, | 1566 return VisitWordCompare(selector, value, kS390_Tst64, cont, true, |
| 1512 kInt16Imm_Unsigned); | 1567 kUint32Imm); |
| 1513 // TODO(mbrandy): Handle? | 1568 // TODO(mbrandy): Handle? |
| 1514 // case IrOpcode::kInt64Add: | 1569 // case IrOpcode::kInt64Add: |
| 1515 // case IrOpcode::kWord64Or: | 1570 // case IrOpcode::kWord64Or: |
| 1516 // case IrOpcode::kWord64Xor: | 1571 // case IrOpcode::kWord64Xor: |
| 1517 // case IrOpcode::kWord64Sar: | 1572 // case IrOpcode::kWord64Sar: |
| 1518 // case IrOpcode::kWord64Shl: | 1573 // case IrOpcode::kWord64Shl: |
| 1519 // case IrOpcode::kWord64Shr: | 1574 // case IrOpcode::kWord64Shr: |
| 1520 // case IrOpcode::kWord64Ror: | 1575 // case IrOpcode::kWord64Ror: |
| 1521 #endif | 1576 #endif |
| 1522 default: | 1577 default: |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1854 // static | 1909 // static |
| 1855 MachineOperatorBuilder::AlignmentRequirements | 1910 MachineOperatorBuilder::AlignmentRequirements |
| 1856 InstructionSelector::AlignmentRequirements() { | 1911 InstructionSelector::AlignmentRequirements() { |
| 1857 return MachineOperatorBuilder::AlignmentRequirements:: | 1912 return MachineOperatorBuilder::AlignmentRequirements:: |
| 1858 FullUnalignedAccessSupport(); | 1913 FullUnalignedAccessSupport(); |
| 1859 } | 1914 } |
| 1860 | 1915 |
| 1861 } // namespace compiler | 1916 } // namespace compiler |
| 1862 } // namespace internal | 1917 } // namespace internal |
| 1863 } // namespace v8 | 1918 } // namespace v8 |
| OLD | NEW |