| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
| 8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
| 9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
| 10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
| 11 #include "src/compiler/osr.h" | 11 #include "src/compiler/osr.h" |
| 12 #include "src/ppc/macro-assembler-ppc.h" | 12 #include "src/s390/macro-assembler-s390.h" |
| 13 | 13 |
| 14 namespace v8 { | 14 namespace v8 { |
| 15 namespace internal { | 15 namespace internal { |
| 16 namespace compiler { | 16 namespace compiler { |
| 17 | 17 |
| 18 #define __ masm()-> | 18 #define __ masm()-> |
| 19 | 19 |
| 20 #define kScratchReg ip |
| 20 | 21 |
| 21 #define kScratchReg r11 | 22 // Adds S390-specific methods to convert InstructionOperands. |
| 22 | 23 class S390OperandConverter final : public InstructionOperandConverter { |
| 23 | |
| 24 // Adds PPC-specific methods to convert InstructionOperands. | |
| 25 class PPCOperandConverter final : public InstructionOperandConverter { | |
| 26 public: | 24 public: |
| 27 PPCOperandConverter(CodeGenerator* gen, Instruction* instr) | 25 S390OperandConverter(CodeGenerator* gen, Instruction* instr) |
| 28 : InstructionOperandConverter(gen, instr) {} | 26 : InstructionOperandConverter(gen, instr) {} |
| 29 | 27 |
| 30 size_t OutputCount() { return instr_->OutputCount(); } | 28 size_t OutputCount() { return instr_->OutputCount(); } |
| 31 | 29 |
| 32 RCBit OutputRCBit() const { | |
| 33 switch (instr_->flags_mode()) { | |
| 34 case kFlags_branch: | |
| 35 case kFlags_deoptimize: | |
| 36 case kFlags_set: | |
| 37 return SetRC; | |
| 38 case kFlags_none: | |
| 39 return LeaveRC; | |
| 40 } | |
| 41 UNREACHABLE(); | |
| 42 return LeaveRC; | |
| 43 } | |
| 44 | |
| 45 bool CompareLogical() const { | 30 bool CompareLogical() const { |
| 46 switch (instr_->flags_condition()) { | 31 switch (instr_->flags_condition()) { |
| 47 case kUnsignedLessThan: | 32 case kUnsignedLessThan: |
| 48 case kUnsignedGreaterThanOrEqual: | 33 case kUnsignedGreaterThanOrEqual: |
| 49 case kUnsignedLessThanOrEqual: | 34 case kUnsignedLessThanOrEqual: |
| 50 case kUnsignedGreaterThan: | 35 case kUnsignedGreaterThan: |
| 51 return true; | 36 return true; |
| 52 default: | 37 default: |
| 53 return false; | 38 return false; |
| 54 } | 39 } |
| 55 UNREACHABLE(); | 40 UNREACHABLE(); |
| 56 return false; | 41 return false; |
| 57 } | 42 } |
| 58 | 43 |
| 59 Operand InputImmediate(size_t index) { | 44 Operand InputImmediate(size_t index) { |
| 60 Constant constant = ToConstant(instr_->InputAt(index)); | 45 Constant constant = ToConstant(instr_->InputAt(index)); |
| 61 switch (constant.type()) { | 46 switch (constant.type()) { |
| 62 case Constant::kInt32: | 47 case Constant::kInt32: |
| 63 return Operand(constant.ToInt32()); | 48 return Operand(constant.ToInt32()); |
| 64 case Constant::kFloat32: | 49 case Constant::kFloat32: |
| 65 return Operand( | 50 return Operand( |
| 66 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED)); | 51 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED)); |
| 67 case Constant::kFloat64: | 52 case Constant::kFloat64: |
| 68 return Operand( | 53 return Operand( |
| 69 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED)); | 54 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED)); |
| 70 case Constant::kInt64: | 55 case Constant::kInt64: |
| 71 #if V8_TARGET_ARCH_PPC64 | 56 #if V8_TARGET_ARCH_S390X |
| 72 return Operand(constant.ToInt64()); | 57 return Operand(constant.ToInt64()); |
| 73 #endif | 58 #endif |
| 74 case Constant::kExternalReference: | 59 case Constant::kExternalReference: |
| 75 case Constant::kHeapObject: | 60 case Constant::kHeapObject: |
| 76 case Constant::kRpoNumber: | 61 case Constant::kRpoNumber: |
| 77 break; | 62 break; |
| 78 } | 63 } |
| 79 UNREACHABLE(); | 64 UNREACHABLE(); |
| 80 return Operand::Zero(); | 65 return Operand::Zero(); |
| 81 } | 66 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 103 | 88 |
| 104 MemOperand ToMemOperand(InstructionOperand* op) const { | 89 MemOperand ToMemOperand(InstructionOperand* op) const { |
| 105 DCHECK_NOT_NULL(op); | 90 DCHECK_NOT_NULL(op); |
| 106 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 91 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
| 107 FrameOffset offset = frame_access_state()->GetFrameOffset( | 92 FrameOffset offset = frame_access_state()->GetFrameOffset( |
| 108 AllocatedOperand::cast(op)->index()); | 93 AllocatedOperand::cast(op)->index()); |
| 109 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); | 94 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); |
| 110 } | 95 } |
| 111 }; | 96 }; |
| 112 | 97 |
| 113 | 98 static inline bool HasRegisterInput(Instruction* instr, int index) { |
| 114 static inline bool HasRegisterInput(Instruction* instr, size_t index) { | |
| 115 return instr->InputAt(index)->IsRegister(); | 99 return instr->InputAt(index)->IsRegister(); |
| 116 } | 100 } |
| 117 | 101 |
| 118 | |
| 119 namespace { | 102 namespace { |
| 120 | 103 |
| 121 class OutOfLineLoadNAN32 final : public OutOfLineCode { | 104 class OutOfLineLoadNAN32 final : public OutOfLineCode { |
| 122 public: | 105 public: |
| 123 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result) | 106 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result) |
| 124 : OutOfLineCode(gen), result_(result) {} | 107 : OutOfLineCode(gen), result_(result) {} |
| 125 | 108 |
| 126 void Generate() final { | 109 void Generate() final { |
| 127 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(), | 110 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(), |
| 128 kScratchReg); | 111 kScratchReg); |
| 129 } | 112 } |
| 130 | 113 |
| 131 private: | 114 private: |
| 132 DoubleRegister const result_; | 115 DoubleRegister const result_; |
| 133 }; | 116 }; |
| 134 | 117 |
| 135 | |
| 136 class OutOfLineLoadNAN64 final : public OutOfLineCode { | 118 class OutOfLineLoadNAN64 final : public OutOfLineCode { |
| 137 public: | 119 public: |
| 138 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result) | 120 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result) |
| 139 : OutOfLineCode(gen), result_(result) {} | 121 : OutOfLineCode(gen), result_(result) {} |
| 140 | 122 |
| 141 void Generate() final { | 123 void Generate() final { |
| 142 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(), | 124 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(), |
| 143 kScratchReg); | 125 kScratchReg); |
| 144 } | 126 } |
| 145 | 127 |
| 146 private: | 128 private: |
| 147 DoubleRegister const result_; | 129 DoubleRegister const result_; |
| 148 }; | 130 }; |
| 149 | 131 |
| 150 | |
| 151 class OutOfLineLoadZero final : public OutOfLineCode { | 132 class OutOfLineLoadZero final : public OutOfLineCode { |
| 152 public: | 133 public: |
| 153 OutOfLineLoadZero(CodeGenerator* gen, Register result) | 134 OutOfLineLoadZero(CodeGenerator* gen, Register result) |
| 154 : OutOfLineCode(gen), result_(result) {} | 135 : OutOfLineCode(gen), result_(result) {} |
| 155 | 136 |
| 156 void Generate() final { __ li(result_, Operand::Zero()); } | 137 void Generate() final { __ LoadImmP(result_, Operand::Zero()); } |
| 157 | 138 |
| 158 private: | 139 private: |
| 159 Register const result_; | 140 Register const result_; |
| 160 }; | 141 }; |
| 161 | 142 |
| 162 | |
| 163 class OutOfLineRecordWrite final : public OutOfLineCode { | 143 class OutOfLineRecordWrite final : public OutOfLineCode { |
| 164 public: | 144 public: |
| 165 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset, | 145 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset, |
| 166 Register value, Register scratch0, Register scratch1, | 146 Register value, Register scratch0, Register scratch1, |
| 167 RecordWriteMode mode) | 147 RecordWriteMode mode) |
| 168 : OutOfLineCode(gen), | 148 : OutOfLineCode(gen), |
| 169 object_(object), | 149 object_(object), |
| 170 offset_(offset), | 150 offset_(offset), |
| 171 offset_immediate_(0), | 151 offset_immediate_(0), |
| 172 value_(value), | 152 value_(value), |
| (...skipping 19 matching lines...) Expand all Loading... |
| 192 } | 172 } |
| 193 __ CheckPageFlag(value_, scratch0_, | 173 __ CheckPageFlag(value_, scratch0_, |
| 194 MemoryChunk::kPointersToHereAreInterestingMask, eq, | 174 MemoryChunk::kPointersToHereAreInterestingMask, eq, |
| 195 exit()); | 175 exit()); |
| 196 RememberedSetAction const remembered_set_action = | 176 RememberedSetAction const remembered_set_action = |
| 197 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET | 177 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET |
| 198 : OMIT_REMEMBERED_SET; | 178 : OMIT_REMEMBERED_SET; |
| 199 SaveFPRegsMode const save_fp_mode = | 179 SaveFPRegsMode const save_fp_mode = |
| 200 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; | 180 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; |
| 201 if (!frame()->needs_frame()) { | 181 if (!frame()->needs_frame()) { |
| 202 // We need to save and restore lr if the frame was elided. | 182 // We need to save and restore r14 if the frame was elided. |
| 203 __ mflr(scratch1_); | 183 __ Push(r14); |
| 204 __ Push(scratch1_); | |
| 205 } | 184 } |
| 206 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, | 185 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, |
| 207 remembered_set_action, save_fp_mode); | 186 remembered_set_action, save_fp_mode); |
| 208 if (offset_.is(no_reg)) { | 187 if (offset_.is(no_reg)) { |
| 209 __ addi(scratch1_, object_, Operand(offset_immediate_)); | 188 __ AddP(scratch1_, object_, Operand(offset_immediate_)); |
| 210 } else { | 189 } else { |
| 211 DCHECK_EQ(0, offset_immediate_); | 190 DCHECK_EQ(0, offset_immediate_); |
| 212 __ add(scratch1_, object_, offset_); | 191 __ AddP(scratch1_, object_, offset_); |
| 213 } | 192 } |
| 214 __ CallStub(&stub); | 193 __ CallStub(&stub); |
| 215 if (!frame()->needs_frame()) { | 194 if (!frame()->needs_frame()) { |
| 216 // We need to save and restore lr if the frame was elided. | 195 // We need to save and restore r14 if the frame was elided. |
| 217 __ Pop(scratch1_); | 196 __ Pop(r14); |
| 218 __ mtlr(scratch1_); | |
| 219 } | 197 } |
| 220 } | 198 } |
| 221 | 199 |
| 222 private: | 200 private: |
| 223 Register const object_; | 201 Register const object_; |
| 224 Register const offset_; | 202 Register const offset_; |
| 225 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). | 203 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). |
| 226 Register const value_; | 204 Register const value_; |
| 227 Register const scratch0_; | 205 Register const scratch0_; |
| 228 Register const scratch1_; | 206 Register const scratch1_; |
| 229 RecordWriteMode const mode_; | 207 RecordWriteMode const mode_; |
| 230 }; | 208 }; |
| 231 | 209 |
| 232 | |
| 233 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { | 210 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { |
| 234 switch (condition) { | 211 switch (condition) { |
| 235 case kEqual: | 212 case kEqual: |
| 236 return eq; | 213 return eq; |
| 237 case kNotEqual: | 214 case kNotEqual: |
| 238 return ne; | 215 return ne; |
| 239 case kSignedLessThan: | 216 case kSignedLessThan: |
| 240 case kUnsignedLessThan: | 217 case kUnsignedLessThan: |
| 241 return lt; | 218 return lt; |
| 242 case kSignedGreaterThanOrEqual: | 219 case kSignedGreaterThanOrEqual: |
| 243 case kUnsignedGreaterThanOrEqual: | 220 case kUnsignedGreaterThanOrEqual: |
| 244 return ge; | 221 return ge; |
| 245 case kSignedLessThanOrEqual: | 222 case kSignedLessThanOrEqual: |
| 246 case kUnsignedLessThanOrEqual: | 223 case kUnsignedLessThanOrEqual: |
| 247 return le; | 224 return le; |
| 248 case kSignedGreaterThan: | 225 case kSignedGreaterThan: |
| 249 case kUnsignedGreaterThan: | 226 case kUnsignedGreaterThan: |
| 250 return gt; | 227 return gt; |
| 251 case kOverflow: | 228 case kOverflow: |
| 252 // Overflow checked for add/sub only. | 229 // Overflow checked for AddP/SubP only. |
| 253 switch (op) { | 230 switch (op) { |
| 254 #if V8_TARGET_ARCH_PPC64 | 231 #if V8_TARGET_ARCH_S390X |
| 255 case kPPC_Add: | 232 case kS390_Add: |
| 256 case kPPC_Sub: | 233 case kS390_Sub: |
| 257 return lt; | 234 return lt; |
| 258 #endif | 235 #endif |
| 259 case kPPC_AddWithOverflow32: | 236 case kS390_AddWithOverflow32: |
| 260 case kPPC_SubWithOverflow32: | 237 case kS390_SubWithOverflow32: |
| 261 #if V8_TARGET_ARCH_PPC64 | 238 #if V8_TARGET_ARCH_S390X |
| 262 return ne; | 239 return ne; |
| 263 #else | 240 #else |
| 264 return lt; | 241 return lt; |
| 265 #endif | 242 #endif |
| 266 default: | 243 default: |
| 267 break; | 244 break; |
| 268 } | 245 } |
| 269 break; | 246 break; |
| 270 case kNotOverflow: | 247 case kNotOverflow: |
| 271 switch (op) { | 248 switch (op) { |
| 272 #if V8_TARGET_ARCH_PPC64 | 249 #if V8_TARGET_ARCH_S390X |
| 273 case kPPC_Add: | 250 case kS390_Add: |
| 274 case kPPC_Sub: | 251 case kS390_Sub: |
| 275 return ge; | 252 return ge; |
| 276 #endif | 253 #endif |
| 277 case kPPC_AddWithOverflow32: | 254 case kS390_AddWithOverflow32: |
| 278 case kPPC_SubWithOverflow32: | 255 case kS390_SubWithOverflow32: |
| 279 #if V8_TARGET_ARCH_PPC64 | 256 #if V8_TARGET_ARCH_S390X |
| 280 return eq; | 257 return eq; |
| 281 #else | 258 #else |
| 282 return ge; | 259 return ge; |
| 283 #endif | 260 #endif |
| 284 default: | 261 default: |
| 285 break; | 262 break; |
| 286 } | 263 } |
| 287 break; | 264 break; |
| 288 default: | 265 default: |
| 289 break; | 266 break; |
| 290 } | 267 } |
| 291 UNREACHABLE(); | 268 UNREACHABLE(); |
| 292 return kNoCondition; | 269 return kNoCondition; |
| 293 } | 270 } |
| 294 | 271 |
| 295 } // namespace | 272 } // namespace |
| 296 | 273 |
| 297 #define ASSEMBLE_FLOAT_UNOP_RC(asm_instr) \ | 274 #define ASSEMBLE_FLOAT_UNOP(asm_instr) \ |
| 275 do { \ |
| 276 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \ |
| 277 } while (0) |
| 278 |
| 279 #define ASSEMBLE_FLOAT_BINOP(asm_instr) \ |
| 298 do { \ | 280 do { \ |
| 299 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ | 281 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ |
| 300 i.OutputRCBit()); \ | 282 i.InputDoubleRegister(1)); \ |
| 301 } while (0) | 283 } while (0) |
| 302 | 284 |
| 303 | |
| 304 #define ASSEMBLE_FLOAT_BINOP_RC(asm_instr) \ | |
| 305 do { \ | |
| 306 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ | |
| 307 i.InputDoubleRegister(1), i.OutputRCBit()); \ | |
| 308 } while (0) | |
| 309 | |
| 310 | |
| 311 #define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \ | 285 #define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \ |
| 312 do { \ | 286 do { \ |
| 313 if (HasRegisterInput(instr, 1)) { \ | 287 if (HasRegisterInput(instr, 1)) { \ |
| 314 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ | 288 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ |
| 315 i.InputRegister(1)); \ | 289 i.InputRegister(1)); \ |
| 316 } else { \ | 290 } else { \ |
| 317 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ | 291 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ |
| 318 i.InputImmediate(1)); \ | 292 i.InputImmediate(1)); \ |
| 319 } \ | 293 } \ |
| 320 } while (0) | 294 } while (0) |
| 321 | 295 |
| 322 | 296 #define ASSEMBLE_BINOP_INT(asm_instr_reg, asm_instr_imm) \ |
| 323 #define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm) \ | |
| 324 do { \ | 297 do { \ |
| 325 if (HasRegisterInput(instr, 1)) { \ | 298 if (HasRegisterInput(instr, 1)) { \ |
| 326 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ | 299 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ |
| 327 i.InputRegister(1), i.OutputRCBit()); \ | 300 i.InputRegister(1)); \ |
| 328 } else { \ | 301 } else { \ |
| 329 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ | 302 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ |
| 330 i.InputImmediate(1), i.OutputRCBit()); \ | 303 i.InputInt32(1)); \ |
| 331 } \ | 304 } \ |
| 332 } while (0) | 305 } while (0) |
| 333 | 306 |
| 334 | |
| 335 #define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm) \ | |
| 336 do { \ | |
| 337 if (HasRegisterInput(instr, 1)) { \ | |
| 338 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ | |
| 339 i.InputRegister(1), i.OutputRCBit()); \ | |
| 340 } else { \ | |
| 341 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ | |
| 342 i.InputInt32(1), i.OutputRCBit()); \ | |
| 343 } \ | |
| 344 } while (0) | |
| 345 | |
| 346 | |
| 347 #define ASSEMBLE_ADD_WITH_OVERFLOW() \ | 307 #define ASSEMBLE_ADD_WITH_OVERFLOW() \ |
| 348 do { \ | 308 do { \ |
| 349 if (HasRegisterInput(instr, 1)) { \ | 309 if (HasRegisterInput(instr, 1)) { \ |
| 350 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ | 310 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ |
| 351 i.InputRegister(1), kScratchReg, r0); \ | 311 i.InputRegister(1), kScratchReg, r0); \ |
| 352 } else { \ | 312 } else { \ |
| 353 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ | 313 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ |
| 354 i.InputInt32(1), kScratchReg, r0); \ | 314 i.InputInt32(1), kScratchReg, r0); \ |
| 355 } \ | 315 } \ |
| 356 } while (0) | 316 } while (0) |
| 357 | 317 |
| 358 | |
| 359 #define ASSEMBLE_SUB_WITH_OVERFLOW() \ | 318 #define ASSEMBLE_SUB_WITH_OVERFLOW() \ |
| 360 do { \ | 319 do { \ |
| 361 if (HasRegisterInput(instr, 1)) { \ | 320 if (HasRegisterInput(instr, 1)) { \ |
| 362 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ | 321 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ |
| 363 i.InputRegister(1), kScratchReg, r0); \ | 322 i.InputRegister(1), kScratchReg, r0); \ |
| 364 } else { \ | 323 } else { \ |
| 365 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ | 324 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ |
| 366 -i.InputInt32(1), kScratchReg, r0); \ | 325 -i.InputInt32(1), kScratchReg, r0); \ |
| 367 } \ | 326 } \ |
| 368 } while (0) | 327 } while (0) |
| 369 | 328 |
| 370 | 329 #if V8_TARGET_ARCH_S390X |
| 371 #if V8_TARGET_ARCH_PPC64 | 330 #define ASSEMBLE_ADD_WITH_OVERFLOW32() \ |
| 372 #define ASSEMBLE_ADD_WITH_OVERFLOW32() \ | 331 do { \ |
| 373 do { \ | 332 ASSEMBLE_BINOP(AddP, AddP); \ |
| 374 ASSEMBLE_BINOP(add, addi); \ | 333 __ TestIfInt32(i.OutputRegister(), r0); \ |
| 375 __ TestIfInt32(i.OutputRegister(), r0, cr0); \ | |
| 376 } while (0) | 334 } while (0) |
| 377 | 335 |
| 378 | 336 #define ASSEMBLE_SUB_WITH_OVERFLOW32() \ |
| 379 #define ASSEMBLE_SUB_WITH_OVERFLOW32() \ | 337 do { \ |
| 380 do { \ | 338 ASSEMBLE_BINOP(SubP, SubP); \ |
| 381 ASSEMBLE_BINOP(sub, subi); \ | 339 __ TestIfInt32(i.OutputRegister(), r0); \ |
| 382 __ TestIfInt32(i.OutputRegister(), r0, cr0); \ | |
| 383 } while (0) | 340 } while (0) |
| 384 #else | 341 #else |
| 385 #define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW | 342 #define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW |
| 386 #define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW | 343 #define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW |
| 387 #endif | 344 #endif |
| 388 | 345 |
| 389 | 346 #define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \ |
| 390 #define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \ | 347 do { \ |
| 391 do { \ | 348 if (HasRegisterInput(instr, 1)) { \ |
| 392 const CRegister cr = cr0; \ | 349 if (i.CompareLogical()) { \ |
| 393 if (HasRegisterInput(instr, 1)) { \ | 350 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1)); \ |
| 394 if (i.CompareLogical()) { \ | 351 } else { \ |
| 395 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr); \ | 352 __ cmp_instr(i.InputRegister(0), i.InputRegister(1)); \ |
| 396 } else { \ | 353 } \ |
| 397 __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr); \ | 354 } else { \ |
| 398 } \ | 355 if (i.CompareLogical()) { \ |
| 399 } else { \ | 356 __ cmpl_instr(i.InputRegister(0), i.InputImmediate(1)); \ |
| 400 if (i.CompareLogical()) { \ | 357 } else { \ |
| 401 __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \ | 358 __ cmp_instr(i.InputRegister(0), i.InputImmediate(1)); \ |
| 402 } else { \ | 359 } \ |
| 403 __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \ | 360 } \ |
| 404 } \ | |
| 405 } \ | |
| 406 DCHECK_EQ(SetRC, i.OutputRCBit()); \ | |
| 407 } while (0) | 361 } while (0) |
| 408 | 362 |
| 409 | 363 #define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \ |
| 410 #define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \ | 364 do { \ |
| 411 do { \ | 365 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1); \ |
| 412 const CRegister cr = cr0; \ | |
| 413 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \ | |
| 414 DCHECK_EQ(SetRC, i.OutputRCBit()); \ | |
| 415 } while (0) | 366 } while (0) |
| 416 | 367 |
| 417 | 368 // Divide instruction dr will implicity use register pair |
| 418 #define ASSEMBLE_MODULO(div_instr, mul_instr) \ | 369 // r0 & r1 below. |
| 419 do { \ | 370 // R0:R1 = R1 / divisor - R0 remainder |
| 420 const Register scratch = kScratchReg; \ | 371 // Copy remainder to output reg |
| 421 __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1)); \ | 372 #define ASSEMBLE_MODULO(div_instr, shift_instr) \ |
| 422 __ mul_instr(scratch, scratch, i.InputRegister(1)); \ | 373 do { \ |
| 423 __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \ | 374 __ LoadRR(r0, i.InputRegister(0)); \ |
| 424 i.OutputRCBit()); \ | 375 __ shift_instr(r0, Operand(32)); \ |
| 376 __ div_instr(r0, i.InputRegister(1)); \ |
| 377 __ ltr(i.OutputRegister(), r0); \ |
| 425 } while (0) | 378 } while (0) |
| 426 | 379 |
| 427 | |
| 428 #define ASSEMBLE_FLOAT_MODULO() \ | 380 #define ASSEMBLE_FLOAT_MODULO() \ |
| 429 do { \ | 381 do { \ |
| 430 FrameScope scope(masm(), StackFrame::MANUAL); \ | 382 FrameScope scope(masm(), StackFrame::MANUAL); \ |
| 431 __ PrepareCallCFunction(0, 2, kScratchReg); \ | 383 __ PrepareCallCFunction(0, 2, kScratchReg); \ |
| 432 __ MovToFloatParameters(i.InputDoubleRegister(0), \ | 384 __ MovToFloatParameters(i.InputDoubleRegister(0), \ |
| 433 i.InputDoubleRegister(1)); \ | 385 i.InputDoubleRegister(1)); \ |
| 434 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \ | 386 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \ |
| 435 0, 2); \ | 387 0, 2); \ |
| 436 __ MovFromFloatResult(i.OutputDoubleRegister()); \ | 388 __ MovFromFloatResult(i.OutputDoubleRegister()); \ |
| 437 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 438 } while (0) | 389 } while (0) |
| 439 | 390 |
| 440 | 391 #define ASSEMBLE_FLOAT_MAX(double_scratch_reg, general_scratch_reg) \ |
| 441 #define ASSEMBLE_FLOAT_MAX(scratch_reg) \ | 392 do { \ |
| 442 do { \ | 393 Label ge, done; \ |
| 443 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \ | 394 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \ |
| 444 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(0), \ | 395 __ bge(&ge, Label::kNear); \ |
| 445 i.InputDoubleRegister(1)); \ | 396 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); \ |
| 397 __ b(&done, Label::kNear); \ |
| 398 __ bind(&ge); \ |
| 399 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \ |
| 400 __ bind(&done); \ |
| 446 } while (0) | 401 } while (0) |
| 447 | 402 |
| 448 | 403 #define ASSEMBLE_FLOAT_MIN(double_scratch_reg, general_scratch_reg) \ |
| 449 #define ASSEMBLE_FLOAT_MIN(scratch_reg) \ | 404 do { \ |
| 450 do { \ | 405 Label ge, done; \ |
| 451 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \ | 406 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \ |
| 452 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(1), \ | 407 __ bge(&ge, Label::kNear); \ |
| 453 i.InputDoubleRegister(0)); \ | 408 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \ |
| 409 __ b(&done, Label::kNear); \ |
| 410 __ bind(&ge); \ |
| 411 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); \ |
| 412 __ bind(&done); \ |
| 454 } while (0) | 413 } while (0) |
| 455 | 414 |
| 456 | 415 // Only MRI mode for these instructions available |
| 457 #define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx) \ | 416 #define ASSEMBLE_LOAD_FLOAT(asm_instr) \ |
| 458 do { \ | 417 do { \ |
| 459 DoubleRegister result = i.OutputDoubleRegister(); \ | 418 DoubleRegister result = i.OutputDoubleRegister(); \ |
| 460 AddressingMode mode = kMode_None; \ | 419 AddressingMode mode = kMode_None; \ |
| 461 MemOperand operand = i.MemoryOperand(&mode); \ | 420 MemOperand operand = i.MemoryOperand(&mode); \ |
| 462 if (mode == kMode_MRI) { \ | 421 __ asm_instr(result, operand); \ |
| 463 __ asm_instr(result, operand); \ | |
| 464 } else { \ | |
| 465 __ asm_instrx(result, operand); \ | |
| 466 } \ | |
| 467 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 468 } while (0) | 422 } while (0) |
| 469 | 423 |
| 470 | 424 #define ASSEMBLE_LOAD_INTEGER(asm_instr) \ |
| 471 #define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \ | 425 do { \ |
| 472 do { \ | 426 Register result = i.OutputRegister(); \ |
| 473 Register result = i.OutputRegister(); \ | 427 AddressingMode mode = kMode_None; \ |
| 474 AddressingMode mode = kMode_None; \ | 428 MemOperand operand = i.MemoryOperand(&mode); \ |
| 475 MemOperand operand = i.MemoryOperand(&mode); \ | 429 __ asm_instr(result, operand); \ |
| 476 if (mode == kMode_MRI) { \ | |
| 477 __ asm_instr(result, operand); \ | |
| 478 } else { \ | |
| 479 __ asm_instrx(result, operand); \ | |
| 480 } \ | |
| 481 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 482 } while (0) | 430 } while (0) |
| 483 | 431 |
| 484 | |
| 485 #define ASSEMBLE_STORE_FLOAT32() \ | 432 #define ASSEMBLE_STORE_FLOAT32() \ |
| 486 do { \ | 433 do { \ |
| 487 size_t index = 0; \ | 434 size_t index = 0; \ |
| 488 AddressingMode mode = kMode_None; \ | 435 AddressingMode mode = kMode_None; \ |
| 489 MemOperand operand = i.MemoryOperand(&mode, &index); \ | 436 MemOperand operand = i.MemoryOperand(&mode, &index); \ |
| 490 DoubleRegister value = i.InputDoubleRegister(index); \ | 437 DoubleRegister value = i.InputDoubleRegister(index); \ |
| 491 __ frsp(kScratchDoubleReg, value); \ | 438 __ StoreFloat32(value, operand); \ |
| 492 if (mode == kMode_MRI) { \ | |
| 493 __ stfs(kScratchDoubleReg, operand); \ | |
| 494 } else { \ | |
| 495 __ stfsx(kScratchDoubleReg, operand); \ | |
| 496 } \ | |
| 497 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 498 } while (0) | 439 } while (0) |
| 499 | 440 |
| 500 | |
| 501 #define ASSEMBLE_STORE_DOUBLE() \ | 441 #define ASSEMBLE_STORE_DOUBLE() \ |
| 502 do { \ | 442 do { \ |
| 503 size_t index = 0; \ | 443 size_t index = 0; \ |
| 504 AddressingMode mode = kMode_None; \ | 444 AddressingMode mode = kMode_None; \ |
| 505 MemOperand operand = i.MemoryOperand(&mode, &index); \ | 445 MemOperand operand = i.MemoryOperand(&mode, &index); \ |
| 506 DoubleRegister value = i.InputDoubleRegister(index); \ | 446 DoubleRegister value = i.InputDoubleRegister(index); \ |
| 507 if (mode == kMode_MRI) { \ | 447 __ StoreDouble(value, operand); \ |
| 508 __ stfd(value, operand); \ | |
| 509 } else { \ | |
| 510 __ stfdx(value, operand); \ | |
| 511 } \ | |
| 512 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 513 } while (0) | 448 } while (0) |
| 514 | 449 |
| 515 | 450 #define ASSEMBLE_STORE_INTEGER(asm_instr) \ |
| 516 #define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \ | |
| 517 do { \ | 451 do { \ |
| 518 size_t index = 0; \ | 452 size_t index = 0; \ |
| 519 AddressingMode mode = kMode_None; \ | 453 AddressingMode mode = kMode_None; \ |
| 520 MemOperand operand = i.MemoryOperand(&mode, &index); \ | 454 MemOperand operand = i.MemoryOperand(&mode, &index); \ |
| 521 Register value = i.InputRegister(index); \ | 455 Register value = i.InputRegister(index); \ |
| 522 if (mode == kMode_MRI) { \ | 456 __ asm_instr(value, operand); \ |
| 523 __ asm_instr(value, operand); \ | |
| 524 } else { \ | |
| 525 __ asm_instrx(value, operand); \ | |
| 526 } \ | |
| 527 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 528 } while (0) | 457 } while (0) |
| 529 | 458 |
| 530 | |
| 531 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 459 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
| 532 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \ | 460 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, width) \ |
| 533 do { \ | 461 do { \ |
| 534 DoubleRegister result = i.OutputDoubleRegister(); \ | 462 DoubleRegister result = i.OutputDoubleRegister(); \ |
| 535 size_t index = 0; \ | 463 size_t index = 0; \ |
| 536 AddressingMode mode = kMode_None; \ | 464 AddressingMode mode = kMode_None; \ |
| 537 MemOperand operand = i.MemoryOperand(&mode, index); \ | 465 MemOperand operand = i.MemoryOperand(&mode, index); \ |
| 538 DCHECK_EQ(kMode_MRR, mode); \ | |
| 539 Register offset = operand.rb(); \ | 466 Register offset = operand.rb(); \ |
| 540 __ extsw(offset, offset); \ | 467 __ lgfr(offset, offset); \ |
| 541 if (HasRegisterInput(instr, 2)) { \ | 468 if (HasRegisterInput(instr, 2)) { \ |
| 542 __ cmplw(offset, i.InputRegister(2)); \ | 469 __ CmpLogical32(offset, i.InputRegister(2)); \ |
| 543 } else { \ | 470 } else { \ |
| 544 __ cmplwi(offset, i.InputImmediate(2)); \ | 471 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
| 545 } \ | 472 } \ |
| 546 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \ | 473 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \ |
| 547 __ bge(ool->entry()); \ | 474 __ bge(ool->entry()); \ |
| 548 if (mode == kMode_MRI) { \ | 475 __ asm_instr(result, operand); \ |
| 549 __ asm_instr(result, operand); \ | |
| 550 } else { \ | |
| 551 __ asm_instrx(result, operand); \ | |
| 552 } \ | |
| 553 __ bind(ool->exit()); \ | 476 __ bind(ool->exit()); \ |
| 554 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 555 } while (0) | 477 } while (0) |
| 556 | 478 |
| 557 | |
| 558 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 479 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
| 559 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \ | 480 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \ |
| 560 do { \ | 481 do { \ |
| 561 Register result = i.OutputRegister(); \ | 482 Register result = i.OutputRegister(); \ |
| 562 size_t index = 0; \ | 483 size_t index = 0; \ |
| 563 AddressingMode mode = kMode_None; \ | 484 AddressingMode mode = kMode_None; \ |
| 564 MemOperand operand = i.MemoryOperand(&mode, index); \ | 485 MemOperand operand = i.MemoryOperand(&mode, index); \ |
| 565 DCHECK_EQ(kMode_MRR, mode); \ | |
| 566 Register offset = operand.rb(); \ | 486 Register offset = operand.rb(); \ |
| 567 __ extsw(offset, offset); \ | 487 __ lgfr(offset, offset); \ |
| 568 if (HasRegisterInput(instr, 2)) { \ | 488 if (HasRegisterInput(instr, 2)) { \ |
| 569 __ cmplw(offset, i.InputRegister(2)); \ | 489 __ CmpLogical32(offset, i.InputRegister(2)); \ |
| 570 } else { \ | 490 } else { \ |
| 571 __ cmplwi(offset, i.InputImmediate(2)); \ | 491 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
| 572 } \ | 492 } \ |
| 573 auto ool = new (zone()) OutOfLineLoadZero(this, result); \ | 493 auto ool = new (zone()) OutOfLineLoadZero(this, result); \ |
| 574 __ bge(ool->entry()); \ | 494 __ bge(ool->entry()); \ |
| 575 if (mode == kMode_MRI) { \ | 495 __ asm_instr(result, operand); \ |
| 576 __ asm_instr(result, operand); \ | |
| 577 } else { \ | |
| 578 __ asm_instrx(result, operand); \ | |
| 579 } \ | |
| 580 __ bind(ool->exit()); \ | 496 __ bind(ool->exit()); \ |
| 581 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 582 } while (0) | 497 } while (0) |
| 583 | 498 |
| 584 | |
| 585 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 499 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
| 586 #define ASSEMBLE_CHECKED_STORE_FLOAT32() \ | 500 #define ASSEMBLE_CHECKED_STORE_FLOAT32() \ |
| 587 do { \ | 501 do { \ |
| 588 Label done; \ | 502 Label done; \ |
| 589 size_t index = 0; \ | 503 size_t index = 0; \ |
| 590 AddressingMode mode = kMode_None; \ | 504 AddressingMode mode = kMode_None; \ |
| 591 MemOperand operand = i.MemoryOperand(&mode, index); \ | 505 MemOperand operand = i.MemoryOperand(&mode, index); \ |
| 592 DCHECK_EQ(kMode_MRR, mode); \ | |
| 593 Register offset = operand.rb(); \ | 506 Register offset = operand.rb(); \ |
| 594 __ extsw(offset, offset); \ | 507 __ lgfr(offset, offset); \ |
| 595 if (HasRegisterInput(instr, 2)) { \ | 508 if (HasRegisterInput(instr, 2)) { \ |
| 596 __ cmplw(offset, i.InputRegister(2)); \ | 509 __ CmpLogical32(offset, i.InputRegister(2)); \ |
| 597 } else { \ | 510 } else { \ |
| 598 __ cmplwi(offset, i.InputImmediate(2)); \ | 511 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
| 599 } \ | 512 } \ |
| 600 __ bge(&done); \ | 513 __ bge(&done); \ |
| 601 DoubleRegister value = i.InputDoubleRegister(3); \ | 514 DoubleRegister value = i.InputDoubleRegister(3); \ |
| 602 __ frsp(kScratchDoubleReg, value); \ | 515 __ StoreFloat32(value, operand); \ |
| 603 if (mode == kMode_MRI) { \ | |
| 604 __ stfs(kScratchDoubleReg, operand); \ | |
| 605 } else { \ | |
| 606 __ stfsx(kScratchDoubleReg, operand); \ | |
| 607 } \ | |
| 608 __ bind(&done); \ | 516 __ bind(&done); \ |
| 609 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 610 } while (0) | 517 } while (0) |
| 611 | 518 |
| 612 | |
| 613 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 519 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
| 614 #define ASSEMBLE_CHECKED_STORE_DOUBLE() \ | 520 #define ASSEMBLE_CHECKED_STORE_DOUBLE() \ |
| 615 do { \ | 521 do { \ |
| 616 Label done; \ | 522 Label done; \ |
| 617 size_t index = 0; \ | 523 size_t index = 0; \ |
| 618 AddressingMode mode = kMode_None; \ | 524 AddressingMode mode = kMode_None; \ |
| 619 MemOperand operand = i.MemoryOperand(&mode, index); \ | 525 MemOperand operand = i.MemoryOperand(&mode, index); \ |
| 620 DCHECK_EQ(kMode_MRR, mode); \ | 526 DCHECK_EQ(kMode_MRR, mode); \ |
| 621 Register offset = operand.rb(); \ | 527 Register offset = operand.rb(); \ |
| 622 __ extsw(offset, offset); \ | 528 __ lgfr(offset, offset); \ |
| 623 if (HasRegisterInput(instr, 2)) { \ | 529 if (HasRegisterInput(instr, 2)) { \ |
| 624 __ cmplw(offset, i.InputRegister(2)); \ | 530 __ CmpLogical32(offset, i.InputRegister(2)); \ |
| 625 } else { \ | 531 } else { \ |
| 626 __ cmplwi(offset, i.InputImmediate(2)); \ | 532 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
| 627 } \ | 533 } \ |
| 628 __ bge(&done); \ | 534 __ bge(&done); \ |
| 629 DoubleRegister value = i.InputDoubleRegister(3); \ | 535 DoubleRegister value = i.InputDoubleRegister(3); \ |
| 630 if (mode == kMode_MRI) { \ | 536 __ StoreDouble(value, operand); \ |
| 631 __ stfd(value, operand); \ | |
| 632 } else { \ | |
| 633 __ stfdx(value, operand); \ | |
| 634 } \ | |
| 635 __ bind(&done); \ | 537 __ bind(&done); \ |
| 636 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 637 } while (0) | 538 } while (0) |
| 638 | 539 |
| 639 | |
| 640 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 540 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
| 641 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \ | 541 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \ |
| 642 do { \ | 542 do { \ |
| 643 Label done; \ | 543 Label done; \ |
| 644 size_t index = 0; \ | 544 size_t index = 0; \ |
| 645 AddressingMode mode = kMode_None; \ | 545 AddressingMode mode = kMode_None; \ |
| 646 MemOperand operand = i.MemoryOperand(&mode, index); \ | 546 MemOperand operand = i.MemoryOperand(&mode, index); \ |
| 647 DCHECK_EQ(kMode_MRR, mode); \ | 547 Register offset = operand.rb(); \ |
| 648 Register offset = operand.rb(); \ | 548 __ lgfr(offset, offset); \ |
| 649 __ extsw(offset, offset); \ | 549 if (HasRegisterInput(instr, 2)) { \ |
| 650 if (HasRegisterInput(instr, 2)) { \ | 550 __ CmpLogical32(offset, i.InputRegister(2)); \ |
| 651 __ cmplw(offset, i.InputRegister(2)); \ | 551 } else { \ |
| 652 } else { \ | 552 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
| 653 __ cmplwi(offset, i.InputImmediate(2)); \ | 553 } \ |
| 654 } \ | 554 __ bge(&done); \ |
| 655 __ bge(&done); \ | 555 Register value = i.InputRegister(3); \ |
| 656 Register value = i.InputRegister(3); \ | 556 __ asm_instr(value, operand); \ |
| 657 if (mode == kMode_MRI) { \ | 557 __ bind(&done); \ |
| 658 __ asm_instr(value, operand); \ | |
| 659 } else { \ | |
| 660 __ asm_instrx(value, operand); \ | |
| 661 } \ | |
| 662 __ bind(&done); \ | |
| 663 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | |
| 664 } while (0) | 558 } while (0) |
| 665 | 559 |
| 666 | |
| 667 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 560 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
| 668 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 561 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 669 if (sp_slot_delta > 0) { | 562 if (sp_slot_delta > 0) { |
| 670 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); | 563 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); |
| 671 } | 564 } |
| 672 frame_access_state()->SetFrameAccessToDefault(); | 565 frame_access_state()->SetFrameAccessToDefault(); |
| 673 } | 566 } |
| 674 | 567 |
| 675 | |
| 676 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 568 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
| 677 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 569 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 678 if (sp_slot_delta < 0) { | 570 if (sp_slot_delta < 0) { |
| 679 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); | 571 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); |
| 680 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 572 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
| 681 } | 573 } |
| 682 if (frame()->needs_frame()) { | 574 if (frame()->needs_frame()) { |
| 683 __ RestoreFrameStateForTailCall(); | 575 __ RestoreFrameStateForTailCall(); |
| 684 } | 576 } |
| 685 frame_access_state()->SetFrameAccessToSP(); | 577 frame_access_state()->SetFrameAccessToSP(); |
| 686 } | 578 } |
| 687 | 579 |
| 688 | |
| 689 // Assembles an instruction after register allocation, producing machine code. | 580 // Assembles an instruction after register allocation, producing machine code. |
| 690 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 581 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
| 691 PPCOperandConverter i(this, instr); | 582 S390OperandConverter i(this, instr); |
| 692 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode()); | 583 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode()); |
| 693 | 584 |
| 694 switch (opcode) { | 585 switch (opcode) { |
| 695 case kArchCallCodeObject: { | 586 case kArchCallCodeObject: { |
| 696 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( | |
| 697 masm()); | |
| 698 EnsureSpaceForLazyDeopt(); | 587 EnsureSpaceForLazyDeopt(); |
| 699 if (HasRegisterInput(instr, 0)) { | 588 if (HasRegisterInput(instr, 0)) { |
| 700 __ addi(ip, i.InputRegister(0), | 589 __ AddP(ip, i.InputRegister(0), |
| 701 Operand(Code::kHeaderSize - kHeapObjectTag)); | 590 Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 702 __ Call(ip); | 591 __ Call(ip); |
| 703 } else { | 592 } else { |
| 704 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 593 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
| 705 RelocInfo::CODE_TARGET); | 594 RelocInfo::CODE_TARGET); |
| 706 } | 595 } |
| 707 RecordCallPosition(instr); | 596 RecordCallPosition(instr); |
| 708 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 709 frame_access_state()->ClearSPDelta(); | 597 frame_access_state()->ClearSPDelta(); |
| 710 break; | 598 break; |
| 711 } | 599 } |
| 712 case kArchTailCallCodeObject: { | 600 case kArchTailCallCodeObject: { |
| 713 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 601 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
| 714 AssembleDeconstructActivationRecord(stack_param_delta); | 602 AssembleDeconstructActivationRecord(stack_param_delta); |
| 715 if (HasRegisterInput(instr, 0)) { | 603 if (HasRegisterInput(instr, 0)) { |
| 716 __ addi(ip, i.InputRegister(0), | 604 __ AddP(ip, i.InputRegister(0), |
| 717 Operand(Code::kHeaderSize - kHeapObjectTag)); | 605 Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 718 __ Jump(ip); | 606 __ Jump(ip); |
| 719 } else { | 607 } else { |
| 720 // We cannot use the constant pool to load the target since | 608 // We cannot use the constant pool to load the target since |
| 721 // we've already restored the caller's frame. | 609 // we've already restored the caller's frame. |
| 722 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); | 610 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); |
| 723 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 611 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
| 724 RelocInfo::CODE_TARGET); | 612 RelocInfo::CODE_TARGET); |
| 725 } | 613 } |
| 726 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 727 frame_access_state()->ClearSPDelta(); | 614 frame_access_state()->ClearSPDelta(); |
| 728 break; | 615 break; |
| 729 } | 616 } |
| 730 case kArchCallJSFunction: { | 617 case kArchCallJSFunction: { |
| 731 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( | |
| 732 masm()); | |
| 733 EnsureSpaceForLazyDeopt(); | 618 EnsureSpaceForLazyDeopt(); |
| 734 Register func = i.InputRegister(0); | 619 Register func = i.InputRegister(0); |
| 735 if (FLAG_debug_code) { | 620 if (FLAG_debug_code) { |
| 736 // Check the function's context matches the context argument. | 621 // Check the function's context matches the context argument. |
| 737 __ LoadP(kScratchReg, | 622 __ LoadP(kScratchReg, |
| 738 FieldMemOperand(func, JSFunction::kContextOffset)); | 623 FieldMemOperand(func, JSFunction::kContextOffset)); |
| 739 __ cmp(cp, kScratchReg); | 624 __ CmpP(cp, kScratchReg); |
| 740 __ Assert(eq, kWrongFunctionContext); | 625 __ Assert(eq, kWrongFunctionContext); |
| 741 } | 626 } |
| 742 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 627 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 743 __ Call(ip); | 628 __ Call(ip); |
| 744 RecordCallPosition(instr); | 629 RecordCallPosition(instr); |
| 745 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 746 frame_access_state()->ClearSPDelta(); | 630 frame_access_state()->ClearSPDelta(); |
| 747 break; | 631 break; |
| 748 } | 632 } |
| 749 case kArchTailCallJSFunction: { | 633 case kArchTailCallJSFunction: { |
| 750 Register func = i.InputRegister(0); | 634 Register func = i.InputRegister(0); |
| 751 if (FLAG_debug_code) { | 635 if (FLAG_debug_code) { |
| 752 // Check the function's context matches the context argument. | 636 // Check the function's context matches the context argument. |
| 753 __ LoadP(kScratchReg, | 637 __ LoadP(kScratchReg, |
| 754 FieldMemOperand(func, JSFunction::kContextOffset)); | 638 FieldMemOperand(func, JSFunction::kContextOffset)); |
| 755 __ cmp(cp, kScratchReg); | 639 __ CmpP(cp, kScratchReg); |
| 756 __ Assert(eq, kWrongFunctionContext); | 640 __ Assert(eq, kWrongFunctionContext); |
| 757 } | 641 } |
| 758 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 642 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
| 759 AssembleDeconstructActivationRecord(stack_param_delta); | 643 AssembleDeconstructActivationRecord(stack_param_delta); |
| 760 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 644 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 761 __ Jump(ip); | 645 __ Jump(ip); |
| 762 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 763 frame_access_state()->ClearSPDelta(); | 646 frame_access_state()->ClearSPDelta(); |
| 764 break; | 647 break; |
| 765 } | 648 } |
| 766 case kArchPrepareCallCFunction: { | 649 case kArchPrepareCallCFunction: { |
| 767 int const num_parameters = MiscField::decode(instr->opcode()); | 650 int const num_parameters = MiscField::decode(instr->opcode()); |
| 768 __ PrepareCallCFunction(num_parameters, kScratchReg); | 651 __ PrepareCallCFunction(num_parameters, kScratchReg); |
| 769 // Frame alignment requires using FP-relative frame addressing. | 652 // Frame alignment requires using FP-relative frame addressing. |
| 770 frame_access_state()->SetFrameAccessToFP(); | 653 frame_access_state()->SetFrameAccessToFP(); |
| 771 break; | 654 break; |
| 772 } | 655 } |
| 773 case kArchPrepareTailCall: | 656 case kArchPrepareTailCall: |
| 774 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 657 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
| 775 break; | 658 break; |
| 776 case kArchCallCFunction: { | 659 case kArchCallCFunction: { |
| 777 int const num_parameters = MiscField::decode(instr->opcode()); | 660 int const num_parameters = MiscField::decode(instr->opcode()); |
| 778 if (instr->InputAt(0)->IsImmediate()) { | 661 if (instr->InputAt(0)->IsImmediate()) { |
| 779 ExternalReference ref = i.InputExternalReference(0); | 662 ExternalReference ref = i.InputExternalReference(0); |
| 780 __ CallCFunction(ref, num_parameters); | 663 __ CallCFunction(ref, num_parameters); |
| 781 } else { | 664 } else { |
| 782 Register func = i.InputRegister(0); | 665 Register func = i.InputRegister(0); |
| 783 __ CallCFunction(func, num_parameters); | 666 __ CallCFunction(func, num_parameters); |
| 784 } | 667 } |
| 785 frame_access_state()->SetFrameAccessToDefault(); | 668 frame_access_state()->SetFrameAccessToDefault(); |
| 786 frame_access_state()->ClearSPDelta(); | 669 frame_access_state()->ClearSPDelta(); |
| 787 break; | 670 break; |
| 788 } | 671 } |
| 789 case kArchJmp: | 672 case kArchJmp: |
| 790 AssembleArchJump(i.InputRpo(0)); | 673 AssembleArchJump(i.InputRpo(0)); |
| 791 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 792 break; | 674 break; |
| 793 case kArchLookupSwitch: | 675 case kArchLookupSwitch: |
| 794 AssembleArchLookupSwitch(instr); | 676 AssembleArchLookupSwitch(instr); |
| 795 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 796 break; | 677 break; |
| 797 case kArchTableSwitch: | 678 case kArchTableSwitch: |
| 798 AssembleArchTableSwitch(instr); | 679 AssembleArchTableSwitch(instr); |
| 799 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 800 break; | 680 break; |
| 801 case kArchNop: | 681 case kArchNop: |
| 802 case kArchThrowTerminator: | 682 case kArchThrowTerminator: |
| 803 // don't emit code for nops. | 683 // don't emit code for nops. |
| 804 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 805 break; | 684 break; |
| 806 case kArchDeoptimize: { | 685 case kArchDeoptimize: { |
| 807 int deopt_state_id = | 686 int deopt_state_id = |
| 808 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore()); | 687 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore()); |
| 809 Deoptimizer::BailoutType bailout_type = | 688 Deoptimizer::BailoutType bailout_type = |
| 810 Deoptimizer::BailoutType(MiscField::decode(instr->opcode())); | 689 Deoptimizer::BailoutType(MiscField::decode(instr->opcode())); |
| 811 AssembleDeoptimizerCall(deopt_state_id, bailout_type); | 690 AssembleDeoptimizerCall(deopt_state_id, bailout_type); |
| 812 break; | 691 break; |
| 813 } | 692 } |
| 814 case kArchRet: | 693 case kArchRet: |
| 815 AssembleReturn(); | 694 AssembleReturn(); |
| 816 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 817 break; | 695 break; |
| 818 case kArchStackPointer: | 696 case kArchStackPointer: |
| 819 __ mr(i.OutputRegister(), sp); | 697 __ LoadRR(i.OutputRegister(), sp); |
| 820 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 821 break; | 698 break; |
| 822 case kArchFramePointer: | 699 case kArchFramePointer: |
| 823 __ mr(i.OutputRegister(), fp); | 700 __ LoadRR(i.OutputRegister(), fp); |
| 824 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 825 break; | 701 break; |
| 826 case kArchParentFramePointer: | 702 case kArchParentFramePointer: |
| 827 if (frame_access_state()->frame()->needs_frame()) { | 703 if (frame_access_state()->frame()->needs_frame()) { |
| 828 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); | 704 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); |
| 829 } else { | 705 } else { |
| 830 __ mr(i.OutputRegister(), fp); | 706 __ LoadRR(i.OutputRegister(), fp); |
| 831 } | 707 } |
| 832 break; | 708 break; |
| 833 case kArchTruncateDoubleToI: | 709 case kArchTruncateDoubleToI: |
| 834 // TODO(mbrandy): move slow call to stub out of line. | 710 // TODO(mbrandy): move slow call to stub out of line. |
| 835 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); | 711 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); |
| 836 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 837 break; | 712 break; |
| 838 case kArchStoreWithWriteBarrier: { | 713 case kArchStoreWithWriteBarrier: { |
| 839 RecordWriteMode mode = | 714 RecordWriteMode mode = |
| 840 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode())); | 715 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode())); |
| 841 Register object = i.InputRegister(0); | 716 Register object = i.InputRegister(0); |
| 842 Register value = i.InputRegister(2); | 717 Register value = i.InputRegister(2); |
| 843 Register scratch0 = i.TempRegister(0); | 718 Register scratch0 = i.TempRegister(0); |
| 844 Register scratch1 = i.TempRegister(1); | 719 Register scratch1 = i.TempRegister(1); |
| 845 OutOfLineRecordWrite* ool; | 720 OutOfLineRecordWrite* ool; |
| 846 | 721 |
| 847 AddressingMode addressing_mode = | 722 AddressingMode addressing_mode = |
| 848 AddressingModeField::decode(instr->opcode()); | 723 AddressingModeField::decode(instr->opcode()); |
| 849 if (addressing_mode == kMode_MRI) { | 724 if (addressing_mode == kMode_MRI) { |
| 850 int32_t offset = i.InputInt32(1); | 725 int32_t offset = i.InputInt32(1); |
| 851 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value, | 726 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value, |
| 852 scratch0, scratch1, mode); | 727 scratch0, scratch1, mode); |
| 853 __ StoreP(value, MemOperand(object, offset)); | 728 __ StoreP(value, MemOperand(object, offset)); |
| 854 } else { | 729 } else { |
| 855 DCHECK_EQ(kMode_MRR, addressing_mode); | 730 DCHECK_EQ(kMode_MRR, addressing_mode); |
| 856 Register offset(i.InputRegister(1)); | 731 Register offset(i.InputRegister(1)); |
| 857 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value, | 732 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value, |
| 858 scratch0, scratch1, mode); | 733 scratch0, scratch1, mode); |
| 859 __ StorePX(value, MemOperand(object, offset)); | 734 __ StoreP(value, MemOperand(object, offset)); |
| 860 } | 735 } |
| 861 __ CheckPageFlag(object, scratch0, | 736 __ CheckPageFlag(object, scratch0, |
| 862 MemoryChunk::kPointersFromHereAreInterestingMask, ne, | 737 MemoryChunk::kPointersFromHereAreInterestingMask, ne, |
| 863 ool->entry()); | 738 ool->entry()); |
| 864 __ bind(ool->exit()); | 739 __ bind(ool->exit()); |
| 865 break; | 740 break; |
| 866 } | 741 } |
| 867 case kArchStackSlot: { | 742 case kArchStackSlot: { |
| 868 FrameOffset offset = | 743 FrameOffset offset = |
| 869 frame_access_state()->GetFrameOffset(i.InputInt32(0)); | 744 frame_access_state()->GetFrameOffset(i.InputInt32(0)); |
| 870 __ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp, | 745 __ AddP(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp, |
| 871 Operand(offset.offset())); | 746 Operand(offset.offset())); |
| 872 break; | 747 break; |
| 873 } | 748 } |
| 874 case kPPC_And: | 749 case kS390_And: |
| 750 ASSEMBLE_BINOP(AndP, AndP); |
| 751 break; |
| 752 case kS390_AndComplement: |
| 753 __ NotP(i.InputRegister(1)); |
| 754 __ AndP(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); |
| 755 break; |
| 756 case kS390_Or: |
| 757 ASSEMBLE_BINOP(OrP, OrP); |
| 758 break; |
| 759 case kS390_OrComplement: |
| 760 __ NotP(i.InputRegister(1)); |
| 761 __ OrP(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); |
| 762 break; |
| 763 case kS390_Xor: |
| 764 ASSEMBLE_BINOP(XorP, XorP); |
| 765 break; |
| 766 case kS390_ShiftLeft32: |
| 875 if (HasRegisterInput(instr, 1)) { | 767 if (HasRegisterInput(instr, 1)) { |
| 876 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 768 if (i.OutputRegister().is(i.InputRegister(1))) { |
| 877 i.OutputRCBit()); | 769 __ LoadRR(kScratchReg, i.InputRegister(1)); |
| 878 } else { | 770 __ ShiftLeft(i.OutputRegister(), i.InputRegister(0), kScratchReg); |
| 879 __ andi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); | 771 } else { |
| 880 } | 772 ASSEMBLE_BINOP(ShiftLeft, ShiftLeft); |
| 881 break; | 773 } |
| 882 case kPPC_AndComplement: | 774 } else { |
| 883 __ andc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 775 ASSEMBLE_BINOP(ShiftLeft, ShiftLeft); |
| 884 i.OutputRCBit()); | 776 } |
| 885 break; | 777 #if V8_TARGET_ARCH_S390X |
| 886 case kPPC_Or: | 778 __ lgfr(i.OutputRegister(0), i.OutputRegister(0)); |
| 779 #endif |
| 780 break; |
| 781 #if V8_TARGET_ARCH_S390X |
| 782 case kS390_ShiftLeft64: |
| 783 ASSEMBLE_BINOP(sllg, sllg); |
| 784 break; |
| 785 #endif |
| 786 case kS390_ShiftRight32: |
| 887 if (HasRegisterInput(instr, 1)) { | 787 if (HasRegisterInput(instr, 1)) { |
| 888 __ orx(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 788 if (i.OutputRegister().is(i.InputRegister(1))) { |
| 889 i.OutputRCBit()); | 789 __ LoadRR(kScratchReg, i.InputRegister(1)); |
| 890 } else { | 790 __ ShiftRight(i.OutputRegister(), i.InputRegister(0), kScratchReg); |
| 891 __ ori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); | 791 } else { |
| 892 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 792 ASSEMBLE_BINOP(ShiftRight, ShiftRight); |
| 893 } | 793 } |
| 894 break; | 794 } else { |
| 895 case kPPC_OrComplement: | 795 ASSEMBLE_BINOP(ShiftRight, ShiftRight); |
| 896 __ orc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 796 } |
| 897 i.OutputRCBit()); | 797 #if V8_TARGET_ARCH_S390X |
| 898 break; | 798 __ lgfr(i.OutputRegister(0), i.OutputRegister(0)); |
| 899 case kPPC_Xor: | 799 #endif |
| 800 break; |
| 801 #if V8_TARGET_ARCH_S390X |
| 802 case kS390_ShiftRight64: |
| 803 ASSEMBLE_BINOP(srlg, srlg); |
| 804 break; |
| 805 #endif |
| 806 case kS390_ShiftRightAlg32: |
| 900 if (HasRegisterInput(instr, 1)) { | 807 if (HasRegisterInput(instr, 1)) { |
| 901 __ xor_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 808 if (i.OutputRegister().is(i.InputRegister(1))) { |
| 902 i.OutputRCBit()); | 809 __ LoadRR(kScratchReg, i.InputRegister(1)); |
| 903 } else { | 810 __ ShiftRightArith(i.OutputRegister(), i.InputRegister(0), |
| 904 __ xori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); | 811 kScratchReg); |
| 905 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 812 } else { |
| 906 } | 813 ASSEMBLE_BINOP(ShiftRightArith, ShiftRightArith); |
| 907 break; | 814 } |
| 908 case kPPC_ShiftLeft32: | 815 } else { |
| 909 ASSEMBLE_BINOP_RC(slw, slwi); | 816 ASSEMBLE_BINOP(ShiftRightArith, ShiftRightArith); |
| 910 break; | 817 } |
| 911 #if V8_TARGET_ARCH_PPC64 | 818 break; |
| 912 case kPPC_ShiftLeft64: | 819 #if V8_TARGET_ARCH_S390X |
| 913 ASSEMBLE_BINOP_RC(sld, sldi); | 820 case kS390_ShiftRightAlg64: |
| 914 break; | 821 ASSEMBLE_BINOP(srag, srag); |
| 915 #endif | 822 break; |
| 916 case kPPC_ShiftRight32: | 823 #endif |
| 917 ASSEMBLE_BINOP_RC(srw, srwi); | 824 case kS390_RotRight32: |
| 918 break; | |
| 919 #if V8_TARGET_ARCH_PPC64 | |
| 920 case kPPC_ShiftRight64: | |
| 921 ASSEMBLE_BINOP_RC(srd, srdi); | |
| 922 break; | |
| 923 #endif | |
| 924 case kPPC_ShiftRightAlg32: | |
| 925 ASSEMBLE_BINOP_INT_RC(sraw, srawi); | |
| 926 break; | |
| 927 #if V8_TARGET_ARCH_PPC64 | |
| 928 case kPPC_ShiftRightAlg64: | |
| 929 ASSEMBLE_BINOP_INT_RC(srad, sradi); | |
| 930 break; | |
| 931 #endif | |
| 932 case kPPC_RotRight32: | |
| 933 if (HasRegisterInput(instr, 1)) { | 825 if (HasRegisterInput(instr, 1)) { |
| 934 __ subfic(kScratchReg, i.InputRegister(1), Operand(32)); | 826 __ LoadComplementRR(kScratchReg, i.InputRegister(1)); |
| 935 __ rotlw(i.OutputRegister(), i.InputRegister(0), kScratchReg, | 827 __ rll(i.OutputRegister(), i.InputRegister(0), kScratchReg); |
| 936 i.OutputRCBit()); | 828 } else { |
| 937 } else { | 829 __ rll(i.OutputRegister(), i.InputRegister(0), |
| 938 int sh = i.InputInt32(1); | 830 Operand(32 - i.InputInt32(1))); |
| 939 __ rotrwi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit()); | 831 } |
| 940 } | 832 break; |
| 941 break; | 833 #if V8_TARGET_ARCH_S390X |
| 942 #if V8_TARGET_ARCH_PPC64 | 834 case kS390_RotRight64: |
| 943 case kPPC_RotRight64: | |
| 944 if (HasRegisterInput(instr, 1)) { | 835 if (HasRegisterInput(instr, 1)) { |
| 945 __ subfic(kScratchReg, i.InputRegister(1), Operand(64)); | 836 __ LoadComplementRR(kScratchReg, i.InputRegister(1)); |
| 946 __ rotld(i.OutputRegister(), i.InputRegister(0), kScratchReg, | 837 __ rll(i.OutputRegister(), i.InputRegister(0), kScratchReg, |
| 947 i.OutputRCBit()); | 838 Operand(32)); |
| 948 } else { | 839 __ lgfr(i.OutputRegister(), i.OutputRegister()); |
| 949 int sh = i.InputInt32(1); | 840 } else { |
| 950 __ rotrdi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit()); | 841 UNIMPLEMENTED(); // Not implemented for now |
| 951 } | 842 } |
| 952 break; | 843 break; |
| 953 #endif | 844 #endif |
| 954 case kPPC_Not: | 845 case kS390_Not: |
| 955 __ notx(i.OutputRegister(), i.InputRegister(0), i.OutputRCBit()); | 846 __ LoadRR(i.OutputRegister(), i.InputRegister(0)); |
| 956 break; | 847 __ NotP(i.OutputRegister()); |
| 957 case kPPC_RotLeftAndMask32: | 848 break; |
| 958 __ rlwinm(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1), | 849 case kS390_RotLeftAndMask32: |
| 959 31 - i.InputInt32(2), 31 - i.InputInt32(3), i.OutputRCBit()); | 850 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) { |
| 960 break; | 851 int shiftAmount = i.InputInt32(1); |
| 961 #if V8_TARGET_ARCH_PPC64 | 852 int endBit = 63 - i.InputInt32(3); |
| 962 case kPPC_RotLeftAndClear64: | 853 int startBit = 63 - i.InputInt32(2); |
| 963 __ rldic(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1), | 854 __ rll(i.OutputRegister(), i.InputRegister(0), Operand(shiftAmount)); |
| 964 63 - i.InputInt32(2), i.OutputRCBit()); | 855 __ risbg(i.OutputRegister(), i.OutputRegister(), Operand(startBit), |
| 965 break; | 856 Operand(endBit), Operand::Zero(), true); |
| 966 case kPPC_RotLeftAndClearLeft64: | 857 } else { |
| 967 __ rldicl(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1), | 858 UNIMPLEMENTED(); |
| 968 63 - i.InputInt32(2), i.OutputRCBit()); | 859 } |
| 969 break; | 860 break; |
| 970 case kPPC_RotLeftAndClearRight64: | 861 #if V8_TARGET_ARCH_S390X |
| 971 __ rldicr(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1), | 862 case kS390_RotLeftAndClear64: |
| 972 63 - i.InputInt32(2), i.OutputRCBit()); | 863 UNIMPLEMENTED(); // Find correct instruction |
| 973 break; | 864 break; |
| 974 #endif | 865 case kS390_RotLeftAndClearLeft64: |
| 975 case kPPC_Add: | 866 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) { |
| 976 #if V8_TARGET_ARCH_PPC64 | 867 int shiftAmount = i.InputInt32(1); |
| 868 int endBit = 63; |
| 869 int startBit = 63 - i.InputInt32(2); |
| 870 __ risbg(i.OutputRegister(), i.InputRegister(0), Operand(startBit), |
| 871 Operand(endBit), Operand(shiftAmount), true); |
| 872 } else { |
| 873 UNIMPLEMENTED(); |
| 874 } |
| 875 break; |
| 876 case kS390_RotLeftAndClearRight64: |
| 877 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) { |
| 878 int shiftAmount = i.InputInt32(1); |
| 879 int endBit = 63 - i.InputInt32(2); |
| 880 int startBit = 0; |
| 881 __ risbg(i.OutputRegister(), i.InputRegister(0), Operand(startBit), |
| 882 Operand(endBit), Operand(shiftAmount), true); |
| 883 } else { |
| 884 UNIMPLEMENTED(); |
| 885 } |
| 886 break; |
| 887 #endif |
| 888 case kS390_Add: |
| 889 #if V8_TARGET_ARCH_S390X |
| 977 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) { | 890 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) { |
| 978 ASSEMBLE_ADD_WITH_OVERFLOW(); | 891 ASSEMBLE_ADD_WITH_OVERFLOW(); |
| 979 } else { | 892 } else { |
| 980 #endif | 893 #endif |
| 981 if (HasRegisterInput(instr, 1)) { | 894 ASSEMBLE_BINOP(AddP, AddP); |
| 982 __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 895 #if V8_TARGET_ARCH_S390X |
| 983 LeaveOE, i.OutputRCBit()); | 896 } |
| 984 } else { | 897 #endif |
| 985 __ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); | 898 break; |
| 986 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 899 case kS390_AddWithOverflow32: |
| 987 } | |
| 988 #if V8_TARGET_ARCH_PPC64 | |
| 989 } | |
| 990 #endif | |
| 991 break; | |
| 992 case kPPC_AddWithOverflow32: | |
| 993 ASSEMBLE_ADD_WITH_OVERFLOW32(); | 900 ASSEMBLE_ADD_WITH_OVERFLOW32(); |
| 994 break; | 901 break; |
| 995 case kPPC_AddDouble: | 902 case kS390_AddFloat: |
| 996 ASSEMBLE_FLOAT_BINOP_RC(fadd); | 903 // Ensure we don't clobber right/InputReg(1) |
| 997 break; | 904 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 998 case kPPC_Sub: | 905 ASSEMBLE_FLOAT_UNOP(aebr); |
| 999 #if V8_TARGET_ARCH_PPC64 | 906 } else { |
| 907 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 908 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 909 __ aebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 910 } |
| 911 break; |
| 912 case kS390_AddDouble: |
| 913 // Ensure we don't clobber right/InputReg(1) |
| 914 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 915 ASSEMBLE_FLOAT_UNOP(adbr); |
| 916 } else { |
| 917 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 918 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 919 __ adbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 920 } |
| 921 break; |
| 922 case kS390_Sub: |
| 923 #if V8_TARGET_ARCH_S390X |
| 1000 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) { | 924 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) { |
| 1001 ASSEMBLE_SUB_WITH_OVERFLOW(); | 925 ASSEMBLE_SUB_WITH_OVERFLOW(); |
| 1002 } else { | 926 } else { |
| 1003 #endif | 927 #endif |
| 1004 if (HasRegisterInput(instr, 1)) { | 928 ASSEMBLE_BINOP(SubP, SubP); |
| 1005 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 929 #if V8_TARGET_ARCH_S390X |
| 1006 LeaveOE, i.OutputRCBit()); | 930 } |
| 1007 } else { | 931 #endif |
| 1008 __ subi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); | 932 break; |
| 1009 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 933 case kS390_SubWithOverflow32: |
| 934 ASSEMBLE_SUB_WITH_OVERFLOW32(); |
| 935 break; |
| 936 case kS390_SubFloat: |
| 937 // OutputDoubleReg() = i.InputDoubleRegister(0) - i.InputDoubleRegister(1) |
| 938 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 939 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1)); |
| 940 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 941 __ sebr(i.OutputDoubleRegister(), kScratchDoubleReg); |
| 942 } else { |
| 943 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) { |
| 944 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1010 } | 945 } |
| 1011 #if V8_TARGET_ARCH_PPC64 | 946 __ sebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1012 } | 947 } |
| 1013 #endif | 948 break; |
| 1014 break; | 949 case kS390_SubDouble: |
| 1015 case kPPC_SubWithOverflow32: | 950 // OutputDoubleReg() = i.InputDoubleRegister(0) - i.InputDoubleRegister(1) |
| 1016 ASSEMBLE_SUB_WITH_OVERFLOW32(); | 951 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 1017 break; | 952 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1)); |
| 1018 case kPPC_SubDouble: | 953 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1019 ASSEMBLE_FLOAT_BINOP_RC(fsub); | 954 __ sdbr(i.OutputDoubleRegister(), kScratchDoubleReg); |
| 1020 break; | 955 } else { |
| 1021 case kPPC_Mul32: | 956 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) { |
| 1022 __ mullw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 957 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1023 LeaveOE, i.OutputRCBit()); | 958 } |
| 1024 break; | 959 __ sdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1025 #if V8_TARGET_ARCH_PPC64 | 960 } |
| 1026 case kPPC_Mul64: | 961 break; |
| 1027 __ mulld(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 962 case kS390_Mul32: |
| 1028 LeaveOE, i.OutputRCBit()); | 963 #if V8_TARGET_ARCH_S390X |
| 1029 break; | 964 case kS390_Mul64: |
| 1030 #endif | 965 #endif |
| 1031 case kPPC_MulHigh32: | 966 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); |
| 1032 __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 967 break; |
| 1033 i.OutputRCBit()); | 968 case kS390_MulHigh32: |
| 1034 break; | 969 __ LoadRR(r1, i.InputRegister(0)); |
| 1035 case kPPC_MulHighU32: | 970 __ mr_z(r0, i.InputRegister(1)); |
| 1036 __ mulhwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), | 971 __ LoadRR(i.OutputRegister(), r0); |
| 1037 i.OutputRCBit()); | 972 break; |
| 1038 break; | 973 case kS390_MulHighU32: |
| 1039 case kPPC_MulDouble: | 974 __ LoadRR(r1, i.InputRegister(0)); |
| 1040 ASSEMBLE_FLOAT_BINOP_RC(fmul); | 975 __ mlr(r0, i.InputRegister(1)); |
| 1041 break; | 976 __ LoadRR(i.OutputRegister(), r0); |
| 1042 case kPPC_Div32: | 977 break; |
| 1043 __ divw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); | 978 case kS390_MulFloat: |
| 1044 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 979 // Ensure we don't clobber right |
| 1045 break; | 980 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 1046 #if V8_TARGET_ARCH_PPC64 | 981 ASSEMBLE_FLOAT_UNOP(meebr); |
| 1047 case kPPC_Div64: | 982 } else { |
| 1048 __ divd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); | 983 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 1049 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 984 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1050 break; | 985 __ meebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1051 #endif | 986 } |
| 1052 case kPPC_DivU32: | 987 break; |
| 1053 __ divwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); | 988 case kS390_MulDouble: |
| 1054 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 989 // Ensure we don't clobber right |
| 1055 break; | 990 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 1056 #if V8_TARGET_ARCH_PPC64 | 991 ASSEMBLE_FLOAT_UNOP(mdbr); |
| 1057 case kPPC_DivU64: | 992 } else { |
| 1058 __ divdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); | 993 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 1059 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 994 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1060 break; | 995 __ mdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1061 #endif | 996 } |
| 1062 case kPPC_DivDouble: | 997 break; |
| 1063 ASSEMBLE_FLOAT_BINOP_RC(fdiv); | 998 #if V8_TARGET_ARCH_S390X |
| 1064 break; | 999 case kS390_Div64: |
| 1065 case kPPC_Mod32: | 1000 #endif |
| 1066 ASSEMBLE_MODULO(divw, mullw); | 1001 case kS390_Div32: |
| 1067 break; | 1002 __ LoadRR(r0, i.InputRegister(0)); |
| 1068 #if V8_TARGET_ARCH_PPC64 | 1003 __ srda(r0, Operand(32)); |
| 1069 case kPPC_Mod64: | 1004 __ dr(r0, i.InputRegister(1)); |
| 1070 ASSEMBLE_MODULO(divd, mulld); | 1005 __ ltr(i.OutputRegister(), r1); |
| 1071 break; | 1006 break; |
| 1072 #endif | 1007 #if V8_TARGET_ARCH_S390X |
| 1073 case kPPC_ModU32: | 1008 case kS390_DivU64: |
| 1074 ASSEMBLE_MODULO(divwu, mullw); | 1009 __ LoadRR(r1, i.InputRegister(0)); |
| 1075 break; | 1010 __ LoadImmP(r0, Operand::Zero()); |
| 1076 #if V8_TARGET_ARCH_PPC64 | 1011 __ dlgr(r0, i.InputRegister(1)); // R0:R1 = R1 / divisor - |
| 1077 case kPPC_ModU64: | 1012 __ ltgr(i.OutputRegister(), r1); // Copy remainder to output reg |
| 1078 ASSEMBLE_MODULO(divdu, mulld); | 1013 break; |
| 1079 break; | 1014 #endif |
| 1080 #endif | 1015 case kS390_DivU32: |
| 1081 case kPPC_ModDouble: | 1016 __ LoadRR(r0, i.InputRegister(0)); |
| 1082 // TODO(bmeurer): We should really get rid of this special instruction, | 1017 __ srdl(r0, Operand(32)); |
| 1083 // and generate a CallAddress instruction instead. | 1018 __ dlr(r0, i.InputRegister(1)); // R0:R1 = R1 / divisor - |
| 1019 __ ltr(i.OutputRegister(), r1); // Copy remainder to output reg |
| 1020 break; |
| 1021 |
| 1022 case kS390_DivFloat: |
| 1023 // InputDoubleRegister(1)=InputDoubleRegister(0)/InputDoubleRegister(1) |
| 1024 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 1025 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1)); |
| 1026 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1027 __ debr(i.OutputDoubleRegister(), kScratchDoubleReg); |
| 1028 } else { |
| 1029 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 1030 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1031 __ debr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1032 } |
| 1033 break; |
| 1034 case kS390_DivDouble: |
| 1035 // InputDoubleRegister(1)=InputDoubleRegister(0)/InputDoubleRegister(1) |
| 1036 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) { |
| 1037 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1)); |
| 1038 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1039 __ ddbr(i.OutputDoubleRegister(), kScratchDoubleReg); |
| 1040 } else { |
| 1041 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) |
| 1042 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1043 __ ddbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); |
| 1044 } |
| 1045 break; |
| 1046 case kS390_Mod32: |
| 1047 ASSEMBLE_MODULO(dr, srda); |
| 1048 break; |
| 1049 case kS390_ModU32: |
| 1050 ASSEMBLE_MODULO(dlr, srdl); |
| 1051 break; |
| 1052 #if V8_TARGET_ARCH_S390X |
| 1053 case kS390_Mod64: |
| 1054 ASSEMBLE_MODULO(dr, srda); |
| 1055 break; |
| 1056 case kS390_ModU64: |
| 1057 ASSEMBLE_MODULO(dlr, srdl); |
| 1058 break; |
| 1059 #endif |
| 1060 case kS390_AbsFloat: |
| 1061 __ lpebr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1062 break; |
| 1063 case kS390_SqrtFloat: |
| 1064 ASSEMBLE_FLOAT_UNOP(sqebr); |
| 1065 break; |
| 1066 case kS390_FloorFloat: |
| 1067 // ASSEMBLE_FLOAT_UNOP_RC(frim); |
| 1068 __ FloatFloor32(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1069 kScratchReg); |
| 1070 break; |
| 1071 case kS390_CeilFloat: |
| 1072 __ FloatCeiling32(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1073 kScratchReg, kScratchDoubleReg); |
| 1074 break; |
| 1075 case kS390_TruncateFloat: |
| 1076 __ fiebra(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1077 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0); |
| 1078 break; |
| 1079 // Double operations |
| 1080 case kS390_ModDouble: |
| 1084 ASSEMBLE_FLOAT_MODULO(); | 1081 ASSEMBLE_FLOAT_MODULO(); |
| 1085 break; | 1082 break; |
| 1086 case kPPC_Neg: | 1083 case kS390_Neg: |
| 1087 __ neg(i.OutputRegister(), i.InputRegister(0), LeaveOE, i.OutputRCBit()); | 1084 __ LoadComplementRR(i.OutputRegister(), i.InputRegister(0)); |
| 1088 break; | 1085 break; |
| 1089 case kPPC_MaxDouble: | 1086 case kS390_MaxDouble: |
| 1090 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg); | 1087 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg, kScratchReg); |
| 1091 break; | 1088 break; |
| 1092 case kPPC_MinDouble: | 1089 case kS390_MinDouble: |
| 1093 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg); | 1090 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg, kScratchReg); |
| 1094 break; | 1091 break; |
| 1095 case kPPC_AbsDouble: | 1092 case kS390_AbsDouble: |
| 1096 ASSEMBLE_FLOAT_UNOP_RC(fabs); | 1093 __ lpdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1097 break; | 1094 break; |
| 1098 case kPPC_SqrtDouble: | 1095 case kS390_SqrtDouble: |
| 1099 ASSEMBLE_FLOAT_UNOP_RC(fsqrt); | 1096 ASSEMBLE_FLOAT_UNOP(sqdbr); |
| 1100 break; | 1097 break; |
| 1101 case kPPC_FloorDouble: | 1098 case kS390_FloorDouble: |
| 1102 ASSEMBLE_FLOAT_UNOP_RC(frim); | 1099 __ FloatFloor64(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1103 break; | 1100 kScratchReg); |
| 1104 case kPPC_CeilDouble: | 1101 break; |
| 1105 ASSEMBLE_FLOAT_UNOP_RC(frip); | 1102 case kS390_CeilDouble: |
| 1106 break; | 1103 __ FloatCeiling64(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1107 case kPPC_TruncateDouble: | 1104 kScratchReg, kScratchDoubleReg); |
| 1108 ASSEMBLE_FLOAT_UNOP_RC(friz); | 1105 break; |
| 1109 break; | 1106 case kS390_TruncateDouble: |
| 1110 case kPPC_RoundDouble: | 1107 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1111 ASSEMBLE_FLOAT_UNOP_RC(frin); | 1108 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0); |
| 1112 break; | 1109 break; |
| 1113 case kPPC_NegDouble: | 1110 case kS390_RoundDouble: |
| 1114 ASSEMBLE_FLOAT_UNOP_RC(fneg); | 1111 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0), |
| 1115 break; | 1112 v8::internal::Assembler::FIDBRA_ROUND_TO_NEAREST_AWAY_FROM_0); |
| 1116 case kPPC_Cntlz32: | 1113 break; |
| 1117 __ cntlzw_(i.OutputRegister(), i.InputRegister(0)); | 1114 case kS390_NegDouble: |
| 1118 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1115 ASSEMBLE_FLOAT_UNOP(lcdbr); |
| 1119 break; | 1116 break; |
| 1120 #if V8_TARGET_ARCH_PPC64 | 1117 case kS390_Cntlz32: { |
| 1121 case kPPC_Cntlz64: | 1118 __ llgfr(i.OutputRegister(), i.InputRegister(0)); |
| 1122 __ cntlzd_(i.OutputRegister(), i.InputRegister(0)); | 1119 __ flogr(r0, i.OutputRegister()); |
| 1123 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1120 __ LoadRR(i.OutputRegister(), r0); |
| 1124 break; | 1121 __ SubP(i.OutputRegister(), Operand(32)); |
| 1125 #endif | 1122 } break; |
| 1126 case kPPC_Popcnt32: | 1123 #if V8_TARGET_ARCH_S390X |
| 1127 __ popcntw(i.OutputRegister(), i.InputRegister(0)); | 1124 case kS390_Cntlz64: { |
| 1128 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1125 __ flogr(r0, i.InputRegister(0)); |
| 1129 break; | 1126 __ LoadRR(i.OutputRegister(), r0); |
| 1130 #if V8_TARGET_ARCH_PPC64 | 1127 } break; |
| 1131 case kPPC_Popcnt64: | 1128 #endif |
| 1132 __ popcntd(i.OutputRegister(), i.InputRegister(0)); | 1129 case kS390_Popcnt32: |
| 1133 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1130 __ Popcnt32(i.OutputRegister(), i.InputRegister(0)); |
| 1134 break; | 1131 break; |
| 1135 #endif | 1132 #if V8_TARGET_ARCH_S390X |
| 1136 case kPPC_Cmp32: | 1133 case kS390_Popcnt64: |
| 1137 ASSEMBLE_COMPARE(cmpw, cmplw); | 1134 __ Popcnt64(i.OutputRegister(), i.InputRegister(0)); |
| 1138 break; | 1135 break; |
| 1139 #if V8_TARGET_ARCH_PPC64 | 1136 #endif |
| 1140 case kPPC_Cmp64: | 1137 case kS390_Cmp32: |
| 1141 ASSEMBLE_COMPARE(cmp, cmpl); | 1138 ASSEMBLE_COMPARE(Cmp32, CmpLogical32); |
| 1142 break; | 1139 break; |
| 1143 #endif | 1140 #if V8_TARGET_ARCH_S390X |
| 1144 case kPPC_CmpDouble: | 1141 case kS390_Cmp64: |
| 1145 ASSEMBLE_FLOAT_COMPARE(fcmpu); | 1142 ASSEMBLE_COMPARE(CmpP, CmpLogicalP); |
| 1146 break; | 1143 break; |
| 1147 case kPPC_Tst32: | 1144 #endif |
| 1145 case kS390_CmpFloat: |
| 1146 __ cebr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); |
| 1147 break; |
| 1148 case kS390_CmpDouble: |
| 1149 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); |
| 1150 break; |
| 1151 case kS390_Tst32: |
| 1148 if (HasRegisterInput(instr, 1)) { | 1152 if (HasRegisterInput(instr, 1)) { |
| 1149 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit()); | 1153 __ AndP(r0, i.InputRegister(0), i.InputRegister(1)); |
| 1150 } else { | 1154 } else { |
| 1151 __ andi(r0, i.InputRegister(0), i.InputImmediate(1)); | 1155 __ AndP(r0, i.InputRegister(0), i.InputImmediate(1)); |
| 1152 } | 1156 } |
| 1153 #if V8_TARGET_ARCH_PPC64 | 1157 #if V8_TARGET_ARCH_S390X |
| 1154 __ extsw(r0, r0, i.OutputRCBit()); | 1158 // TODO(john.yan): use ltgfr here. |
| 1155 #endif | 1159 __ lgfr(r0, r0); |
| 1156 DCHECK_EQ(SetRC, i.OutputRCBit()); | 1160 __ LoadAndTestP(r0, r0); |
| 1157 break; | 1161 #endif |
| 1158 #if V8_TARGET_ARCH_PPC64 | 1162 break; |
| 1159 case kPPC_Tst64: | 1163 #if V8_TARGET_ARCH_S390X |
| 1164 case kS390_Tst64: |
| 1160 if (HasRegisterInput(instr, 1)) { | 1165 if (HasRegisterInput(instr, 1)) { |
| 1161 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit()); | 1166 __ AndP(r0, i.InputRegister(0), i.InputRegister(1)); |
| 1162 } else { | 1167 } else { |
| 1163 __ andi(r0, i.InputRegister(0), i.InputImmediate(1)); | 1168 __ AndP(r0, i.InputRegister(0), i.InputImmediate(1)); |
| 1164 } | 1169 } |
| 1165 DCHECK_EQ(SetRC, i.OutputRCBit()); | 1170 break; |
| 1166 break; | 1171 #endif |
| 1167 #endif | 1172 case kS390_Push: |
| 1168 case kPPC_Push: | |
| 1169 if (instr->InputAt(0)->IsDoubleRegister()) { | 1173 if (instr->InputAt(0)->IsDoubleRegister()) { |
| 1170 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); | 1174 __ StoreDouble(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
| 1175 __ lay(sp, MemOperand(sp, -kDoubleSize)); |
| 1171 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); | 1176 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
| 1172 } else { | 1177 } else { |
| 1173 __ Push(i.InputRegister(0)); | 1178 __ Push(i.InputRegister(0)); |
| 1174 frame_access_state()->IncreaseSPDelta(1); | 1179 frame_access_state()->IncreaseSPDelta(1); |
| 1175 } | 1180 } |
| 1176 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1181 break; |
| 1177 break; | 1182 case kS390_PushFrame: { |
| 1178 case kPPC_PushFrame: { | |
| 1179 int num_slots = i.InputInt32(1); | 1183 int num_slots = i.InputInt32(1); |
| 1180 if (instr->InputAt(0)->IsDoubleRegister()) { | 1184 if (instr->InputAt(0)->IsDoubleRegister()) { |
| 1181 __ stfdu(i.InputDoubleRegister(0), | 1185 __ StoreDouble(i.InputDoubleRegister(0), |
| 1182 MemOperand(sp, -num_slots * kPointerSize)); | 1186 MemOperand(sp, -num_slots * kPointerSize)); |
| 1183 } else { | 1187 } else { |
| 1184 __ StorePU(i.InputRegister(0), | 1188 __ StoreP(i.InputRegister(0), |
| 1185 MemOperand(sp, -num_slots * kPointerSize)); | 1189 MemOperand(sp, -num_slots * kPointerSize)); |
| 1186 } | 1190 } |
| 1187 break; | 1191 __ lay(sp, MemOperand(sp, -num_slots * kPointerSize)); |
| 1188 } | 1192 break; |
| 1189 case kPPC_StoreToStackSlot: { | 1193 } |
| 1194 case kS390_StoreToStackSlot: { |
| 1190 int slot = i.InputInt32(1); | 1195 int slot = i.InputInt32(1); |
| 1191 if (instr->InputAt(0)->IsDoubleRegister()) { | 1196 if (instr->InputAt(0)->IsDoubleRegister()) { |
| 1192 __ stfd(i.InputDoubleRegister(0), MemOperand(sp, slot * kPointerSize)); | 1197 __ StoreDouble(i.InputDoubleRegister(0), |
| 1198 MemOperand(sp, slot * kPointerSize)); |
| 1193 } else { | 1199 } else { |
| 1194 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize)); | 1200 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize)); |
| 1195 } | 1201 } |
| 1196 break; | 1202 break; |
| 1197 } | 1203 } |
| 1198 case kPPC_ExtendSignWord8: | 1204 case kS390_ExtendSignWord8: |
| 1199 __ extsb(i.OutputRegister(), i.InputRegister(0)); | 1205 #if V8_TARGET_ARCH_S390X |
| 1200 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1206 __ lgbr(i.OutputRegister(), i.InputRegister(0)); |
| 1201 break; | 1207 #else |
| 1202 case kPPC_ExtendSignWord16: | 1208 __ lbr(i.OutputRegister(), i.InputRegister(0)); |
| 1203 __ extsh(i.OutputRegister(), i.InputRegister(0)); | 1209 #endif |
| 1204 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1210 break; |
| 1205 break; | 1211 case kS390_ExtendSignWord16: |
| 1206 #if V8_TARGET_ARCH_PPC64 | 1212 #if V8_TARGET_ARCH_S390X |
| 1207 case kPPC_ExtendSignWord32: | 1213 __ lghr(i.OutputRegister(), i.InputRegister(0)); |
| 1208 __ extsw(i.OutputRegister(), i.InputRegister(0)); | 1214 #else |
| 1209 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1215 __ lhr(i.OutputRegister(), i.InputRegister(0)); |
| 1210 break; | 1216 #endif |
| 1211 case kPPC_Uint32ToUint64: | 1217 break; |
| 1218 #if V8_TARGET_ARCH_S390X |
| 1219 case kS390_ExtendSignWord32: |
| 1220 __ lgfr(i.OutputRegister(), i.InputRegister(0)); |
| 1221 break; |
| 1222 case kS390_Uint32ToUint64: |
| 1212 // Zero extend | 1223 // Zero extend |
| 1213 __ clrldi(i.OutputRegister(), i.InputRegister(0), Operand(32)); | 1224 __ llgfr(i.OutputRegister(), i.InputRegister(0)); |
| 1214 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1225 break; |
| 1215 break; | 1226 case kS390_Int64ToInt32: |
| 1216 case kPPC_Int64ToInt32: | 1227 // sign extend |
| 1217 __ extsw(i.OutputRegister(), i.InputRegister(0)); | 1228 __ lgfr(i.OutputRegister(), i.InputRegister(0)); |
| 1218 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1229 break; |
| 1219 break; | 1230 case kS390_Int64ToFloat32: |
| 1220 case kPPC_Int64ToFloat32: | |
| 1221 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister()); | 1231 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister()); |
| 1222 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1232 break; |
| 1223 break; | 1233 case kS390_Int64ToDouble: |
| 1224 case kPPC_Int64ToDouble: | |
| 1225 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister()); | 1234 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister()); |
| 1226 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1235 break; |
| 1227 break; | 1236 case kS390_Uint64ToFloat32: |
| 1228 case kPPC_Uint64ToFloat32: | |
| 1229 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0), | 1237 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0), |
| 1230 i.OutputDoubleRegister()); | 1238 i.OutputDoubleRegister()); |
| 1231 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1239 break; |
| 1232 break; | 1240 case kS390_Uint64ToDouble: |
| 1233 case kPPC_Uint64ToDouble: | |
| 1234 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0), | 1241 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0), |
| 1235 i.OutputDoubleRegister()); | 1242 i.OutputDoubleRegister()); |
| 1236 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1243 break; |
| 1237 break; | 1244 #endif |
| 1238 #endif | 1245 case kS390_Int32ToFloat32: |
| 1239 case kPPC_Int32ToFloat32: | |
| 1240 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister()); | 1246 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister()); |
| 1241 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1247 break; |
| 1242 break; | 1248 case kS390_Int32ToDouble: |
| 1243 case kPPC_Int32ToDouble: | |
| 1244 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister()); | 1249 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister()); |
| 1245 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1250 break; |
| 1246 break; | 1251 case kS390_Uint32ToFloat32: |
| 1247 case kPPC_Uint32ToFloat32: | |
| 1248 __ ConvertUnsignedIntToFloat(i.InputRegister(0), | 1252 __ ConvertUnsignedIntToFloat(i.InputRegister(0), |
| 1249 i.OutputDoubleRegister()); | 1253 i.OutputDoubleRegister()); |
| 1250 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1254 break; |
| 1251 break; | 1255 case kS390_Uint32ToDouble: |
| 1252 case kPPC_Uint32ToDouble: | |
| 1253 __ ConvertUnsignedIntToDouble(i.InputRegister(0), | 1256 __ ConvertUnsignedIntToDouble(i.InputRegister(0), |
| 1254 i.OutputDoubleRegister()); | 1257 i.OutputDoubleRegister()); |
| 1255 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1258 break; |
| 1256 break; | 1259 case kS390_DoubleToInt32: |
| 1257 case kPPC_DoubleToInt32: | 1260 case kS390_DoubleToUint32: |
| 1258 case kPPC_DoubleToUint32: | 1261 case kS390_DoubleToInt64: { |
| 1259 case kPPC_DoubleToInt64: { | 1262 #if V8_TARGET_ARCH_S390X |
| 1260 #if V8_TARGET_ARCH_PPC64 | |
| 1261 bool check_conversion = | 1263 bool check_conversion = |
| 1262 (opcode == kPPC_DoubleToInt64 && i.OutputCount() > 1); | 1264 (opcode == kS390_DoubleToInt64 && i.OutputCount() > 1); |
| 1265 #endif |
| 1266 __ ConvertDoubleToInt64(i.InputDoubleRegister(0), |
| 1267 #if !V8_TARGET_ARCH_S390X |
| 1268 kScratchReg, |
| 1269 #endif |
| 1270 i.OutputRegister(0), kScratchDoubleReg); |
| 1271 #if V8_TARGET_ARCH_S390X |
| 1263 if (check_conversion) { | 1272 if (check_conversion) { |
| 1264 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit | 1273 Label conversion_done; |
| 1265 } | 1274 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1266 #endif | 1275 __ b(Condition(1), &conversion_done); // special case |
| 1267 __ ConvertDoubleToInt64(i.InputDoubleRegister(0), | 1276 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1268 #if !V8_TARGET_ARCH_PPC64 | 1277 __ bind(&conversion_done); |
| 1269 kScratchReg, | 1278 } |
| 1270 #endif | 1279 #endif |
| 1271 i.OutputRegister(0), kScratchDoubleReg); | 1280 break; |
| 1272 #if V8_TARGET_ARCH_PPC64 | 1281 } |
| 1282 case kS390_Float32ToInt32: { |
| 1283 bool check_conversion = (i.OutputCount() > 1); |
| 1284 __ ConvertFloat32ToInt32(i.InputDoubleRegister(0), i.OutputRegister(0), |
| 1285 kScratchDoubleReg); |
| 1273 if (check_conversion) { | 1286 if (check_conversion) { |
| 1274 // Set 2nd output to zero if conversion fails. | 1287 Label conversion_done; |
| 1275 CRegister cr = cr7; | 1288 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1276 int crbit = v8::internal::Assembler::encode_crbit( | 1289 __ b(Condition(1), &conversion_done); // special case |
| 1277 cr, static_cast<CRBit>(VXCVI % CRWIDTH)); | 1290 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1278 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7 | 1291 __ bind(&conversion_done); |
| 1279 if (CpuFeatures::IsSupported(ISELECT)) { | 1292 } |
| 1280 __ li(i.OutputRegister(1), Operand(1)); | 1293 break; |
| 1281 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit); | 1294 } |
| 1282 } else { | 1295 case kS390_Float32ToUint32: { |
| 1283 __ li(i.OutputRegister(1), Operand::Zero()); | |
| 1284 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit); | |
| 1285 __ li(i.OutputRegister(1), Operand(1)); | |
| 1286 } | |
| 1287 } | |
| 1288 #endif | |
| 1289 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | |
| 1290 break; | |
| 1291 } | |
| 1292 #if V8_TARGET_ARCH_PPC64 | |
| 1293 case kPPC_DoubleToUint64: { | |
| 1294 bool check_conversion = (i.OutputCount() > 1); | 1296 bool check_conversion = (i.OutputCount() > 1); |
| 1297 __ ConvertFloat32ToUnsignedInt32(i.InputDoubleRegister(0), |
| 1298 i.OutputRegister(0), kScratchDoubleReg); |
| 1295 if (check_conversion) { | 1299 if (check_conversion) { |
| 1296 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit | 1300 Label conversion_done; |
| 1297 } | 1301 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1302 __ b(Condition(1), &conversion_done); // special case |
| 1303 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1304 __ bind(&conversion_done); |
| 1305 } |
| 1306 break; |
| 1307 } |
| 1308 #if V8_TARGET_ARCH_S390X |
| 1309 case kS390_Float32ToUint64: { |
| 1310 bool check_conversion = (i.OutputCount() > 1); |
| 1311 __ ConvertFloat32ToUnsignedInt64(i.InputDoubleRegister(0), |
| 1312 i.OutputRegister(0), kScratchDoubleReg); |
| 1313 if (check_conversion) { |
| 1314 Label conversion_done; |
| 1315 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1316 __ b(Condition(1), &conversion_done); // special case |
| 1317 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1318 __ bind(&conversion_done); |
| 1319 } |
| 1320 break; |
| 1321 } |
| 1322 #endif |
| 1323 case kS390_Float32ToInt64: { |
| 1324 #if V8_TARGET_ARCH_S390X |
| 1325 bool check_conversion = |
| 1326 (opcode == kS390_Float32ToInt64 && i.OutputCount() > 1); |
| 1327 #endif |
| 1328 __ ConvertFloat32ToInt64(i.InputDoubleRegister(0), |
| 1329 #if !V8_TARGET_ARCH_S390X |
| 1330 kScratchReg, |
| 1331 #endif |
| 1332 i.OutputRegister(0), kScratchDoubleReg); |
| 1333 #if V8_TARGET_ARCH_S390X |
| 1334 if (check_conversion) { |
| 1335 Label conversion_done; |
| 1336 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1337 __ b(Condition(1), &conversion_done); // special case |
| 1338 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1339 __ bind(&conversion_done); |
| 1340 } |
| 1341 #endif |
| 1342 break; |
| 1343 } |
| 1344 #if V8_TARGET_ARCH_S390X |
| 1345 case kS390_DoubleToUint64: { |
| 1346 bool check_conversion = (i.OutputCount() > 1); |
| 1298 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0), | 1347 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0), |
| 1299 i.OutputRegister(0), kScratchDoubleReg); | 1348 i.OutputRegister(0), kScratchDoubleReg); |
| 1300 if (check_conversion) { | 1349 if (check_conversion) { |
| 1301 // Set 2nd output to zero if conversion fails. | 1350 Label conversion_done; |
| 1302 CRegister cr = cr7; | 1351 __ LoadImmP(i.OutputRegister(1), Operand::Zero()); |
| 1303 int crbit = v8::internal::Assembler::encode_crbit( | 1352 __ b(Condition(1), &conversion_done); // special case |
| 1304 cr, static_cast<CRBit>(VXCVI % CRWIDTH)); | 1353 __ LoadImmP(i.OutputRegister(1), Operand(1)); |
| 1305 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7 | 1354 __ bind(&conversion_done); |
| 1306 if (CpuFeatures::IsSupported(ISELECT)) { | 1355 } |
| 1307 __ li(i.OutputRegister(1), Operand(1)); | 1356 break; |
| 1308 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit); | 1357 } |
| 1309 } else { | 1358 #endif |
| 1310 __ li(i.OutputRegister(1), Operand::Zero()); | 1359 case kS390_DoubleToFloat32: |
| 1311 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit); | 1360 __ ledbr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1312 __ li(i.OutputRegister(1), Operand(1)); | 1361 break; |
| 1313 } | 1362 case kS390_Float32ToDouble: |
| 1314 } | 1363 __ ldebr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); |
| 1315 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1364 break; |
| 1316 break; | 1365 case kS390_DoubleExtractLowWord32: |
| 1317 } | 1366 // TODO(john.yan): this can cause problem when interrupting, |
| 1318 #endif | 1367 // use freg->greg instruction |
| 1319 case kPPC_DoubleToFloat32: | 1368 __ stdy(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
| 1320 ASSEMBLE_FLOAT_UNOP_RC(frsp); | 1369 __ LoadlW(i.OutputRegister(), |
| 1321 break; | 1370 MemOperand(sp, -kDoubleSize + Register::kMantissaOffset)); |
| 1322 case kPPC_Float32ToDouble: | 1371 break; |
| 1323 // Nothing to do. | 1372 case kS390_DoubleExtractHighWord32: |
| 1324 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); | 1373 // TODO(john.yan): this can cause problem when interrupting, |
| 1325 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1374 // use freg->greg instruction |
| 1326 break; | 1375 __ stdy(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
| 1327 case kPPC_DoubleExtractLowWord32: | 1376 __ LoadlW(i.OutputRegister(), |
| 1328 __ MovDoubleLowToInt(i.OutputRegister(), i.InputDoubleRegister(0)); | 1377 MemOperand(sp, -kDoubleSize + Register::kExponentOffset)); |
| 1329 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1378 break; |
| 1330 break; | 1379 case kS390_DoubleInsertLowWord32: |
| 1331 case kPPC_DoubleExtractHighWord32: | 1380 __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1)); |
| 1332 __ MovDoubleHighToInt(i.OutputRegister(), i.InputDoubleRegister(0)); | 1381 break; |
| 1333 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1382 case kS390_DoubleInsertHighWord32: |
| 1334 break; | 1383 __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1)); |
| 1335 case kPPC_DoubleInsertLowWord32: | 1384 break; |
| 1336 __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1), r0); | 1385 case kS390_DoubleConstruct: |
| 1337 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1386 // TODO(john.yan): this can cause problem when interrupting, |
| 1338 break; | 1387 // use greg->freg instruction |
| 1339 case kPPC_DoubleInsertHighWord32: | 1388 #if V8_TARGET_LITTLE_ENDIAN |
| 1340 __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1), r0); | 1389 __ StoreW(i.InputRegister(0), MemOperand(sp, -kDoubleSize / 2)); |
| 1341 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1390 __ StoreW(i.InputRegister(1), MemOperand(sp, -kDoubleSize)); |
| 1342 break; | |
| 1343 case kPPC_DoubleConstruct: | |
| 1344 #if V8_TARGET_ARCH_PPC64 | |
| 1345 __ MovInt64ComponentsToDouble(i.OutputDoubleRegister(), | |
| 1346 i.InputRegister(0), i.InputRegister(1), r0); | |
| 1347 #else | 1391 #else |
| 1348 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0), | 1392 __ StoreW(i.InputRegister(1), MemOperand(sp, -kDoubleSize / 2)); |
| 1349 i.InputRegister(1)); | 1393 __ StoreW(i.InputRegister(0), MemOperand(sp, -kDoubleSize)); |
| 1350 #endif | 1394 #endif |
| 1351 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 1395 __ ldy(i.OutputDoubleRegister(), MemOperand(sp, -kDoubleSize)); |
| 1352 break; | 1396 break; |
| 1353 case kPPC_BitcastFloat32ToInt32: | 1397 case kS390_LoadWordS8: |
| 1398 ASSEMBLE_LOAD_INTEGER(LoadlB); |
| 1399 #if V8_TARGET_ARCH_S390X |
| 1400 __ lgbr(i.OutputRegister(), i.OutputRegister()); |
| 1401 #else |
| 1402 __ lbr(i.OutputRegister(), i.OutputRegister()); |
| 1403 #endif |
| 1404 break; |
| 1405 case kS390_BitcastFloat32ToInt32: |
| 1354 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0)); | 1406 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0)); |
| 1355 break; | 1407 break; |
| 1356 case kPPC_BitcastInt32ToFloat32: | 1408 case kS390_BitcastInt32ToFloat32: |
| 1357 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0)); | 1409 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0)); |
| 1358 break; | 1410 break; |
| 1359 #if V8_TARGET_ARCH_PPC64 | 1411 #if V8_TARGET_ARCH_S390X |
| 1360 case kPPC_BitcastDoubleToInt64: | 1412 case kS390_BitcastDoubleToInt64: |
| 1361 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0)); | 1413 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0)); |
| 1362 break; | 1414 break; |
| 1363 case kPPC_BitcastInt64ToDouble: | 1415 case kS390_BitcastInt64ToDouble: |
| 1364 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0)); | 1416 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0)); |
| 1365 break; | 1417 break; |
| 1366 #endif | 1418 #endif |
| 1367 case kPPC_LoadWordU8: | 1419 case kS390_LoadWordU8: |
| 1368 ASSEMBLE_LOAD_INTEGER(lbz, lbzx); | 1420 ASSEMBLE_LOAD_INTEGER(LoadlB); |
| 1369 break; | 1421 break; |
| 1370 case kPPC_LoadWordS8: | 1422 case kS390_LoadWordU16: |
| 1371 ASSEMBLE_LOAD_INTEGER(lbz, lbzx); | 1423 ASSEMBLE_LOAD_INTEGER(LoadLogicalHalfWordP); |
| 1372 __ extsb(i.OutputRegister(), i.OutputRegister()); | 1424 break; |
| 1373 break; | 1425 case kS390_LoadWordS16: |
| 1374 case kPPC_LoadWordU16: | 1426 ASSEMBLE_LOAD_INTEGER(LoadHalfWordP); |
| 1375 ASSEMBLE_LOAD_INTEGER(lhz, lhzx); | 1427 break; |
| 1376 break; | 1428 case kS390_LoadWordS32: |
| 1377 case kPPC_LoadWordS16: | 1429 ASSEMBLE_LOAD_INTEGER(LoadW); |
| 1378 ASSEMBLE_LOAD_INTEGER(lha, lhax); | 1430 break; |
| 1379 break; | 1431 #if V8_TARGET_ARCH_S390X |
| 1380 case kPPC_LoadWordS32: | 1432 case kS390_LoadWord64: |
| 1381 ASSEMBLE_LOAD_INTEGER(lwa, lwax); | 1433 ASSEMBLE_LOAD_INTEGER(lg); |
| 1382 break; | 1434 break; |
| 1383 #if V8_TARGET_ARCH_PPC64 | 1435 #endif |
| 1384 case kPPC_LoadWord64: | 1436 case kS390_LoadFloat32: |
| 1385 ASSEMBLE_LOAD_INTEGER(ld, ldx); | 1437 ASSEMBLE_LOAD_FLOAT(LoadFloat32); |
| 1386 break; | 1438 break; |
| 1387 #endif | 1439 case kS390_LoadDouble: |
| 1388 case kPPC_LoadFloat32: | 1440 ASSEMBLE_LOAD_FLOAT(LoadDouble); |
| 1389 ASSEMBLE_LOAD_FLOAT(lfs, lfsx); | 1441 break; |
| 1390 break; | 1442 case kS390_StoreWord8: |
| 1391 case kPPC_LoadDouble: | 1443 ASSEMBLE_STORE_INTEGER(StoreByte); |
| 1392 ASSEMBLE_LOAD_FLOAT(lfd, lfdx); | 1444 break; |
| 1393 break; | 1445 case kS390_StoreWord16: |
| 1394 case kPPC_StoreWord8: | 1446 ASSEMBLE_STORE_INTEGER(StoreHalfWord); |
| 1395 ASSEMBLE_STORE_INTEGER(stb, stbx); | 1447 break; |
| 1396 break; | 1448 case kS390_StoreWord32: |
| 1397 case kPPC_StoreWord16: | 1449 ASSEMBLE_STORE_INTEGER(StoreW); |
| 1398 ASSEMBLE_STORE_INTEGER(sth, sthx); | 1450 break; |
| 1399 break; | 1451 #if V8_TARGET_ARCH_S390X |
| 1400 case kPPC_StoreWord32: | 1452 case kS390_StoreWord64: |
| 1401 ASSEMBLE_STORE_INTEGER(stw, stwx); | 1453 ASSEMBLE_STORE_INTEGER(StoreP); |
| 1402 break; | 1454 break; |
| 1403 #if V8_TARGET_ARCH_PPC64 | 1455 #endif |
| 1404 case kPPC_StoreWord64: | 1456 case kS390_StoreFloat32: |
| 1405 ASSEMBLE_STORE_INTEGER(std, stdx); | |
| 1406 break; | |
| 1407 #endif | |
| 1408 case kPPC_StoreFloat32: | |
| 1409 ASSEMBLE_STORE_FLOAT32(); | 1457 ASSEMBLE_STORE_FLOAT32(); |
| 1410 break; | 1458 break; |
| 1411 case kPPC_StoreDouble: | 1459 case kS390_StoreDouble: |
| 1412 ASSEMBLE_STORE_DOUBLE(); | 1460 ASSEMBLE_STORE_DOUBLE(); |
| 1413 break; | 1461 break; |
| 1414 case kCheckedLoadInt8: | 1462 case kCheckedLoadInt8: |
| 1415 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx); | 1463 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadlB); |
| 1416 __ extsb(i.OutputRegister(), i.OutputRegister()); | 1464 #if V8_TARGET_ARCH_S390X |
| 1465 __ lgbr(i.OutputRegister(), i.OutputRegister()); |
| 1466 #else |
| 1467 __ lbr(i.OutputRegister(), i.OutputRegister()); |
| 1468 #endif |
| 1417 break; | 1469 break; |
| 1418 case kCheckedLoadUint8: | 1470 case kCheckedLoadUint8: |
| 1419 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx); | 1471 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadlB); |
| 1420 break; | 1472 break; |
| 1421 case kCheckedLoadInt16: | 1473 case kCheckedLoadInt16: |
| 1422 ASSEMBLE_CHECKED_LOAD_INTEGER(lha, lhax); | 1474 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadHalfWordP); |
| 1423 break; | 1475 break; |
| 1424 case kCheckedLoadUint16: | 1476 case kCheckedLoadUint16: |
| 1425 ASSEMBLE_CHECKED_LOAD_INTEGER(lhz, lhzx); | 1477 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadLogicalHalfWordP); |
| 1426 break; | 1478 break; |
| 1427 case kCheckedLoadWord32: | 1479 case kCheckedLoadWord32: |
| 1428 ASSEMBLE_CHECKED_LOAD_INTEGER(lwa, lwax); | 1480 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadW); |
| 1429 break; | 1481 break; |
| 1430 case kCheckedLoadWord64: | 1482 case kCheckedLoadWord64: |
| 1431 #if V8_TARGET_ARCH_PPC64 | 1483 #if V8_TARGET_ARCH_S390X |
| 1432 ASSEMBLE_CHECKED_LOAD_INTEGER(ld, ldx); | 1484 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadP); |
| 1433 #else | 1485 #else |
| 1434 UNREACHABLE(); | 1486 UNREACHABLE(); |
| 1435 #endif | 1487 #endif |
| 1436 break; | 1488 break; |
| 1437 case kCheckedLoadFloat32: | 1489 case kCheckedLoadFloat32: |
| 1438 ASSEMBLE_CHECKED_LOAD_FLOAT(lfs, lfsx, 32); | 1490 ASSEMBLE_CHECKED_LOAD_FLOAT(LoadFloat32, 32); |
| 1439 break; | 1491 break; |
| 1440 case kCheckedLoadFloat64: | 1492 case kCheckedLoadFloat64: |
| 1441 ASSEMBLE_CHECKED_LOAD_FLOAT(lfd, lfdx, 64); | 1493 ASSEMBLE_CHECKED_LOAD_FLOAT(LoadDouble, 64); |
| 1442 break; | 1494 break; |
| 1443 case kCheckedStoreWord8: | 1495 case kCheckedStoreWord8: |
| 1444 ASSEMBLE_CHECKED_STORE_INTEGER(stb, stbx); | 1496 ASSEMBLE_CHECKED_STORE_INTEGER(StoreByte); |
| 1445 break; | 1497 break; |
| 1446 case kCheckedStoreWord16: | 1498 case kCheckedStoreWord16: |
| 1447 ASSEMBLE_CHECKED_STORE_INTEGER(sth, sthx); | 1499 ASSEMBLE_CHECKED_STORE_INTEGER(StoreHalfWord); |
| 1448 break; | 1500 break; |
| 1449 case kCheckedStoreWord32: | 1501 case kCheckedStoreWord32: |
| 1450 ASSEMBLE_CHECKED_STORE_INTEGER(stw, stwx); | 1502 ASSEMBLE_CHECKED_STORE_INTEGER(StoreW); |
| 1451 break; | 1503 break; |
| 1452 case kCheckedStoreWord64: | 1504 case kCheckedStoreWord64: |
| 1453 #if V8_TARGET_ARCH_PPC64 | 1505 #if V8_TARGET_ARCH_S390X |
| 1454 ASSEMBLE_CHECKED_STORE_INTEGER(std, stdx); | 1506 ASSEMBLE_CHECKED_STORE_INTEGER(StoreP); |
| 1455 #else | 1507 #else |
| 1456 UNREACHABLE(); | 1508 UNREACHABLE(); |
| 1457 #endif | 1509 #endif |
| 1458 break; | 1510 break; |
| 1459 case kCheckedStoreFloat32: | 1511 case kCheckedStoreFloat32: |
| 1460 ASSEMBLE_CHECKED_STORE_FLOAT32(); | 1512 ASSEMBLE_CHECKED_STORE_FLOAT32(); |
| 1461 break; | 1513 break; |
| 1462 case kCheckedStoreFloat64: | 1514 case kCheckedStoreFloat64: |
| 1463 ASSEMBLE_CHECKED_STORE_DOUBLE(); | 1515 ASSEMBLE_CHECKED_STORE_DOUBLE(); |
| 1464 break; | 1516 break; |
| 1465 default: | 1517 default: |
| 1466 UNREACHABLE(); | 1518 UNREACHABLE(); |
| 1467 break; | 1519 break; |
| 1468 } | 1520 } |
| 1469 } // NOLINT(readability/fn_size) | 1521 } // NOLINT(readability/fn_size) |
| 1470 | 1522 |
| 1471 | |
| 1472 // Assembles branches after an instruction. | 1523 // Assembles branches after an instruction. |
| 1473 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { | 1524 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { |
| 1474 PPCOperandConverter i(this, instr); | 1525 S390OperandConverter i(this, instr); |
| 1475 Label* tlabel = branch->true_label; | 1526 Label* tlabel = branch->true_label; |
| 1476 Label* flabel = branch->false_label; | 1527 Label* flabel = branch->false_label; |
| 1477 ArchOpcode op = instr->arch_opcode(); | 1528 ArchOpcode op = instr->arch_opcode(); |
| 1478 FlagsCondition condition = branch->condition; | 1529 FlagsCondition condition = branch->condition; |
| 1479 CRegister cr = cr0; | |
| 1480 | 1530 |
| 1481 Condition cond = FlagsConditionToCondition(condition, op); | 1531 Condition cond = FlagsConditionToCondition(condition, op); |
| 1482 if (op == kPPC_CmpDouble) { | 1532 if (op == kS390_CmpDouble) { |
| 1483 // check for unordered if necessary | 1533 // check for unordered if necessary |
| 1484 if (cond == le) { | 1534 // Branching to flabel/tlabel according to what's expected by tests |
| 1485 __ bunordered(flabel, cr); | 1535 if (cond == le || cond == eq || cond == lt) { |
| 1486 // Unnecessary for eq/lt since only FU bit will be set. | 1536 __ bunordered(flabel); |
| 1487 } else if (cond == gt) { | 1537 } else if (cond == gt || cond == ne || cond == ge) { |
| 1488 __ bunordered(tlabel, cr); | 1538 __ bunordered(tlabel); |
| 1489 // Unnecessary for ne/ge since only FU bit will be set. | |
| 1490 } | 1539 } |
| 1491 } | 1540 } |
| 1492 __ b(cond, tlabel, cr); | 1541 __ b(cond, tlabel); |
| 1493 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. | 1542 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. |
| 1494 } | 1543 } |
| 1495 | 1544 |
| 1496 | |
| 1497 void CodeGenerator::AssembleArchJump(RpoNumber target) { | 1545 void CodeGenerator::AssembleArchJump(RpoNumber target) { |
| 1498 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); | 1546 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); |
| 1499 } | 1547 } |
| 1500 | 1548 |
| 1501 | |
| 1502 // Assembles boolean materializations after an instruction. | 1549 // Assembles boolean materializations after an instruction. |
| 1503 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 1550 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
| 1504 FlagsCondition condition) { | 1551 FlagsCondition condition) { |
| 1505 PPCOperandConverter i(this, instr); | 1552 S390OperandConverter i(this, instr); |
| 1506 Label done; | 1553 Label done; |
| 1507 ArchOpcode op = instr->arch_opcode(); | 1554 ArchOpcode op = instr->arch_opcode(); |
| 1508 CRegister cr = cr0; | 1555 bool check_unordered = (op == kS390_CmpDouble || kS390_CmpFloat); |
| 1509 int reg_value = -1; | 1556 |
| 1557 // Overflow checked for add/sub only. |
| 1558 DCHECK((condition != kOverflow && condition != kNotOverflow) || |
| 1559 (op == kS390_AddWithOverflow32 || op == kS390_SubWithOverflow32)); |
| 1510 | 1560 |
| 1511 // Materialize a full 32-bit 1 or 0 value. The result register is always the | 1561 // Materialize a full 32-bit 1 or 0 value. The result register is always the |
| 1512 // last output of the instruction. | 1562 // last output of the instruction. |
| 1513 DCHECK_NE(0u, instr->OutputCount()); | 1563 DCHECK_NE(0u, instr->OutputCount()); |
| 1514 Register reg = i.OutputRegister(instr->OutputCount() - 1); | 1564 Register reg = i.OutputRegister(instr->OutputCount() - 1); |
| 1515 | |
| 1516 Condition cond = FlagsConditionToCondition(condition, op); | 1565 Condition cond = FlagsConditionToCondition(condition, op); |
| 1517 if (op == kPPC_CmpDouble) { | 1566 switch (cond) { |
| 1518 // check for unordered if necessary | 1567 case ne: |
| 1519 if (cond == le) { | 1568 case ge: |
| 1520 reg_value = 0; | 1569 case gt: |
| 1521 __ li(reg, Operand::Zero()); | 1570 if (check_unordered) { |
| 1522 __ bunordered(&done, cr); | 1571 __ LoadImmP(reg, Operand(1)); |
| 1523 } else if (cond == gt) { | 1572 __ LoadImmP(kScratchReg, Operand::Zero()); |
| 1524 reg_value = 1; | 1573 __ bunordered(&done); |
| 1525 __ li(reg, Operand(1)); | 1574 Label cond_true; |
| 1526 __ bunordered(&done, cr); | 1575 __ b(cond, &cond_true, Label::kNear); |
| 1527 } | 1576 __ LoadRR(reg, kScratchReg); |
| 1528 // Unnecessary for eq/lt & ne/ge since only FU bit will be set. | 1577 __ bind(&cond_true); |
| 1529 } | 1578 } else { |
| 1530 | 1579 Label cond_true, done_here; |
| 1531 if (CpuFeatures::IsSupported(ISELECT)) { | 1580 __ LoadImmP(reg, Operand(1)); |
| 1532 switch (cond) { | 1581 __ b(cond, &cond_true, Label::kNear); |
| 1533 case eq: | 1582 __ LoadImmP(reg, Operand::Zero()); |
| 1534 case lt: | 1583 __ bind(&cond_true); |
| 1535 case gt: | 1584 } |
| 1536 if (reg_value != 1) __ li(reg, Operand(1)); | 1585 break; |
| 1537 __ li(kScratchReg, Operand::Zero()); | 1586 case eq: |
| 1538 __ isel(cond, reg, reg, kScratchReg, cr); | 1587 case lt: |
| 1539 break; | 1588 case le: |
| 1540 case ne: | 1589 if (check_unordered) { |
| 1541 case ge: | 1590 __ LoadImmP(reg, Operand::Zero()); |
| 1542 case le: | 1591 __ LoadImmP(kScratchReg, Operand(1)); |
| 1543 if (reg_value != 1) __ li(reg, Operand(1)); | 1592 __ bunordered(&done); |
| 1544 // r0 implies logical zero in this form | 1593 Label cond_false; |
| 1545 __ isel(NegateCondition(cond), reg, r0, reg, cr); | 1594 __ b(NegateCondition(cond), &cond_false, Label::kNear); |
| 1546 break; | 1595 __ LoadRR(reg, kScratchReg); |
| 1596 __ bind(&cond_false); |
| 1597 } else { |
| 1598 __ LoadImmP(reg, Operand::Zero()); |
| 1599 Label cond_false; |
| 1600 __ b(NegateCondition(cond), &cond_false, Label::kNear); |
| 1601 __ LoadImmP(reg, Operand(1)); |
| 1602 __ bind(&cond_false); |
| 1603 } |
| 1604 break; |
| 1547 default: | 1605 default: |
| 1548 UNREACHABLE(); | 1606 UNREACHABLE(); |
| 1549 break; | 1607 break; |
| 1550 } | |
| 1551 } else { | |
| 1552 if (reg_value != 0) __ li(reg, Operand::Zero()); | |
| 1553 __ b(NegateCondition(cond), &done, cr); | |
| 1554 __ li(reg, Operand(1)); | |
| 1555 } | 1608 } |
| 1556 __ bind(&done); | 1609 __ bind(&done); |
| 1557 } | 1610 } |
| 1558 | 1611 |
| 1559 | |
| 1560 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) { | 1612 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) { |
| 1561 PPCOperandConverter i(this, instr); | 1613 S390OperandConverter i(this, instr); |
| 1562 Register input = i.InputRegister(0); | 1614 Register input = i.InputRegister(0); |
| 1563 for (size_t index = 2; index < instr->InputCount(); index += 2) { | 1615 for (size_t index = 2; index < instr->InputCount(); index += 2) { |
| 1564 __ Cmpi(input, Operand(i.InputInt32(index + 0)), r0); | 1616 __ CmpP(input, Operand(i.InputInt32(index + 0))); |
| 1565 __ beq(GetLabel(i.InputRpo(index + 1))); | 1617 __ beq(GetLabel(i.InputRpo(index + 1))); |
| 1566 } | 1618 } |
| 1567 AssembleArchJump(i.InputRpo(1)); | 1619 AssembleArchJump(i.InputRpo(1)); |
| 1568 } | 1620 } |
| 1569 | 1621 |
| 1570 | |
| 1571 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { | 1622 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { |
| 1572 PPCOperandConverter i(this, instr); | 1623 S390OperandConverter i(this, instr); |
| 1573 Register input = i.InputRegister(0); | 1624 Register input = i.InputRegister(0); |
| 1574 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2); | 1625 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2); |
| 1575 Label** cases = zone()->NewArray<Label*>(case_count); | 1626 Label** cases = zone()->NewArray<Label*>(case_count); |
| 1576 for (int32_t index = 0; index < case_count; ++index) { | 1627 for (int32_t index = 0; index < case_count; ++index) { |
| 1577 cases[index] = GetLabel(i.InputRpo(index + 2)); | 1628 cases[index] = GetLabel(i.InputRpo(index + 2)); |
| 1578 } | 1629 } |
| 1579 Label* const table = AddJumpTable(cases, case_count); | 1630 Label* const table = AddJumpTable(cases, case_count); |
| 1580 __ Cmpli(input, Operand(case_count), r0); | 1631 __ CmpLogicalP(input, Operand(case_count)); |
| 1581 __ bge(GetLabel(i.InputRpo(1))); | 1632 __ bge(GetLabel(i.InputRpo(1))); |
| 1582 __ mov_label_addr(kScratchReg, table); | 1633 __ larl(kScratchReg, table); |
| 1583 __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2)); | 1634 __ ShiftLeftP(r1, input, Operand(kPointerSizeLog2)); |
| 1584 __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0)); | 1635 __ LoadP(kScratchReg, MemOperand(kScratchReg, r1)); |
| 1585 __ Jump(kScratchReg); | 1636 __ Jump(kScratchReg); |
| 1586 } | 1637 } |
| 1587 | 1638 |
| 1588 | |
| 1589 void CodeGenerator::AssembleDeoptimizerCall( | 1639 void CodeGenerator::AssembleDeoptimizerCall( |
| 1590 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 1640 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { |
| 1591 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 1641 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( |
| 1592 isolate(), deoptimization_id, bailout_type); | 1642 isolate(), deoptimization_id, bailout_type); |
| 1593 // TODO(turbofan): We should be able to generate better code by sharing the | 1643 // TODO(turbofan): We should be able to generate better code by sharing the |
| 1594 // actual final call site and just bl'ing to it here, similar to what we do | 1644 // actual final call site and just bl'ing to it here, similar to what we do |
| 1595 // in the lithium backend. | 1645 // in the lithium backend. |
| 1596 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1646 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
| 1597 } | 1647 } |
| 1598 | 1648 |
| 1599 | |
| 1600 void CodeGenerator::AssemblePrologue() { | 1649 void CodeGenerator::AssemblePrologue() { |
| 1601 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1650 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1651 |
| 1602 if (descriptor->IsCFunctionCall()) { | 1652 if (descriptor->IsCFunctionCall()) { |
| 1603 __ function_descriptor(); | 1653 __ Push(r14, fp); |
| 1604 __ mflr(r0); | 1654 __ LoadRR(fp, sp); |
| 1605 if (FLAG_enable_embedded_constant_pool) { | |
| 1606 __ Push(r0, fp, kConstantPoolRegister); | |
| 1607 // Adjust FP to point to saved FP. | |
| 1608 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); | |
| 1609 } else { | |
| 1610 __ Push(r0, fp); | |
| 1611 __ mr(fp, sp); | |
| 1612 } | |
| 1613 } else if (descriptor->IsJSFunctionCall()) { | 1655 } else if (descriptor->IsJSFunctionCall()) { |
| 1614 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); | 1656 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); |
| 1615 } else if (frame()->needs_frame()) { | 1657 } else if (frame()->needs_frame()) { |
| 1616 if (!ABI_CALL_VIA_IP && info()->output_code_kind() == Code::WASM_FUNCTION) { | 1658 if (!ABI_CALL_VIA_IP && info()->output_code_kind() == Code::WASM_FUNCTION) { |
| 1617 // TODO(mbrandy): Restrict only to the wasm wrapper case. | 1659 // TODO(mbrandy): Restrict only to the wasm wrapper case. |
| 1618 __ StubPrologue(); | 1660 __ StubPrologue(); |
| 1619 } else { | 1661 } else { |
| 1620 __ StubPrologue(ip); | 1662 __ StubPrologue(ip); |
| 1621 } | 1663 } |
| 1622 } else { | 1664 } else { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1636 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); | 1678 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); |
| 1637 osr_pc_offset_ = __ pc_offset(); | 1679 osr_pc_offset_ = __ pc_offset(); |
| 1638 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); | 1680 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); |
| 1639 } | 1681 } |
| 1640 | 1682 |
| 1641 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); | 1683 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); |
| 1642 if (double_saves != 0) { | 1684 if (double_saves != 0) { |
| 1643 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots(); | 1685 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots(); |
| 1644 } | 1686 } |
| 1645 if (stack_shrink_slots > 0) { | 1687 if (stack_shrink_slots > 0) { |
| 1646 __ Add(sp, sp, -stack_shrink_slots * kPointerSize, r0); | 1688 __ lay(sp, MemOperand(sp, -stack_shrink_slots * kPointerSize)); |
| 1647 } | 1689 } |
| 1648 | 1690 |
| 1649 // Save callee-saved Double registers. | 1691 // Save callee-saved Double registers. |
| 1650 if (double_saves != 0) { | 1692 if (double_saves != 0) { |
| 1651 __ MultiPushDoubles(double_saves); | 1693 __ MultiPushDoubles(double_saves); |
| 1652 DCHECK(kNumCalleeSavedDoubles == | 1694 DCHECK(kNumCalleeSavedDoubles == |
| 1653 base::bits::CountPopulation32(double_saves)); | 1695 base::bits::CountPopulation32(double_saves)); |
| 1654 frame()->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles * | 1696 frame()->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles * |
| 1655 (kDoubleSize / kPointerSize)); | 1697 (kDoubleSize / kPointerSize)); |
| 1656 } | 1698 } |
| 1657 | 1699 |
| 1658 // Save callee-saved registers. | 1700 // Save callee-saved registers. |
| 1659 const RegList saves = | 1701 const RegList saves = descriptor->CalleeSavedRegisters(); |
| 1660 FLAG_enable_embedded_constant_pool | |
| 1661 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit() | |
| 1662 : descriptor->CalleeSavedRegisters(); | |
| 1663 if (saves != 0) { | 1702 if (saves != 0) { |
| 1664 __ MultiPush(saves); | 1703 __ MultiPush(saves); |
| 1665 // register save area does not include the fp or constant pool pointer. | 1704 // register save area does not include the fp or constant pool pointer. |
| 1666 const int num_saves = | 1705 const int num_saves = |
| 1667 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0); | 1706 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0); |
| 1668 DCHECK(num_saves == base::bits::CountPopulation32(saves)); | 1707 DCHECK(num_saves == base::bits::CountPopulation32(saves)); |
| 1669 frame()->AllocateSavedCalleeRegisterSlots(num_saves); | 1708 frame()->AllocateSavedCalleeRegisterSlots(num_saves); |
| 1670 } | 1709 } |
| 1671 } | 1710 } |
| 1672 | 1711 |
| 1673 | |
| 1674 void CodeGenerator::AssembleReturn() { | 1712 void CodeGenerator::AssembleReturn() { |
| 1675 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1713 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1676 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 1714 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
| 1677 | 1715 |
| 1678 // Restore registers. | 1716 // Restore registers. |
| 1679 const RegList saves = | 1717 const RegList saves = descriptor->CalleeSavedRegisters(); |
| 1680 FLAG_enable_embedded_constant_pool | |
| 1681 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit() | |
| 1682 : descriptor->CalleeSavedRegisters(); | |
| 1683 if (saves != 0) { | 1718 if (saves != 0) { |
| 1684 __ MultiPop(saves); | 1719 __ MultiPop(saves); |
| 1685 } | 1720 } |
| 1686 | 1721 |
| 1687 // Restore double registers. | 1722 // Restore double registers. |
| 1688 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); | 1723 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); |
| 1689 if (double_saves != 0) { | 1724 if (double_saves != 0) { |
| 1690 __ MultiPopDoubles(double_saves); | 1725 __ MultiPopDoubles(double_saves); |
| 1691 } | 1726 } |
| 1692 | 1727 |
| 1693 if (descriptor->IsCFunctionCall()) { | 1728 if (descriptor->IsCFunctionCall()) { |
| 1694 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1729 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); |
| 1695 } else if (frame()->needs_frame()) { | 1730 } else if (frame()->needs_frame()) { |
| 1696 // Canonicalize JSFunction return sites for now. | 1731 // Canonicalize JSFunction return sites for now. |
| 1697 if (return_label_.is_bound()) { | 1732 if (return_label_.is_bound()) { |
| 1698 __ b(&return_label_); | 1733 __ b(&return_label_); |
| 1699 return; | 1734 return; |
| 1700 } else { | 1735 } else { |
| 1701 __ bind(&return_label_); | 1736 __ bind(&return_label_); |
| 1702 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1737 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); |
| 1703 } | 1738 } |
| 1704 } else { | 1739 } else { |
| 1705 __ Drop(pop_count); | 1740 __ Drop(pop_count); |
| 1706 } | 1741 } |
| 1707 __ Ret(); | 1742 __ Ret(); |
| 1708 } | 1743 } |
| 1709 | 1744 |
| 1710 | |
| 1711 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1745 void CodeGenerator::AssembleMove(InstructionOperand* source, |
| 1712 InstructionOperand* destination) { | 1746 InstructionOperand* destination) { |
| 1713 PPCOperandConverter g(this, nullptr); | 1747 S390OperandConverter g(this, nullptr); |
| 1714 // Dispatch on the source and destination operand kinds. Not all | 1748 // Dispatch on the source and destination operand kinds. Not all |
| 1715 // combinations are possible. | 1749 // combinations are possible. |
| 1716 if (source->IsRegister()) { | 1750 if (source->IsRegister()) { |
| 1717 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 1751 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
| 1718 Register src = g.ToRegister(source); | 1752 Register src = g.ToRegister(source); |
| 1719 if (destination->IsRegister()) { | 1753 if (destination->IsRegister()) { |
| 1720 __ Move(g.ToRegister(destination), src); | 1754 __ Move(g.ToRegister(destination), src); |
| 1721 } else { | 1755 } else { |
| 1722 __ StoreP(src, g.ToMemOperand(destination), r0); | 1756 __ StoreP(src, g.ToMemOperand(destination)); |
| 1723 } | 1757 } |
| 1724 } else if (source->IsStackSlot()) { | 1758 } else if (source->IsStackSlot()) { |
| 1725 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 1759 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
| 1726 MemOperand src = g.ToMemOperand(source); | 1760 MemOperand src = g.ToMemOperand(source); |
| 1727 if (destination->IsRegister()) { | 1761 if (destination->IsRegister()) { |
| 1728 __ LoadP(g.ToRegister(destination), src, r0); | 1762 __ LoadP(g.ToRegister(destination), src); |
| 1729 } else { | 1763 } else { |
| 1730 Register temp = kScratchReg; | 1764 Register temp = kScratchReg; |
| 1731 __ LoadP(temp, src, r0); | 1765 __ LoadP(temp, src, r0); |
| 1732 __ StoreP(temp, g.ToMemOperand(destination), r0); | 1766 __ StoreP(temp, g.ToMemOperand(destination)); |
| 1733 } | 1767 } |
| 1734 } else if (source->IsConstant()) { | 1768 } else if (source->IsConstant()) { |
| 1735 Constant src = g.ToConstant(source); | 1769 Constant src = g.ToConstant(source); |
| 1736 if (destination->IsRegister() || destination->IsStackSlot()) { | 1770 if (destination->IsRegister() || destination->IsStackSlot()) { |
| 1737 Register dst = | 1771 Register dst = |
| 1738 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg; | 1772 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg; |
| 1739 switch (src.type()) { | 1773 switch (src.type()) { |
| 1740 case Constant::kInt32: | 1774 case Constant::kInt32: |
| 1741 __ mov(dst, Operand(src.ToInt32())); | 1775 __ mov(dst, Operand(src.ToInt32())); |
| 1742 break; | 1776 break; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1761 if (IsMaterializableFromFrame(src_object, &offset)) { | 1795 if (IsMaterializableFromFrame(src_object, &offset)) { |
| 1762 __ LoadP(dst, MemOperand(fp, offset)); | 1796 __ LoadP(dst, MemOperand(fp, offset)); |
| 1763 } else if (IsMaterializableFromRoot(src_object, &index)) { | 1797 } else if (IsMaterializableFromRoot(src_object, &index)) { |
| 1764 __ LoadRoot(dst, index); | 1798 __ LoadRoot(dst, index); |
| 1765 } else { | 1799 } else { |
| 1766 __ Move(dst, src_object); | 1800 __ Move(dst, src_object); |
| 1767 } | 1801 } |
| 1768 break; | 1802 break; |
| 1769 } | 1803 } |
| 1770 case Constant::kRpoNumber: | 1804 case Constant::kRpoNumber: |
| 1771 UNREACHABLE(); // TODO(dcarney): loading RPO constants on PPC. | 1805 UNREACHABLE(); // TODO(dcarney): loading RPO constants on S390. |
| 1772 break; | 1806 break; |
| 1773 } | 1807 } |
| 1774 if (destination->IsStackSlot()) { | 1808 if (destination->IsStackSlot()) { |
| 1775 __ StoreP(dst, g.ToMemOperand(destination), r0); | 1809 __ StoreP(dst, g.ToMemOperand(destination), r0); |
| 1776 } | 1810 } |
| 1777 } else { | 1811 } else { |
| 1778 DoubleRegister dst = destination->IsDoubleRegister() | 1812 DoubleRegister dst = destination->IsDoubleRegister() |
| 1779 ? g.ToDoubleRegister(destination) | 1813 ? g.ToDoubleRegister(destination) |
| 1780 : kScratchDoubleReg; | 1814 : kScratchDoubleReg; |
| 1781 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32() | 1815 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32() |
| 1782 : src.ToFloat64(); | 1816 : src.ToFloat64(); |
| 1783 __ LoadDoubleLiteral(dst, value, kScratchReg); | 1817 if (src.type() == Constant::kFloat32) { |
| 1818 __ LoadFloat32Literal(dst, src.ToFloat32(), kScratchReg); |
| 1819 } else { |
| 1820 __ LoadDoubleLiteral(dst, value, kScratchReg); |
| 1821 } |
| 1822 |
| 1784 if (destination->IsDoubleStackSlot()) { | 1823 if (destination->IsDoubleStackSlot()) { |
| 1785 __ StoreDouble(dst, g.ToMemOperand(destination), r0); | 1824 __ StoreDouble(dst, g.ToMemOperand(destination)); |
| 1786 } | 1825 } |
| 1787 } | 1826 } |
| 1788 } else if (source->IsDoubleRegister()) { | 1827 } else if (source->IsDoubleRegister()) { |
| 1789 DoubleRegister src = g.ToDoubleRegister(source); | 1828 DoubleRegister src = g.ToDoubleRegister(source); |
| 1790 if (destination->IsDoubleRegister()) { | 1829 if (destination->IsDoubleRegister()) { |
| 1791 DoubleRegister dst = g.ToDoubleRegister(destination); | 1830 DoubleRegister dst = g.ToDoubleRegister(destination); |
| 1792 __ Move(dst, src); | 1831 __ Move(dst, src); |
| 1793 } else { | 1832 } else { |
| 1794 DCHECK(destination->IsDoubleStackSlot()); | 1833 DCHECK(destination->IsDoubleStackSlot()); |
| 1795 __ StoreDouble(src, g.ToMemOperand(destination), r0); | 1834 __ StoreDouble(src, g.ToMemOperand(destination)); |
| 1796 } | 1835 } |
| 1797 } else if (source->IsDoubleStackSlot()) { | 1836 } else if (source->IsDoubleStackSlot()) { |
| 1798 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot()); | 1837 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot()); |
| 1799 MemOperand src = g.ToMemOperand(source); | 1838 MemOperand src = g.ToMemOperand(source); |
| 1800 if (destination->IsDoubleRegister()) { | 1839 if (destination->IsDoubleRegister()) { |
| 1801 __ LoadDouble(g.ToDoubleRegister(destination), src, r0); | 1840 __ LoadDouble(g.ToDoubleRegister(destination), src); |
| 1802 } else { | 1841 } else { |
| 1803 DoubleRegister temp = kScratchDoubleReg; | 1842 DoubleRegister temp = kScratchDoubleReg; |
| 1804 __ LoadDouble(temp, src, r0); | 1843 __ LoadDouble(temp, src); |
| 1805 __ StoreDouble(temp, g.ToMemOperand(destination), r0); | 1844 __ StoreDouble(temp, g.ToMemOperand(destination)); |
| 1806 } | 1845 } |
| 1807 } else { | 1846 } else { |
| 1808 UNREACHABLE(); | 1847 UNREACHABLE(); |
| 1809 } | 1848 } |
| 1810 } | 1849 } |
| 1811 | 1850 |
| 1812 | |
| 1813 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 1851 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
| 1814 InstructionOperand* destination) { | 1852 InstructionOperand* destination) { |
| 1815 PPCOperandConverter g(this, nullptr); | 1853 S390OperandConverter g(this, nullptr); |
| 1816 // Dispatch on the source and destination operand kinds. Not all | 1854 // Dispatch on the source and destination operand kinds. Not all |
| 1817 // combinations are possible. | 1855 // combinations are possible. |
| 1818 if (source->IsRegister()) { | 1856 if (source->IsRegister()) { |
| 1819 // Register-register. | 1857 // Register-register. |
| 1820 Register temp = kScratchReg; | 1858 Register temp = kScratchReg; |
| 1821 Register src = g.ToRegister(source); | 1859 Register src = g.ToRegister(source); |
| 1822 if (destination->IsRegister()) { | 1860 if (destination->IsRegister()) { |
| 1823 Register dst = g.ToRegister(destination); | 1861 Register dst = g.ToRegister(destination); |
| 1824 __ mr(temp, src); | 1862 __ LoadRR(temp, src); |
| 1825 __ mr(src, dst); | 1863 __ LoadRR(src, dst); |
| 1826 __ mr(dst, temp); | 1864 __ LoadRR(dst, temp); |
| 1827 } else { | 1865 } else { |
| 1828 DCHECK(destination->IsStackSlot()); | 1866 DCHECK(destination->IsStackSlot()); |
| 1829 MemOperand dst = g.ToMemOperand(destination); | 1867 MemOperand dst = g.ToMemOperand(destination); |
| 1830 __ mr(temp, src); | 1868 __ LoadRR(temp, src); |
| 1831 __ LoadP(src, dst); | 1869 __ LoadP(src, dst); |
| 1832 __ StoreP(temp, dst); | 1870 __ StoreP(temp, dst); |
| 1833 } | 1871 } |
| 1834 #if V8_TARGET_ARCH_PPC64 | 1872 #if V8_TARGET_ARCH_S390X |
| 1835 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) { | 1873 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) { |
| 1836 #else | 1874 #else |
| 1837 } else if (source->IsStackSlot()) { | 1875 } else if (source->IsStackSlot()) { |
| 1838 DCHECK(destination->IsStackSlot()); | 1876 DCHECK(destination->IsStackSlot()); |
| 1839 #endif | 1877 #endif |
| 1840 Register temp_0 = kScratchReg; | 1878 Register temp_0 = kScratchReg; |
| 1841 Register temp_1 = r0; | 1879 Register temp_1 = r0; |
| 1842 MemOperand src = g.ToMemOperand(source); | 1880 MemOperand src = g.ToMemOperand(source); |
| 1843 MemOperand dst = g.ToMemOperand(destination); | 1881 MemOperand dst = g.ToMemOperand(destination); |
| 1844 __ LoadP(temp_0, src); | 1882 __ LoadP(temp_0, src); |
| 1845 __ LoadP(temp_1, dst); | 1883 __ LoadP(temp_1, dst); |
| 1846 __ StoreP(temp_0, dst); | 1884 __ StoreP(temp_0, dst); |
| 1847 __ StoreP(temp_1, src); | 1885 __ StoreP(temp_1, src); |
| 1848 } else if (source->IsDoubleRegister()) { | 1886 } else if (source->IsDoubleRegister()) { |
| 1849 DoubleRegister temp = kScratchDoubleReg; | 1887 DoubleRegister temp = kScratchDoubleReg; |
| 1850 DoubleRegister src = g.ToDoubleRegister(source); | 1888 DoubleRegister src = g.ToDoubleRegister(source); |
| 1851 if (destination->IsDoubleRegister()) { | 1889 if (destination->IsDoubleRegister()) { |
| 1852 DoubleRegister dst = g.ToDoubleRegister(destination); | 1890 DoubleRegister dst = g.ToDoubleRegister(destination); |
| 1853 __ fmr(temp, src); | 1891 __ ldr(temp, src); |
| 1854 __ fmr(src, dst); | 1892 __ ldr(src, dst); |
| 1855 __ fmr(dst, temp); | 1893 __ ldr(dst, temp); |
| 1856 } else { | 1894 } else { |
| 1857 DCHECK(destination->IsDoubleStackSlot()); | 1895 DCHECK(destination->IsDoubleStackSlot()); |
| 1858 MemOperand dst = g.ToMemOperand(destination); | 1896 MemOperand dst = g.ToMemOperand(destination); |
| 1859 __ fmr(temp, src); | 1897 __ ldr(temp, src); |
| 1860 __ lfd(src, dst); | 1898 __ LoadDouble(src, dst); |
| 1861 __ stfd(temp, dst); | 1899 __ StoreDouble(temp, dst); |
| 1862 } | 1900 } |
| 1863 #if !V8_TARGET_ARCH_PPC64 | 1901 #if !V8_TARGET_ARCH_S390X |
| 1864 } else if (source->IsDoubleStackSlot()) { | 1902 } else if (source->IsDoubleStackSlot()) { |
| 1865 DCHECK(destination->IsDoubleStackSlot()); | 1903 DCHECK(destination->IsDoubleStackSlot()); |
| 1866 DoubleRegister temp_0 = kScratchDoubleReg; | 1904 DoubleRegister temp_0 = kScratchDoubleReg; |
| 1867 DoubleRegister temp_1 = d0; | 1905 DoubleRegister temp_1 = d0; |
| 1868 MemOperand src = g.ToMemOperand(source); | 1906 MemOperand src = g.ToMemOperand(source); |
| 1869 MemOperand dst = g.ToMemOperand(destination); | 1907 MemOperand dst = g.ToMemOperand(destination); |
| 1870 __ lfd(temp_0, src); | 1908 // TODO(joransiu): MVC opportunity |
| 1871 __ lfd(temp_1, dst); | 1909 __ LoadDouble(temp_0, src); |
| 1872 __ stfd(temp_0, dst); | 1910 __ LoadDouble(temp_1, dst); |
| 1873 __ stfd(temp_1, src); | 1911 __ StoreDouble(temp_0, dst); |
| 1912 __ StoreDouble(temp_1, src); |
| 1874 #endif | 1913 #endif |
| 1875 } else { | 1914 } else { |
| 1876 // No other combinations are possible. | 1915 // No other combinations are possible. |
| 1877 UNREACHABLE(); | 1916 UNREACHABLE(); |
| 1878 } | 1917 } |
| 1879 } | 1918 } |
| 1880 | 1919 |
| 1881 | |
| 1882 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { | 1920 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
| 1883 for (size_t index = 0; index < target_count; ++index) { | 1921 for (size_t index = 0; index < target_count; ++index) { |
| 1884 __ emit_label_addr(targets[index]); | 1922 __ emit_label_addr(targets[index]); |
| 1885 } | 1923 } |
| 1886 } | 1924 } |
| 1887 | 1925 |
| 1888 | |
| 1889 void CodeGenerator::AddNopForSmiCodeInlining() { | 1926 void CodeGenerator::AddNopForSmiCodeInlining() { |
| 1890 // We do not insert nops for inlined Smi code. | 1927 // We do not insert nops for inlined Smi code. |
| 1891 } | 1928 } |
| 1892 | 1929 |
| 1893 | |
| 1894 void CodeGenerator::EnsureSpaceForLazyDeopt() { | 1930 void CodeGenerator::EnsureSpaceForLazyDeopt() { |
| 1895 if (!info()->ShouldEnsureSpaceForLazyDeopt()) { | 1931 if (!info()->ShouldEnsureSpaceForLazyDeopt()) { |
| 1896 return; | 1932 return; |
| 1897 } | 1933 } |
| 1898 | 1934 |
| 1899 int space_needed = Deoptimizer::patch_size(); | 1935 int space_needed = Deoptimizer::patch_size(); |
| 1900 // Ensure that we have enough space after the previous lazy-bailout | 1936 // Ensure that we have enough space after the previous lazy-bailout |
| 1901 // instruction for patching the code here. | 1937 // instruction for patching the code here. |
| 1902 int current_pc = masm()->pc_offset(); | 1938 int current_pc = masm()->pc_offset(); |
| 1903 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 1939 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 1904 // Block tramoline pool emission for duration of padding. | |
| 1905 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( | |
| 1906 masm()); | |
| 1907 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1940 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 1908 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize); | 1941 DCHECK_EQ(0, padding_size % 2); |
| 1909 while (padding_size > 0) { | 1942 while (padding_size > 0) { |
| 1910 __ nop(); | 1943 __ nop(); |
| 1911 padding_size -= v8::internal::Assembler::kInstrSize; | 1944 padding_size -= 2; |
| 1912 } | 1945 } |
| 1913 } | 1946 } |
| 1914 } | 1947 } |
| 1915 | 1948 |
| 1916 #undef __ | 1949 #undef __ |
| 1917 | 1950 |
| 1918 } // namespace compiler | 1951 } // namespace compiler |
| 1919 } // namespace internal | 1952 } // namespace internal |
| 1920 } // namespace v8 | 1953 } // namespace v8 |
| OLD | NEW |