OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #if V8_TARGET_ARCH_S390 | 8 #if V8_TARGET_ARCH_S390 |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 3356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3367 | 3367 |
3368 void MacroAssembler::Div32(Register dst, Register src1, | 3368 void MacroAssembler::Div32(Register dst, Register src1, |
3369 const MemOperand& src2) { | 3369 const MemOperand& src2) { |
3370 Generate_Div32(dsgf); | 3370 Generate_Div32(dsgf); |
3371 } | 3371 } |
3372 | 3372 |
3373 void MacroAssembler::Div32(Register dst, Register src1, Register src2) { | 3373 void MacroAssembler::Div32(Register dst, Register src1, Register src2) { |
3374 Generate_Div32(dsgfr); | 3374 Generate_Div32(dsgfr); |
3375 } | 3375 } |
3376 | 3376 |
3377 void MacroAssembler::Div32(Register dst, Register src1, const Operand& src2) { | |
3378 USE(dst); | |
3379 USE(src1); | |
3380 USE(src2); | |
3381 UNREACHABLE(); | |
3382 } | |
3383 | |
3384 #undef Generate_Div32 | 3377 #undef Generate_Div32 |
3385 | 3378 |
3386 #define Generate_DivU32(instr) \ | 3379 #define Generate_DivU32(instr) \ |
3387 { \ | 3380 { \ |
3388 lr(r0, src1); \ | 3381 lr(r0, src1); \ |
3389 srdl(r0, Operand(32)); \ | 3382 srdl(r0, Operand(32)); \ |
3390 instr(r0, src2); \ | 3383 instr(r0, src2); \ |
3391 LoadlW(dst, r1); \ | 3384 LoadlW(dst, r1); \ |
3392 } | 3385 } |
3393 | 3386 |
3394 void MacroAssembler::DivU32(Register dst, Register src1, | 3387 void MacroAssembler::DivU32(Register dst, Register src1, |
3395 const MemOperand& src2) { | 3388 const MemOperand& src2) { |
3396 Generate_DivU32(dl); | 3389 Generate_DivU32(dl); |
3397 } | 3390 } |
3398 | 3391 |
3399 void MacroAssembler::DivU32(Register dst, Register src1, Register src2) { | 3392 void MacroAssembler::DivU32(Register dst, Register src1, Register src2) { |
3400 Generate_DivU32(dlr); | 3393 Generate_DivU32(dlr); |
3401 } | 3394 } |
3402 | 3395 |
3403 void MacroAssembler::DivU32(Register dst, Register src1, const Operand& src2) { | |
3404 USE(dst); | |
3405 USE(src1); | |
3406 USE(src2); | |
3407 UNREACHABLE(); | |
3408 } | |
3409 | |
3410 #undef Generate_DivU32 | 3396 #undef Generate_DivU32 |
3411 | 3397 |
| 3398 #define Generate_Div64(instr) \ |
| 3399 { \ |
| 3400 lgr(r1, src1); \ |
| 3401 instr(r0, src2); \ |
| 3402 lgr(dst, r1); \ |
| 3403 } |
| 3404 |
| 3405 void MacroAssembler::Div64(Register dst, Register src1, |
| 3406 const MemOperand& src2) { |
| 3407 Generate_Div64(dsg); |
| 3408 } |
| 3409 |
| 3410 void MacroAssembler::Div64(Register dst, Register src1, Register src2) { |
| 3411 Generate_Div64(dsgr); |
| 3412 } |
| 3413 |
| 3414 #undef Generate_Div64 |
| 3415 |
| 3416 #define Generate_DivU64(instr) \ |
| 3417 { \ |
| 3418 lgr(r1, src1); \ |
| 3419 lghi(r0, Operand::Zero()); \ |
| 3420 instr(r0, src2); \ |
| 3421 lgr(dst, r1); \ |
| 3422 } |
| 3423 |
| 3424 void MacroAssembler::DivU64(Register dst, Register src1, |
| 3425 const MemOperand& src2) { |
| 3426 Generate_DivU64(dlg); |
| 3427 } |
| 3428 |
| 3429 void MacroAssembler::DivU64(Register dst, Register src1, Register src2) { |
| 3430 Generate_DivU64(dlgr); |
| 3431 } |
| 3432 |
| 3433 #undef Generate_DivU64 |
| 3434 |
3412 #define Generate_Mod32(instr) \ | 3435 #define Generate_Mod32(instr) \ |
3413 { \ | 3436 { \ |
3414 lgfr(r1, src1); \ | 3437 lgfr(r1, src1); \ |
3415 instr(r0, src2); \ | 3438 instr(r0, src2); \ |
3416 LoadlW(dst, r0); \ | 3439 LoadlW(dst, r0); \ |
3417 } | 3440 } |
3418 | 3441 |
3419 void MacroAssembler::Mod32(Register dst, Register src1, | 3442 void MacroAssembler::Mod32(Register dst, Register src1, |
3420 const MemOperand& src2) { | 3443 const MemOperand& src2) { |
3421 Generate_Mod32(dsgf); | 3444 Generate_Mod32(dsgf); |
3422 } | 3445 } |
3423 | 3446 |
3424 void MacroAssembler::Mod32(Register dst, Register src1, Register src2) { | 3447 void MacroAssembler::Mod32(Register dst, Register src1, Register src2) { |
3425 Generate_Mod32(dsgfr); | 3448 Generate_Mod32(dsgfr); |
3426 } | 3449 } |
3427 | 3450 |
3428 void MacroAssembler::Mod32(Register dst, Register src1, const Operand& src2) { | |
3429 USE(dst); | |
3430 USE(src1); | |
3431 USE(src2); | |
3432 UNREACHABLE(); | |
3433 } | |
3434 | |
3435 #undef Generate_Mod32 | 3451 #undef Generate_Mod32 |
3436 | 3452 |
3437 #define Generate_ModU32(instr) \ | 3453 #define Generate_ModU32(instr) \ |
3438 { \ | 3454 { \ |
3439 lr(r0, src1); \ | 3455 lr(r0, src1); \ |
3440 srdl(r0, Operand(32)); \ | 3456 srdl(r0, Operand(32)); \ |
3441 instr(r0, src2); \ | 3457 instr(r0, src2); \ |
3442 LoadlW(dst, r0); \ | 3458 LoadlW(dst, r0); \ |
3443 } | 3459 } |
3444 | 3460 |
3445 void MacroAssembler::ModU32(Register dst, Register src1, | 3461 void MacroAssembler::ModU32(Register dst, Register src1, |
3446 const MemOperand& src2) { | 3462 const MemOperand& src2) { |
3447 Generate_ModU32(dl); | 3463 Generate_ModU32(dl); |
3448 } | 3464 } |
3449 | 3465 |
3450 void MacroAssembler::ModU32(Register dst, Register src1, Register src2) { | 3466 void MacroAssembler::ModU32(Register dst, Register src1, Register src2) { |
3451 Generate_ModU32(dlr); | 3467 Generate_ModU32(dlr); |
3452 } | 3468 } |
3453 | 3469 |
3454 void MacroAssembler::ModU32(Register dst, Register src1, const Operand& src2) { | |
3455 USE(dst); | |
3456 USE(src1); | |
3457 USE(src2); | |
3458 UNREACHABLE(); | |
3459 } | |
3460 | |
3461 #undef Generate_ModU32 | 3470 #undef Generate_ModU32 |
3462 | 3471 |
| 3472 #define Generate_Mod64(instr) \ |
| 3473 { \ |
| 3474 lgr(r1, src1); \ |
| 3475 instr(r0, src2); \ |
| 3476 lgr(dst, r0); \ |
| 3477 } |
| 3478 |
| 3479 void MacroAssembler::Mod64(Register dst, Register src1, |
| 3480 const MemOperand& src2) { |
| 3481 Generate_Mod64(dsg); |
| 3482 } |
| 3483 |
| 3484 void MacroAssembler::Mod64(Register dst, Register src1, Register src2) { |
| 3485 Generate_Mod64(dsgr); |
| 3486 } |
| 3487 |
| 3488 #undef Generate_Mod64 |
| 3489 |
| 3490 #define Generate_ModU64(instr) \ |
| 3491 { \ |
| 3492 lgr(r1, src1); \ |
| 3493 lghi(r0, Operand::Zero()); \ |
| 3494 instr(r0, src2); \ |
| 3495 lgr(dst, r0); \ |
| 3496 } |
| 3497 |
| 3498 void MacroAssembler::ModU64(Register dst, Register src1, |
| 3499 const MemOperand& src2) { |
| 3500 Generate_ModU64(dlg); |
| 3501 } |
| 3502 |
| 3503 void MacroAssembler::ModU64(Register dst, Register src1, Register src2) { |
| 3504 Generate_ModU64(dlgr); |
| 3505 } |
| 3506 |
| 3507 #undef Generate_ModU64 |
| 3508 |
3463 void MacroAssembler::MulP(Register dst, const Operand& opnd) { | 3509 void MacroAssembler::MulP(Register dst, const Operand& opnd) { |
3464 #if V8_TARGET_ARCH_S390X | 3510 #if V8_TARGET_ARCH_S390X |
3465 msgfi(dst, opnd); | 3511 msgfi(dst, opnd); |
3466 #else | 3512 #else |
3467 msfi(dst, opnd); | 3513 msfi(dst, opnd); |
3468 #endif | 3514 #endif |
3469 } | 3515 } |
3470 | 3516 |
3471 void MacroAssembler::MulP(Register dst, Register src) { | 3517 void MacroAssembler::MulP(Register dst, Register src) { |
3472 #if V8_TARGET_ARCH_S390X | 3518 #if V8_TARGET_ARCH_S390X |
(...skipping 1520 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4993 | 5039 |
4994 // Convert Double precision (64-bit) to Single Precision (32-bit) | 5040 // Convert Double precision (64-bit) to Single Precision (32-bit) |
4995 // and store resulting Float32 to memory | 5041 // and store resulting Float32 to memory |
4996 void MacroAssembler::StoreDoubleAsFloat32(DoubleRegister src, | 5042 void MacroAssembler::StoreDoubleAsFloat32(DoubleRegister src, |
4997 const MemOperand& mem, | 5043 const MemOperand& mem, |
4998 DoubleRegister scratch) { | 5044 DoubleRegister scratch) { |
4999 ledbr(scratch, src); | 5045 ledbr(scratch, src); |
5000 StoreFloat32(scratch, mem); | 5046 StoreFloat32(scratch, mem); |
5001 } | 5047 } |
5002 | 5048 |
| 5049 void MacroAssembler::AddFloat32(DoubleRegister dst, const MemOperand& opnd, |
| 5050 DoubleRegister scratch) { |
| 5051 if (is_uint12(opnd.offset())) { |
| 5052 aeb(dst, opnd); |
| 5053 } else { |
| 5054 ley(scratch, opnd); |
| 5055 aebr(dst, scratch); |
| 5056 } |
| 5057 } |
| 5058 |
| 5059 void MacroAssembler::AddFloat64(DoubleRegister dst, const MemOperand& opnd, |
| 5060 DoubleRegister scratch) { |
| 5061 if (is_uint12(opnd.offset())) { |
| 5062 adb(dst, opnd); |
| 5063 } else { |
| 5064 ldy(scratch, opnd); |
| 5065 adbr(dst, scratch); |
| 5066 } |
| 5067 } |
| 5068 |
| 5069 void MacroAssembler::SubFloat32(DoubleRegister dst, const MemOperand& opnd, |
| 5070 DoubleRegister scratch) { |
| 5071 if (is_uint12(opnd.offset())) { |
| 5072 seb(dst, opnd); |
| 5073 } else { |
| 5074 ley(scratch, opnd); |
| 5075 sebr(dst, scratch); |
| 5076 } |
| 5077 } |
| 5078 |
| 5079 void MacroAssembler::SubFloat64(DoubleRegister dst, const MemOperand& opnd, |
| 5080 DoubleRegister scratch) { |
| 5081 if (is_uint12(opnd.offset())) { |
| 5082 sdb(dst, opnd); |
| 5083 } else { |
| 5084 ldy(scratch, opnd); |
| 5085 sdbr(dst, scratch); |
| 5086 } |
| 5087 } |
| 5088 |
| 5089 void MacroAssembler::MulFloat32(DoubleRegister dst, const MemOperand& opnd, |
| 5090 DoubleRegister scratch) { |
| 5091 if (is_uint12(opnd.offset())) { |
| 5092 meeb(dst, opnd); |
| 5093 } else { |
| 5094 ley(scratch, opnd); |
| 5095 meebr(dst, scratch); |
| 5096 } |
| 5097 } |
| 5098 |
| 5099 void MacroAssembler::MulFloat64(DoubleRegister dst, const MemOperand& opnd, |
| 5100 DoubleRegister scratch) { |
| 5101 if (is_uint12(opnd.offset())) { |
| 5102 mdb(dst, opnd); |
| 5103 } else { |
| 5104 ldy(scratch, opnd); |
| 5105 mdbr(dst, scratch); |
| 5106 } |
| 5107 } |
| 5108 |
| 5109 void MacroAssembler::DivFloat32(DoubleRegister dst, const MemOperand& opnd, |
| 5110 DoubleRegister scratch) { |
| 5111 if (is_uint12(opnd.offset())) { |
| 5112 deb(dst, opnd); |
| 5113 } else { |
| 5114 ley(scratch, opnd); |
| 5115 debr(dst, scratch); |
| 5116 } |
| 5117 } |
| 5118 |
| 5119 void MacroAssembler::DivFloat64(DoubleRegister dst, const MemOperand& opnd, |
| 5120 DoubleRegister scratch) { |
| 5121 if (is_uint12(opnd.offset())) { |
| 5122 ddb(dst, opnd); |
| 5123 } else { |
| 5124 ldy(scratch, opnd); |
| 5125 ddbr(dst, scratch); |
| 5126 } |
| 5127 } |
| 5128 |
| 5129 void MacroAssembler::LoadFloat32ToDouble(DoubleRegister dst, |
| 5130 const MemOperand& opnd, |
| 5131 DoubleRegister scratch) { |
| 5132 if (is_uint12(opnd.offset())) { |
| 5133 ldeb(dst, opnd); |
| 5134 } else { |
| 5135 ley(scratch, opnd); |
| 5136 ldebr(dst, scratch); |
| 5137 } |
| 5138 } |
| 5139 |
5003 // Variable length depending on whether offset fits into immediate field | 5140 // Variable length depending on whether offset fits into immediate field |
5004 // MemOperand of RX or RXY format | 5141 // MemOperand of RX or RXY format |
5005 void MacroAssembler::StoreW(Register src, const MemOperand& mem, | 5142 void MacroAssembler::StoreW(Register src, const MemOperand& mem, |
5006 Register scratch) { | 5143 Register scratch) { |
5007 Register base = mem.rb(); | 5144 Register base = mem.rb(); |
5008 int offset = mem.offset(); | 5145 int offset = mem.offset(); |
5009 | 5146 |
5010 bool use_RXform = false; | 5147 bool use_RXform = false; |
5011 bool use_RXYform = false; | 5148 bool use_RXYform = false; |
5012 | 5149 |
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5336 } | 5473 } |
5337 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); | 5474 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); |
5338 ExtractBit(r0, dividend, 31); | 5475 ExtractBit(r0, dividend, 31); |
5339 AddP(result, r0); | 5476 AddP(result, r0); |
5340 } | 5477 } |
5341 | 5478 |
5342 } // namespace internal | 5479 } // namespace internal |
5343 } // namespace v8 | 5480 } // namespace v8 |
5344 | 5481 |
5345 #endif // V8_TARGET_ARCH_S390 | 5482 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |