OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #if V8_TARGET_ARCH_S390 | 8 #if V8_TARGET_ARCH_S390 |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 4311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4322 uint32_t lo_32 = 0; | 4322 uint32_t lo_32 = 0; |
4323 | 4323 |
4324 // Load the 64-bit value into a GPR, then transfer it to FPR via LDGR | 4324 // Load the 64-bit value into a GPR, then transfer it to FPR via LDGR |
4325 iihf(scratch, Operand(hi_32)); | 4325 iihf(scratch, Operand(hi_32)); |
4326 iilf(scratch, Operand(lo_32)); | 4326 iilf(scratch, Operand(lo_32)); |
4327 ldgr(result, scratch); | 4327 ldgr(result, scratch); |
4328 } | 4328 } |
4329 | 4329 |
4330 void MacroAssembler::CmpSmiLiteral(Register src1, Smi* smi, Register scratch) { | 4330 void MacroAssembler::CmpSmiLiteral(Register src1, Smi* smi, Register scratch) { |
4331 #if V8_TARGET_ARCH_S390X | 4331 #if V8_TARGET_ARCH_S390X |
4332 LoadSmiLiteral(scratch, smi); | 4332 if (CpuFeatures::IsSupported(DISTINCT_OPS)) { |
4333 cgr(src1, scratch); | 4333 cih(src1, Operand(reinterpret_cast<intptr_t>(smi) >> 32)); |
| 4334 } else { |
| 4335 LoadSmiLiteral(scratch, smi); |
| 4336 cgr(src1, scratch); |
| 4337 } |
4334 #else | 4338 #else |
4335 // CFI takes 32-bit immediate. | 4339 // CFI takes 32-bit immediate. |
4336 cfi(src1, Operand(smi)); | 4340 cfi(src1, Operand(smi)); |
4337 #endif | 4341 #endif |
4338 } | 4342 } |
4339 | 4343 |
4340 void MacroAssembler::CmpLogicalSmiLiteral(Register src1, Smi* smi, | 4344 void MacroAssembler::CmpLogicalSmiLiteral(Register src1, Smi* smi, |
4341 Register scratch) { | 4345 Register scratch) { |
4342 #if V8_TARGET_ARCH_S390X | 4346 #if V8_TARGET_ARCH_S390X |
4343 LoadSmiLiteral(scratch, smi); | 4347 if (CpuFeatures::IsSupported(DISTINCT_OPS)) { |
4344 clgr(src1, scratch); | 4348 clih(src1, Operand(reinterpret_cast<intptr_t>(smi) >> 32)); |
| 4349 } else { |
| 4350 LoadSmiLiteral(scratch, smi); |
| 4351 clgr(src1, scratch); |
| 4352 } |
4345 #else | 4353 #else |
4346 // CLFI takes 32-bit immediate | 4354 // CLFI takes 32-bit immediate |
4347 clfi(src1, Operand(smi)); | 4355 clfi(src1, Operand(smi)); |
4348 #endif | 4356 #endif |
4349 } | 4357 } |
4350 | 4358 |
4351 void MacroAssembler::AddSmiLiteral(Register dst, Register src, Smi* smi, | 4359 void MacroAssembler::AddSmiLiteral(Register dst, Register src, Smi* smi, |
4352 Register scratch) { | 4360 Register scratch) { |
4353 #if V8_TARGET_ARCH_S390X | 4361 #if V8_TARGET_ARCH_S390X |
4354 LoadSmiLiteral(scratch, smi); | 4362 if (CpuFeatures::IsSupported(DISTINCT_OPS)) { |
4355 AddP(dst, src, scratch); | 4363 if (!dst.is(src)) LoadRR(dst, src); |
| 4364 aih(dst, Operand(reinterpret_cast<intptr_t>(smi) >> 32)); |
| 4365 } else { |
| 4366 LoadSmiLiteral(scratch, smi); |
| 4367 AddP(dst, src, scratch); |
| 4368 } |
4356 #else | 4369 #else |
4357 AddP(dst, src, Operand(reinterpret_cast<intptr_t>(smi))); | 4370 AddP(dst, src, Operand(reinterpret_cast<intptr_t>(smi))); |
4358 #endif | 4371 #endif |
4359 } | 4372 } |
4360 | 4373 |
4361 void MacroAssembler::SubSmiLiteral(Register dst, Register src, Smi* smi, | 4374 void MacroAssembler::SubSmiLiteral(Register dst, Register src, Smi* smi, |
4362 Register scratch) { | 4375 Register scratch) { |
4363 #if V8_TARGET_ARCH_S390X | 4376 #if V8_TARGET_ARCH_S390X |
4364 LoadSmiLiteral(scratch, smi); | 4377 if (CpuFeatures::IsSupported(DISTINCT_OPS)) { |
4365 SubP(dst, src, scratch); | 4378 if (!dst.is(src)) LoadRR(dst, src); |
| 4379 aih(dst, Operand((-reinterpret_cast<intptr_t>(smi)) >> 32)); |
| 4380 } else { |
| 4381 LoadSmiLiteral(scratch, smi); |
| 4382 SubP(dst, src, scratch); |
| 4383 } |
4366 #else | 4384 #else |
4367 AddP(dst, src, Operand(-(reinterpret_cast<intptr_t>(smi)))); | 4385 AddP(dst, src, Operand(-(reinterpret_cast<intptr_t>(smi)))); |
4368 #endif | 4386 #endif |
4369 } | 4387 } |
4370 | 4388 |
4371 void MacroAssembler::AndSmiLiteral(Register dst, Register src, Smi* smi) { | 4389 void MacroAssembler::AndSmiLiteral(Register dst, Register src, Smi* smi) { |
4372 if (!dst.is(src)) LoadRR(dst, src); | 4390 if (!dst.is(src)) LoadRR(dst, src); |
4373 #if V8_TARGET_ARCH_S390X | 4391 #if V8_TARGET_ARCH_S390X |
4374 DCHECK((reinterpret_cast<intptr_t>(smi) & 0xffffffff) == 0); | 4392 DCHECK((reinterpret_cast<intptr_t>(smi) & 0xffffffff) == 0); |
4375 int value = static_cast<int>(reinterpret_cast<intptr_t>(smi) >> 32); | 4393 int value = static_cast<int>(reinterpret_cast<intptr_t>(smi) >> 32); |
(...skipping 704 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5080 } | 5098 } |
5081 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); | 5099 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); |
5082 ExtractBit(r0, dividend, 31); | 5100 ExtractBit(r0, dividend, 31); |
5083 AddP(result, r0); | 5101 AddP(result, r0); |
5084 } | 5102 } |
5085 | 5103 |
5086 } // namespace internal | 5104 } // namespace internal |
5087 } // namespace v8 | 5105 } // namespace v8 |
5088 | 5106 |
5089 #endif // V8_TARGET_ARCH_S390 | 5107 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |