| Index: src/x64/macro-assembler-x64.cc
|
| diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
|
| index 13d7ddaa685bf92e16a4adab1faa1dc17fee4aae..57aa1818fc8dda1be3dee613ad1185548b58832c 100644
|
| --- a/src/x64/macro-assembler-x64.cc
|
| +++ b/src/x64/macro-assembler-x64.cc
|
| @@ -983,6 +983,19 @@ void MacroAssembler::Set(const Operand& dst, int64_t x) {
|
| }
|
|
|
|
|
| +// ----------------------------------------------------------------------------
|
| +// Smi tagging, untagging and tag detection.
|
| +
|
| +
|
| +static inline Immediate SmiToImmediate(Smi* src) {
|
| + if (kSmiValueSize == 32) {
|
| + UNREACHABLE();
|
| + } else {
|
| + return Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(src)));
|
| + }
|
| +}
|
| +
|
| +
|
| bool MacroAssembler::IsUnsafeInt(const int x) {
|
| static const int kMaxBits = 17;
|
| return !is_intn(x, kMaxBits);
|
| @@ -991,32 +1004,51 @@ bool MacroAssembler::IsUnsafeInt(const int x) {
|
|
|
| void MacroAssembler::SafeMove(Register dst, Smi* src) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| - ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
|
| - if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| - Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
|
| - Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| - xor_(dst, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| + Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
|
| + Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| + xor_(dst, kScratchRegister);
|
| + } else {
|
| + Move(dst, src);
|
| + }
|
| } else {
|
| - Move(dst, src);
|
| + ASSERT(kSmiValueSize == 31);
|
| + if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| + movq(dst, Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
|
| + jit_cookie()));
|
| + movq(kScratchRegister, Immediate(jit_cookie()));
|
| + xor_(dst, kScratchRegister);
|
| + } else {
|
| + Move(dst, src);
|
| + }
|
| }
|
| }
|
|
|
|
|
| void MacroAssembler::SafePush(Smi* src) {
|
| - ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
|
| - if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| - Push(Smi::FromInt(src->value() ^ jit_cookie()));
|
| - Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| - xor_(Operand(rsp, 0), kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| + Push(Smi::FromInt(src->value() ^ jit_cookie()));
|
| + Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| + xor_(Operand(rsp, 0), kScratchRegister);
|
| + } else {
|
| + Push(src);
|
| + }
|
| } else {
|
| - Push(src);
|
| + ASSERT(kSmiValueSize == 31);
|
| + if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| + push(Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
|
| + jit_cookie()));
|
| + movq(kScratchRegister, Immediate(jit_cookie()));
|
| + xor_(Operand(rsp, 0), kScratchRegister);
|
| + } else {
|
| + Push(src);
|
| + }
|
| }
|
| }
|
|
|
|
|
| -// ----------------------------------------------------------------------------
|
| -// Smi tagging, untagging and tag detection.
|
| -
|
| Register MacroAssembler::GetSmiConstant(Smi* source) {
|
| int value = source->value();
|
| if (value == 0) {
|
| @@ -1096,7 +1128,13 @@ void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
|
| if (!dst.is(src)) {
|
| movl(dst, src);
|
| }
|
| - shl(dst, Immediate(kSmiShift));
|
| + if (kSmiValueSize == 32) {
|
| + shl(dst, Immediate(kSmiShift));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + shll(dst, Immediate(kSmiShift));
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
|
|
|
|
| @@ -1112,8 +1150,14 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
|
| }
|
| bind(&ok);
|
| }
|
| - ASSERT(kSmiShift % kBitsPerByte == 0);
|
| - movl(Operand(dst, kSmiShift / kBitsPerByte), src);
|
| + if (kSmiValueSize == 32) {
|
| + ASSERT(kSmiShift % kBitsPerByte == 0);
|
| + movl(Operand(dst, kSmiShift / kBitsPerByte), src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + Integer32ToSmi(kScratchRegister, src);
|
| + movq(dst, kScratchRegister);
|
| + }
|
| }
|
|
|
|
|
| @@ -1125,7 +1169,7 @@ void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
|
| } else {
|
| leal(dst, Operand(src, constant));
|
| }
|
| - shl(dst, Immediate(kSmiShift));
|
| + Integer32ToSmi(dst, dst);
|
| }
|
|
|
|
|
| @@ -1134,12 +1178,23 @@ void MacroAssembler::SmiToInteger32(Register dst, Register src) {
|
| if (!dst.is(src)) {
|
| movq(dst, src);
|
| }
|
| - shr(dst, Immediate(kSmiShift));
|
| + if (kSmiValueSize == 32) {
|
| + shr(dst, Immediate(kSmiShift));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + sarl(dst, Immediate(kSmiShift));
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
|
| - movl(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + if (kSmiValueSize == 32) {
|
| + movl(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(dst, src);
|
| + sarl(dst, Immediate(kSmiShift));
|
| + }
|
| }
|
|
|
|
|
| @@ -1153,20 +1208,36 @@ void MacroAssembler::SmiToInteger64(Register dst, Register src) {
|
|
|
|
|
| void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
|
| - movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + if (kSmiValueSize == 32) {
|
| + movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movq(dst, src);
|
| + SmiToInteger64(dst, dst);
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiTest(Register src) {
|
| AssertSmi(src);
|
| - testq(src, src);
|
| + if (kSmiValueSize == 32) {
|
| + testq(src, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(src, src);
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
|
| AssertSmi(smi1);
|
| AssertSmi(smi2);
|
| - cmpq(smi1, smi2);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(smi1, smi2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(smi1, smi2);
|
| + }
|
| }
|
|
|
|
|
| @@ -1179,10 +1250,20 @@ void MacroAssembler::SmiCompare(Register dst, Smi* src) {
|
| void MacroAssembler::Cmp(Register dst, Smi* src) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| if (src->value() == 0) {
|
| - testq(dst, dst);
|
| + if (kSmiValueSize == 32) {
|
| + testq(dst, dst);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(dst, dst);
|
| + }
|
| } else {
|
| Register constant_reg = GetSmiConstant(src);
|
| - cmpq(dst, constant_reg);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(dst, constant_reg);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, constant_reg);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1190,33 +1271,59 @@ void MacroAssembler::Cmp(Register dst, Smi* src) {
|
| void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
|
| AssertSmi(dst);
|
| AssertSmi(src);
|
| - cmpq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, src);
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
|
| AssertSmi(dst);
|
| AssertSmi(src);
|
| - cmpq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, src);
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
|
| AssertSmi(dst);
|
| - cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
|
| + if (kSmiValueSize == 32) {
|
| + cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, SmiToImmediate(src));
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
|
| - // The Operand cannot use the smi register.
|
| - Register smi_reg = GetSmiConstant(src);
|
| - ASSERT(!dst.AddressUsesRegister(smi_reg));
|
| - cmpq(dst, smi_reg);
|
| + if (kSmiValueSize == 32) {
|
| + // The Operand cannot use the smi register.
|
| + Register smi_reg = GetSmiConstant(src);
|
| + ASSERT(!dst.AddressUsesRegister(smi_reg));
|
| + cmpq(dst, smi_reg);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, SmiToImmediate(src));
|
| + }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
|
| - cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
|
| + if (kSmiValueSize == 32) {
|
| + cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(kScratchRegister, dst);
|
| + cmpl(kScratchRegister, src);
|
| + }
|
| }
|
|
|
|
|
| @@ -1245,7 +1352,12 @@ void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
|
| int power) {
|
| ASSERT((0 <= power) && (power < 32));
|
| if (dst.is(src)) {
|
| - shr(dst, Immediate(power + kSmiShift));
|
| + if (kSmiValueSize == 32) {
|
| + shr(dst, Immediate(power + kSmiShift));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + shrl(dst, Immediate(power + kSmiShift));
|
| + }
|
| } else {
|
| UNIMPLEMENTED(); // Not used.
|
| }
|
| @@ -1299,8 +1411,15 @@ Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
|
| return CheckSmi(first);
|
| }
|
| STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
|
| - leal(kScratchRegister, Operand(first, second, times_1, 0));
|
| - testb(kScratchRegister, Immediate(0x03));
|
| + if (kSmiValueSize == 32) {
|
| + leal(kScratchRegister, Operand(first, second, times_1, 0));
|
| + testb(kScratchRegister, Immediate(0x03));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(kScratchRegister, first);
|
| + orl(kScratchRegister, second);
|
| + testb(kScratchRegister, Immediate(kSmiTagMask));
|
| + }
|
| return zero;
|
| }
|
|
|
| @@ -1340,22 +1459,39 @@ Condition MacroAssembler::CheckEitherSmi(Register first,
|
| Condition MacroAssembler::CheckIsMinSmi(Register src) {
|
| ASSERT(!src.is(kScratchRegister));
|
| // If we overflow by subtracting one, it's the minimal smi value.
|
| - cmpq(src, kSmiConstantRegister);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(src, kSmiConstantRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(src, kSmiConstantRegister);
|
| + }
|
| return overflow;
|
| }
|
|
|
|
|
| Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
|
| - // A 32-bit integer value can always be converted to a smi.
|
| - return always;
|
| + if (kSmiValueSize == 32) {
|
| + // A 32-bit integer value can always be converted to a smi.
|
| + return always;
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(src, Immediate(0xc0000000));
|
| + return positive;
|
| + }
|
| }
|
|
|
|
|
| Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
|
| - // An unsigned 32-bit integer value is valid as long as the high bit
|
| - // is not set.
|
| - testl(src, src);
|
| - return positive;
|
| + if (kSmiValueSize == 32) {
|
| + // An unsigned 32-bit integer value is valid as long as the high bit
|
| + // is not set.
|
| + testl(src, src);
|
| + return positive;
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(src, Immediate(0xc0000000));
|
| + return zero;
|
| + }
|
| }
|
|
|
|
|
| @@ -1388,6 +1524,14 @@ void MacroAssembler::JumpIfNotValidSmiValue(Register src,
|
| }
|
|
|
|
|
| +void MacroAssembler::JumpIfValidSmiValue(Register src,
|
| + Label* on_valid,
|
| + Label::Distance near_jump) {
|
| + Condition is_valid = CheckInteger32ValidSmiValue(src);
|
| + j(is_valid, on_valid, near_jump);
|
| +}
|
| +
|
| +
|
| void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
|
| Label* on_invalid,
|
| Label::Distance near_jump) {
|
| @@ -1396,6 +1540,14 @@ void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
|
| }
|
|
|
|
|
| +void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
|
| + Label* on_valid,
|
| + Label::Distance near_jump) {
|
| + Condition is_valid = CheckUInteger32ValidSmiValue(src);
|
| + j(is_valid, on_valid, near_jump);
|
| +}
|
| +
|
| +
|
| void MacroAssembler::JumpIfSmi(Register src,
|
| Label* on_smi,
|
| Label::Distance near_jump) {
|
| @@ -1461,11 +1613,19 @@ void MacroAssembler::SmiTryAddConstant(Register dst,
|
| JumpIfNotSmi(src, on_not_smi_result, near_jump);
|
| Register tmp = (dst.is(src) ? kScratchRegister : dst);
|
| LoadSmiConstant(tmp, constant);
|
| - addq(tmp, src);
|
| + if (kSmiValueSize == 32) {
|
| + addq(tmp, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(tmp, src);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| if (dst.is(src)) {
|
| movq(dst, tmp);
|
| }
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
|
|
|
|
| @@ -1520,7 +1680,13 @@ void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
|
|
|
| void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
|
| if (constant->value() != 0) {
|
| - addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
|
| + if (kSmiValueSize == 32) {
|
| + addl(Operand(dst, kSmiShift / kBitsPerByte),
|
| + Immediate(constant->value()));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addq(dst, SmiToImmediate(constant));
|
| + }
|
| }
|
| }
|
|
|
| @@ -1538,13 +1704,31 @@ void MacroAssembler::SmiAddConstant(Register dst,
|
| ASSERT(!dst.is(kScratchRegister));
|
|
|
| LoadSmiConstant(kScratchRegister, constant);
|
| - addq(kScratchRegister, src);
|
| + if (kSmiValueSize == 32) {
|
| + addq(kScratchRegister, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(kScratchRegister, src);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| - movq(dst, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movsxlq(dst, kScratchRegister);
|
| + }
|
| } else {
|
| LoadSmiConstant(dst, constant);
|
| - addq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + addq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(dst, src);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1560,10 +1744,16 @@ void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
|
| subq(dst, constant_reg);
|
| } else {
|
| if (constant->value() == Smi::kMinValue) {
|
| - LoadSmiConstant(dst, constant);
|
| - // Adding and subtracting the min-value gives the same result, it only
|
| - // differs on the overflow bit, which we don't check here.
|
| - addq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + LoadSmiConstant(dst, constant);
|
| + // Adding and subtracting the min-value gives the same result, it only
|
| + // differs on the overflow bit, which we don't check here.
|
| + addq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movq(dst, src);
|
| + subq(dst, SmiToImmediate(constant));
|
| + }
|
| } else {
|
| // Subtract by adding the negation.
|
| LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
|
| @@ -1587,32 +1777,63 @@ void MacroAssembler::SmiSubConstant(Register dst,
|
| if (constant->value() == Smi::kMinValue) {
|
| // Subtracting min-value from any non-negative value will overflow.
|
| // We test the non-negativeness before doing the subtraction.
|
| - testq(src, src);
|
| + if (kSmiValueSize == 32) {
|
| + testq(src, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(src, src);
|
| + }
|
| j(not_sign, on_not_smi_result, near_jump);
|
| LoadSmiConstant(kScratchRegister, constant);
|
| subq(dst, kScratchRegister);
|
| } else {
|
| // Subtract by adding the negation.
|
| LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
|
| - addq(kScratchRegister, dst);
|
| + if (kSmiValueSize == 32) {
|
| + addq(kScratchRegister, dst);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(kScratchRegister, dst);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| - movq(dst, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movsxlq(dst, kScratchRegister);
|
| + }
|
| }
|
| } else {
|
| if (constant->value() == Smi::kMinValue) {
|
| // Subtracting min-value from any non-negative value will overflow.
|
| // We test the non-negativeness before doing the subtraction.
|
| - testq(src, src);
|
| - j(not_sign, on_not_smi_result, near_jump);
|
| - LoadSmiConstant(dst, constant);
|
| - // Adding and subtracting the min-value gives the same result, it only
|
| - // differs on the overflow bit, which we don't check here.
|
| - addq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + testq(src, src);
|
| + j(not_sign, on_not_smi_result, near_jump);
|
| + LoadSmiConstant(dst, constant);
|
| + // Adding and subtracting the min-value gives the same result, it only
|
| + // differs on the overflow bit, which we don't check here.
|
| + addq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(src, src);
|
| + j(not_sign, on_not_smi_result, near_jump);
|
| + movq(dst, src);
|
| + subq(dst, SmiToImmediate(constant));
|
| + }
|
| } else {
|
| // Subtract by adding the negation.
|
| LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
|
| - addq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + addq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(dst, src);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
| }
|
| @@ -1627,13 +1848,23 @@ void MacroAssembler::SmiNeg(Register dst,
|
| movq(kScratchRegister, src);
|
| neg(dst); // Low 32 bits are retained as zero by negation.
|
| // Test if result is zero or Smi::kMinValue.
|
| + if (kSmiValueSize == 32) {
|
| cmpq(dst, kScratchRegister);
|
| - j(not_equal, on_smi_result, near_jump);
|
| - movq(src, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, kScratchRegister);
|
| + }
|
| + j(not_equal, on_smi_result, near_jump);
|
| + movq(src, kScratchRegister);
|
| } else {
|
| movq(dst, src);
|
| neg(dst);
|
| - cmpq(dst, src);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(dst, src);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, src);
|
| + }
|
| // If the result is zero or Smi::kMinValue, negation failed to create a smi.
|
| j(not_equal, on_smi_result, near_jump);
|
| }
|
| @@ -1649,13 +1880,31 @@ void MacroAssembler::SmiAdd(Register dst,
|
| ASSERT(!dst.is(src2));
|
| if (dst.is(src1)) {
|
| movq(kScratchRegister, src1);
|
| - addq(kScratchRegister, src2);
|
| + if (kSmiValueSize == 32) {
|
| + addq(kScratchRegister, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(kScratchRegister, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| - movq(dst, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movsxlq(dst, kScratchRegister);
|
| + }
|
| } else {
|
| movq(dst, src1);
|
| - addq(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + addq(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1668,16 +1917,34 @@ void MacroAssembler::SmiAdd(Register dst,
|
| ASSERT_NOT_NULL(on_not_smi_result);
|
| if (dst.is(src1)) {
|
| movq(kScratchRegister, src1);
|
| - addq(kScratchRegister, src2);
|
| + if (kSmiValueSize == 32) {
|
| + addq(kScratchRegister, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(kScratchRegister, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| - movq(dst, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movsxlq(dst, kScratchRegister);
|
| + }
|
| } else {
|
| ASSERT(!src2.AddressUsesRegister(dst));
|
| movq(dst, src1);
|
| - addq(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + addq(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + addl(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| }
|
| }
|
| +}
|
|
|
|
|
| void MacroAssembler::SmiAdd(Register dst,
|
| @@ -1707,13 +1974,27 @@ void MacroAssembler::SmiSub(Register dst,
|
| ASSERT_NOT_NULL(on_not_smi_result);
|
| ASSERT(!dst.is(src2));
|
| if (dst.is(src1)) {
|
| - cmpq(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + cmpq(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + cmpl(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| subq(dst, src2);
|
| } else {
|
| - movq(dst, src1);
|
| - subq(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, src1);
|
| + subq(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(dst, src1);
|
| + subl(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1737,14 +2018,34 @@ void MacroAssembler::SmiSub(Register dst,
|
| Label::Distance near_jump) {
|
| ASSERT_NOT_NULL(on_not_smi_result);
|
| if (dst.is(src1)) {
|
| - movq(kScratchRegister, src2);
|
| - cmpq(src1, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + movq(kScratchRegister, src2);
|
| + cmpq(src1, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(kScratchRegister, src2);
|
| + cmpl(src1, kScratchRegister);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| - subq(src1, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + subq(src1, kScratchRegister);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movsxlq(src1, kScratchRegister);
|
| + }
|
| } else {
|
| - movq(dst, src1);
|
| - subq(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + movq(dst, src1);
|
| + subq(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(dst, src1);
|
| + subl(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1775,8 +2076,14 @@ void MacroAssembler::SmiMul(Register dst,
|
| if (dst.is(src1)) {
|
| Label failure, zero_correct_result;
|
| movq(kScratchRegister, src1); // Create backup for later testing.
|
| - SmiToInteger64(dst, src1);
|
| - imul(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + SmiToInteger64(dst, src1);
|
| + imul(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(dst, src1);
|
| + imull(dst, src2);
|
| + }
|
| j(overflow, &failure, Label::kNear);
|
|
|
| // Check for negative zero result. If product is zero, and one
|
| @@ -1798,9 +2105,18 @@ void MacroAssembler::SmiMul(Register dst,
|
| Set(dst, 0);
|
|
|
| bind(&correct_result);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| } else {
|
| - SmiToInteger64(dst, src1);
|
| - imul(dst, src2);
|
| + if (kSmiValueSize == 32) {
|
| + SmiToInteger64(dst, src1);
|
| + imul(dst, src2);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(dst, src1);
|
| + imull(dst, src2);
|
| + }
|
| j(overflow, on_not_smi_result, near_jump);
|
| // Check for negative zero result. If product is zero, and one
|
| // argument is negative, go to slow case.
|
| @@ -1813,6 +2129,9 @@ void MacroAssembler::SmiMul(Register dst,
|
| xor_(kScratchRegister, src2);
|
| j(negative, on_not_smi_result, near_jump);
|
| bind(&correct_result);
|
| + if (kSmiValueSize == 31) {
|
| + movsxlq(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1845,7 +2164,12 @@ void MacroAssembler::SmiDiv(Register dst,
|
| // We overshoot a little and go to slow case if we divide min-value
|
| // by any negative value, not just -1.
|
| Label safe_div;
|
| - testl(rax, Immediate(0x7fffffff));
|
| + if (kSmiValueSize == 32) {
|
| + testl(rax, Immediate(0x7fffffff));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(rax, Immediate(0x3fffffff));
|
| + }
|
| j(not_zero, &safe_div, Label::kNear);
|
| testq(src2, src2);
|
| if (src1.is(rax)) {
|
| @@ -1939,8 +2263,13 @@ void MacroAssembler::SmiMod(Register dst,
|
| void MacroAssembler::SmiNot(Register dst, Register src) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| ASSERT(!src.is(kScratchRegister));
|
| + if (kSmiValueSize == 32) {
|
| // Set tag and padding bits before negating, so that they are zero afterwards.
|
| - movl(kScratchRegister, Immediate(~0));
|
| + movl(kScratchRegister, Immediate(~0));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + movl(kScratchRegister, Immediate(1));
|
| + }
|
| if (dst.is(src)) {
|
| xor_(dst, kScratchRegister);
|
| } else {
|
| @@ -2032,12 +2361,22 @@ void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
|
|
|
| void MacroAssembler::SmiShiftLeftConstant(Register dst,
|
| Register src,
|
| - int shift_value) {
|
| + int shift_value,
|
| + Label* on_not_smi_result) {
|
| if (!dst.is(src)) {
|
| movq(dst, src);
|
| }
|
| +
|
| if (shift_value > 0) {
|
| - shl(dst, Immediate(shift_value));
|
| + if (kSmiValueSize == 32) {
|
| + shl(dst, Immediate(shift_value));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(dst, dst);
|
| + shll(dst, Immediate(shift_value));
|
| + JumpIfNotValidSmiValue(dst, on_not_smi_result);
|
| + Integer32ToSmi(dst, dst);
|
| + }
|
| }
|
| }
|
|
|
| @@ -2051,27 +2390,74 @@ void MacroAssembler::SmiShiftLogicalRightConstant(
|
| } else {
|
| movq(dst, src);
|
| if (shift_value == 0) {
|
| - testq(dst, dst);
|
| + if (kSmiValueSize == 32) {
|
| + testq(dst, dst);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(dst, dst);
|
| + }
|
| j(negative, on_not_smi_result, near_jump);
|
| }
|
| - shr(dst, Immediate(shift_value + kSmiShift));
|
| - shl(dst, Immediate(kSmiShift));
|
| + if (kSmiValueSize == 32) {
|
| + shr(dst, Immediate(shift_value + kSmiShift));
|
| + shl(dst, Immediate(kSmiShift));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(dst, dst);
|
| + shrl(dst, Immediate(shift_value));
|
| + JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
|
| + shll(dst, Immediate(kSmiShift));
|
| + }
|
| }
|
| }
|
|
|
|
|
| void MacroAssembler::SmiShiftLeft(Register dst,
|
| Register src1,
|
| - Register src2) {
|
| - ASSERT(!dst.is(rcx));
|
| - // Untag shift amount.
|
| - if (!dst.is(src1)) {
|
| - movq(dst, src1);
|
| + Register src2,
|
| + Label* on_not_smi_result) {
|
| + if (kSmiValueSize == 32) {
|
| + ASSERT(!dst.is(rcx));
|
| + // Untag shift amount.
|
| + if (!dst.is(src1)) {
|
| + movq(dst, src1);
|
| + }
|
| + SmiToInteger32(rcx, src2);
|
| + // Shift amount specified by lower 5 bits, not six as the shl opcode.
|
| + and_(rcx, Immediate(0x1f));
|
| + shl_cl(dst);
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + ASSERT(!dst.is(kScratchRegister));
|
| + ASSERT(!src1.is(kScratchRegister));
|
| + ASSERT(!src2.is(kScratchRegister));
|
| + ASSERT(!dst.is(rcx));
|
| + Label result_ok;
|
| +
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + movq(kScratchRegister, rcx);
|
| + }
|
| + // Untag shift amount.
|
| + if (!dst.is(src1)) {
|
| + movq(dst, src1);
|
| + }
|
| + SmiToInteger32(dst, dst);
|
| + SmiToInteger32(rcx, src2);
|
| + // Shift amount specified by lower 5 bits, not six as the shl opcode.
|
| + andl(rcx, Immediate(0x1f));
|
| + shll_cl(dst);
|
| + JumpIfValidSmiValue(dst, &result_ok, Label::kNear);
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + if (src1.is(rcx)) {
|
| + movq(src1, kScratchRegister);
|
| + } else {
|
| + movq(src2, kScratchRegister);
|
| + }
|
| + }
|
| + jmp(on_not_smi_result);
|
| + bind(&result_ok);
|
| + Integer32ToSmi(dst, dst);
|
| }
|
| - SmiToInteger32(rcx, src2);
|
| - // Shift amount specified by lower 5 bits, not six as the shl opcode.
|
| - and_(rcx, Immediate(0x1f));
|
| - shl_cl(dst);
|
| }
|
|
|
|
|
| @@ -2080,36 +2466,68 @@ void MacroAssembler::SmiShiftLogicalRight(Register dst,
|
| Register src2,
|
| Label* on_not_smi_result,
|
| Label::Distance near_jump) {
|
| - ASSERT(!dst.is(kScratchRegister));
|
| - ASSERT(!src1.is(kScratchRegister));
|
| - ASSERT(!src2.is(kScratchRegister));
|
| - ASSERT(!dst.is(rcx));
|
| - // dst and src1 can be the same, because the one case that bails out
|
| - // is a shift by 0, which leaves dst, and therefore src1, unchanged.
|
| - if (src1.is(rcx) || src2.is(rcx)) {
|
| - movq(kScratchRegister, rcx);
|
| - }
|
| - if (!dst.is(src1)) {
|
| - movq(dst, src1);
|
| - }
|
| - SmiToInteger32(rcx, src2);
|
| - orl(rcx, Immediate(kSmiShift));
|
| - shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
|
| - shl(dst, Immediate(kSmiShift));
|
| - testq(dst, dst);
|
| - if (src1.is(rcx) || src2.is(rcx)) {
|
| - Label positive_result;
|
| - j(positive, &positive_result, Label::kNear);
|
| - if (src1.is(rcx)) {
|
| - movq(src1, kScratchRegister);
|
| + if (kSmiValueSize == 32) {
|
| + ASSERT(!dst.is(kScratchRegister));
|
| + ASSERT(!src1.is(kScratchRegister));
|
| + ASSERT(!src2.is(kScratchRegister));
|
| + ASSERT(!dst.is(rcx));
|
| + // dst and src1 can be the same, because the one case that bails out
|
| + // is a shift by 0, which leaves dst, and therefore src1, unchanged.
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + movq(kScratchRegister, rcx);
|
| + }
|
| + if (!dst.is(src1)) {
|
| + movq(dst, src1);
|
| + }
|
| + SmiToInteger32(rcx, src2);
|
| + orl(rcx, Immediate(kSmiShift));
|
| + shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
|
| + shl(dst, Immediate(kSmiShift));
|
| + testq(dst, dst);
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + Label positive_result;
|
| + j(positive, &positive_result, Label::kNear);
|
| + if (src1.is(rcx)) {
|
| + movq(src1, kScratchRegister);
|
| + } else {
|
| + movq(src2, kScratchRegister);
|
| + }
|
| + jmp(on_not_smi_result, near_jump);
|
| + bind(&positive_result);
|
| } else {
|
| - movq(src2, kScratchRegister);
|
| + // src2 was zero and src1 negative.
|
| + j(negative, on_not_smi_result, near_jump);
|
| }
|
| - jmp(on_not_smi_result, near_jump);
|
| - bind(&positive_result);
|
| } else {
|
| - // src2 was zero and src1 negative.
|
| - j(negative, on_not_smi_result, near_jump);
|
| + ASSERT(kSmiValueSize == 31);
|
| + ASSERT(!dst.is(kScratchRegister));
|
| + ASSERT(!src1.is(kScratchRegister));
|
| + ASSERT(!src2.is(kScratchRegister));
|
| + ASSERT(!dst.is(rcx));
|
| + Label result_ok;
|
| +
|
| + // dst and src1 can be the same, because the one case that bails out
|
| + // is a shift by 0, which leaves dst, and therefore src1, unchanged.
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + movq(kScratchRegister, rcx);
|
| + }
|
| + if (!dst.is(src1)) {
|
| + movq(dst, src1);
|
| + }
|
| + SmiToInteger32(rcx, src2);
|
| + SmiToInteger32(dst, dst);
|
| + shrl_cl(dst);
|
| + JumpIfUIntValidSmiValue(dst, &result_ok, Label::kNear);
|
| + if (src1.is(rcx) || src2.is(rcx)) {
|
| + if (src1.is(rcx)) {
|
| + movq(src1, kScratchRegister);
|
| + } else {
|
| + movq(src2, kScratchRegister);
|
| + }
|
| + }
|
| + jmp(on_not_smi_result);
|
| + bind(&result_ok);
|
| + Integer32ToSmi(dst, dst);
|
| }
|
| }
|
|
|
| @@ -2130,9 +2548,15 @@ void MacroAssembler::SmiShiftArithmeticRight(Register dst,
|
| movq(dst, src1);
|
| }
|
| SmiToInteger32(rcx, src2);
|
| - orl(rcx, Immediate(kSmiShift));
|
| - sar_cl(dst); // Shift 32 + original rcx & 0x1f.
|
| - shl(dst, Immediate(kSmiShift));
|
| + if (kSmiValueSize == 32) {
|
| + orl(rcx, Immediate(kSmiShift));
|
| + sar_cl(dst); // Shift 32 + original rcx & 0x1f.
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(dst, dst);
|
| + sarl_cl(dst);
|
| + }
|
| + Integer32ToSmi(dst, dst);
|
| if (src1.is(rcx)) {
|
| movq(src1, kScratchRegister);
|
| } else if (src2.is(rcx)) {
|
| @@ -2189,14 +2613,24 @@ SmiIndex MacroAssembler::SmiToIndex(Register dst,
|
| if (!dst.is(src)) {
|
| movq(dst, src);
|
| }
|
| - if (shift < kSmiShift) {
|
| - sar(dst, Immediate(kSmiShift - shift));
|
| + if (kSmiValueSize == 32) {
|
| + if (shift < kSmiShift) {
|
| + sar(dst, Immediate(kSmiShift - shift));
|
| + } else {
|
| + shl(dst, Immediate(shift - kSmiShift));
|
| + }
|
| + return SmiIndex(dst, times_1);
|
| } else {
|
| - shl(dst, Immediate(shift - kSmiShift));
|
| + ASSERT(kSmiValueSize == 31);
|
| + if (shift == times_1) {
|
| + sar(dst, Immediate(kSmiShift));
|
| + return SmiIndex(dst, times_1);
|
| + }
|
| + return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
|
| }
|
| - return SmiIndex(dst, times_1);
|
| }
|
|
|
| +
|
| SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
|
| Register src,
|
| int shift) {
|
| @@ -2206,21 +2640,57 @@ SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
|
| movq(dst, src);
|
| }
|
| neg(dst);
|
| - if (shift < kSmiShift) {
|
| - sar(dst, Immediate(kSmiShift - shift));
|
| + if (kSmiValueSize == 32) {
|
| + if (shift < kSmiShift) {
|
| + sar(dst, Immediate(kSmiShift - shift));
|
| + } else {
|
| + shl(dst, Immediate(shift - kSmiShift));
|
| + }
|
| + return SmiIndex(dst, times_1);
|
| } else {
|
| - shl(dst, Immediate(shift - kSmiShift));
|
| + ASSERT(kSmiValueSize == 31);
|
| + if (shift == times_1) {
|
| + sar(dst, Immediate(kSmiShift));
|
| + return SmiIndex(dst, times_1);
|
| + }
|
| + return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
|
| }
|
| - return SmiIndex(dst, times_1);
|
| }
|
|
|
|
|
| void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
|
| - ASSERT_EQ(0, kSmiShift % kBitsPerByte);
|
| - addl(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + if (kSmiValueSize == 32) {
|
| + ASSERT_EQ(0, kSmiShift % kBitsPerByte);
|
| + addl(dst, Operand(src, kSmiShift / kBitsPerByte));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + SmiToInteger32(kScratchRegister, src);
|
| + addl(dst, kScratchRegister);
|
| + }
|
| +}
|
| +
|
| +
|
| +void MacroAssembler::Test(const Operand& src, Smi* source) {
|
| + if (kSmiValueSize == 32) {
|
| + testl(Operand(src, kIntSize), Immediate(source->value()));
|
| + } else {
|
| + ASSERT(kSmiValueSize == 31);
|
| + testl(src, SmiToImmediate(source));
|
| + }
|
| +}
|
| +
|
| +
|
| +void MacroAssembler::TestBit(const Operand& src, int bits) {
|
| + int byte_offset = bits / kBitsPerByte;
|
| + int bit_in_byte = bits & (kBitsPerByte - 1);
|
| + testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
|
| }
|
|
|
|
|
| +// End of smi tagging, untagging and tag detection.
|
| +// ----------------------------------------------------------------------------
|
| +
|
| +
|
| void MacroAssembler::JumpIfNotString(Register object,
|
| Register object_map,
|
| Label* not_string,
|
| @@ -2478,18 +2948,6 @@ void MacroAssembler::Drop(int stack_elements) {
|
| }
|
|
|
|
|
| -void MacroAssembler::Test(const Operand& src, Smi* source) {
|
| - testl(Operand(src, kIntSize), Immediate(source->value()));
|
| -}
|
| -
|
| -
|
| -void MacroAssembler::TestBit(const Operand& src, int bits) {
|
| - int byte_offset = bits / kBitsPerByte;
|
| - int bit_in_byte = bits & (kBitsPerByte - 1);
|
| - testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
|
| -}
|
| -
|
| -
|
| void MacroAssembler::Jump(ExternalReference ext) {
|
| LoadAddress(kScratchRegister, ext);
|
| jmp(kScratchRegister);
|
|
|