| Index: src/x64/macro-assembler-x64.cc
|
| diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
|
| index caf9b204e902a4eb81abf8e88b63350a4f7c16cd..7ea97d40da46455bc3dfbf8ffebb3cabe93e1843 100644
|
| --- a/src/x64/macro-assembler-x64.cc
|
| +++ b/src/x64/macro-assembler-x64.cc
|
| @@ -276,10 +276,10 @@ void MacroAssembler::InNewSpace(Register object,
|
| // and the running system.
|
| if (scratch.is(object)) {
|
| Move(kScratchRegister, ExternalReference::new_space_mask(isolate()));
|
| - and_(scratch, kScratchRegister);
|
| + andp(scratch, kScratchRegister);
|
| } else {
|
| Move(scratch, ExternalReference::new_space_mask(isolate()));
|
| - and_(scratch, object);
|
| + andp(scratch, object);
|
| }
|
| Move(kScratchRegister, ExternalReference::new_space_start(isolate()));
|
| cmpp(scratch, kScratchRegister);
|
| @@ -295,7 +295,7 @@ void MacroAssembler::InNewSpace(Register object,
|
| } else {
|
| leap(scratch, Operand(object, kScratchRegister, times_1, 0));
|
| }
|
| - and_(scratch,
|
| + andp(scratch,
|
| Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
|
| j(cc, branch, distance);
|
| }
|
| @@ -576,7 +576,7 @@ void MacroAssembler::IndexFromHash(Register hash, Register index) {
|
| // the slow case, converting the key to a smi is always valid.
|
| // key: string key
|
| // hash: key's hash field, including its array index value.
|
| - and_(hash, Immediate(String::kArrayIndexValueMask));
|
| + andp(hash, Immediate(String::kArrayIndexValueMask));
|
| shr(hash, Immediate(String::kHashShift));
|
| // Here we actually clobber the key which will be used if calling into
|
| // runtime later. However as the new key is the numeric value of a string key
|
| @@ -1002,7 +1002,7 @@ void MacroAssembler::SafeMove(Register dst, Smi* src) {
|
| if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
|
| Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| - xor_(dst, kScratchRegister);
|
| + xorq(dst, kScratchRegister);
|
| } else {
|
| Move(dst, src);
|
| }
|
| @@ -1014,7 +1014,7 @@ void MacroAssembler::SafePush(Smi* src) {
|
| if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
|
| Push(Smi::FromInt(src->value() ^ jit_cookie()));
|
| Move(kScratchRegister, Smi::FromInt(jit_cookie()));
|
| - xor_(Operand(rsp, 0), kScratchRegister);
|
| + xorq(Operand(rsp, 0), kScratchRegister);
|
| } else {
|
| Push(src);
|
| }
|
| @@ -1255,12 +1255,12 @@ void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
|
| ASSERT(!src1.is(kScratchRegister));
|
| ASSERT(!src2.is(kScratchRegister));
|
| movp(kScratchRegister, src1);
|
| - or_(kScratchRegister, src2);
|
| + orp(kScratchRegister, src2);
|
| JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
|
| movp(dst, kScratchRegister);
|
| } else {
|
| movp(dst, src1);
|
| - or_(dst, src2);
|
| + orp(dst, src2);
|
| JumpIfNotSmi(dst, on_not_smis, near_jump);
|
| }
|
| }
|
| @@ -1307,7 +1307,7 @@ Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
|
| return CheckNonNegativeSmi(first);
|
| }
|
| movp(kScratchRegister, first);
|
| - or_(kScratchRegister, second);
|
| + orp(kScratchRegister, second);
|
| rol(kScratchRegister, Immediate(1));
|
| testl(kScratchRegister, Immediate(3));
|
| return zero;
|
| @@ -1799,7 +1799,7 @@ void MacroAssembler::SmiMul(Register dst,
|
| j(not_zero, &correct_result, Label::kNear);
|
|
|
| movp(dst, kScratchRegister);
|
| - xor_(dst, src2);
|
| + xorp(dst, src2);
|
| // Result was positive zero.
|
| j(positive, &zero_correct_result, Label::kNear);
|
|
|
| @@ -1823,7 +1823,7 @@ void MacroAssembler::SmiMul(Register dst,
|
| // One of src1 and src2 is zero, the check whether the other is
|
| // negative.
|
| movp(kScratchRegister, src1);
|
| - xor_(kScratchRegister, src2);
|
| + xorp(kScratchRegister, src2);
|
| j(negative, on_not_smi_result, near_jump);
|
| bind(&correct_result);
|
| }
|
| @@ -1955,11 +1955,11 @@ void MacroAssembler::SmiNot(Register dst, Register src) {
|
| // Set tag and padding bits before negating, so that they are zero afterwards.
|
| movl(kScratchRegister, Immediate(~0));
|
| if (dst.is(src)) {
|
| - xor_(dst, kScratchRegister);
|
| + xorp(dst, kScratchRegister);
|
| } else {
|
| leap(dst, Operand(src, kScratchRegister, times_1, 0));
|
| }
|
| - not_(dst);
|
| + notp(dst);
|
| }
|
|
|
|
|
| @@ -1968,7 +1968,7 @@ void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
|
| if (!dst.is(src1)) {
|
| movp(dst, src1);
|
| }
|
| - and_(dst, src2);
|
| + andp(dst, src2);
|
| }
|
|
|
|
|
| @@ -1978,10 +1978,10 @@ void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
|
| } else if (dst.is(src)) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| Register constant_reg = GetSmiConstant(constant);
|
| - and_(dst, constant_reg);
|
| + andp(dst, constant_reg);
|
| } else {
|
| LoadSmiConstant(dst, constant);
|
| - and_(dst, src);
|
| + andp(dst, src);
|
| }
|
| }
|
|
|
| @@ -1991,7 +1991,7 @@ void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
|
| ASSERT(!src1.is(src2));
|
| movp(dst, src1);
|
| }
|
| - or_(dst, src2);
|
| + orp(dst, src2);
|
| }
|
|
|
|
|
| @@ -1999,10 +1999,10 @@ void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
|
| if (dst.is(src)) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| Register constant_reg = GetSmiConstant(constant);
|
| - or_(dst, constant_reg);
|
| + orp(dst, constant_reg);
|
| } else {
|
| LoadSmiConstant(dst, constant);
|
| - or_(dst, src);
|
| + orp(dst, src);
|
| }
|
| }
|
|
|
| @@ -2012,7 +2012,7 @@ void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
|
| ASSERT(!src1.is(src2));
|
| movp(dst, src1);
|
| }
|
| - xor_(dst, src2);
|
| + xorp(dst, src2);
|
| }
|
|
|
|
|
| @@ -2020,10 +2020,10 @@ void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
|
| if (dst.is(src)) {
|
| ASSERT(!dst.is(kScratchRegister));
|
| Register constant_reg = GetSmiConstant(constant);
|
| - xor_(dst, constant_reg);
|
| + xorp(dst, constant_reg);
|
| } else {
|
| LoadSmiConstant(dst, constant);
|
| - xor_(dst, src);
|
| + xorp(dst, src);
|
| }
|
| }
|
|
|
| @@ -2083,7 +2083,7 @@ void MacroAssembler::SmiShiftLeft(Register dst,
|
| }
|
| SmiToInteger32(rcx, src2);
|
| // Shift amount specified by lower 5 bits, not six as the shl opcode.
|
| - and_(rcx, Immediate(0x1f));
|
| + andq(rcx, Immediate(0x1f));
|
| shl_cl(dst);
|
| }
|
|
|
| @@ -2172,7 +2172,7 @@ void MacroAssembler::SelectNonSmi(Register dst,
|
| STATIC_ASSERT(kSmiTag == 0);
|
| ASSERT_EQ(0, Smi::FromInt(0));
|
| movl(kScratchRegister, Immediate(kSmiTagMask));
|
| - and_(kScratchRegister, src1);
|
| + andp(kScratchRegister, src1);
|
| testl(kScratchRegister, src2);
|
| // If non-zero then both are smis.
|
| j(not_zero, on_not_smis, near_jump);
|
| @@ -2183,10 +2183,10 @@ void MacroAssembler::SelectNonSmi(Register dst,
|
| subp(kScratchRegister, Immediate(1));
|
| // If src1 is a smi, then scratch register all 1s, else it is all 0s.
|
| movp(dst, src1);
|
| - xor_(dst, src2);
|
| - and_(dst, kScratchRegister);
|
| + xorp(dst, src2);
|
| + andp(dst, kScratchRegister);
|
| // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
|
| - xor_(dst, src1);
|
| + xorp(dst, src1);
|
| // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
|
| }
|
|
|
| @@ -2263,7 +2263,7 @@ void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) {
|
| shr(dst, Immediate(kSmiShift));
|
| // High bits.
|
| shl(dst, Immediate(64 - kSmiShift));
|
| - or_(dst, scratch);
|
| + orp(dst, scratch);
|
| }
|
|
|
|
|
| @@ -2309,8 +2309,8 @@ void MacroAssembler::LookupNumberStringCache(Register object,
|
|
|
| STATIC_ASSERT(8 == kDoubleSize);
|
| movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
|
| - xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
|
| - and_(scratch, mask);
|
| + xorp(scratch, FieldOperand(object, HeapNumber::kValueOffset));
|
| + andp(scratch, mask);
|
| // Each entry in string cache consists of two pointer sized fields,
|
| // but times_twice_pointer_size (multiplication by 16) scale factor
|
| // is not supported by addrmode on x64 platform.
|
| @@ -2333,7 +2333,7 @@ void MacroAssembler::LookupNumberStringCache(Register object,
|
|
|
| bind(&is_smi);
|
| SmiToInteger32(scratch, object);
|
| - and_(scratch, mask);
|
| + andp(scratch, mask);
|
| // Each entry in string cache consists of two pointer sized fields,
|
| // but times_twice_pointer_size (multiplication by 16) scale factor
|
| // is not supported by addrmode on x64 platform.
|
| @@ -3341,7 +3341,7 @@ void MacroAssembler::EnumLength(Register dst, Register map) {
|
| STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
|
| movp(dst, FieldOperand(map, Map::kBitField3Offset));
|
| Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask));
|
| - and_(dst, kScratchRegister);
|
| + andp(dst, kScratchRegister);
|
| }
|
|
|
|
|
| @@ -3842,7 +3842,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
|
| if (kFrameAlignment > 0) {
|
| ASSERT(IsPowerOf2(kFrameAlignment));
|
| ASSERT(is_int8(kFrameAlignment));
|
| - and_(rsp, Immediate(-kFrameAlignment));
|
| + andp(rsp, Immediate(-kFrameAlignment));
|
| }
|
|
|
| // Patch the saved entry sp.
|
| @@ -4068,7 +4068,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
|
| if (i > 0) {
|
| addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
|
| }
|
| - and_(r2, r1);
|
| + andp(r2, r1);
|
|
|
| // Scale the index by multiplying by the entry size.
|
| ASSERT(SeededNumberDictionary::kEntrySize == 3);
|
| @@ -4293,7 +4293,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
|
| ExternalReference::new_space_allocation_top_address(isolate());
|
|
|
| // Make sure the object has no tag before resetting top.
|
| - and_(object, Immediate(~kHeapObjectTagMask));
|
| + andp(object, Immediate(~kHeapObjectTagMask));
|
| Operand top_operand = ExternalOperand(new_space_allocation_top);
|
| #ifdef DEBUG
|
| cmpp(object, top_operand);
|
| @@ -4329,7 +4329,7 @@ void MacroAssembler::AllocateTwoByteString(Register result,
|
| // scratch1 = length * 2 + kObjectAlignmentMask.
|
| leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
|
| kHeaderAlignment));
|
| - and_(scratch1, Immediate(~kObjectAlignmentMask));
|
| + andp(scratch1, Immediate(~kObjectAlignmentMask));
|
| if (kHeaderAlignment > 0) {
|
| subp(scratch1, Immediate(kHeaderAlignment));
|
| }
|
| @@ -4367,7 +4367,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
|
| movl(scratch1, length);
|
| ASSERT(kCharSize == 1);
|
| addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
|
| - and_(scratch1, Immediate(~kObjectAlignmentMask));
|
| + andp(scratch1, Immediate(~kObjectAlignmentMask));
|
| if (kHeaderAlignment > 0) {
|
| subp(scratch1, Immediate(kHeaderAlignment));
|
| }
|
| @@ -4720,7 +4720,7 @@ void MacroAssembler::PrepareCallCFunction(int num_arguments) {
|
| int argument_slots_on_stack =
|
| ArgumentStackSlotsForCFunctionCall(num_arguments);
|
| subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
|
| - and_(rsp, Immediate(-frame_alignment));
|
| + andp(rsp, Immediate(-frame_alignment));
|
| movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
|
| }
|
|
|
| @@ -4789,10 +4789,10 @@ void MacroAssembler::CheckPageFlag(
|
| Label::Distance condition_met_distance) {
|
| ASSERT(cc == zero || cc == not_zero);
|
| if (scratch.is(object)) {
|
| - and_(scratch, Immediate(~Page::kPageAlignmentMask));
|
| + andp(scratch, Immediate(~Page::kPageAlignmentMask));
|
| } else {
|
| movp(scratch, Immediate(~Page::kPageAlignmentMask));
|
| - and_(scratch, object);
|
| + andp(scratch, object);
|
| }
|
| if (mask < (1 << kBitsPerByte)) {
|
| testb(Operand(scratch, MemoryChunk::kFlagsOffset),
|
| @@ -4811,7 +4811,7 @@ void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
|
| Move(scratch, map);
|
| movp(scratch, FieldOperand(scratch, Map::kBitField3Offset));
|
| SmiToInteger32(scratch, scratch);
|
| - and_(scratch, Immediate(Map::Deprecated::kMask));
|
| + andp(scratch, Immediate(Map::Deprecated::kMask));
|
| j(not_zero, if_deprecated);
|
| }
|
| }
|
| @@ -4833,7 +4833,7 @@ void MacroAssembler::JumpIfBlack(Register object,
|
| // rcx = mask | (mask << 1).
|
| leap(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
|
| // Note that we are using a 4-byte aligned 8-byte load.
|
| - and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
|
| + andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
|
| cmpp(mask_scratch, rcx);
|
| j(equal, on_black, on_black_distance);
|
| }
|
| @@ -4868,19 +4868,19 @@ void MacroAssembler::GetMarkBits(Register addr_reg,
|
| ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
|
| movp(bitmap_reg, addr_reg);
|
| // Sign extended 32 bit immediate.
|
| - and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
|
| + andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
|
| movp(rcx, addr_reg);
|
| int shift =
|
| Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
|
| shrl(rcx, Immediate(shift));
|
| - and_(rcx,
|
| + andp(rcx,
|
| Immediate((Page::kPageAlignmentMask >> shift) &
|
| ~(Bitmap::kBytesPerCell - 1)));
|
|
|
| addp(bitmap_reg, rcx);
|
| movp(rcx, addr_reg);
|
| shrl(rcx, Immediate(kPointerSizeLog2));
|
| - and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
|
| + andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
|
| movl(mask_reg, Immediate(1));
|
| shl_cl(mask_reg);
|
| }
|
| @@ -4961,21 +4961,21 @@ void MacroAssembler::EnsureNotWhite(
|
| bind(¬_external);
|
| // Sequential string, either ASCII or UC16.
|
| ASSERT(kOneByteStringTag == 0x04);
|
| - and_(length, Immediate(kStringEncodingMask));
|
| - xor_(length, Immediate(kStringEncodingMask));
|
| + andp(length, Immediate(kStringEncodingMask));
|
| + xorp(length, Immediate(kStringEncodingMask));
|
| addp(length, Immediate(0x04));
|
| // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2.
|
| imulp(length, FieldOperand(value, String::kLengthOffset));
|
| shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
|
| addp(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
|
| - and_(length, Immediate(~kObjectAlignmentMask));
|
| + andp(length, Immediate(~kObjectAlignmentMask));
|
|
|
| bind(&is_data_object);
|
| // Value is a data object, and it is white. Mark it black. Since we know
|
| // that the object is white we can make it black by flipping one bit.
|
| - or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
|
| + orp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
|
|
|
| - and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
|
| + andp(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
|
| addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
|
|
|
| bind(&done);
|
| @@ -5064,7 +5064,7 @@ void MacroAssembler::JumpIfDictionaryInPrototypeChain(
|
| bind(&loop_again);
|
| movp(current, FieldOperand(current, HeapObject::kMapOffset));
|
| movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
|
| - and_(scratch1, Immediate(Map::kElementsKindMask));
|
| + andp(scratch1, Immediate(Map::kElementsKindMask));
|
| shr(scratch1, Immediate(Map::kElementsKindShift));
|
| cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
|
| j(equal, found);
|
|
|