Index: src/x64/code-stubs-x64.cc |
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
index fe333e938fb7e91bfe012d7fa69786b08b1dd36d..b070d054f165d30548fb36bfb9f5918dc1ffbf77 100644 |
--- a/src/x64/code-stubs-x64.cc |
+++ b/src/x64/code-stubs-x64.cc |
@@ -1040,7 +1040,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
const int kParameterMapHeaderSize = |
FixedArray::kHeaderSize + 2 * kPointerSize; |
Label no_parameter_map; |
- __ xor_(r8, r8); |
+ __ xorp(r8, r8); |
__ testp(rbx, rbx); |
__ j(zero, &no_parameter_map, Label::kNear); |
__ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); |
@@ -1839,7 +1839,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
__ JumpIfNotBothSmi(rax, rdx, &non_smi); |
__ subp(rdx, rax); |
__ j(no_overflow, &smi_done); |
- __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. |
+ __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. |
__ bind(&smi_done); |
__ movp(rax, rdx); |
__ ret(0); |
@@ -3119,7 +3119,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, |
// Find number of bytes left. |
__ movl(count, kScratchRegister); |
- __ and_(count, Immediate(kPointerSize - 1)); |
+ __ andp(count, Immediate(kPointerSize - 1)); |
// Check if there are more bytes to copy. |
__ bind(&last_bytes); |
@@ -3848,7 +3848,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
__ subp(rdx, rax); |
__ j(no_overflow, &done, Label::kNear); |
// Correct sign of result in case of overflow. |
- __ not_(rdx); |
+ __ notp(rdx); |
__ bind(&done); |
__ movp(rax, rdx); |
} |
@@ -3957,7 +3957,7 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
__ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
__ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
- __ or_(tmp1, tmp2); |
+ __ orp(tmp1, tmp2); |
__ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
__ j(not_zero, &miss, Label::kNear); |
@@ -4047,7 +4047,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
__ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
__ movp(tmp3, tmp1); |
STATIC_ASSERT(kNotStringTag != 0); |
- __ or_(tmp3, tmp2); |
+ __ orp(tmp3, tmp2); |
__ testb(tmp3, Immediate(kIsNotStringMask)); |
__ j(not_zero, &miss); |
@@ -4069,7 +4069,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
if (equality) { |
Label do_compare; |
STATIC_ASSERT(kInternalizedTag == 0); |
- __ or_(tmp1, tmp2); |
+ __ orp(tmp1, tmp2); |
__ testb(tmp1, Immediate(kIsNotInternalizedMask)); |
__ j(not_zero, &do_compare, Label::kNear); |
// Make sure rax is non-zero. At this point input operands are |
@@ -4193,7 +4193,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
// Capacity is smi 2^n. |
__ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); |
__ decl(index); |
- __ and_(index, |
+ __ andp(index, |
Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); |
// Scale the index by multiplying by the entry size. |
@@ -4264,7 +4264,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
if (i > 0) { |
__ addl(r1, Immediate(NameDictionary::GetProbeOffset(i))); |
} |
- __ and_(r1, r0); |
+ __ andp(r1, r0); |
// Scale the index by multiplying by the entry size. |
ASSERT(NameDictionary::kEntrySize == 3); |
@@ -4325,7 +4325,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
if (i > 0) { |
__ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
} |
- __ and_(scratch, Operand(rsp, 0)); |
+ __ andp(scratch, Operand(rsp, 0)); |
// Scale the index by multiplying by the entry size. |
ASSERT(NameDictionary::kEntrySize == 3); |
@@ -4504,7 +4504,7 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
Label need_incremental_pop_object; |
__ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); |
- __ and_(regs_.scratch0(), regs_.object()); |
+ __ andp(regs_.scratch0(), regs_.object()); |
__ movp(regs_.scratch1(), |
Operand(regs_.scratch0(), |
MemoryChunk::kWriteBarrierCounterOffset)); |
@@ -4942,7 +4942,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
__ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); |
__ SmiToInteger32(rdx, rdx); |
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
- __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); |
+ __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); |
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
__ bind(&no_info); |
@@ -5016,7 +5016,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { |
// but the following masking takes care of that anyway. |
__ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); |
// Retrieve elements_kind from bit field 2. |
- __ and_(rcx, Immediate(Map::kElementsKindMask)); |
+ __ andp(rcx, Immediate(Map::kElementsKindMask)); |
__ shr(rcx, Immediate(Map::kElementsKindShift)); |
if (FLAG_debug_code) { |