Index: src/x64/macro-assembler-x64.cc |
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc |
index 9c9b1620e586ce5872bbc46de2eaf515266716a0..13d7ddaa685bf92e16a4adab1faa1dc17fee4aae 100644 |
--- a/src/x64/macro-assembler-x64.cc |
+++ b/src/x64/macro-assembler-x64.cc |
@@ -155,7 +155,7 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) { |
} |
} |
// Size of movq(destination, src); |
- return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; |
+ return 10; |
} |
@@ -449,8 +449,8 @@ void MacroAssembler::RecordWrite(Register object, |
} |
-void MacroAssembler::Assert(Condition cc, BailoutReason reason) { |
- if (emit_debug_code()) Check(cc, reason); |
+void MacroAssembler::Assert(Condition cc, const char* msg) { |
+ if (emit_debug_code()) Check(cc, msg); |
} |
@@ -466,16 +466,16 @@ void MacroAssembler::AssertFastElements(Register elements) { |
CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), |
Heap::kFixedCOWArrayMapRootIndex); |
j(equal, &ok, Label::kNear); |
- Abort(kJSObjectWithFastElementsMapHasSlowElements); |
+ Abort("JSObject with fast elements map has slow elements"); |
bind(&ok); |
} |
} |
-void MacroAssembler::Check(Condition cc, BailoutReason reason) { |
+void MacroAssembler::Check(Condition cc, const char* msg) { |
Label L; |
j(cc, &L, Label::kNear); |
- Abort(reason); |
+ Abort(msg); |
// Control will not return here. |
bind(&L); |
} |
@@ -508,13 +508,12 @@ void MacroAssembler::NegativeZeroTest(Register result, |
} |
-void MacroAssembler::Abort(BailoutReason reason) { |
+void MacroAssembler::Abort(const char* msg) { |
// We want to pass the msg string like a smi to avoid GC |
// problems, however msg is not guaranteed to be aligned |
// properly. Instead, we pass an aligned pointer that is |
// a proper v8 smi, but also pass the alignment difference |
// from the real pointer as a smi. |
- const char* msg = GetBailoutReason(reason); |
intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
// Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag. |
@@ -839,7 +838,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address, |
CompareRoot(return_value, Heap::kNullValueRootIndex); |
j(equal, &ok, Label::kNear); |
- Abort(kAPICallReturnedInvalidObject); |
+ Abort("API call returned invalid object"); |
bind(&ok); |
#endif |
@@ -1039,7 +1038,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { |
RelocInfo::NONE64); |
cmpq(dst, kSmiConstantRegister); |
if (allow_stub_calls()) { |
- Assert(equal, kUninitializedKSmiConstantRegister); |
+ Assert(equal, "Uninitialized kSmiConstantRegister"); |
} else { |
Label ok; |
j(equal, &ok, Label::kNear); |
@@ -1107,7 +1106,7 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { |
Label ok; |
j(zero, &ok, Label::kNear); |
if (allow_stub_calls()) { |
- Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); |
+ Abort("Integer32ToSmiField writing to non-smi location"); |
} else { |
int3(); |
} |
@@ -1690,12 +1689,12 @@ void MacroAssembler::SmiAdd(Register dst, |
if (emit_debug_code()) { |
movq(kScratchRegister, src1); |
addq(kScratchRegister, src2); |
- Check(no_overflow, kSmiAdditionOverflow); |
+ Check(no_overflow, "Smi addition overflow"); |
} |
lea(dst, Operand(src1, src2, times_1, 0)); |
} else { |
addq(dst, src2); |
- Assert(no_overflow, kSmiAdditionOverflow); |
+ Assert(no_overflow, "Smi addition overflow"); |
} |
} |
@@ -1727,7 +1726,7 @@ void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { |
movq(dst, src1); |
} |
subq(dst, src2); |
- Assert(no_overflow, kSmiSubtractionOverflow); |
+ Assert(no_overflow, "Smi subtraction overflow"); |
} |
@@ -1759,7 +1758,7 @@ void MacroAssembler::SmiSub(Register dst, |
movq(dst, src1); |
} |
subq(dst, src2); |
- Assert(no_overflow, kSmiSubtractionOverflow); |
+ Assert(no_overflow, "Smi subtraction overflow"); |
} |
@@ -2156,7 +2155,7 @@ void MacroAssembler::SelectNonSmi(Register dst, |
#ifdef DEBUG |
if (allow_stub_calls()) { // Check contains a stub call. |
Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); |
- Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi); |
+ Check(not_both_smis, "Both registers were smis in SelectNonSmi."); |
} |
#endif |
STATIC_ASSERT(kSmiTag == 0); |
@@ -2511,8 +2510,8 @@ void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { |
int MacroAssembler::CallSize(ExternalReference ext) { |
// Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes). |
- return LoadAddressSize(ext) + |
- Assembler::kCallScratchRegisterInstructionLength; |
+ const int kCallInstructionSize = 3; |
+ return LoadAddressSize(ext) + kCallInstructionSize; |
} |
@@ -2799,9 +2798,9 @@ void MacroAssembler::Ret(int bytes_dropped, Register scratch) { |
if (is_uint16(bytes_dropped)) { |
ret(bytes_dropped); |
} else { |
- PopReturnAddressTo(scratch); |
+ pop(scratch); |
addq(rsp, Immediate(bytes_dropped)); |
- PushReturnAddressFrom(scratch); |
+ push(scratch); |
ret(0); |
} |
} |
@@ -2985,7 +2984,7 @@ void MacroAssembler::LoadUint32(XMMRegister dst, |
XMMRegister scratch) { |
if (FLAG_debug_code) { |
cmpq(src, Immediate(0xffffffff)); |
- Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared); |
+ Assert(below_equal, "input GPR is expected to have upper32 cleared"); |
} |
cvtqsi2sd(dst, src); |
} |
@@ -3034,7 +3033,7 @@ void MacroAssembler::AssertNumber(Register object) { |
j(is_smi, &ok, Label::kNear); |
Cmp(FieldOperand(object, HeapObject::kMapOffset), |
isolate()->factory()->heap_number_map()); |
- Check(equal, kOperandIsNotANumber); |
+ Check(equal, "Operand is not a number"); |
bind(&ok); |
} |
} |
@@ -3043,7 +3042,7 @@ void MacroAssembler::AssertNumber(Register object) { |
void MacroAssembler::AssertNotSmi(Register object) { |
if (emit_debug_code()) { |
Condition is_smi = CheckSmi(object); |
- Check(NegateCondition(is_smi), kOperandIsASmi); |
+ Check(NegateCondition(is_smi), "Operand is a smi"); |
} |
} |
@@ -3051,7 +3050,7 @@ void MacroAssembler::AssertNotSmi(Register object) { |
void MacroAssembler::AssertSmi(Register object) { |
if (emit_debug_code()) { |
Condition is_smi = CheckSmi(object); |
- Check(is_smi, kOperandIsNotASmi); |
+ Check(is_smi, "Operand is not a smi"); |
} |
} |
@@ -3059,7 +3058,7 @@ void MacroAssembler::AssertSmi(Register object) { |
void MacroAssembler::AssertSmi(const Operand& object) { |
if (emit_debug_code()) { |
Condition is_smi = CheckSmi(object); |
- Check(is_smi, kOperandIsNotASmi); |
+ Check(is_smi, "Operand is not a smi"); |
} |
} |
@@ -3069,7 +3068,7 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) { |
ASSERT(!int32_register.is(kScratchRegister)); |
movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64); |
cmpq(kScratchRegister, int32_register); |
- Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); |
+ Check(above_equal, "32 bit value in register is not zero-extended"); |
} |
} |
@@ -3077,12 +3076,12 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) { |
void MacroAssembler::AssertString(Register object) { |
if (emit_debug_code()) { |
testb(object, Immediate(kSmiTagMask)); |
- Check(not_equal, kOperandIsASmiAndNotAString); |
+ Check(not_equal, "Operand is a smi and not a string"); |
push(object); |
movq(object, FieldOperand(object, HeapObject::kMapOffset)); |
CmpInstanceType(object, FIRST_NONSTRING_TYPE); |
pop(object); |
- Check(below, kOperandIsNotAString); |
+ Check(below, "Operand is not a string"); |
} |
} |
@@ -3090,24 +3089,24 @@ void MacroAssembler::AssertString(Register object) { |
void MacroAssembler::AssertName(Register object) { |
if (emit_debug_code()) { |
testb(object, Immediate(kSmiTagMask)); |
- Check(not_equal, kOperandIsASmiAndNotAName); |
+ Check(not_equal, "Operand is a smi and not a name"); |
push(object); |
movq(object, FieldOperand(object, HeapObject::kMapOffset)); |
CmpInstanceType(object, LAST_NAME_TYPE); |
pop(object); |
- Check(below_equal, kOperandIsNotAName); |
+ Check(below_equal, "Operand is not a name"); |
} |
} |
void MacroAssembler::AssertRootValue(Register src, |
Heap::RootListIndex root_value_index, |
- BailoutReason reason) { |
+ const char* message) { |
if (emit_debug_code()) { |
ASSERT(!src.is(kScratchRegister)); |
LoadRoot(kScratchRegister, root_value_index); |
cmpq(src, kScratchRegister); |
- Check(equal, reason); |
+ Check(equal, message); |
} |
} |
@@ -3458,7 +3457,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) { |
isolate()->factory()->undefined_value(), |
RelocInfo::EMBEDDED_OBJECT); |
cmpq(Operand(rsp, 0), kScratchRegister); |
- Check(not_equal, kCodeObjectNotProperlyPatched); |
+ Check(not_equal, "code object not properly patched"); |
} |
} |
@@ -3467,7 +3466,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
if (emit_debug_code()) { |
Move(kScratchRegister, Smi::FromInt(type)); |
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); |
- Check(equal, kStackFrameTypesMustMatch); |
+ Check(equal, "stack frame types must match"); |
} |
movq(rsp, rbp); |
pop(rbp); |
@@ -3568,7 +3567,8 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) { |
// from the caller stack. |
lea(rsp, Operand(r15, 1 * kPointerSize)); |
- PushReturnAddressFrom(rcx); |
+ // Push the return address to get ready to return. |
+ push(rcx); |
LeaveExitFrameEpilogue(); |
} |
@@ -3612,7 +3612,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
// When generating debug code, make sure the lexical context is set. |
if (emit_debug_code()) { |
cmpq(scratch, Immediate(0)); |
- Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext); |
+ Check(not_equal, "we should not have an empty lexical context"); |
} |
// Load the native context of the current context. |
int offset = |
@@ -3624,7 +3624,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
if (emit_debug_code()) { |
Cmp(FieldOperand(scratch, HeapObject::kMapOffset), |
isolate()->factory()->native_context_map()); |
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(equal, "JSGlobalObject::native_context should be a native context."); |
} |
// Check if both contexts are the same. |
@@ -3643,12 +3643,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
movq(holder_reg, |
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); |
CompareRoot(holder_reg, Heap::kNullValueRootIndex); |
- Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); |
+ Check(not_equal, "JSGlobalProxy::context() should not be null."); |
// Read the first word and compare to native_context_map(), |
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); |
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(equal, "JSGlobalObject::native_context should be a native context."); |
pop(holder_reg); |
} |
@@ -3794,7 +3794,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result, |
// Assert that result actually contains top on entry. |
Operand top_operand = ExternalOperand(allocation_top); |
cmpq(result, top_operand); |
- Check(equal, kUnexpectedAllocationTop); |
+ Check(equal, "Unexpected allocation top"); |
#endif |
return; |
} |
@@ -3815,7 +3815,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
AllocationFlags flags) { |
if (emit_debug_code()) { |
testq(result_end, Immediate(kObjectAlignmentMask)); |
- Check(zero, kUnalignedAllocationInNewSpace); |
+ Check(zero, "Unaligned allocation in new space"); |
} |
ExternalReference allocation_top = |
@@ -3862,7 +3862,7 @@ void MacroAssembler::Allocate(int object_size, |
// always safe because the limit of the heap is always aligned. |
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
testq(result, Immediate(kDoubleAlignmentMask)); |
- Check(zero, kAllocationIsNotDoubleAligned); |
+ Check(zero, "Allocation is not double aligned"); |
} |
// Calculate new top and bail out if new space is exhausted. |
@@ -3941,7 +3941,7 @@ void MacroAssembler::Allocate(Register object_size, |
// always safe because the limit of the heap is always aligned. |
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
testq(result, Immediate(kDoubleAlignmentMask)); |
- Check(zero, kAllocationIsNotDoubleAligned); |
+ Check(zero, "Allocation is not double aligned"); |
} |
// Calculate new top and bail out if new space is exhausted. |
@@ -3975,7 +3975,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) { |
Operand top_operand = ExternalOperand(new_space_allocation_top); |
#ifdef DEBUG |
cmpq(object, top_operand); |
- Check(below, kUndoAllocationOfNonAllocatedMemory); |
+ Check(below, "Undo allocation of non allocated memory"); |
#endif |
movq(top_operand, object); |
} |
@@ -4165,7 +4165,7 @@ void MacroAssembler::CopyBytes(Register destination, |
ASSERT(min_length >= 0); |
if (emit_debug_code()) { |
cmpl(length, Immediate(min_length)); |
- Assert(greater_equal, kInvalidMinLength); |
+ Assert(greater_equal, "Invalid min_length"); |
} |
Label loop, done, short_string, short_loop; |
@@ -4249,7 +4249,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { |
if (emit_debug_code()) { |
CompareRoot(FieldOperand(dst, HeapObject::kMapOffset), |
Heap::kWithContextMapRootIndex); |
- Check(not_equal, kVariableResolvedToWithContext); |
+ Check(not_equal, "Variable resolved to with context."); |
} |
} |
@@ -4340,7 +4340,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); |
jmp(&ok); |
bind(&fail); |
- Abort(kGlobalFunctionsMustHaveInitialMap); |
+ Abort("Global functions must have initial map"); |
bind(&ok); |
} |
} |