Index: src/arm/macro-assembler-arm.cc |
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc |
index b9728ed04fa14aab43229acbaba69d3181165c49..cd124610f974b1c30b44335c0a27e9e3277a0932 100644 |
--- a/src/arm/macro-assembler-arm.cc |
+++ b/src/arm/macro-assembler-arm.cc |
@@ -489,7 +489,7 @@ void MacroAssembler::RecordWrite(Register object, |
if (emit_debug_code()) { |
ldr(ip, MemOperand(address)); |
cmp(ip, value); |
- Check(eq, kWrongAddressOrValuePassedToRecordWrite); |
+ Check(eq, "Wrong address or value passed to RecordWrite"); |
} |
Label done; |
@@ -1490,7 +1490,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
// In debug mode, make sure the lexical context is set. |
#ifdef DEBUG |
cmp(scratch, Operand::Zero()); |
- Check(ne, kWeShouldNotHaveAnEmptyLexicalContext); |
+ Check(ne, "we should not have an empty lexical context"); |
#endif |
// Load the native context of the current context. |
@@ -1508,7 +1508,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
LoadRoot(ip, Heap::kNativeContextMapRootIndex); |
cmp(holder_reg, ip); |
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(eq, "JSGlobalObject::native_context should be a native context."); |
pop(holder_reg); // Restore holder. |
} |
@@ -1525,12 +1525,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
mov(holder_reg, ip); // Move ip to its holding place. |
LoadRoot(ip, Heap::kNullValueRootIndex); |
cmp(holder_reg, ip); |
- Check(ne, kJSGlobalProxyContextShouldNotBeNull); |
+ Check(ne, "JSGlobalProxy::context() should not be null."); |
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); |
LoadRoot(ip, Heap::kNativeContextMapRootIndex); |
cmp(holder_reg, ip); |
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(eq, "JSGlobalObject::native_context should be a native context."); |
// Restore ip is not needed. ip is reloaded below. |
pop(holder_reg); // Restore holder. |
// Restore ip to holder's context. |
@@ -1727,7 +1727,7 @@ void MacroAssembler::Allocate(int object_size, |
// respect to register content between debug and release mode. |
ldr(ip, MemOperand(topaddr)); |
cmp(result, ip); |
- Check(eq, kUnexpectedAllocationTop); |
+ Check(eq, "Unexpected allocation top"); |
} |
// Load allocation limit into ip. Result already contains allocation top. |
ldr(ip, MemOperand(topaddr, limit - top)); |
@@ -1825,7 +1825,7 @@ void MacroAssembler::Allocate(Register object_size, |
// respect to register content between debug and release mode. |
ldr(ip, MemOperand(topaddr)); |
cmp(result, ip); |
- Check(eq, kUnexpectedAllocationTop); |
+ Check(eq, "Unexpected allocation top"); |
} |
// Load allocation limit into ip. Result already contains allocation top. |
ldr(ip, MemOperand(topaddr, limit - top)); |
@@ -1859,7 +1859,7 @@ void MacroAssembler::Allocate(Register object_size, |
// Update allocation top. result temporarily holds the new top. |
if (emit_debug_code()) { |
tst(scratch2, Operand(kObjectAlignmentMask)); |
- Check(eq, kUnalignedAllocationInNewSpace); |
+ Check(eq, "Unaligned allocation in new space"); |
} |
str(scratch2, MemOperand(topaddr)); |
@@ -1882,7 +1882,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object, |
mov(scratch, Operand(new_space_allocation_top)); |
ldr(scratch, MemOperand(scratch)); |
cmp(object, scratch); |
- Check(lt, kUndoAllocationOfNonAllocatedMemory); |
+ Check(lt, "Undo allocation of non allocated memory"); |
#endif |
// Write the address of the object to un-allocate as the current top. |
mov(scratch, Operand(new_space_allocation_top)); |
@@ -2131,7 +2131,7 @@ void MacroAssembler::StoreNumberToDoubleElements( |
if (emit_debug_code()) { |
vmrs(ip); |
tst(ip, Operand(kVFPDefaultNaNModeControlBit)); |
- Assert(ne, kDefaultNaNModeNotSet); |
+ Assert(ne, "Default NaN mode not set"); |
} |
VFPCanonicalizeNaN(double_scratch); |
b(&store); |
@@ -2381,7 +2381,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function, |
if (emit_debug_code()) { |
ldr(r1, MemOperand(r7, kLevelOffset)); |
cmp(r1, r6); |
- Check(eq, kUnexpectedLevelAfterReturnFromApiCall); |
+ Check(eq, "Unexpected level after return from api call"); |
} |
sub(r6, r6, Operand(1)); |
str(r6, MemOperand(r7, kLevelOffset)); |
@@ -2782,9 +2782,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value, |
} |
-void MacroAssembler::Assert(Condition cond, BailoutReason reason) { |
+void MacroAssembler::Assert(Condition cond, const char* msg) { |
if (emit_debug_code()) |
- Check(cond, reason); |
+ Check(cond, msg); |
} |
@@ -2803,23 +2803,23 @@ void MacroAssembler::AssertFastElements(Register elements) { |
LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |
cmp(elements, ip); |
b(eq, &ok); |
- Abort(kJSObjectWithFastElementsMapHasSlowElements); |
+ Abort("JSObject with fast elements map has slow elements"); |
bind(&ok); |
pop(elements); |
} |
} |
-void MacroAssembler::Check(Condition cond, BailoutReason reason) { |
+void MacroAssembler::Check(Condition cond, const char* msg) { |
Label L; |
b(cond, &L); |
- Abort(reason); |
+ Abort(msg); |
// will not return here |
bind(&L); |
} |
-void MacroAssembler::Abort(BailoutReason reason) { |
+void MacroAssembler::Abort(const char* msg) { |
Label abort_start; |
bind(&abort_start); |
// We want to pass the msg string like a smi to avoid GC |
@@ -2827,7 +2827,6 @@ void MacroAssembler::Abort(BailoutReason reason) { |
// properly. Instead, we pass an aligned pointer that is |
// a proper v8 smi, but also pass the alignment difference |
// from the real pointer as a smi. |
- const char* msg = GetBailoutReason(reason); |
intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
@@ -2970,7 +2969,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK); |
b(&ok); |
bind(&fail); |
- Abort(kGlobalFunctionsMustHaveInitialMap); |
+ Abort("Global functions must have initial map"); |
bind(&ok); |
} |
} |
@@ -3039,7 +3038,7 @@ void MacroAssembler::AssertNotSmi(Register object) { |
if (emit_debug_code()) { |
STATIC_ASSERT(kSmiTag == 0); |
tst(object, Operand(kSmiTagMask)); |
- Check(ne, kOperandIsASmi); |
+ Check(ne, "Operand is a smi"); |
} |
} |
@@ -3048,7 +3047,7 @@ void MacroAssembler::AssertSmi(Register object) { |
if (emit_debug_code()) { |
STATIC_ASSERT(kSmiTag == 0); |
tst(object, Operand(kSmiTagMask)); |
- Check(eq, kOperandIsNotSmi); |
+ Check(eq, "Operand is not smi"); |
} |
} |
@@ -3057,12 +3056,12 @@ void MacroAssembler::AssertString(Register object) { |
if (emit_debug_code()) { |
STATIC_ASSERT(kSmiTag == 0); |
tst(object, Operand(kSmiTagMask)); |
- Check(ne, kOperandIsASmiAndNotAString); |
+ Check(ne, "Operand is a smi and not a string"); |
push(object); |
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset)); |
CompareInstanceType(object, object, FIRST_NONSTRING_TYPE); |
pop(object); |
- Check(lo, kOperandIsNotAString); |
+ Check(lo, "Operand is not a string"); |
} |
} |
@@ -3071,12 +3070,12 @@ void MacroAssembler::AssertName(Register object) { |
if (emit_debug_code()) { |
STATIC_ASSERT(kSmiTag == 0); |
tst(object, Operand(kSmiTagMask)); |
- Check(ne, kOperandIsASmiAndNotAName); |
+ Check(ne, "Operand is a smi and not a name"); |
push(object); |
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset)); |
CompareInstanceType(object, object, LAST_NAME_TYPE); |
pop(object); |
- Check(le, kOperandIsNotAName); |
+ Check(le, "Operand is not a name"); |
} |
} |
@@ -3085,7 +3084,7 @@ void MacroAssembler::AssertName(Register object) { |
void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) { |
if (emit_debug_code()) { |
CompareRoot(reg, index); |
- Check(eq, kHeapNumberMapRegisterClobbered); |
+ Check(eq, "HeapNumberMap register clobbered."); |
} |
} |
@@ -3231,7 +3230,7 @@ void MacroAssembler::CopyBytes(Register src, |
bind(&word_loop); |
if (emit_debug_code()) { |
tst(src, Operand(kPointerSize - 1)); |
- Assert(eq, kExpectingAlignmentForCopyBytes); |
+ Assert(eq, "Expecting alignment for CopyBytes"); |
} |
cmp(length, Operand(kPointerSize)); |
b(lt, &byte_loop); |
@@ -3495,7 +3494,7 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
// Check that the instruction is a ldr reg, [pc + offset] . |
and_(result, result, Operand(kLdrPCPattern)); |
cmp(result, Operand(kLdrPCPattern)); |
- Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); |
+ Check(eq, "The instruction to patch should be a load from pc."); |
// Result was clobbered. Restore it. |
ldr(result, MemOperand(ldr_location)); |
} |