Index: src/ia32/macro-assembler-ia32.cc |
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc |
index 8b1be3cf17262f7c7c5d9de2d8a755af7982b937..2ab5a259321cd2cd8095fcd43c14a1886d40e62d 100644 |
--- a/src/ia32/macro-assembler-ia32.cc |
+++ b/src/ia32/macro-assembler-ia32.cc |
@@ -678,7 +678,7 @@ void MacroAssembler::AssertNumber(Register object) { |
JumpIfSmi(object, &ok); |
cmp(FieldOperand(object, HeapObject::kMapOffset), |
isolate()->factory()->heap_number_map()); |
- Check(equal, kOperandNotANumber); |
+ Check(equal, "Operand not a number"); |
bind(&ok); |
} |
} |
@@ -687,7 +687,7 @@ void MacroAssembler::AssertNumber(Register object) { |
void MacroAssembler::AssertSmi(Register object) { |
if (emit_debug_code()) { |
test(object, Immediate(kSmiTagMask)); |
- Check(equal, kOperandIsNotASmi); |
+ Check(equal, "Operand is not a smi"); |
} |
} |
@@ -695,12 +695,12 @@ void MacroAssembler::AssertSmi(Register object) { |
void MacroAssembler::AssertString(Register object) { |
if (emit_debug_code()) { |
test(object, Immediate(kSmiTagMask)); |
- Check(not_equal, kOperandIsASmiAndNotAString); |
+ Check(not_equal, "Operand is a smi and not a string"); |
push(object); |
mov(object, FieldOperand(object, HeapObject::kMapOffset)); |
CmpInstanceType(object, FIRST_NONSTRING_TYPE); |
pop(object); |
- Check(below, kOperandIsNotAString); |
+ Check(below, "Operand is not a string"); |
} |
} |
@@ -708,12 +708,12 @@ void MacroAssembler::AssertString(Register object) { |
void MacroAssembler::AssertName(Register object) { |
if (emit_debug_code()) { |
test(object, Immediate(kSmiTagMask)); |
- Check(not_equal, kOperandIsASmiAndNotAName); |
+ Check(not_equal, "Operand is a smi and not a name"); |
push(object); |
mov(object, FieldOperand(object, HeapObject::kMapOffset)); |
CmpInstanceType(object, LAST_NAME_TYPE); |
pop(object); |
- Check(below_equal, kOperandIsNotAName); |
+ Check(below_equal, "Operand is not a name"); |
} |
} |
@@ -721,7 +721,7 @@ void MacroAssembler::AssertName(Register object) { |
void MacroAssembler::AssertNotSmi(Register object) { |
if (emit_debug_code()) { |
test(object, Immediate(kSmiTagMask)); |
- Check(not_equal, kOperandIsASmi); |
+ Check(not_equal, "Operand is a smi"); |
} |
} |
@@ -734,7 +734,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) { |
push(Immediate(CodeObject())); |
if (emit_debug_code()) { |
cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value())); |
- Check(not_equal, kCodeObjectNotProperlyPatched); |
+ Check(not_equal, "code object not properly patched"); |
} |
} |
@@ -743,7 +743,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
if (emit_debug_code()) { |
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), |
Immediate(Smi::FromInt(type))); |
- Check(equal, kStackFrameTypesMustMatch); |
+ Check(equal, "stack frame types must match"); |
} |
leave(); |
} |
@@ -1024,7 +1024,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
// When generating debug code, make sure the lexical context is set. |
if (emit_debug_code()) { |
cmp(scratch1, Immediate(0)); |
- Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext); |
+ Check(not_equal, "we should not have an empty lexical context"); |
} |
// Load the native context of the current context. |
int offset = |
@@ -1037,7 +1037,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
// Read the first word and compare to native_context_map. |
cmp(FieldOperand(scratch1, HeapObject::kMapOffset), |
isolate()->factory()->native_context_map()); |
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(equal, "JSGlobalObject::native_context should be a native context."); |
} |
// Check if both contexts are the same. |
@@ -1056,12 +1056,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
// Check the context is a native context. |
if (emit_debug_code()) { |
cmp(scratch2, isolate()->factory()->null_value()); |
- Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); |
+ Check(not_equal, "JSGlobalProxy::context() should not be null."); |
// Read the first word and compare to native_context_map(), |
cmp(FieldOperand(scratch2, HeapObject::kMapOffset), |
isolate()->factory()->native_context_map()); |
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); |
+ Check(equal, "JSGlobalObject::native_context should be a native context."); |
} |
int token_offset = Context::kHeaderSize + |
@@ -1206,7 +1206,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result, |
#ifdef DEBUG |
// Assert that result actually contains top on entry. |
cmp(result, Operand::StaticVariable(allocation_top)); |
- Check(equal, kUnexpectedAllocationTop); |
+ Check(equal, "Unexpected allocation top"); |
#endif |
return; |
} |
@@ -1226,7 +1226,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
AllocationFlags flags) { |
if (emit_debug_code()) { |
test(result_end, Immediate(kObjectAlignmentMask)); |
- Check(zero, kUnalignedAllocationInNewSpace); |
+ Check(zero, "Unaligned allocation in new space"); |
} |
ExternalReference allocation_top = |
@@ -1458,7 +1458,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) { |
and_(object, Immediate(~kHeapObjectTagMask)); |
#ifdef DEBUG |
cmp(object, Operand::StaticVariable(new_space_allocation_top)); |
- Check(below, kUndoAllocationOfNonAllocatedMemory); |
+ Check(below, "Undo allocation of non allocated memory"); |
#endif |
mov(Operand::StaticVariable(new_space_allocation_top), object); |
} |
@@ -2062,7 +2062,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address, |
// previous handle scope. |
mov(Operand::StaticVariable(next_address), ebx); |
sub(Operand::StaticVariable(level_address), Immediate(1)); |
- Assert(above_equal, kInvalidHandleScopeLevel); |
+ Assert(above_equal, "Invalid HandleScope level"); |
cmp(edi, Operand::StaticVariable(limit_address)); |
j(not_equal, &delete_allocated_handles); |
bind(&leave_exit_frame); |
@@ -2104,7 +2104,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address, |
cmp(return_value, isolate()->factory()->null_value()); |
j(equal, &ok, Label::kNear); |
- Abort(kAPICallReturnedInvalidObject); |
+ Abort("API call returned invalid object"); |
bind(&ok); |
#endif |
@@ -2390,7 +2390,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { |
if (emit_debug_code()) { |
cmp(FieldOperand(dst, HeapObject::kMapOffset), |
isolate()->factory()->with_context_map()); |
- Check(not_equal, kVariableResolvedToWithContext); |
+ Check(not_equal, "Variable resolved to with context."); |
} |
} |
@@ -2477,7 +2477,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); |
jmp(&ok); |
bind(&fail); |
- Abort(kGlobalFunctionsMustHaveInitialMap); |
+ Abort("Global functions must have initial map"); |
bind(&ok); |
} |
} |
@@ -2578,7 +2578,7 @@ void MacroAssembler::VerifyX87StackDepth(uint32_t depth) { |
and_(eax, kTopMask); |
shr(eax, 11); |
cmp(eax, Immediate(tos)); |
- Check(equal, kUnexpectedFPUStackDepthAfterInstruction); |
+ Check(equal, "Unexpected FPU stack depth after instruction"); |
fnclex(); |
pop(eax); |
} |
@@ -2661,8 +2661,8 @@ void MacroAssembler::DecrementCounter(Condition cc, |
} |
-void MacroAssembler::Assert(Condition cc, BailoutReason reason) { |
- if (emit_debug_code()) Check(cc, reason); |
+void MacroAssembler::Assert(Condition cc, const char* msg) { |
+ if (emit_debug_code()) Check(cc, msg); |
} |
@@ -2679,16 +2679,16 @@ void MacroAssembler::AssertFastElements(Register elements) { |
cmp(FieldOperand(elements, HeapObject::kMapOffset), |
Immediate(factory->fixed_cow_array_map())); |
j(equal, &ok); |
- Abort(kJSObjectWithFastElementsMapHasSlowElements); |
+ Abort("JSObject with fast elements map has slow elements"); |
bind(&ok); |
} |
} |
-void MacroAssembler::Check(Condition cc, BailoutReason reason) { |
+void MacroAssembler::Check(Condition cc, const char* msg) { |
Label L; |
j(cc, &L); |
- Abort(reason); |
+ Abort(msg); |
// will not return here |
bind(&L); |
} |
@@ -2709,13 +2709,12 @@ void MacroAssembler::CheckStackAlignment() { |
} |
-void MacroAssembler::Abort(BailoutReason reason) { |
+void MacroAssembler::Abort(const char* msg) { |
// We want to pass the msg string like a smi to avoid GC |
// problems, however msg is not guaranteed to be aligned |
// properly. Instead, we pass an aligned pointer that is |
// a proper v8 smi, but also pass the alignment difference |
// from the real pointer as a smi. |
- const char* msg = GetBailoutReason(reason); |
intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
@@ -3119,7 +3118,7 @@ void MacroAssembler::EnsureNotWhite( |
if (emit_debug_code()) { |
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); |
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset)); |
- Check(less_equal, kLiveBytesCountOverflowChunkSize); |
+ Check(less_equal, "Live Bytes Count overflow chunk size"); |
} |
bind(&done); |