Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(827)

Unified Diff: src/mips64/code-stubs-mips64.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/mips64/code-stubs-mips64.h ('k') | src/mips64/codegen-mips64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/mips64/code-stubs-mips64.cc
diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc
index 599ca16232da77f822ddea9aa05a4266ea44a845..970792aafa9baff7c9f2a4b2d8aedd2309d2fe61 100644
--- a/src/mips64/code-stubs-mips64.cc
+++ b/src/mips64/code-stubs-mips64.cc
@@ -357,7 +357,7 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
{
// Call the runtime system in a fresh internal frame.
FrameScope scope(masm, StackFrame::INTERNAL);
- ASSERT((param_count == 0) ||
+ DCHECK((param_count == 0) ||
a0.is(descriptor->GetEnvironmentParameterRegister(param_count - 1)));
// Push arguments, adjust sp.
__ Dsubu(sp, sp, Operand(param_count * kPointerSize));
@@ -645,7 +645,7 @@ void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
// but it just ends up combining harmlessly with the last digit of the
// exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
// the most significant 1 to hit the last bit of the 12 bit sign and exponent.
- ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
+ DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
__ srl(at, the_int_, shift_distance);
__ or_(scratch_, scratch_, at);
@@ -706,7 +706,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
__ Branch(&return_equal, ne, t0, Operand(ODDBALL_TYPE));
__ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
__ Branch(&return_equal, ne, a0, Operand(a6));
- ASSERT(is_int16(GREATER) && is_int16(LESS));
+ DCHECK(is_int16(GREATER) && is_int16(LESS));
__ Ret(USE_DELAY_SLOT);
if (cc == le) {
// undefined <= undefined should fail.
@@ -720,7 +720,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
}
__ bind(&return_equal);
- ASSERT(is_int16(GREATER) && is_int16(LESS));
+ DCHECK(is_int16(GREATER) && is_int16(LESS));
__ Ret(USE_DELAY_SLOT);
if (cc == less) {
__ li(v0, Operand(GREATER)); // Things aren't less than themselves.
@@ -758,7 +758,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
if (cc != eq) {
// All-zero means Infinity means equal.
__ Ret(eq, v0, Operand(zero_reg));
- ASSERT(is_int16(GREATER) && is_int16(LESS));
+ DCHECK(is_int16(GREATER) && is_int16(LESS));
__ Ret(USE_DELAY_SLOT);
if (cc == le) {
__ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
@@ -779,7 +779,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Label* both_loaded_as_doubles,
Label* slow,
bool strict) {
- ASSERT((lhs.is(a0) && rhs.is(a1)) ||
+ DCHECK((lhs.is(a0) && rhs.is(a1)) ||
(lhs.is(a1) && rhs.is(a0)));
Label lhs_is_smi;
@@ -896,7 +896,7 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Register rhs,
Label* possible_strings,
Label* not_both_strings) {
- ASSERT((lhs.is(a0) && rhs.is(a1)) ||
+ DCHECK((lhs.is(a0) && rhs.is(a1)) ||
(lhs.is(a1) && rhs.is(a0)));
// a2 is object type of rhs.
@@ -988,7 +988,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
- ASSERT_EQ(0, Smi::FromInt(0));
+ DCHECK_EQ(0, Smi::FromInt(0));
__ And(a6, lhs, Operand(rhs));
__ JumpIfNotSmi(a6, &not_smis, a4);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@@ -1044,7 +1044,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
__ bind(&nan);
// NaN comparisons always fail.
// Load whatever we need in v0 to make the comparison fail.
- ASSERT(is_int16(GREATER) && is_int16(LESS));
+ DCHECK(is_int16(GREATER) && is_int16(LESS));
__ Ret(USE_DELAY_SLOT);
if (cc == lt || cc == le) {
__ li(v0, Operand(GREATER));
@@ -1126,7 +1126,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
if (cc == lt || cc == le) {
ncr = GREATER;
} else {
- ASSERT(cc == gt || cc == ge); // Remaining cases.
+ DCHECK(cc == gt || cc == ge); // Remaining cases.
ncr = LESS;
}
__ li(a0, Operand(Smi::FromInt(ncr)));
@@ -1377,7 +1377,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
__ sdc1(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- ASSERT(heapnumber.is(v0));
+ DCHECK(heapnumber.is(v0));
__ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
__ DropAndRet(2);
} else {
@@ -1524,7 +1524,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Set up sp in the delay slot.
masm->daddiu(sp, sp, -kCArgsSlotsSize);
// Make sure the stored 'ra' points to this position.
- ASSERT_EQ(kNumInstructionsToJump,
+ DCHECK_EQ(kNumInstructionsToJump,
masm->InstructionsGeneratedSince(&find_ra));
}
@@ -1779,9 +1779,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// in the safepoint slot for register a4.
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
- ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
+ DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
// ReturnTrueFalse is only implemented for inlined call sites.
- ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
+ DCHECK(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
// Fixed register usage throughout the stub:
const Register object = a0; // Object (lhs).
@@ -1831,7 +1831,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
} else {
- ASSERT(HasArgsInRegisters());
+ DCHECK(HasArgsInRegisters());
// Patch the (relocated) inlined map check.
// The offset was stored in a4 safepoint slot.
@@ -1861,7 +1861,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Branch(&loop);
__ bind(&is_instance);
- ASSERT(Smi::FromInt(0) == 0);
+ DCHECK(Smi::FromInt(0) == 0);
if (!HasCallSiteInlineCheck()) {
__ mov(v0, zero_reg);
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
@@ -1873,7 +1873,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ PatchRelocatedValue(inline_site, scratch, v0);
if (!ReturnTrueFalseObject()) {
- ASSERT_EQ(Smi::FromInt(0), 0);
+ DCHECK_EQ(Smi::FromInt(0), 0);
__ mov(v0, zero_reg);
}
}
@@ -2093,7 +2093,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
Label param_map_size;
- ASSERT_EQ(0, Smi::FromInt(0));
+ DCHECK_EQ(0, Smi::FromInt(0));
__ Branch(USE_DELAY_SLOT, &param_map_size, eq, a1, Operand(zero_reg));
__ mov(t1, zero_reg); // In delay slot: param map size = 0 when a1 == 0.
__ SmiScale(t1, a1, kPointerSizeLog2);
@@ -2597,7 +2597,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ li(a4, Operand(
ExternalReference::address_of_static_offsets_vector(isolate())));
} else { // O32.
- ASSERT(kMipsAbi == kO32);
+ DCHECK(kMipsAbi == kO32);
// Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
@@ -2848,9 +2848,9 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// a3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
+ DCHECK_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
- ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
+ DCHECK_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->uninitialized_symbol());
// Load the cache state into a4.
@@ -3125,9 +3125,9 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Label got_char_code;
Label sliced_string;
- ASSERT(!a4.is(index_));
- ASSERT(!a4.is(result_));
- ASSERT(!a4.is(object_));
+ DCHECK(!a4.is(index_));
+ DCHECK(!a4.is(result_));
+ DCHECK(!a4.is(object_));
// If the receiver is a smi trigger the non-string case.
__ JumpIfSmi(object_, receiver_not_string_);
@@ -3326,7 +3326,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
__ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
} else {
- ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
+ DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
// NumberToSmi discards numbers that are not exact integers.
__ CallRuntime(Runtime::kNumberToSmi, 1);
}
@@ -3369,11 +3369,11 @@ void StringCharCodeAtGenerator::GenerateSlow(
void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
// Fast case of Heap::LookupSingleCharacterStringFromCode.
- ASSERT(!a4.is(result_));
- ASSERT(!a4.is(code_));
+ DCHECK(!a4.is(result_));
+ DCHECK(!a4.is(code_));
STATIC_ASSERT(kSmiTag == 0);
- ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
+ DCHECK(IsPowerOf2(String::kMaxOneByteCharCode + 1));
__ And(a4,
code_,
Operand(kSmiTagMask |
@@ -3758,7 +3758,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
__ bind(&check_zero_length);
STATIC_ASSERT(kSmiTag == 0);
__ Branch(&compare_chars, ne, length, Operand(zero_reg));
- ASSERT(is_int16((intptr_t)Smi::FromInt(EQUAL)));
+ DCHECK(is_int16((intptr_t)Smi::FromInt(EQUAL)));
__ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(Smi::FromInt(EQUAL)));
@@ -3801,7 +3801,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
// Compare lengths - strings up to min-length are equal.
__ bind(&compare_lengths);
- ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
+ DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
// Use length_delta as result if it's zero.
__ mov(scratch2, length_delta);
__ mov(scratch4, zero_reg);
@@ -3917,7 +3917,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::SMI);
+ DCHECK(state_ == CompareIC::SMI);
Label miss;
__ Or(a2, a1, a0);
__ JumpIfNotSmi(a2, &miss);
@@ -3940,7 +3940,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::NUMBER);
+ DCHECK(state_ == CompareIC::NUMBER);
Label generic_stub;
Label unordered, maybe_undefined1, maybe_undefined2;
@@ -3993,7 +3993,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
__ BranchF(&fpu_lt, NULL, lt, f0, f2);
// Otherwise it's greater, so just fall thru, and return.
- ASSERT(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
+ DCHECK(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
__ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(GREATER));
@@ -4033,7 +4033,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
+ DCHECK(state_ == CompareIC::INTERNALIZED_STRING);
Label miss;
// Registers containing left and right operands respectively.
@@ -4057,13 +4057,13 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
// Make sure a0 is non-zero. At this point input operands are
// guaranteed to be non-zero.
- ASSERT(right.is(a0));
+ DCHECK(right.is(a0));
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
__ mov(v0, right);
// Internalized strings are compared by identity.
__ Ret(ne, left, Operand(right));
- ASSERT(is_int16(EQUAL));
+ DCHECK(is_int16(EQUAL));
__ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(Smi::FromInt(EQUAL)));
@@ -4073,8 +4073,8 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::UNIQUE_NAME);
- ASSERT(GetCondition() == eq);
+ DCHECK(state_ == CompareIC::UNIQUE_NAME);
+ DCHECK(GetCondition() == eq);
Label miss;
// Registers containing left and right operands respectively.
@@ -4104,7 +4104,7 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
__ Branch(&done, ne, left, Operand(right));
// Make sure a0 is non-zero. At this point input operands are
// guaranteed to be non-zero.
- ASSERT(right.is(a0));
+ DCHECK(right.is(a0));
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
__ li(v0, Operand(Smi::FromInt(EQUAL)));
@@ -4117,7 +4117,7 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::STRING);
+ DCHECK(state_ == CompareIC::STRING);
Label miss;
bool equality = Token::IsEqualityOp(op_);
@@ -4160,7 +4160,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// because we already know they are not identical. We know they are both
// strings.
if (equality) {
- ASSERT(GetCondition() == eq);
+ DCHECK(GetCondition() == eq);
STATIC_ASSERT(kInternalizedTag == 0);
__ Or(tmp3, tmp1, Operand(tmp2));
__ And(tmp5, tmp3, Operand(kIsNotInternalizedMask));
@@ -4168,7 +4168,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ Branch(&is_symbol, ne, tmp5, Operand(zero_reg));
// Make sure a0 is non-zero. At this point input operands are
// guaranteed to be non-zero.
- ASSERT(right.is(a0));
+ DCHECK(right.is(a0));
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0); // In the delay slot.
__ bind(&is_symbol);
@@ -4203,7 +4203,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
- ASSERT(state_ == CompareIC::OBJECT);
+ DCHECK(state_ == CompareIC::OBJECT);
Label miss;
__ And(a2, a1, Operand(a0));
__ JumpIfSmi(a2, &miss);
@@ -4213,7 +4213,7 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ GetObjectType(a1, a2, a2);
__ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
- ASSERT(GetCondition() == eq);
+ DCHECK(GetCondition() == eq);
__ Ret(USE_DELAY_SLOT);
__ dsubu(v0, a0, a1);
@@ -4302,7 +4302,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
Register properties,
Handle<Name> name,
Register scratch0) {
- ASSERT(name->IsUniqueName());
+ DCHECK(name->IsUniqueName());
// If names of slots in range from 1 to kProbes - 1 for the hash value are
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
@@ -4319,20 +4319,20 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
Operand(name->Hash() + NameDictionary::GetProbeOffset(i)));
// Scale the index by multiplying by the entry size.
- ASSERT(NameDictionary::kEntrySize == 3);
+ DCHECK(NameDictionary::kEntrySize == 3);
__ dsll(at, index, 1);
__ Daddu(index, index, at); // index *= 3.
Register entity_name = scratch0;
// Having undefined at this place means the name is not contained.
- ASSERT_EQ(kSmiTagSize, 1);
+ DCHECK_EQ(kSmiTagSize, 1);
Register tmp = properties;
__ dsll(scratch0, index, kPointerSizeLog2);
__ Daddu(tmp, properties, scratch0);
__ ld(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
- ASSERT(!tmp.is(entity_name));
+ DCHECK(!tmp.is(entity_name));
__ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
__ Branch(done, eq, entity_name, Operand(tmp));
@@ -4385,10 +4385,10 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
Register name,
Register scratch1,
Register scratch2) {
- ASSERT(!elements.is(scratch1));
- ASSERT(!elements.is(scratch2));
- ASSERT(!name.is(scratch1));
- ASSERT(!name.is(scratch2));
+ DCHECK(!elements.is(scratch1));
+ DCHECK(!elements.is(scratch2));
+ DCHECK(!name.is(scratch1));
+ DCHECK(!name.is(scratch2));
__ AssertName(name);
@@ -4407,7 +4407,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
// Add the probe offset (i + i * i) left shifted to avoid right shifting
// the hash in a separate instruction. The value hash + i + i * i is right
// shifted in the following and instruction.
- ASSERT(NameDictionary::GetProbeOffset(i) <
+ DCHECK(NameDictionary::GetProbeOffset(i) <
1 << (32 - Name::kHashFieldOffset));
__ Daddu(scratch2, scratch2, Operand(
NameDictionary::GetProbeOffset(i) << Name::kHashShift));
@@ -4416,7 +4416,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
__ And(scratch2, scratch1, scratch2);
// Scale the index by multiplying by the element size.
- ASSERT(NameDictionary::kEntrySize == 3);
+ DCHECK(NameDictionary::kEntrySize == 3);
// scratch2 = scratch2 * 3.
__ dsll(at, scratch2, 1);
@@ -4436,7 +4436,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
__ MultiPush(spill_mask);
if (name.is(a0)) {
- ASSERT(!elements.is(a1));
+ DCHECK(!elements.is(a1));
__ Move(a1, name);
__ Move(a0, elements);
} else {
@@ -4492,7 +4492,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
// Add the probe offset (i + i * i) left shifted to avoid right shifting
// the hash in a separate instruction. The value hash + i + i * i is right
// shifted in the following and instruction.
- ASSERT(NameDictionary::GetProbeOffset(i) <
+ DCHECK(NameDictionary::GetProbeOffset(i) <
1 << (32 - Name::kHashFieldOffset));
__ Daddu(index, hash, Operand(
NameDictionary::GetProbeOffset(i) << Name::kHashShift));
@@ -4503,14 +4503,14 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
__ And(index, mask, index);
// Scale the index by multiplying by the entry size.
- ASSERT(NameDictionary::kEntrySize == 3);
+ DCHECK(NameDictionary::kEntrySize == 3);
// index *= 3.
__ mov(at, index);
__ dsll(index, index, 1);
__ Daddu(index, index, at);
- ASSERT_EQ(kSmiTagSize, 1);
+ DCHECK_EQ(kSmiTagSize, 1);
__ dsll(index, index, kPointerSizeLog2);
__ Daddu(index, index, dictionary);
__ ld(entry_key, FieldMemOperand(index, kElementsStartOffset));
@@ -4647,8 +4647,8 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
__ PrepareCallCFunction(argument_count, regs_.scratch0());
Register address =
a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
- ASSERT(!address.is(regs_.object()));
- ASSERT(!address.is(a0));
+ DCHECK(!address.is(regs_.object()));
+ DCHECK(!address.is(a0));
__ Move(address, regs_.address());
__ Move(a0, regs_.object());
__ Move(a1, address);
@@ -4869,7 +4869,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
int frame_alignment = masm->ActivationFrameAlignment();
if (frame_alignment > kPointerSize) {
__ mov(s5, sp);
- ASSERT(IsPowerOf2(frame_alignment));
+ DCHECK(IsPowerOf2(frame_alignment));
__ And(sp, sp, Operand(-frame_alignment));
}
@@ -4937,12 +4937,12 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
// sp[0] - last argument
Label normal_sequence;
if (mode == DONT_OVERRIDE) {
- ASSERT(FAST_SMI_ELEMENTS == 0);
- ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
- ASSERT(FAST_ELEMENTS == 2);
- ASSERT(FAST_HOLEY_ELEMENTS == 3);
- ASSERT(FAST_DOUBLE_ELEMENTS == 4);
- ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+ DCHECK(FAST_SMI_ELEMENTS == 0);
+ DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1);
+ DCHECK(FAST_ELEMENTS == 2);
+ DCHECK(FAST_HOLEY_ELEMENTS == 3);
+ DCHECK(FAST_DOUBLE_ELEMENTS == 4);
+ DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
// is the low bit set? If so, we are holey and that is good.
__ And(at, a3, Operand(1));
@@ -5249,7 +5249,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(false, kApiStackSpace);
- ASSERT(!api_function_address.is(a0) && !scratch.is(a0));
+ DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
// a0 = FunctionCallbackInfo&
// Arguments is after the return address.
__ Daddu(a0, sp, Operand(1 * kPointerSize));
« no previous file with comments | « src/mips64/code-stubs-mips64.h ('k') | src/mips64/codegen-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698