| Index: src/mips/code-stubs-mips.cc
|
| diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
|
| index 4074f892f21de631ac31804565d0a9eabbdffcb9..2e8e6074b57028c6648d2190dba9b750c2c2b59f 100644
|
| --- a/src/mips/code-stubs-mips.cc
|
| +++ b/src/mips/code-stubs-mips.cc
|
| @@ -357,7 +357,7 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
| {
|
| // Call the runtime system in a fresh internal frame.
|
| FrameScope scope(masm, StackFrame::INTERNAL);
|
| - ASSERT(param_count == 0 ||
|
| + DCHECK(param_count == 0 ||
|
| a0.is(descriptor->GetEnvironmentParameterRegister(
|
| param_count - 1)));
|
| // Push arguments, adjust sp.
|
| @@ -650,7 +650,7 @@ void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
|
| // but it just ends up combining harmlessly with the last digit of the
|
| // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
|
| // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
|
| - ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
|
| + DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
|
| const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
|
| __ srl(at, the_int_, shift_distance);
|
| __ or_(scratch_, scratch_, at);
|
| @@ -711,7 +711,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
|
| __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE));
|
| __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
|
| __ Branch(&return_equal, ne, a0, Operand(t2));
|
| - ASSERT(is_int16(GREATER) && is_int16(LESS));
|
| + DCHECK(is_int16(GREATER) && is_int16(LESS));
|
| __ Ret(USE_DELAY_SLOT);
|
| if (cc == le) {
|
| // undefined <= undefined should fail.
|
| @@ -725,7 +725,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
|
| }
|
|
|
| __ bind(&return_equal);
|
| - ASSERT(is_int16(GREATER) && is_int16(LESS));
|
| + DCHECK(is_int16(GREATER) && is_int16(LESS));
|
| __ Ret(USE_DELAY_SLOT);
|
| if (cc == less) {
|
| __ li(v0, Operand(GREATER)); // Things aren't less than themselves.
|
| @@ -764,7 +764,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
|
| if (cc != eq) {
|
| // All-zero means Infinity means equal.
|
| __ Ret(eq, v0, Operand(zero_reg));
|
| - ASSERT(is_int16(GREATER) && is_int16(LESS));
|
| + DCHECK(is_int16(GREATER) && is_int16(LESS));
|
| __ Ret(USE_DELAY_SLOT);
|
| if (cc == le) {
|
| __ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
|
| @@ -785,7 +785,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
|
| Label* both_loaded_as_doubles,
|
| Label* slow,
|
| bool strict) {
|
| - ASSERT((lhs.is(a0) && rhs.is(a1)) ||
|
| + DCHECK((lhs.is(a0) && rhs.is(a1)) ||
|
| (lhs.is(a1) && rhs.is(a0)));
|
|
|
| Label lhs_is_smi;
|
| @@ -903,7 +903,7 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
|
| Register rhs,
|
| Label* possible_strings,
|
| Label* not_both_strings) {
|
| - ASSERT((lhs.is(a0) && rhs.is(a1)) ||
|
| + DCHECK((lhs.is(a0) && rhs.is(a1)) ||
|
| (lhs.is(a1) && rhs.is(a0)));
|
|
|
| // a2 is object type of rhs.
|
| @@ -994,7 +994,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
| // If either is a Smi (we know that not both are), then they can only
|
| // be strictly equal if the other is a HeapNumber.
|
| STATIC_ASSERT(kSmiTag == 0);
|
| - ASSERT_EQ(0, Smi::FromInt(0));
|
| + DCHECK_EQ(0, Smi::FromInt(0));
|
| __ And(t2, lhs, Operand(rhs));
|
| __ JumpIfNotSmi(t2, ¬_smis, t0);
|
| // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
|
| @@ -1038,7 +1038,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
| __ bind(&nan);
|
| // NaN comparisons always fail.
|
| // Load whatever we need in v0 to make the comparison fail.
|
| - ASSERT(is_int16(GREATER) && is_int16(LESS));
|
| + DCHECK(is_int16(GREATER) && is_int16(LESS));
|
| __ Ret(USE_DELAY_SLOT);
|
| if (cc == lt || cc == le) {
|
| __ li(v0, Operand(GREATER));
|
| @@ -1120,7 +1120,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
| if (cc == lt || cc == le) {
|
| ncr = GREATER;
|
| } else {
|
| - ASSERT(cc == gt || cc == ge); // Remaining cases.
|
| + DCHECK(cc == gt || cc == ge); // Remaining cases.
|
| ncr = LESS;
|
| }
|
| __ li(a0, Operand(Smi::FromInt(ncr)));
|
| @@ -1371,7 +1371,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
|
| heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
|
| __ sdc1(double_result,
|
| FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
|
| - ASSERT(heapnumber.is(v0));
|
| + DCHECK(heapnumber.is(v0));
|
| __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
|
| __ DropAndRet(2);
|
| } else {
|
| @@ -1518,7 +1518,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
| // Set up sp in the delay slot.
|
| masm->addiu(sp, sp, -kCArgsSlotsSize);
|
| // Make sure the stored 'ra' points to this position.
|
| - ASSERT_EQ(kNumInstructionsToJump,
|
| + DCHECK_EQ(kNumInstructionsToJump,
|
| masm->InstructionsGeneratedSince(&find_ra));
|
| }
|
|
|
| @@ -1769,9 +1769,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // in the safepoint slot for register t0.
|
| void InstanceofStub::Generate(MacroAssembler* masm) {
|
| // Call site inlining and patching implies arguments in registers.
|
| - ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
|
| + DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
|
| // ReturnTrueFalse is only implemented for inlined call sites.
|
| - ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
|
| + DCHECK(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
|
|
|
| // Fixed register usage throughout the stub:
|
| const Register object = a0; // Object (lhs).
|
| @@ -1821,7 +1821,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
| __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
| } else {
|
| - ASSERT(HasArgsInRegisters());
|
| + DCHECK(HasArgsInRegisters());
|
| // Patch the (relocated) inlined map check.
|
|
|
| // The offset was stored in t0 safepoint slot.
|
| @@ -1851,7 +1851,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| __ Branch(&loop);
|
|
|
| __ bind(&is_instance);
|
| - ASSERT(Smi::FromInt(0) == 0);
|
| + DCHECK(Smi::FromInt(0) == 0);
|
| if (!HasCallSiteInlineCheck()) {
|
| __ mov(v0, zero_reg);
|
| __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
|
| @@ -1863,7 +1863,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| __ PatchRelocatedValue(inline_site, scratch, v0);
|
|
|
| if (!ReturnTrueFalseObject()) {
|
| - ASSERT_EQ(Smi::FromInt(0), 0);
|
| + DCHECK_EQ(Smi::FromInt(0), 0);
|
| __ mov(v0, zero_reg);
|
| }
|
| }
|
| @@ -2084,7 +2084,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
| FixedArray::kHeaderSize + 2 * kPointerSize;
|
| // If there are no mapped parameters, we do not need the parameter_map.
|
| Label param_map_size;
|
| - ASSERT_EQ(0, Smi::FromInt(0));
|
| + DCHECK_EQ(0, Smi::FromInt(0));
|
| __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, a1, Operand(zero_reg));
|
| __ mov(t5, zero_reg); // In delay slot: param map size = 0 when a1 == 0.
|
| __ sll(t5, a1, 1);
|
| @@ -2805,9 +2805,9 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
| // a3 : slot in feedback vector (Smi)
|
| Label initialize, done, miss, megamorphic, not_array_function;
|
|
|
| - ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
|
| + DCHECK_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
|
| masm->isolate()->heap()->megamorphic_symbol());
|
| - ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
|
| + DCHECK_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
|
| masm->isolate()->heap()->uninitialized_symbol());
|
|
|
| // Load the cache state into t0.
|
| @@ -3226,9 +3226,9 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
| Label got_char_code;
|
| Label sliced_string;
|
|
|
| - ASSERT(!t0.is(index_));
|
| - ASSERT(!t0.is(result_));
|
| - ASSERT(!t0.is(object_));
|
| + DCHECK(!t0.is(index_));
|
| + DCHECK(!t0.is(result_));
|
| + DCHECK(!t0.is(object_));
|
|
|
| // If the receiver is a smi trigger the non-string case.
|
| __ JumpIfSmi(object_, receiver_not_string_);
|
| @@ -3281,7 +3281,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
| if (index_flags_ == STRING_INDEX_IS_NUMBER) {
|
| __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
|
| } else {
|
| - ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
|
| + DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
|
| // NumberToSmi discards numbers that are not exact integers.
|
| __ CallRuntime(Runtime::kNumberToSmi, 1);
|
| }
|
| @@ -3324,12 +3324,12 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
| void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
| // Fast case of Heap::LookupSingleCharacterStringFromCode.
|
|
|
| - ASSERT(!t0.is(result_));
|
| - ASSERT(!t0.is(code_));
|
| + DCHECK(!t0.is(result_));
|
| + DCHECK(!t0.is(code_));
|
|
|
| STATIC_ASSERT(kSmiTag == 0);
|
| STATIC_ASSERT(kSmiShiftSize == 0);
|
| - ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
|
| + DCHECK(IsPowerOf2(String::kMaxOneByteCharCode + 1));
|
| __ And(t0,
|
| code_,
|
| Operand(kSmiTagMask |
|
| @@ -3701,7 +3701,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
|
| __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset));
|
| __ Branch(&check_zero_length, eq, length, Operand(scratch2));
|
| __ bind(&strings_not_equal);
|
| - ASSERT(is_int16(NOT_EQUAL));
|
| + DCHECK(is_int16(NOT_EQUAL));
|
| __ Ret(USE_DELAY_SLOT);
|
| __ li(v0, Operand(Smi::FromInt(NOT_EQUAL)));
|
|
|
| @@ -3710,7 +3710,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
|
| __ bind(&check_zero_length);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| __ Branch(&compare_chars, ne, length, Operand(zero_reg));
|
| - ASSERT(is_int16(EQUAL));
|
| + DCHECK(is_int16(EQUAL));
|
| __ Ret(USE_DELAY_SLOT);
|
| __ li(v0, Operand(Smi::FromInt(EQUAL)));
|
|
|
| @@ -3753,7 +3753,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
|
|
|
| // Compare lengths - strings up to min-length are equal.
|
| __ bind(&compare_lengths);
|
| - ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
|
| + DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
|
| // Use length_delta as result if it's zero.
|
| __ mov(scratch2, length_delta);
|
| __ mov(scratch4, zero_reg);
|
| @@ -3869,7 +3869,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::SMI);
|
| + DCHECK(state_ == CompareIC::SMI);
|
| Label miss;
|
| __ Or(a2, a1, a0);
|
| __ JumpIfNotSmi(a2, &miss);
|
| @@ -3892,7 +3892,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::NUMBER);
|
| + DCHECK(state_ == CompareIC::NUMBER);
|
|
|
| Label generic_stub;
|
| Label unordered, maybe_undefined1, maybe_undefined2;
|
| @@ -3945,7 +3945,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
|
| __ BranchF(&fpu_lt, NULL, lt, f0, f2);
|
|
|
| // Otherwise it's greater, so just fall thru, and return.
|
| - ASSERT(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
|
| + DCHECK(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
|
| __ Ret(USE_DELAY_SLOT);
|
| __ li(v0, Operand(GREATER));
|
|
|
| @@ -3985,7 +3985,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
|
| + DCHECK(state_ == CompareIC::INTERNALIZED_STRING);
|
| Label miss;
|
|
|
| // Registers containing left and right operands respectively.
|
| @@ -4009,13 +4009,13 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
|
|
|
| // Make sure a0 is non-zero. At this point input operands are
|
| // guaranteed to be non-zero.
|
| - ASSERT(right.is(a0));
|
| + DCHECK(right.is(a0));
|
| STATIC_ASSERT(EQUAL == 0);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| __ mov(v0, right);
|
| // Internalized strings are compared by identity.
|
| __ Ret(ne, left, Operand(right));
|
| - ASSERT(is_int16(EQUAL));
|
| + DCHECK(is_int16(EQUAL));
|
| __ Ret(USE_DELAY_SLOT);
|
| __ li(v0, Operand(Smi::FromInt(EQUAL)));
|
|
|
| @@ -4025,8 +4025,8 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::UNIQUE_NAME);
|
| - ASSERT(GetCondition() == eq);
|
| + DCHECK(state_ == CompareIC::UNIQUE_NAME);
|
| + DCHECK(GetCondition() == eq);
|
| Label miss;
|
|
|
| // Registers containing left and right operands respectively.
|
| @@ -4056,7 +4056,7 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
|
| __ Branch(&done, ne, left, Operand(right));
|
| // Make sure a0 is non-zero. At this point input operands are
|
| // guaranteed to be non-zero.
|
| - ASSERT(right.is(a0));
|
| + DCHECK(right.is(a0));
|
| STATIC_ASSERT(EQUAL == 0);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| __ li(v0, Operand(Smi::FromInt(EQUAL)));
|
| @@ -4069,7 +4069,7 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::STRING);
|
| + DCHECK(state_ == CompareIC::STRING);
|
| Label miss;
|
|
|
| bool equality = Token::IsEqualityOp(op_);
|
| @@ -4112,7 +4112,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
|
| // because we already know they are not identical. We know they are both
|
| // strings.
|
| if (equality) {
|
| - ASSERT(GetCondition() == eq);
|
| + DCHECK(GetCondition() == eq);
|
| STATIC_ASSERT(kInternalizedTag == 0);
|
| __ Or(tmp3, tmp1, Operand(tmp2));
|
| __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask));
|
| @@ -4120,7 +4120,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
|
| __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg));
|
| // Make sure a0 is non-zero. At this point input operands are
|
| // guaranteed to be non-zero.
|
| - ASSERT(right.is(a0));
|
| + DCHECK(right.is(a0));
|
| __ Ret(USE_DELAY_SLOT);
|
| __ mov(v0, a0); // In the delay slot.
|
| __ bind(&is_symbol);
|
| @@ -4155,7 +4155,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
|
|
|
|
|
| void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
|
| - ASSERT(state_ == CompareIC::OBJECT);
|
| + DCHECK(state_ == CompareIC::OBJECT);
|
| Label miss;
|
| __ And(a2, a1, Operand(a0));
|
| __ JumpIfSmi(a2, &miss);
|
| @@ -4165,7 +4165,7 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
|
| __ GetObjectType(a1, a2, a2);
|
| __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
|
|
|
| - ASSERT(GetCondition() == eq);
|
| + DCHECK(GetCondition() == eq);
|
| __ Ret(USE_DELAY_SLOT);
|
| __ subu(v0, a0, a1);
|
|
|
| @@ -4254,7 +4254,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
|
| Register properties,
|
| Handle<Name> name,
|
| Register scratch0) {
|
| - ASSERT(name->IsUniqueName());
|
| + DCHECK(name->IsUniqueName());
|
| // If names of slots in range from 1 to kProbes - 1 for the hash value are
|
| // not equal to the name and kProbes-th slot is not used (its name is the
|
| // undefined value), it guarantees the hash table doesn't contain the
|
| @@ -4271,19 +4271,19 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
|
| Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
|
|
|
| // Scale the index by multiplying by the entry size.
|
| - ASSERT(NameDictionary::kEntrySize == 3);
|
| + DCHECK(NameDictionary::kEntrySize == 3);
|
| __ sll(at, index, 1);
|
| __ Addu(index, index, at);
|
|
|
| Register entity_name = scratch0;
|
| // Having undefined at this place means the name is not contained.
|
| - ASSERT_EQ(kSmiTagSize, 1);
|
| + DCHECK_EQ(kSmiTagSize, 1);
|
| Register tmp = properties;
|
| __ sll(scratch0, index, 1);
|
| __ Addu(tmp, properties, scratch0);
|
| __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
|
|
|
| - ASSERT(!tmp.is(entity_name));
|
| + DCHECK(!tmp.is(entity_name));
|
| __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
|
| __ Branch(done, eq, entity_name, Operand(tmp));
|
|
|
| @@ -4336,10 +4336,10 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
| Register name,
|
| Register scratch1,
|
| Register scratch2) {
|
| - ASSERT(!elements.is(scratch1));
|
| - ASSERT(!elements.is(scratch2));
|
| - ASSERT(!name.is(scratch1));
|
| - ASSERT(!name.is(scratch2));
|
| + DCHECK(!elements.is(scratch1));
|
| + DCHECK(!elements.is(scratch2));
|
| + DCHECK(!name.is(scratch1));
|
| + DCHECK(!name.is(scratch2));
|
|
|
| __ AssertName(name);
|
|
|
| @@ -4358,7 +4358,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
| // Add the probe offset (i + i * i) left shifted to avoid right shifting
|
| // the hash in a separate instruction. The value hash + i + i * i is right
|
| // shifted in the following and instruction.
|
| - ASSERT(NameDictionary::GetProbeOffset(i) <
|
| + DCHECK(NameDictionary::GetProbeOffset(i) <
|
| 1 << (32 - Name::kHashFieldOffset));
|
| __ Addu(scratch2, scratch2, Operand(
|
| NameDictionary::GetProbeOffset(i) << Name::kHashShift));
|
| @@ -4367,7 +4367,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
| __ And(scratch2, scratch1, scratch2);
|
|
|
| // Scale the index by multiplying by the element size.
|
| - ASSERT(NameDictionary::kEntrySize == 3);
|
| + DCHECK(NameDictionary::kEntrySize == 3);
|
| // scratch2 = scratch2 * 3.
|
|
|
| __ sll(at, scratch2, 1);
|
| @@ -4387,7 +4387,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
|
|
| __ MultiPush(spill_mask);
|
| if (name.is(a0)) {
|
| - ASSERT(!elements.is(a1));
|
| + DCHECK(!elements.is(a1));
|
| __ Move(a1, name);
|
| __ Move(a0, elements);
|
| } else {
|
| @@ -4443,7 +4443,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
| // Add the probe offset (i + i * i) left shifted to avoid right shifting
|
| // the hash in a separate instruction. The value hash + i + i * i is right
|
| // shifted in the following and instruction.
|
| - ASSERT(NameDictionary::GetProbeOffset(i) <
|
| + DCHECK(NameDictionary::GetProbeOffset(i) <
|
| 1 << (32 - Name::kHashFieldOffset));
|
| __ Addu(index, hash, Operand(
|
| NameDictionary::GetProbeOffset(i) << Name::kHashShift));
|
| @@ -4454,14 +4454,14 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
| __ And(index, mask, index);
|
|
|
| // Scale the index by multiplying by the entry size.
|
| - ASSERT(NameDictionary::kEntrySize == 3);
|
| + DCHECK(NameDictionary::kEntrySize == 3);
|
| // index *= 3.
|
| __ mov(at, index);
|
| __ sll(index, index, 1);
|
| __ Addu(index, index, at);
|
|
|
|
|
| - ASSERT_EQ(kSmiTagSize, 1);
|
| + DCHECK_EQ(kSmiTagSize, 1);
|
| __ sll(index, index, 2);
|
| __ Addu(index, index, dictionary);
|
| __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
|
| @@ -4598,8 +4598,8 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
|
| __ PrepareCallCFunction(argument_count, regs_.scratch0());
|
| Register address =
|
| a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
|
| - ASSERT(!address.is(regs_.object()));
|
| - ASSERT(!address.is(a0));
|
| + DCHECK(!address.is(regs_.object()));
|
| + DCHECK(!address.is(a0));
|
| __ Move(address, regs_.address());
|
| __ Move(a0, regs_.object());
|
| __ Move(a1, address);
|
| @@ -4820,7 +4820,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
|
| int frame_alignment = masm->ActivationFrameAlignment();
|
| if (frame_alignment > kPointerSize) {
|
| __ mov(s5, sp);
|
| - ASSERT(IsPowerOf2(frame_alignment));
|
| + DCHECK(IsPowerOf2(frame_alignment));
|
| __ And(sp, sp, Operand(-frame_alignment));
|
| }
|
| __ Subu(sp, sp, kCArgsSlotsSize);
|
| @@ -4887,12 +4887,12 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
|
| // sp[0] - last argument
|
| Label normal_sequence;
|
| if (mode == DONT_OVERRIDE) {
|
| - ASSERT(FAST_SMI_ELEMENTS == 0);
|
| - ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
|
| - ASSERT(FAST_ELEMENTS == 2);
|
| - ASSERT(FAST_HOLEY_ELEMENTS == 3);
|
| - ASSERT(FAST_DOUBLE_ELEMENTS == 4);
|
| - ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
|
| + DCHECK(FAST_SMI_ELEMENTS == 0);
|
| + DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1);
|
| + DCHECK(FAST_ELEMENTS == 2);
|
| + DCHECK(FAST_HOLEY_ELEMENTS == 3);
|
| + DCHECK(FAST_DOUBLE_ELEMENTS == 4);
|
| + DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
|
|
|
| // is the low bit set? If so, we are holey and that is good.
|
| __ And(at, a3, Operand(1));
|
| @@ -5200,7 +5200,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
|
| FrameScope frame_scope(masm, StackFrame::MANUAL);
|
| __ EnterExitFrame(false, kApiStackSpace);
|
|
|
| - ASSERT(!api_function_address.is(a0) && !scratch.is(a0));
|
| + DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
|
| // a0 = FunctionCallbackInfo&
|
| // Arguments is after the return address.
|
| __ Addu(a0, sp, Operand(1 * kPointerSize));
|
|
|