Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(124)

Unified Diff: src/arm/codegen-arm.cc

Issue 3017018: Change asserts to STATIC_ASSERT if they can be checked at compilation time. ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/arm/codegen-arm.cc
===================================================================
--- src/arm/codegen-arm.cc (revision 5120)
+++ src/arm/codegen-arm.cc (working copy)
@@ -815,7 +815,7 @@
// Check they are both small and positive.
__ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (op == Token::ADD) {
__ add(r0, lhs, Operand(rhs), LeaveCC, eq);
} else {
@@ -863,7 +863,7 @@
__ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
} else {
ASSERT(op == Token::BIT_XOR);
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
}
if (cond != al) {
@@ -1520,8 +1520,8 @@
// JS_FUNCTION_TYPE is the last instance type and it is right
// after LAST_JS_OBJECT_TYPE, we do not have to check the upper
// bound.
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
__ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
__ b(lt, &build_args);
@@ -2610,7 +2610,7 @@
// The next handler address is on top of the frame. Unlink from
// the handler list and drop the rest of this handler from the
// frame.
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
frame_->EmitPop(r1);
__ mov(r3, Operand(handler_address));
__ str(r1, MemOperand(r3));
@@ -2636,7 +2636,7 @@
__ ldr(sp, MemOperand(r3));
frame_->Forget(frame_->height() - handler_height);
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
frame_->EmitPop(r1);
__ str(r1, MemOperand(r3));
frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
@@ -2723,7 +2723,7 @@
// chain and set the state on the frame to FALLING.
if (has_valid_frame()) {
// The next handler address is on top of the frame.
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
frame_->EmitPop(r1);
__ mov(r3, Operand(handler_address));
__ str(r1, MemOperand(r3));
@@ -2762,7 +2762,7 @@
// Unlink this handler and drop it from the frame. The next
// handler address is currently on top of the frame.
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
frame_->EmitPop(r1);
__ str(r1, MemOperand(r3));
frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
@@ -4181,8 +4181,8 @@
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
// LAST_JS_OBJECT_TYPE.
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
__ cmp(r1, Operand(JS_FUNCTION_TYPE));
function.Branch(eq);
@@ -5128,7 +5128,7 @@
const int kFingerOffset =
FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ ldr(r0, FieldMemOperand(r1, kFingerOffset));
// r0 now holds finger offset as a smi.
__ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -6940,7 +6940,7 @@
// Move sign bit from source to destination. This works because the sign bit
// in the exponent word of the double has the same position and polarity as
// the 2's complement sign bit in a Smi.
- ASSERT(HeapNumber::kSignMask == 0x80000000u);
+ STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
__ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
// Subtract from 0 if source was negative.
__ rsb(source_, source_, Operand(0), LeaveCC, ne);
@@ -6993,7 +6993,7 @@
// the_int_ has the answer which is a signed int32 but not a Smi.
// We test for the special value that has a different exponent. This test
// has the neat side effect of setting the flags according to the sign.
- ASSERT(HeapNumber::kSignMask == 0x80000000u);
+ STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
__ cmp(the_int_, Operand(0x80000000u));
__ b(eq, &max_negative_int);
// Set up the correct exponent in scratch_. All non-Smi int32s have the same.
@@ -7338,7 +7338,7 @@
// If either operand is a JSObject or an oddball value, then they are
// not equal since their pointers are different.
// There is no test for undetectability in strict equality.
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Label first_non_object;
// Get the type of the first operand into r2 and compare it with
// FIRST_JS_OBJECT_TYPE.
@@ -7364,8 +7364,8 @@
// Now that we have the types we might as well check for symbol-symbol.
// Ensure that no non-strings have the symbol bit set.
- ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
+ STATIC_ASSERT(kSymbolTag != 0);
__ and_(r2, r2, Operand(r3));
__ tst(r2, Operand(kIsSymbolMask));
__ b(ne, &return_not_equal);
@@ -7416,7 +7416,7 @@
// r2 is object type of rhs.
// Ensure that no non-strings have the symbol bit set.
Label object_test;
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
__ tst(r2, Operand(kIsNotStringMask));
__ b(ne, &object_test);
__ tst(r2, Operand(kIsSymbolMask));
@@ -7487,7 +7487,7 @@
not_found,
true);
- ASSERT_EQ(8, kDoubleSize);
+ STATIC_ASSERT(8 == kDoubleSize);
__ add(scratch1,
object,
Operand(HeapNumber::kValueOffset - kHeapObjectTag));
@@ -7586,7 +7586,7 @@
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(0, Smi::FromInt(0));
__ and_(r2, lhs_, Operand(rhs_));
__ tst(r2, Operand(kSmiTagMask));
@@ -8589,7 +8589,7 @@
Label not_smi;
// Fast path.
if (ShouldGenerateSmiCode()) {
- ASSERT(kSmiTag == 0); // Adjust code below.
+ STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
__ tst(smi_test_reg, Operand(kSmiTagMask));
__ b(ne, &not_smi);
__ add(r0, r1, Operand(r0), SetCC); // Add y optimistically.
@@ -8605,7 +8605,7 @@
Label not_smi;
// Fast path.
if (ShouldGenerateSmiCode()) {
- ASSERT(kSmiTag == 0); // Adjust code below.
+ STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
__ tst(smi_test_reg, Operand(kSmiTagMask));
__ b(ne, &not_smi);
if (lhs.is(r1)) {
@@ -8627,7 +8627,7 @@
case Token::MUL: {
Label not_smi, slow;
if (ShouldGenerateSmiCode()) {
- ASSERT(kSmiTag == 0); // adjust code below
+ STATIC_ASSERT(kSmiTag == 0); // adjust code below
__ tst(smi_test_reg, Operand(kSmiTagMask));
Register scratch2 = smi_test_reg;
smi_test_reg = no_reg;
@@ -8763,7 +8763,7 @@
Label slow;
Label not_power_of_2;
ASSERT(!ShouldGenerateSmiCode());
- ASSERT(kSmiTag == 0); // Adjust code below.
+ STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
// Check for two positive smis.
__ orr(smi_test_reg, lhs, Operand(rhs));
__ tst(smi_test_reg, Operand(0x80000000u | kSmiTagMask));
@@ -8823,7 +8823,7 @@
case Token::SHR:
case Token::SHL: {
Label slow;
- ASSERT(kSmiTag == 0); // adjust code below
+ STATIC_ASSERT(kSmiTag == 0); // adjust code below
__ tst(smi_test_reg, Operand(kSmiTagMask));
__ b(ne, &slow);
Register scratch2 = smi_test_reg;
@@ -9137,17 +9137,17 @@
// r0 holds the exception.
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
// Drop the sp to the top of the handler.
__ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
__ ldr(sp, MemOperand(r3));
// Restore the next handler and frame pointer, discard handler state.
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
__ pop(r2);
__ str(r2, MemOperand(r3));
- ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
__ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
// Before returning we restore the context from the frame pointer if
@@ -9163,7 +9163,7 @@
__ mov(lr, Operand(pc));
}
#endif
- ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
__ pop(pc);
}
@@ -9171,7 +9171,7 @@
void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type) {
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
// Drop sp to the top stack handler.
__ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
@@ -9192,7 +9192,7 @@
__ bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
- ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
__ pop(r2);
__ str(r2, MemOperand(r3));
@@ -9216,7 +9216,7 @@
// lr
// Discard handler state (r2 is not used) and restore frame pointer.
- ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
__ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
// Before returning we restore the context from the frame pointer if
// not NULL. The frame pointer is NULL in the exception handler of a
@@ -9231,7 +9231,7 @@
__ mov(lr, Operand(pc));
}
#endif
- ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
__ pop(pc);
}
@@ -9326,7 +9326,7 @@
// check for failure result
Label failure_returned;
- ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
+ STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
// Lower 2 bits of r2 are 0 iff r0 has failure tag.
__ add(r2, r0, Operand(1));
__ tst(r2, Operand(kFailureTagMask));
@@ -9341,7 +9341,7 @@
// check if we should retry or throw exception
Label retry;
__ bind(&failure_returned);
- ASSERT(Failure::RETRY_AFTER_GC == 0);
+ STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
__ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ b(eq, &retry);
@@ -9744,12 +9744,12 @@
}
// Setup the callee in-object property.
- ASSERT(Heap::arguments_callee_index == 0);
+ STATIC_ASSERT(Heap::arguments_callee_index == 0);
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
__ str(r3, FieldMemOperand(r0, JSObject::kHeaderSize));
// Get the length (smi tagged) and set that as an in-object property too.
- ASSERT(Heap::arguments_length_index == 1);
+ STATIC_ASSERT(Heap::arguments_length_index == 1);
__ ldr(r1, MemOperand(sp, 0 * kPointerSize));
__ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + kPointerSize));
@@ -9841,7 +9841,7 @@
// Check that the first argument is a JSRegExp object.
__ ldr(r0, MemOperand(sp, kJSRegExpOffset));
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &runtime);
__ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
@@ -9868,8 +9868,8 @@
FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
// Calculate number of capture registers (number_of_captures + 1) * 2. This
// uses the asumption that smis are 2 * their untagged value.
- ASSERT_EQ(0, kSmiTag);
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
__ add(r2, r2, Operand(2)); // r2 was a smi.
// Check that the static offsets vector buffer is large enough.
__ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
@@ -9930,7 +9930,7 @@
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
// First check for flat string.
__ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask));
- ASSERT_EQ(0, kStringTag | kSeqStringTag);
+ STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
__ b(eq, &seq_string);
// subject: Subject string
@@ -9940,8 +9940,8 @@
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
- ASSERT(kExternalStringTag !=0);
- ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
+ STATIC_ASSERT(kExternalStringTag !=0);
+ STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
__ tst(r0, Operand(kIsNotStringMask | kExternalStringTag));
__ b(ne, &runtime);
__ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
@@ -9952,7 +9952,7 @@
__ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
// Is first part a flat string?
- ASSERT_EQ(0, kSeqStringTag);
+ STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r0, Operand(kStringRepresentationMask));
__ b(nz, &runtime);
@@ -9960,8 +9960,8 @@
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// r0: Instance type of subject string
- ASSERT_EQ(4, kAsciiStringTag);
- ASSERT_EQ(0, kTwoByteStringTag);
+ STATIC_ASSERT(4 == kAsciiStringTag);
+ STATIC_ASSERT(kTwoByteStringTag == 0);
// Find the code object based on the assumptions above.
__ and_(r0, r0, Operand(kStringEncodingMask));
__ mov(r3, Operand(r0, ASR, 2), SetCC);
@@ -10015,7 +10015,7 @@
// calculate the shift of the index (0 for ASCII and 1 for two byte).
__ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
__ mov(r0, Operand(r0, ASR, kSmiTagSize));
- ASSERT_EQ(SeqAsciiString::kHeaderSize, SeqTwoByteString::kHeaderSize);
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
__ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ eor(r3, r3, Operand(1));
// Argument 4 (r3): End of string data
@@ -10070,8 +10070,8 @@
__ ldr(r1,
FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
// Calculate number of capture registers (number_of_captures + 1) * 2.
- ASSERT_EQ(0, kSmiTag);
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
__ add(r1, r1, Operand(2)); // r1 was a smi.
// r1: number of capture registers
@@ -10283,7 +10283,7 @@
__ b(ls, index_out_of_range_);
// We need special handling for non-flat strings.
- ASSERT(kSeqStringTag == 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
__ tst(result_, Operand(kStringRepresentationMask));
__ b(eq, &flat_string);
@@ -10305,13 +10305,13 @@
__ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
__ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
// If the first cons component is also non-flat, then go to runtime.
- ASSERT(kSeqStringTag == 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
__ tst(result_, Operand(kStringRepresentationMask));
__ b(nz, &call_runtime_);
// Check for 1-byte or 2-byte string.
__ bind(&flat_string);
- ASSERT(kAsciiStringTag != 0);
+ STATIC_ASSERT(kAsciiStringTag != 0);
__ tst(result_, Operand(kStringEncodingMask));
__ b(nz, &ascii_string);
@@ -10319,7 +10319,7 @@
// Load the 2-byte character code into the result register. We can
// add without shifting since the smi tag size is the log2 of the
// number of bytes in a two-byte character.
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
__ add(scratch_, object_, Operand(scratch_));
__ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
__ jmp(&got_char_code);
@@ -10396,8 +10396,8 @@
void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
// Fast case of Heap::LookupSingleCharacterStringFromCode.
- ASSERT(kSmiTag == 0);
- ASSERT(kSmiShiftSize == 0);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiShiftSize == 0);
ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
__ tst(code_,
Operand(kSmiTagMask |
@@ -10406,7 +10406,7 @@
__ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
// At this point code register contains smi tagged ascii char code.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
@@ -10511,7 +10511,7 @@
// Ensure that reading an entire aligned word containing the last character
// of a string will not read outside the allocated area (because we pad up
// to kObjectAlignment).
- ASSERT(kObjectAlignment >= kReadAlignment);
+ STATIC_ASSERT(kObjectAlignment >= kReadAlignment);
// Assumes word reads and writes are little endian.
// Nothing to do for zero characters.
Label done;
@@ -10715,7 +10715,7 @@
__ and_(candidate, candidate, Operand(mask));
// Load the entry from the symble table.
- ASSERT_EQ(1, SymbolTable::kEntrySize);
+ STATIC_ASSERT(SymbolTable::kEntrySize == 1);
__ ldr(candidate,
MemOperand(first_symbol_table_element,
candidate,
@@ -10815,8 +10815,8 @@
// Check bounds and smi-ness.
__ ldr(r7, MemOperand(sp, kToOffset));
__ ldr(r6, MemOperand(sp, kFromOffset));
- ASSERT_EQ(0, kSmiTag);
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
// I.e., arithmetic shift right by one un-smi-tags.
__ mov(r2, Operand(r7, ASR, 1), SetCC);
__ mov(r3, Operand(r6, ASR, 1), SetCC, cc);
@@ -10839,7 +10839,7 @@
// Make sure first argument is a sequential (or flat) string.
__ ldr(r5, MemOperand(sp, kStringOffset));
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ tst(r5, Operand(kSmiTagMask));
__ b(eq, &runtime);
Condition is_string = masm->IsObjectStringType(r5, r1);
@@ -10853,8 +10853,8 @@
// r7: to (smi)
Label seq_string;
__ and_(r4, r1, Operand(kStringRepresentationMask));
- ASSERT(kSeqStringTag < kConsStringTag);
- ASSERT(kExternalStringTag > kConsStringTag);
+ STATIC_ASSERT(kSeqStringTag < kConsStringTag);
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
__ cmp(r4, Operand(kConsStringTag));
__ b(gt, &runtime); // External strings go to runtime.
__ b(lt, &seq_string); // Sequential strings are handled directly.
@@ -10866,7 +10866,7 @@
__ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
__ tst(r1, Operand(kStringRepresentationMask));
- ASSERT_EQ(0, kSeqStringTag);
+ STATIC_ASSERT(kSeqStringTag == 0);
__ b(ne, &runtime); // Cons and External strings go to runtime.
// Definitly a sequential string.
@@ -10890,7 +10890,7 @@
// Check for flat ascii string.
Label non_ascii_flat;
__ tst(r1, Operand(kStringEncodingMask));
- ASSERT_EQ(0, kTwoByteStringTag);
+ STATIC_ASSERT(kTwoByteStringTag == 0);
__ b(eq, &non_ascii_flat);
Label result_longer_than_two;
@@ -10939,7 +10939,7 @@
// r1: first character of result string.
// r2: result string length.
// r5: first character of sub string to copy.
- ASSERT_EQ(0, SeqAsciiString::kHeaderSize & kObjectAlignmentMask);
+ STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
COPY_ASCII | DEST_ALWAYS_ALIGNED);
__ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
@@ -10970,7 +10970,7 @@
// r1: first character of result.
// r2: result length.
// r5: first character of string to copy.
- ASSERT_EQ(0, SeqTwoByteString::kHeaderSize & kObjectAlignmentMask);
+ STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
DEST_ALWAYS_ALIGNED);
__ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
@@ -10998,7 +10998,7 @@
Register length_delta = scratch3;
__ mov(scratch1, scratch2, LeaveCC, gt);
Register min_length = scratch1;
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ tst(min_length, Operand(min_length));
__ b(eq, &compare_lengths);
@@ -11054,8 +11054,8 @@
Label not_same;
__ cmp(r0, r1);
__ b(ne, &not_same);
- ASSERT_EQ(0, EQUAL);
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(EQUAL == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ mov(r0, Operand(Smi::FromInt(EQUAL)));
__ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
__ add(sp, sp, Operand(2 * kPointerSize));
@@ -11090,14 +11090,14 @@
// Make sure that both arguments are strings if not known in advance.
if (string_check_) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ JumpIfEitherSmi(r0, r1, &string_add_runtime);
// Load instance types.
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
__ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
- ASSERT_EQ(0, kStringTag);
+ STATIC_ASSERT(kStringTag == 0);
// If either is not a string, go to runtime.
__ tst(r4, Operand(kIsNotStringMask));
__ tst(r5, Operand(kIsNotStringMask), eq);
@@ -11114,10 +11114,10 @@
// Check if either of the strings are empty. In that case return the other.
__ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
__ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty.
__ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
// Else test if second string is empty.
__ cmp(r3, Operand(Smi::FromInt(0)), ne);
__ b(ne, &strings_not_empty); // If either string was empty, return r0.
@@ -11141,7 +11141,7 @@
// Look at the length of the result of adding the two strings.
Label string_add_flat_result, longer_than_two;
// Adding two lengths can't overflow.
- ASSERT(String::kMaxLength * 2 > String::kMaxLength);
+ STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
__ add(r6, r2, Operand(r3));
// Use the runtime system when adding two one character strings, as it
// contains optimizations for this specific case using the symbol table.
@@ -11189,7 +11189,7 @@
__ cmp(r6, Operand(String::kMinNonFlatLength));
__ b(lt, &string_add_flat_result);
// Handle exceptionally long strings in the runtime system.
- ASSERT((String::kMaxLength & 0x80000000) == 0);
+ STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
ASSERT(IsPowerOf2(String::kMaxLength + 1));
// kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
__ cmp(r6, Operand(String::kMaxLength + 1));
@@ -11204,7 +11204,7 @@
__ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
}
Label non_ascii, allocated, ascii_data;
- ASSERT_EQ(0, kTwoByteStringTag);
+ STATIC_ASSERT(kTwoByteStringTag == 0);
__ tst(r4, Operand(kStringEncodingMask));
__ tst(r5, Operand(kStringEncodingMask), ne);
__ b(eq, &non_ascii);
@@ -11230,7 +11230,7 @@
__ tst(r5, Operand(kAsciiDataHintMask), ne);
__ b(ne, &ascii_data);
__ eor(r4, r4, Operand(r5));
- ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
+ STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
__ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
__ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
__ b(eq, &ascii_data);
@@ -11256,7 +11256,7 @@
__ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
}
// Check that both strings are sequential.
- ASSERT_EQ(0, kSeqStringTag);
+ STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r4, Operand(kStringRepresentationMask));
__ tst(r5, Operand(kStringRepresentationMask), eq);
__ b(ne, &string_add_runtime);
« no previous file with comments | « no previous file | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698