Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(149)

Unified Diff: src/mips/code-stubs-mips.cc

Issue 7830036: Optimize isFinite and isNaN. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Made the change general by moving putting it in the NUMBER_IS_FINITE macro. Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/messages.js ('k') | src/mips/constants-mips.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/mips/code-stubs-mips.cc
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index ef0bf77c865ce98411ebd2e8161d12309ba38cc4..7ca78f6ad72ab5abd65825bb0e1a8a2e5645bb90 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -3538,7 +3538,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
const int kNumInstructionsToJump = 6;
masm->Addu(ra, ra, kNumInstructionsToJump * kPointerSize);
masm->sw(ra, MemOperand(sp)); // This spot was reserved in EnterExitFrame.
- masm->Subu(sp, sp, kCArgsSlotsSize);
+ masm->Subu(sp, sp, StandardFrameConstants::kCArgsSlotsSize);
// Stack is still aligned.
// Call the C routine.
@@ -3551,7 +3551,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
}
// Restore stack (remove arg slots).
- __ Addu(sp, sp, kCArgsSlotsSize);
+ __ Addu(sp, sp, StandardFrameConstants::kCArgsSlotsSize);
if (always_allocate) {
// It's okay to clobber a2 and a3 here. v0 & v1 contain result.
@@ -3707,7 +3707,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
}
- __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
+ __ lw(s0, MemOperand(sp, offset_to_argv +
+ StandardFrameConstants::kCArgsSlotsSize));
// We build an EntryFrame.
__ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
@@ -5642,6 +5643,11 @@ void SubStringStub::Generate(MacroAssembler* masm) {
Register to = t2;
Register from = t3;
+ if (FLAG_string_slices) {
+ __ nop(); // Jumping as first instruction would crash the code generation.
+ __ jmp(&sub_string_runtime);
+ }
+
// Check bounds and smi-ness.
__ lw(to, MemOperand(sp, kToOffset));
__ lw(from, MemOperand(sp, kFromOffset));
@@ -5665,8 +5671,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Special handling of sub-strings of length 1 and 2. One character strings
// are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache in
- // generated code.
+ // cache). Two character strings are looked for in the symbol cache.
__ Branch(&sub_string_runtime, lt, a2, Operand(2));
// Both to and from are smis.
@@ -5678,32 +5683,19 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// t5: to index (untagged smi)
// Make sure first argument is a sequential (or flat) string.
- __ lw(v0, MemOperand(sp, kStringOffset));
- __ Branch(&sub_string_runtime, eq, v0, Operand(kSmiTagMask));
+ __ lw(t1, MemOperand(sp, kStringOffset));
+ __ Branch(&sub_string_runtime, eq, t1, Operand(kSmiTagMask));
- __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
- __ And(t4, v0, Operand(kIsNotStringMask));
+ __ And(t4, a1, Operand(kIsNotStringMask));
__ Branch(&sub_string_runtime, ne, t4, Operand(zero_reg));
- // Short-cut for the case of trivial substring.
- Label return_v0;
- // v0: original string
- // a2: result string length
- __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
- __ sra(t0, t0, 1);
- __ Branch(&return_v0, eq, a2, Operand(t0));
-
- Label create_slice;
- if (FLAG_string_slices) {
- __ Branch(&create_slice, ge, a2, Operand(SlicedString::kMinLength));
- }
-
- // v0: original string
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
+ // t1: string
// t2: (a.k.a. to): to (smi)
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
@@ -5712,9 +5704,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ And(t0, a1, Operand(kStringRepresentationMask));
STATIC_ASSERT(kSeqStringTag < kConsStringTag);
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kConsStringTag < kSlicedStringTag);
- // Slices and external strings go to runtime.
+ // External strings go to runtime.
__ Branch(&sub_string_runtime, gt, t0, Operand(kConsStringTag));
// Sequential strings are handled directly.
@@ -5723,32 +5714,32 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Cons string. Try to recurse (once) on the first substring.
// (This adds a little more generality than necessary to handle flattened
// cons strings, but not much).
- __ lw(v0, FieldMemOperand(v0, ConsString::kFirstOffset));
- __ lw(t0, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ lw(t1, FieldMemOperand(t1, ConsString::kFirstOffset));
+ __ lw(t0, FieldMemOperand(t1, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(t0, Map::kInstanceTypeOffset));
STATIC_ASSERT(kSeqStringTag == 0);
- // Cons, slices and external strings go to runtime.
+ // Cons and External strings go to runtime.
__ Branch(&sub_string_runtime, ne, a1, Operand(kStringRepresentationMask));
// Definitly a sequential string.
__ bind(&seq_string);
- // v0: original string
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
+ // t1: string
// t2: (a.k.a. to): to (smi)
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
- __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
+ __ lw(t0, FieldMemOperand(t1, String::kLengthOffset));
__ Branch(&sub_string_runtime, lt, t0, Operand(to)); // Fail if to > length.
to = no_reg;
- // v0: original string or left hand side of the original cons string.
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
+ // t1: string
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
@@ -5764,147 +5755,84 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Sub string of length 2 requested.
// Get the two characters forming the sub string.
- __ Addu(v0, v0, Operand(a3));
- __ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
- __ lbu(t0, FieldMemOperand(v0, SeqAsciiString::kHeaderSize + 1));
+ __ Addu(t1, t1, Operand(a3));
+ __ lbu(a3, FieldMemOperand(t1, SeqAsciiString::kHeaderSize));
+ __ lbu(t0, FieldMemOperand(t1, SeqAsciiString::kHeaderSize + 1));
// Try to lookup two character string in symbol table.
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string);
Counters* counters = masm->isolate()->counters();
- __ jmp(&return_v0);
+ __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
+ __ Addu(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
+
// a2: result string length.
// a3: two characters combined into halfword in little endian byte order.
__ bind(&make_two_character_string);
__ AllocateAsciiString(v0, a2, t0, t1, t4, &sub_string_runtime);
__ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
- __ jmp(&return_v0);
+ __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
+ __ Addu(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
__ bind(&result_longer_than_two);
- // Locate 'from' character of string.
- __ Addu(t1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ sra(t4, from, 1);
- __ Addu(t1, t1, t4);
-
// Allocate the result.
__ AllocateAsciiString(v0, a2, t4, t0, a1, &sub_string_runtime);
- // v0: result string
- // a2: result string length
+ // v0: result string.
+ // a2: result string length.
// a3: from index (untagged smi)
- // t1: first character of substring to copy
+ // t1: string.
// t3: (a.k.a. from): from offset (smi)
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ // Locate 'from' character of string.
+ __ Addu(t1, t1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ Addu(t1, t1, Operand(a3));
- // v0: result string
- // a1: first character of result string
- // a2: result string length
- // t1: first character of substring to copy
+ // v0: result string.
+ // a1: first character of result string.
+ // a2: result string length.
+ // t1: first character of sub string to copy.
STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
- __ jmp(&return_v0);
+ __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
+ __ Addu(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
__ bind(&non_ascii_flat);
- // a2: result string length
- // t1: string
+ // a2: result string length.
+ // t1: string.
// t3: (a.k.a. from): from offset (smi)
// Check for flat two byte string.
- // Locate 'from' character of string.
- __ Addu(t1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // As "from" is a smi it is 2 times the value which matches the size of a two
- // byte character.
- STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ Addu(t1, t1, Operand(from));
-
// Allocate the result.
__ AllocateTwoByteString(v0, a2, a1, a3, t0, &sub_string_runtime);
- // v0: result string
- // a2: result string length
- // t1: first character of substring to copy
+ // v0: result string.
+ // a2: result string length.
+ // t1: string.
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
+ // Locate 'from' character of string.
+ __ Addu(t1, t1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ // As "from" is a smi it is 2 times the value which matches the size of a two
+ // byte character.
+ __ Addu(t1, t1, Operand(from));
from = no_reg;
// v0: result string.
// a1: first character of result.
// a2: result length.
- // t1: first character of substring to copy.
+ // t1: first character of string to copy.
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
- __ jmp(&return_v0);
-
- if (FLAG_string_slices) {
- __ bind(&create_slice);
- // v0: original string
- // a1: instance type
- // a2: length
- // a3: from index (untagged smi)
- // t2 (a.k.a. to): to (smi)
- // t3 (a.k.a. from): from offset (smi)
- Label allocate_slice, sliced_string, seq_string;
- STATIC_ASSERT(kSeqStringTag == 0);
- __ And(t4, a1, Operand(kStringRepresentationMask));
- __ Branch(&seq_string, eq, t4, Operand(zero_reg));
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ And(t4, a1, Operand(kIsIndirectStringMask));
- // External string. Jump to runtime.
- __ Branch(&sub_string_runtime, eq, t4, Operand(zero_reg));
-
- __ And(t4, a1, Operand(kSlicedNotConsMask));
- __ Branch(&sliced_string, ne, t4, Operand(zero_reg));
- // Cons string. Check whether it is flat, then fetch first part.
- __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
- __ LoadRoot(t5, Heap::kEmptyStringRootIndex);
- __ Branch(&sub_string_runtime, ne, t1, Operand(t5));
- __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ lw(t1, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ addu(t3, t3, t1);
- __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&seq_string);
- // Sequential string. Just move string to the right register.
- __ mov(t1, v0);
-
- __ bind(&allocate_slice);
- // a1: instance type of original string
- // a2: length
- // t1: underlying subject string
- // t3 (a.k.a. from): from offset (smi)
- // Allocate new sliced string. At this point we do not reload the instance
- // type including the string encoding because we simply rely on the info
- // provided by the original string. It does not matter if the original
- // string's encoding is wrong because we always have to recheck encoding of
- // the newly created string's parent anyways due to externalized strings.
- Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ And(t4, a1, Operand(kStringEncodingMask));
- __ Branch(&two_byte_slice, eq, t4, Operand(zero_reg));
- __ AllocateAsciiSlicedString(v0, a2, a3, t0, &sub_string_runtime);
- __ jmp(&set_slice_header);
- __ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(v0, a2, a3, t0, &sub_string_runtime);
- __ bind(&set_slice_header);
- __ sw(t3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- }
-
- __ bind(&return_v0);
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret();
« no previous file with comments | « src/messages.js ('k') | src/mips/constants-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698