| Index: src/mips/code-stubs-mips.cc
|
| diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
|
| index 7ab85dcec89c81ed4fa136a661aff1a2ea227bc4..68cb356d002b781f73056230ec664c2f887d9c91 100644
|
| --- a/src/mips/code-stubs-mips.cc
|
| +++ b/src/mips/code-stubs-mips.cc
|
| @@ -2268,6 +2268,229 @@
|
| __ bind(&done);
|
| }
|
|
|
| +
|
| +void SubStringStub::Generate(MacroAssembler* masm) {
|
| + Label runtime;
|
| + // Stack frame on entry.
|
| + // ra: return address
|
| + // sp[0]: to
|
| + // sp[4]: from
|
| + // sp[8]: string
|
| +
|
| + // This stub is called from the native-call %_SubString(...), so
|
| + // nothing can be assumed about the arguments. It is tested that:
|
| + // "string" is a sequential string,
|
| + // both "from" and "to" are smis, and
|
| + // 0 <= from <= to <= string.length.
|
| + // If any of these assumptions fail, we call the runtime system.
|
| +
|
| + const int kToOffset = 0 * kPointerSize;
|
| + const int kFromOffset = 1 * kPointerSize;
|
| + const int kStringOffset = 2 * kPointerSize;
|
| +
|
| + __ lw(a2, MemOperand(sp, kToOffset));
|
| + __ lw(a3, MemOperand(sp, kFromOffset));
|
| + STATIC_ASSERT(kFromOffset == kToOffset + 4);
|
| + STATIC_ASSERT(kSmiTag == 0);
|
| + STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
|
| +
|
| + // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
|
| + // safe in this case.
|
| + __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
|
| + __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
|
| + // Both a2 and a3 are untagged integers.
|
| +
|
| + __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
|
| +
|
| + __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
|
| + __ Subu(a2, a2, a3);
|
| +
|
| + // Make sure first argument is a string.
|
| + __ lw(v0, MemOperand(sp, kStringOffset));
|
| + __ JumpIfSmi(v0, &runtime);
|
| + __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
|
| + __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
|
| + __ And(t0, a1, Operand(kIsNotStringMask));
|
| +
|
| + __ Branch(&runtime, ne, t0, Operand(zero_reg));
|
| +
|
| + Label single_char;
|
| + __ Branch(&single_char, eq, a2, Operand(1));
|
| +
|
| + // Short-cut for the case of trivial substring.
|
| + Label return_v0;
|
| + // v0: original string
|
| + // a2: result string length
|
| + __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
|
| + __ sra(t0, t0, 1);
|
| + // Return original string.
|
| + __ Branch(&return_v0, eq, a2, Operand(t0));
|
| + // Longer than original string's length or negative: unsafe arguments.
|
| + __ Branch(&runtime, hi, a2, Operand(t0));
|
| + // Shorter than original string's length: an actual substring.
|
| +
|
| + // Deal with different string types: update the index if necessary
|
| + // and put the underlying string into t1.
|
| + // v0: original string
|
| + // a1: instance type
|
| + // a2: length
|
| + // a3: from index (untagged)
|
| + Label underlying_unpacked, sliced_string, seq_or_external_string;
|
| + // If the string is not indirect, it can only be sequential or external.
|
| + STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
|
| + STATIC_ASSERT(kIsIndirectStringMask != 0);
|
| + __ And(t0, a1, Operand(kIsIndirectStringMask));
|
| + __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
|
| + // t0 is used as a scratch register and can be overwritten in either case.
|
| + __ And(t0, a1, Operand(kSlicedNotConsMask));
|
| + __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
|
| + // Cons string. Check whether it is flat, then fetch first part.
|
| + __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
|
| + __ LoadRoot(t0, Heap::kempty_stringRootIndex);
|
| + __ Branch(&runtime, ne, t1, Operand(t0));
|
| + __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
|
| + // Update instance type.
|
| + __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
|
| + __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
|
| + __ jmp(&underlying_unpacked);
|
| +
|
| + __ bind(&sliced_string);
|
| + // Sliced string. Fetch parent and correct start index by offset.
|
| + __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
|
| + __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
|
| + __ sra(t0, t0, 1); // Add offset to index.
|
| + __ Addu(a3, a3, t0);
|
| + // Update instance type.
|
| + __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
|
| + __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
|
| + __ jmp(&underlying_unpacked);
|
| +
|
| + __ bind(&seq_or_external_string);
|
| + // Sequential or external string. Just move string to the expected register.
|
| + __ mov(t1, v0);
|
| +
|
| + __ bind(&underlying_unpacked);
|
| +
|
| + if (FLAG_string_slices) {
|
| + Label copy_routine;
|
| + // t1: underlying subject string
|
| + // a1: instance type of underlying subject string
|
| + // a2: length
|
| + // a3: adjusted start index (untagged)
|
| + // Short slice. Copy instead of slicing.
|
| + __ Branch(©_routine, lt, a2, Operand(SlicedString::kMinLength));
|
| + // Allocate new sliced string. At this point we do not reload the instance
|
| + // type including the string encoding because we simply rely on the info
|
| + // provided by the original string. It does not matter if the original
|
| + // string's encoding is wrong because we always have to recheck encoding of
|
| + // the newly created string's parent anyways due to externalized strings.
|
| + Label two_byte_slice, set_slice_header;
|
| + STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
|
| + STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
|
| + __ And(t0, a1, Operand(kStringEncodingMask));
|
| + __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
|
| + __ AllocateOneByteSlicedString(v0, a2, t2, t3, &runtime);
|
| + __ jmp(&set_slice_header);
|
| + __ bind(&two_byte_slice);
|
| + __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
|
| + __ bind(&set_slice_header);
|
| + __ sll(a3, a3, 1);
|
| + __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
|
| + __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
|
| + __ jmp(&return_v0);
|
| +
|
| + __ bind(©_routine);
|
| + }
|
| +
|
| + // t1: underlying subject string
|
| + // a1: instance type of underlying subject string
|
| + // a2: length
|
| + // a3: adjusted start index (untagged)
|
| + Label two_byte_sequential, sequential_string, allocate_result;
|
| + STATIC_ASSERT(kExternalStringTag != 0);
|
| + STATIC_ASSERT(kSeqStringTag == 0);
|
| + __ And(t0, a1, Operand(kExternalStringTag));
|
| + __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
|
| +
|
| + // Handle external string.
|
| + // Rule out short external strings.
|
| + STATIC_ASSERT(kShortExternalStringTag != 0);
|
| + __ And(t0, a1, Operand(kShortExternalStringTag));
|
| + __ Branch(&runtime, ne, t0, Operand(zero_reg));
|
| + __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
|
| + // t1 already points to the first character of underlying string.
|
| + __ jmp(&allocate_result);
|
| +
|
| + __ bind(&sequential_string);
|
| + // Locate first character of underlying subject string.
|
| + STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
|
| + __ Addu(t1, t1, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| +
|
| + __ bind(&allocate_result);
|
| + // Sequential acii string. Allocate the result.
|
| + STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
|
| + __ And(t0, a1, Operand(kStringEncodingMask));
|
| + __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
|
| +
|
| + // Allocate and copy the resulting ASCII string.
|
| + __ AllocateOneByteString(v0, a2, t0, t2, t3, &runtime);
|
| +
|
| + // Locate first character of substring to copy.
|
| + __ Addu(t1, t1, a3);
|
| +
|
| + // Locate first character of result.
|
| + __ Addu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| +
|
| + // v0: result string
|
| + // a1: first character of result string
|
| + // a2: result string length
|
| + // t1: first character of substring to copy
|
| + STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
|
| + StringHelper::GenerateCopyCharacters(
|
| + masm, a1, t1, a2, a3, String::ONE_BYTE_ENCODING);
|
| + __ jmp(&return_v0);
|
| +
|
| + // Allocate and copy the resulting two-byte string.
|
| + __ bind(&two_byte_sequential);
|
| + __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
|
| +
|
| + // Locate first character of substring to copy.
|
| + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
|
| + __ Lsa(t1, t1, a3, 1);
|
| + // Locate first character of result.
|
| + __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
|
| +
|
| + // v0: result string.
|
| + // a1: first character of result.
|
| + // a2: result length.
|
| + // t1: first character of substring to copy.
|
| + STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
|
| + StringHelper::GenerateCopyCharacters(
|
| + masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING);
|
| +
|
| + __ bind(&return_v0);
|
| + Counters* counters = isolate()->counters();
|
| + __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
|
| + __ DropAndRet(3);
|
| +
|
| + // Just jump to runtime to create the sub string.
|
| + __ bind(&runtime);
|
| + __ TailCallRuntime(Runtime::kSubString);
|
| +
|
| + __ bind(&single_char);
|
| + // v0: original string
|
| + // a1: instance type
|
| + // a2: length
|
| + // a3: from index (untagged)
|
| + __ SmiTag(a3, a3);
|
| + StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
|
| + RECEIVER_IS_STRING);
|
| + generator.GenerateFast(masm);
|
| + __ DropAndRet(3);
|
| + generator.SkipSlow(masm, &runtime);
|
| +}
|
| +
|
| +
|
| void ToStringStub::Generate(MacroAssembler* masm) {
|
| // The ToString stub takes on argument in a0.
|
| Label is_number;
|
|
|