| Index: src/arm/code-stubs-arm.cc
|
| diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
|
| index ff754ec095cdd389a8a12882b4b4029b6984a4da..f45c75b67d5d36d255b7a3caf9b5f089756cf422 100644
|
| --- a/src/arm/code-stubs-arm.cc
|
| +++ b/src/arm/code-stubs-arm.cc
|
| @@ -38,6 +38,16 @@ namespace v8 {
|
| namespace internal {
|
|
|
|
|
| +void ToNumberStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { r0 };
|
| + descriptor->register_param_count_ = 1;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ = NULL;
|
| +}
|
| +
|
| +
|
| void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
|
| Isolate* isolate,
|
| CodeStubInterfaceDescriptor* descriptor) {
|
| @@ -258,6 +268,17 @@ void StoreGlobalStub::InitializeInterfaceDescriptor(
|
| }
|
|
|
|
|
| +void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { r0, r3, r1, r2 };
|
| + descriptor->register_param_count_ = 4;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ =
|
| + FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
|
| +}
|
| +
|
| +
|
| #define __ ACCESS_MASM(masm)
|
|
|
|
|
| @@ -275,17 +296,6 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
|
| Register rhs);
|
|
|
|
|
| -// Check if the operand is a heap number.
|
| -static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
|
| - Register scratch1, Register scratch2,
|
| - Label* not_a_heap_number) {
|
| - __ ldr(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset));
|
| - __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
|
| - __ cmp(scratch1, scratch2);
|
| - __ b(ne, not_a_heap_number);
|
| -}
|
| -
|
| -
|
| void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
| // Update the static counter each time a new code stub is generated.
|
| Isolate* isolate = masm->isolate();
|
| @@ -310,22 +320,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void ToNumberStub::Generate(MacroAssembler* masm) {
|
| - // The ToNumber stub takes one argument in eax.
|
| - Label check_heap_number, call_builtin;
|
| - __ JumpIfNotSmi(r0, &check_heap_number);
|
| - __ Ret();
|
| -
|
| - __ bind(&check_heap_number);
|
| - EmitCheckForHeapNumber(masm, r0, r1, ip, &call_builtin);
|
| - __ Ret();
|
| -
|
| - __ bind(&call_builtin);
|
| - __ push(r0);
|
| - __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
|
| -}
|
| -
|
| -
|
| void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| // Create a new closure from the given function info in new
|
| // space. Set the context to the current context in cp.
|
| @@ -925,17 +919,10 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
|
|
|
| // Now that we have the types we might as well check for
|
| // internalized-internalized.
|
| - Label not_internalized;
|
| - STATIC_ASSERT(kInternalizedTag != 0);
|
| - __ and_(r2, r2, Operand(kIsNotStringMask | kIsInternalizedMask));
|
| - __ cmp(r2, Operand(kInternalizedTag | kStringTag));
|
| - __ b(ne, ¬_internalized); // r2 (rhs) is not an internalized string
|
| -
|
| - __ and_(r3, r3, Operand(kIsNotStringMask | kIsInternalizedMask));
|
| - __ cmp(r3, Operand(kInternalizedTag | kStringTag));
|
| - __ b(eq, &return_not_equal); // both rhs and lhs are internalized strings
|
| -
|
| - __ bind(¬_internalized);
|
| + STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
|
| + __ orr(r2, r2, Operand(r3));
|
| + __ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
|
| + __ b(eq, &return_not_equal);
|
| }
|
|
|
|
|
| @@ -976,15 +963,15 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
|
|
|
| // r2 is object type of rhs.
|
| Label object_test;
|
| - STATIC_ASSERT(kInternalizedTag != 0);
|
| + STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
|
| __ tst(r2, Operand(kIsNotStringMask));
|
| __ b(ne, &object_test);
|
| - __ tst(r2, Operand(kIsInternalizedMask));
|
| - __ b(eq, possible_strings);
|
| + __ tst(r2, Operand(kIsNotInternalizedMask));
|
| + __ b(ne, possible_strings);
|
| __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
|
| __ b(ge, not_both_strings);
|
| - __ tst(r3, Operand(kIsInternalizedMask));
|
| - __ b(eq, possible_strings);
|
| + __ tst(r3, Operand(kIsNotInternalizedMask));
|
| + __ b(ne, possible_strings);
|
|
|
| // Both are internalized. We already checked they weren't the same pointer
|
| // so they are not equal.
|
| @@ -1015,7 +1002,6 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
|
| Register scratch1,
|
| Register scratch2,
|
| Register scratch3,
|
| - bool object_is_smi,
|
| Label* not_found) {
|
| // Use of registers. Register result is used as a temporary.
|
| Register number_string_cache = result;
|
| @@ -1038,40 +1024,38 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
|
| Isolate* isolate = masm->isolate();
|
| Label is_smi;
|
| Label load_result_from_cache;
|
| - if (!object_is_smi) {
|
| - __ JumpIfSmi(object, &is_smi);
|
| - __ CheckMap(object,
|
| - scratch1,
|
| - Heap::kHeapNumberMapRootIndex,
|
| - not_found,
|
| - DONT_DO_SMI_CHECK);
|
| + __ JumpIfSmi(object, &is_smi);
|
| + __ CheckMap(object,
|
| + scratch1,
|
| + Heap::kHeapNumberMapRootIndex,
|
| + not_found,
|
| + DONT_DO_SMI_CHECK);
|
|
|
| - STATIC_ASSERT(8 == kDoubleSize);
|
| - __ add(scratch1,
|
| - object,
|
| - Operand(HeapNumber::kValueOffset - kHeapObjectTag));
|
| - __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
|
| - __ eor(scratch1, scratch1, Operand(scratch2));
|
| - __ and_(scratch1, scratch1, Operand(mask));
|
| -
|
| - // Calculate address of entry in string cache: each entry consists
|
| - // of two pointer sized fields.
|
| - __ add(scratch1,
|
| - number_string_cache,
|
| - Operand(scratch1, LSL, kPointerSizeLog2 + 1));
|
| -
|
| - Register probe = mask;
|
| - __ ldr(probe,
|
| - FieldMemOperand(scratch1, FixedArray::kHeaderSize));
|
| - __ JumpIfSmi(probe, not_found);
|
| - __ sub(scratch2, object, Operand(kHeapObjectTag));
|
| - __ vldr(d0, scratch2, HeapNumber::kValueOffset);
|
| - __ sub(probe, probe, Operand(kHeapObjectTag));
|
| - __ vldr(d1, probe, HeapNumber::kValueOffset);
|
| - __ VFPCompareAndSetFlags(d0, d1);
|
| - __ b(ne, not_found); // The cache did not contain this value.
|
| - __ b(&load_result_from_cache);
|
| - }
|
| + STATIC_ASSERT(8 == kDoubleSize);
|
| + __ add(scratch1,
|
| + object,
|
| + Operand(HeapNumber::kValueOffset - kHeapObjectTag));
|
| + __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
|
| + __ eor(scratch1, scratch1, Operand(scratch2));
|
| + __ and_(scratch1, scratch1, Operand(mask));
|
| +
|
| + // Calculate address of entry in string cache: each entry consists
|
| + // of two pointer sized fields.
|
| + __ add(scratch1,
|
| + number_string_cache,
|
| + Operand(scratch1, LSL, kPointerSizeLog2 + 1));
|
| +
|
| + Register probe = mask;
|
| + __ ldr(probe,
|
| + FieldMemOperand(scratch1, FixedArray::kHeaderSize));
|
| + __ JumpIfSmi(probe, not_found);
|
| + __ sub(scratch2, object, Operand(kHeapObjectTag));
|
| + __ vldr(d0, scratch2, HeapNumber::kValueOffset);
|
| + __ sub(probe, probe, Operand(kHeapObjectTag));
|
| + __ vldr(d1, probe, HeapNumber::kValueOffset);
|
| + __ VFPCompareAndSetFlags(d0, d1);
|
| + __ b(ne, not_found); // The cache did not contain this value.
|
| + __ b(&load_result_from_cache);
|
|
|
| __ bind(&is_smi);
|
| Register scratch = scratch1;
|
| @@ -1083,7 +1067,6 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
|
| Operand(scratch, LSL, kPointerSizeLog2 + 1));
|
|
|
| // Check if the entry is the smi we are looking for.
|
| - Register probe = mask;
|
| __ ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
|
| __ cmp(object, probe);
|
| __ b(ne, not_found);
|
| @@ -1105,7 +1088,7 @@ void NumberToStringStub::Generate(MacroAssembler* masm) {
|
| __ ldr(r1, MemOperand(sp, 0));
|
|
|
| // Generate code to lookup number in the number string cache.
|
| - GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime);
|
| + GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, &runtime);
|
| __ add(sp, sp, Operand(1 * kPointerSize));
|
| __ Ret();
|
|
|
| @@ -1897,8 +1880,8 @@ void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
|
| __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
|
| __ b(ge, &call_runtime);
|
|
|
| - StringAddStub string_add_stub((StringAddFlags)
|
| - (ERECT_FRAME | NO_STRING_CHECK_IN_STUB));
|
| + StringAddStub string_add_stub(
|
| + (StringAddFlags)(STRING_ADD_CHECK_NONE | STRING_ADD_ERECT_FRAME));
|
| GenerateRegisterArgsPush(masm);
|
| __ TailCallStub(&string_add_stub);
|
|
|
| @@ -2256,8 +2239,8 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
|
| __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
|
| __ b(ge, &left_not_string);
|
|
|
| - StringAddStub string_add_left_stub((StringAddFlags)
|
| - (ERECT_FRAME | NO_STRING_CHECK_LEFT_IN_STUB));
|
| + StringAddStub string_add_left_stub(
|
| + (StringAddFlags)(STRING_ADD_CHECK_RIGHT | STRING_ADD_ERECT_FRAME));
|
| GenerateRegisterArgsPush(masm);
|
| __ TailCallStub(&string_add_left_stub);
|
|
|
| @@ -2267,8 +2250,8 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
|
| __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
|
| __ b(ge, &call_runtime);
|
|
|
| - StringAddStub string_add_right_stub((StringAddFlags)
|
| - (ERECT_FRAME | NO_STRING_CHECK_RIGHT_IN_STUB));
|
| + StringAddStub string_add_right_stub(
|
| + (StringAddFlags)(STRING_ADD_CHECK_LEFT | STRING_ADD_ERECT_FRAME));
|
| GenerateRegisterArgsPush(masm);
|
| __ TailCallStub(&string_add_right_stub);
|
|
|
| @@ -5494,7 +5477,11 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
|
|
|
| // Make sure that both arguments are strings if not known in advance.
|
| - if ((flags_ & NO_STRING_ADD_FLAGS) != 0) {
|
| + // Otherwise, at least one of the arguments is definitely a string,
|
| + // and we convert the one that is not known to be a string.
|
| + if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
|
| + ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
|
| + ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
|
| __ JumpIfEitherSmi(r0, r1, &call_runtime);
|
| // Load instance types.
|
| __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| @@ -5506,20 +5493,16 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| __ tst(r4, Operand(kIsNotStringMask));
|
| __ tst(r5, Operand(kIsNotStringMask), eq);
|
| __ b(ne, &call_runtime);
|
| - } else {
|
| - // Here at least one of the arguments is definitely a string.
|
| - // We convert the one that is not known to be a string.
|
| - if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
|
| - ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
|
| - GenerateConvertArgument(
|
| - masm, 1 * kPointerSize, r0, r2, r3, r4, r5, &call_builtin);
|
| - builtin_id = Builtins::STRING_ADD_RIGHT;
|
| - } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
|
| - ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
|
| - GenerateConvertArgument(
|
| - masm, 0 * kPointerSize, r1, r2, r3, r4, r5, &call_builtin);
|
| - builtin_id = Builtins::STRING_ADD_LEFT;
|
| - }
|
| + } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
|
| + ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
|
| + GenerateConvertArgument(
|
| + masm, 1 * kPointerSize, r0, r2, r3, r4, r5, &call_builtin);
|
| + builtin_id = Builtins::STRING_ADD_RIGHT;
|
| + } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
|
| + ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
|
| + GenerateConvertArgument(
|
| + masm, 0 * kPointerSize, r1, r2, r3, r4, r5, &call_builtin);
|
| + builtin_id = Builtins::STRING_ADD_LEFT;
|
| }
|
|
|
| // Both arguments are strings.
|
| @@ -5567,7 +5550,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| __ b(ne, &longer_than_two);
|
|
|
| // Check that both strings are non-external ASCII strings.
|
| - if (flags_ != NO_STRING_ADD_FLAGS) {
|
| + if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
|
| __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
|
| __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
|
| @@ -5615,7 +5598,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
|
|
| // If result is not supposed to be flat, allocate a cons string object.
|
| // If both strings are ASCII the result is an ASCII cons string.
|
| - if (flags_ != NO_STRING_ADD_FLAGS) {
|
| + if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
|
| __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
|
| __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
|
| @@ -5698,7 +5681,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| // r6: sum of lengths.
|
| Label first_prepared, second_prepared;
|
| __ bind(&string_add_flat_result);
|
| - if (flags_ != NO_STRING_ADD_FLAGS) {
|
| + if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
|
| __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
|
| __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
|
| @@ -5786,7 +5769,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
|
|
| // Just jump to runtime to add the two strings.
|
| __ bind(&call_runtime);
|
| - if ((flags_ & ERECT_FRAME) != 0) {
|
| + if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
|
| GenerateRegisterArgsPop(masm);
|
| // Build a frame
|
| {
|
| @@ -5801,7 +5784,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
|
|
| if (call_builtin.is_linked()) {
|
| __ bind(&call_builtin);
|
| - if ((flags_ & ERECT_FRAME) != 0) {
|
| + if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
|
| GenerateRegisterArgsPop(masm);
|
| // Build a frame
|
| {
|
| @@ -5853,7 +5836,6 @@ void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
|
| scratch2,
|
| scratch3,
|
| scratch4,
|
| - false,
|
| ¬_cached);
|
| __ mov(arg, scratch1);
|
| __ str(arg, MemOperand(sp, stack_offset));
|
| @@ -5994,14 +5976,9 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
|
| __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
|
| __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
|
| __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
|
| - STATIC_ASSERT(kInternalizedTag != 0);
|
| -
|
| - __ and_(tmp1, tmp1, Operand(kIsNotStringMask | kIsInternalizedMask));
|
| - __ cmp(tmp1, Operand(kInternalizedTag | kStringTag));
|
| - __ b(ne, &miss);
|
| -
|
| - __ and_(tmp2, tmp2, Operand(kIsNotStringMask | kIsInternalizedMask));
|
| - __ cmp(tmp2, Operand(kInternalizedTag | kStringTag));
|
| + STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
|
| + __ orr(tmp1, tmp1, Operand(tmp2));
|
| + __ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
|
| __ b(ne, &miss);
|
|
|
| // Internalized strings are compared by identity.
|
| @@ -6035,7 +6012,6 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
|
|
|
| // Check that both operands are unique names. This leaves the instance
|
| // types loaded in tmp1 and tmp2.
|
| - STATIC_ASSERT(kInternalizedTag != 0);
|
| __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
|
| __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
|
| __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
|
| @@ -6101,13 +6077,13 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
|
| // strings.
|
| if (equality) {
|
| ASSERT(GetCondition() == eq);
|
| - STATIC_ASSERT(kInternalizedTag != 0);
|
| - __ and_(tmp3, tmp1, Operand(tmp2));
|
| - __ tst(tmp3, Operand(kIsInternalizedMask));
|
| + STATIC_ASSERT(kInternalizedTag == 0);
|
| + __ orr(tmp3, tmp1, Operand(tmp2));
|
| + __ tst(tmp3, Operand(kIsNotInternalizedMask));
|
| // Make sure r0 is non-zero. At this point input operands are
|
| // guaranteed to be non-zero.
|
| ASSERT(right.is(r0));
|
| - __ Ret(ne);
|
| + __ Ret(eq);
|
| }
|
|
|
| // Check that both strings are sequential ASCII.
|
|
|