| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1362 Label is_smi; | 1362 Label is_smi; |
| 1363 Label load_result_from_cache; | 1363 Label load_result_from_cache; |
| 1364 if (!object_is_smi) { | 1364 if (!object_is_smi) { |
| 1365 __ JumpIfSmi(object, &is_smi); | 1365 __ JumpIfSmi(object, &is_smi); |
| 1366 if (CpuFeatures::IsSupported(VFP3)) { | 1366 if (CpuFeatures::IsSupported(VFP3)) { |
| 1367 CpuFeatures::Scope scope(VFP3); | 1367 CpuFeatures::Scope scope(VFP3); |
| 1368 __ CheckMap(object, | 1368 __ CheckMap(object, |
| 1369 scratch1, | 1369 scratch1, |
| 1370 Heap::kHeapNumberMapRootIndex, | 1370 Heap::kHeapNumberMapRootIndex, |
| 1371 not_found, | 1371 not_found, |
| 1372 true); | 1372 DONT_DO_SMI_CHECK); |
| 1373 | 1373 |
| 1374 STATIC_ASSERT(8 == kDoubleSize); | 1374 STATIC_ASSERT(8 == kDoubleSize); |
| 1375 __ add(scratch1, | 1375 __ add(scratch1, |
| 1376 object, | 1376 object, |
| 1377 Operand(HeapNumber::kValueOffset - kHeapObjectTag)); | 1377 Operand(HeapNumber::kValueOffset - kHeapObjectTag)); |
| 1378 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); | 1378 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); |
| 1379 __ eor(scratch1, scratch1, Operand(scratch2)); | 1379 __ eor(scratch1, scratch1, Operand(scratch2)); |
| 1380 __ and_(scratch1, scratch1, Operand(mask)); | 1380 __ and_(scratch1, scratch1, Operand(mask)); |
| 1381 | 1381 |
| 1382 // Calculate address of entry in string cache: each entry consists | 1382 // Calculate address of entry in string cache: each entry consists |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1561 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow); | 1561 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow); |
| 1562 } | 1562 } |
| 1563 | 1563 |
| 1564 // Check for both being sequential ASCII strings, and inline if that is the | 1564 // Check for both being sequential ASCII strings, and inline if that is the |
| 1565 // case. | 1565 // case. |
| 1566 __ bind(&flat_string_check); | 1566 __ bind(&flat_string_check); |
| 1567 | 1567 |
| 1568 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow); | 1568 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow); |
| 1569 | 1569 |
| 1570 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3); | 1570 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3); |
| 1571 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, | 1571 if (cc_ == eq) { |
| 1572 StringCompareStub::GenerateFlatAsciiStringEquals(masm, |
| 1572 lhs_, | 1573 lhs_, |
| 1573 rhs_, | 1574 rhs_, |
| 1574 r2, | 1575 r2, |
| 1575 r3, | 1576 r3, |
| 1576 r4, | 1577 r4); |
| 1577 r5); | 1578 } else { |
| 1579 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, |
| 1580 lhs_, |
| 1581 rhs_, |
| 1582 r2, |
| 1583 r3, |
| 1584 r4, |
| 1585 r5); |
| 1586 } |
| 1578 // Never falls through to here. | 1587 // Never falls through to here. |
| 1579 | 1588 |
| 1580 __ bind(&slow); | 1589 __ bind(&slow); |
| 1581 | 1590 |
| 1582 __ Push(lhs_, rhs_); | 1591 __ Push(lhs_, rhs_); |
| 1583 // Figure out which native to call and setup the arguments. | 1592 // Figure out which native to call and setup the arguments. |
| 1584 Builtins::JavaScript native; | 1593 Builtins::JavaScript native; |
| 1585 if (cc_ == eq) { | 1594 if (cc_ == eq) { |
| 1586 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | 1595 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
| 1587 } else { | 1596 } else { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1600 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1609 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 1601 // tagged as a small integer. | 1610 // tagged as a small integer. |
| 1602 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1611 __ InvokeBuiltin(native, JUMP_FUNCTION); |
| 1603 } | 1612 } |
| 1604 | 1613 |
| 1605 | 1614 |
| 1606 // This stub does not handle the inlined cases (Smis, Booleans, undefined). | 1615 // This stub does not handle the inlined cases (Smis, Booleans, undefined). |
| 1607 // The stub returns zero for false, and a non-zero value for true. | 1616 // The stub returns zero for false, and a non-zero value for true. |
| 1608 void ToBooleanStub::Generate(MacroAssembler* masm) { | 1617 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 1609 // This stub uses VFP3 instructions. | 1618 // This stub uses VFP3 instructions. |
| 1610 ASSERT(CpuFeatures::IsEnabled(VFP3)); | 1619 CpuFeatures::Scope scope(VFP3); |
| 1611 | 1620 |
| 1612 Label false_result; | 1621 Label false_result; |
| 1613 Label not_heap_number; | 1622 Label not_heap_number; |
| 1614 Register scratch = r9.is(tos_) ? r7 : r9; | 1623 Register scratch = r9.is(tos_) ? r7 : r9; |
| 1615 | 1624 |
| 1625 // undefined -> false |
| 1626 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1627 __ cmp(tos_, ip); |
| 1628 __ b(eq, &false_result); |
| 1629 |
| 1630 // Boolean -> its value |
| 1631 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 1632 __ cmp(tos_, ip); |
| 1633 __ b(eq, &false_result); |
| 1634 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 1635 __ cmp(tos_, ip); |
| 1636 // "tos_" is a register and contains a non-zero value. Hence we implicitly |
| 1637 // return true if the equal condition is satisfied. |
| 1638 __ Ret(eq); |
| 1639 |
| 1640 // Smis: 0 -> false, all other -> true |
| 1641 __ tst(tos_, tos_); |
| 1642 __ b(eq, &false_result); |
| 1643 __ tst(tos_, Operand(kSmiTagMask)); |
| 1644 // "tos_" is a register and contains a non-zero value. Hence we implicitly |
| 1645 // return true if the not equal condition is satisfied. |
| 1646 __ Ret(eq); |
| 1647 |
| 1648 // 'null' -> false |
| 1616 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 1649 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1617 __ cmp(tos_, ip); | 1650 __ cmp(tos_, ip); |
| 1618 __ b(eq, &false_result); | 1651 __ b(eq, &false_result); |
| 1619 | 1652 |
| 1620 // HeapNumber => false iff +0, -0, or NaN. | 1653 // HeapNumber => false iff +0, -0, or NaN. |
| 1621 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1654 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| 1622 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 1655 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 1623 __ cmp(scratch, ip); | 1656 __ cmp(scratch, ip); |
| 1624 __ b(¬_heap_number, ne); | 1657 __ b(¬_heap_number, ne); |
| 1625 | 1658 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); |
| 1626 __ sub(ip, tos_, Operand(kHeapObjectTag)); | |
| 1627 __ vldr(d1, ip, HeapNumber::kValueOffset); | |
| 1628 __ VFPCompareAndSetFlags(d1, 0.0); | 1659 __ VFPCompareAndSetFlags(d1, 0.0); |
| 1629 // "tos_" is a register, and contains a non zero value by default. | 1660 // "tos_" is a register, and contains a non zero value by default. |
| 1630 // Hence we only need to overwrite "tos_" with zero to return false for | 1661 // Hence we only need to overwrite "tos_" with zero to return false for |
| 1631 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. | 1662 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. |
| 1632 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO | 1663 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO |
| 1633 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN | 1664 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN |
| 1634 __ Ret(); | 1665 __ Ret(); |
| 1635 | 1666 |
| 1636 __ bind(¬_heap_number); | 1667 __ bind(¬_heap_number); |
| 1637 | 1668 |
| 1638 // Check if the value is 'null'. | |
| 1639 // 'null' => false. | |
| 1640 __ LoadRoot(ip, Heap::kNullValueRootIndex); | |
| 1641 __ cmp(tos_, ip); | |
| 1642 __ b(&false_result, eq); | |
| 1643 | |
| 1644 // It can be an undetectable object. | 1669 // It can be an undetectable object. |
| 1645 // Undetectable => false. | 1670 // Undetectable => false. |
| 1646 __ ldr(ip, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1671 __ ldr(ip, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| 1647 __ ldrb(scratch, FieldMemOperand(ip, Map::kBitFieldOffset)); | 1672 __ ldrb(scratch, FieldMemOperand(ip, Map::kBitFieldOffset)); |
| 1648 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); | 1673 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); |
| 1649 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); | 1674 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); |
| 1650 __ b(&false_result, eq); | 1675 __ b(&false_result, eq); |
| 1651 | 1676 |
| 1652 // JavaScript object => true. | 1677 // JavaScript object => true. |
| 1653 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1678 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1815 case Token::BIT_NOT: | 1840 case Token::BIT_NOT: |
| 1816 GenerateHeapNumberStubBitNot(masm); | 1841 GenerateHeapNumberStubBitNot(masm); |
| 1817 break; | 1842 break; |
| 1818 default: | 1843 default: |
| 1819 UNREACHABLE(); | 1844 UNREACHABLE(); |
| 1820 } | 1845 } |
| 1821 } | 1846 } |
| 1822 | 1847 |
| 1823 | 1848 |
| 1824 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 1849 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
| 1825 Label non_smi, slow; | 1850 Label non_smi, slow, call_builtin; |
| 1826 GenerateSmiCodeSub(masm, &non_smi, &slow); | 1851 GenerateSmiCodeSub(masm, &non_smi, &call_builtin); |
| 1827 __ bind(&non_smi); | 1852 __ bind(&non_smi); |
| 1828 GenerateHeapNumberCodeSub(masm, &slow); | 1853 GenerateHeapNumberCodeSub(masm, &slow); |
| 1829 __ bind(&slow); | 1854 __ bind(&slow); |
| 1830 GenerateTypeTransition(masm); | 1855 GenerateTypeTransition(masm); |
| 1856 __ bind(&call_builtin); |
| 1857 GenerateGenericCodeFallback(masm); |
| 1831 } | 1858 } |
| 1832 | 1859 |
| 1833 | 1860 |
| 1834 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 1861 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
| 1835 MacroAssembler* masm) { | 1862 MacroAssembler* masm) { |
| 1836 Label non_smi, slow; | 1863 Label non_smi, slow; |
| 1837 GenerateSmiCodeBitNot(masm, &non_smi); | 1864 GenerateSmiCodeBitNot(masm, &non_smi); |
| 1838 __ bind(&non_smi); | 1865 __ bind(&non_smi); |
| 1839 GenerateHeapNumberCodeBitNot(masm, &slow); | 1866 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 1840 __ bind(&slow); | 1867 __ bind(&slow); |
| (...skipping 1164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3005 // of the double into r2, r3. | 3032 // of the double into r2, r3. |
| 3006 __ IntegerToDoubleConversionWithVFP3(r0, r3, r2); | 3033 __ IntegerToDoubleConversionWithVFP3(r0, r3, r2); |
| 3007 __ b(&loaded); | 3034 __ b(&loaded); |
| 3008 | 3035 |
| 3009 __ bind(&input_not_smi); | 3036 __ bind(&input_not_smi); |
| 3010 // Check if input is a HeapNumber. | 3037 // Check if input is a HeapNumber. |
| 3011 __ CheckMap(r0, | 3038 __ CheckMap(r0, |
| 3012 r1, | 3039 r1, |
| 3013 Heap::kHeapNumberMapRootIndex, | 3040 Heap::kHeapNumberMapRootIndex, |
| 3014 &calculate, | 3041 &calculate, |
| 3015 true); | 3042 DONT_DO_SMI_CHECK); |
| 3016 // Input is a HeapNumber. Load it to a double register and store the | 3043 // Input is a HeapNumber. Load it to a double register and store the |
| 3017 // low and high words into r2, r3. | 3044 // low and high words into r2, r3. |
| 3018 __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); | 3045 __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
| 3019 __ vmov(r2, r3, d0); | 3046 __ vmov(r2, r3, d0); |
| 3020 } else { | 3047 } else { |
| 3021 // Input is untagged double in d2. Output goes to d2. | 3048 // Input is untagged double in d2. Output goes to d2. |
| 3022 __ vmov(r2, r3, d2); | 3049 __ vmov(r2, r3, d2); |
| 3023 } | 3050 } |
| 3024 __ bind(&loaded); | 3051 __ bind(&loaded); |
| 3025 // r2 = low 32 bits of double value | 3052 // r2 = low 32 bits of double value |
| (...skipping 522 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3548 __ mov(r5, | 3575 __ mov(r5, |
| 3549 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); | 3576 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); |
| 3550 __ ldr(r5, MemOperand(r5)); | 3577 __ ldr(r5, MemOperand(r5)); |
| 3551 __ Push(r8, r7, r6, r5); | 3578 __ Push(r8, r7, r6, r5); |
| 3552 | 3579 |
| 3553 // Setup frame pointer for the frame to be pushed. | 3580 // Setup frame pointer for the frame to be pushed. |
| 3554 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 3581 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 3555 | 3582 |
| 3556 #ifdef ENABLE_LOGGING_AND_PROFILING | 3583 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 3557 // If this is the outermost JS call, set js_entry_sp value. | 3584 // If this is the outermost JS call, set js_entry_sp value. |
| 3585 Label non_outermost_js; |
| 3558 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); | 3586 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); |
| 3559 __ mov(r5, Operand(ExternalReference(js_entry_sp))); | 3587 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
| 3560 __ ldr(r6, MemOperand(r5)); | 3588 __ ldr(r6, MemOperand(r5)); |
| 3561 __ cmp(r6, Operand(0, RelocInfo::NONE)); | 3589 __ cmp(r6, Operand(0)); |
| 3562 __ str(fp, MemOperand(r5), eq); | 3590 __ b(ne, &non_outermost_js); |
| 3591 __ str(fp, MemOperand(r5)); |
| 3592 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
| 3593 Label cont; |
| 3594 __ b(&cont); |
| 3595 __ bind(&non_outermost_js); |
| 3596 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); |
| 3597 __ bind(&cont); |
| 3598 __ push(ip); |
| 3563 #endif | 3599 #endif |
| 3564 | 3600 |
| 3565 // Call a faked try-block that does the invoke. | 3601 // Call a faked try-block that does the invoke. |
| 3566 __ bl(&invoke); | 3602 __ bl(&invoke); |
| 3567 | 3603 |
| 3568 // Caught exception: Store result (exception) in the pending | 3604 // Caught exception: Store result (exception) in the pending |
| 3569 // exception field in the JSEnv and return a failure sentinel. | 3605 // exception field in the JSEnv and return a failure sentinel. |
| 3570 // Coming in here the fp will be invalid because the PushTryHandler below | 3606 // Coming in here the fp will be invalid because the PushTryHandler below |
| 3571 // sets it to 0 to signal the existence of the JSEntry frame. | 3607 // sets it to 0 to signal the existence of the JSEntry frame. |
| 3572 __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address, | 3608 __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address, |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3610 __ mov(ip, Operand(entry)); | 3646 __ mov(ip, Operand(entry)); |
| 3611 } | 3647 } |
| 3612 __ ldr(ip, MemOperand(ip)); // deref address | 3648 __ ldr(ip, MemOperand(ip)); // deref address |
| 3613 | 3649 |
| 3614 // Branch and link to JSEntryTrampoline. We don't use the double underscore | 3650 // Branch and link to JSEntryTrampoline. We don't use the double underscore |
| 3615 // macro for the add instruction because we don't want the coverage tool | 3651 // macro for the add instruction because we don't want the coverage tool |
| 3616 // inserting instructions here after we read the pc. | 3652 // inserting instructions here after we read the pc. |
| 3617 __ mov(lr, Operand(pc)); | 3653 __ mov(lr, Operand(pc)); |
| 3618 masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3654 masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 3619 | 3655 |
| 3620 // Unlink this frame from the handler chain. When reading the | 3656 // Unlink this frame from the handler chain. |
| 3621 // address of the next handler, there is no need to use the address | 3657 __ PopTryHandler(); |
| 3622 // displacement since the current stack pointer (sp) points directly | |
| 3623 // to the stack handler. | |
| 3624 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset)); | |
| 3625 __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate))); | |
| 3626 __ str(r3, MemOperand(ip)); | |
| 3627 // No need to restore registers | |
| 3628 __ add(sp, sp, Operand(StackHandlerConstants::kSize)); | |
| 3629 | 3658 |
| 3659 __ bind(&exit); // r0 holds result |
| 3630 #ifdef ENABLE_LOGGING_AND_PROFILING | 3660 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 3631 // If current FP value is the same as js_entry_sp value, it means that | 3661 // Check if the current stack frame is marked as the outermost JS frame. |
| 3632 // the current function is the outermost. | 3662 Label non_outermost_js_2; |
| 3663 __ pop(r5); |
| 3664 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
| 3665 __ b(ne, &non_outermost_js_2); |
| 3666 __ mov(r6, Operand(0)); |
| 3633 __ mov(r5, Operand(ExternalReference(js_entry_sp))); | 3667 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
| 3634 __ ldr(r6, MemOperand(r5)); | 3668 __ str(r6, MemOperand(r5)); |
| 3635 __ cmp(fp, Operand(r6)); | 3669 __ bind(&non_outermost_js_2); |
| 3636 __ mov(r6, Operand(0, RelocInfo::NONE), LeaveCC, eq); | |
| 3637 __ str(r6, MemOperand(r5), eq); | |
| 3638 #endif | 3670 #endif |
| 3639 | 3671 |
| 3640 __ bind(&exit); // r0 holds result | |
| 3641 // Restore the top frame descriptors from the stack. | 3672 // Restore the top frame descriptors from the stack. |
| 3642 __ pop(r3); | 3673 __ pop(r3); |
| 3643 __ mov(ip, | 3674 __ mov(ip, |
| 3644 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); | 3675 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); |
| 3645 __ str(r3, MemOperand(ip)); | 3676 __ str(r3, MemOperand(ip)); |
| 3646 | 3677 |
| 3647 // Reset the stack to the callee saved registers. | 3678 // Reset the stack to the callee saved registers. |
| 3648 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 3679 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 3649 | 3680 |
| 3650 // Restore callee-saved registers and return. | 3681 // Restore callee-saved registers and return. |
| (...skipping 1026 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4677 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { | 4708 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { |
| 4678 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); | 4709 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); |
| 4679 | 4710 |
| 4680 // Index is not a smi. | 4711 // Index is not a smi. |
| 4681 __ bind(&index_not_smi_); | 4712 __ bind(&index_not_smi_); |
| 4682 // If index is a heap number, try converting it to an integer. | 4713 // If index is a heap number, try converting it to an integer. |
| 4683 __ CheckMap(index_, | 4714 __ CheckMap(index_, |
| 4684 scratch_, | 4715 scratch_, |
| 4685 Heap::kHeapNumberMapRootIndex, | 4716 Heap::kHeapNumberMapRootIndex, |
| 4686 index_not_number_, | 4717 index_not_number_, |
| 4687 true); | 4718 DONT_DO_SMI_CHECK); |
| 4688 call_helper.BeforeCall(masm); | 4719 call_helper.BeforeCall(masm); |
| 4689 __ Push(object_, index_); | 4720 __ Push(object_, index_); |
| 4690 __ push(index_); // Consumed by runtime conversion function. | 4721 __ push(index_); // Consumed by runtime conversion function. |
| 4691 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 4722 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
| 4692 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 4723 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
| 4693 } else { | 4724 } else { |
| 4694 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 4725 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
| 4695 // NumberToSmi discards numbers that are not exact integers. | 4726 // NumberToSmi discards numbers that are not exact integers. |
| 4696 __ CallRuntime(Runtime::kNumberToSmi, 1); | 4727 __ CallRuntime(Runtime::kNumberToSmi, 1); |
| 4697 } | 4728 } |
| (...skipping 691 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5389 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); | 5420 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); |
| 5390 __ add(sp, sp, Operand(3 * kPointerSize)); | 5421 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 5391 __ Ret(); | 5422 __ Ret(); |
| 5392 | 5423 |
| 5393 // Just jump to runtime to create the sub string. | 5424 // Just jump to runtime to create the sub string. |
| 5394 __ bind(&runtime); | 5425 __ bind(&runtime); |
| 5395 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 5426 __ TailCallRuntime(Runtime::kSubString, 3, 1); |
| 5396 } | 5427 } |
| 5397 | 5428 |
| 5398 | 5429 |
| 5430 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, |
| 5431 Register left, |
| 5432 Register right, |
| 5433 Register scratch1, |
| 5434 Register scratch2, |
| 5435 Register scratch3) { |
| 5436 Register length = scratch1; |
| 5437 |
| 5438 // Compare lengths. |
| 5439 Label strings_not_equal, check_zero_length; |
| 5440 __ ldr(length, FieldMemOperand(left, String::kLengthOffset)); |
| 5441 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); |
| 5442 __ cmp(length, scratch2); |
| 5443 __ b(eq, &check_zero_length); |
| 5444 __ bind(&strings_not_equal); |
| 5445 __ mov(r0, Operand(Smi::FromInt(NOT_EQUAL))); |
| 5446 __ Ret(); |
| 5447 |
| 5448 // Check if the length is zero. |
| 5449 Label compare_chars; |
| 5450 __ bind(&check_zero_length); |
| 5451 STATIC_ASSERT(kSmiTag == 0); |
| 5452 __ tst(length, Operand(length)); |
| 5453 __ b(ne, &compare_chars); |
| 5454 __ mov(r0, Operand(Smi::FromInt(EQUAL))); |
| 5455 __ Ret(); |
| 5456 |
| 5457 // Compare characters. |
| 5458 __ bind(&compare_chars); |
| 5459 GenerateAsciiCharsCompareLoop(masm, |
| 5460 left, right, length, scratch2, scratch3, |
| 5461 &strings_not_equal); |
| 5462 |
| 5463 // Characters are equal. |
| 5464 __ mov(r0, Operand(Smi::FromInt(EQUAL))); |
| 5465 __ Ret(); |
| 5466 } |
| 5467 |
| 5468 |
| 5399 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, | 5469 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, |
| 5400 Register left, | 5470 Register left, |
| 5401 Register right, | 5471 Register right, |
| 5402 Register scratch1, | 5472 Register scratch1, |
| 5403 Register scratch2, | 5473 Register scratch2, |
| 5404 Register scratch3, | 5474 Register scratch3, |
| 5405 Register scratch4) { | 5475 Register scratch4) { |
| 5406 Label compare_lengths; | 5476 Label result_not_equal, compare_lengths; |
| 5407 // Find minimum length and length difference. | 5477 // Find minimum length and length difference. |
| 5408 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset)); | 5478 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset)); |
| 5409 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); | 5479 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); |
| 5410 __ sub(scratch3, scratch1, Operand(scratch2), SetCC); | 5480 __ sub(scratch3, scratch1, Operand(scratch2), SetCC); |
| 5411 Register length_delta = scratch3; | 5481 Register length_delta = scratch3; |
| 5412 __ mov(scratch1, scratch2, LeaveCC, gt); | 5482 __ mov(scratch1, scratch2, LeaveCC, gt); |
| 5413 Register min_length = scratch1; | 5483 Register min_length = scratch1; |
| 5414 STATIC_ASSERT(kSmiTag == 0); | 5484 STATIC_ASSERT(kSmiTag == 0); |
| 5415 __ tst(min_length, Operand(min_length)); | 5485 __ tst(min_length, Operand(min_length)); |
| 5416 __ b(eq, &compare_lengths); | 5486 __ b(eq, &compare_lengths); |
| 5417 | 5487 |
| 5418 // Untag smi. | 5488 // Compare loop. |
| 5419 __ mov(min_length, Operand(min_length, ASR, kSmiTagSize)); | 5489 GenerateAsciiCharsCompareLoop(masm, |
| 5490 left, right, min_length, scratch2, scratch4, |
| 5491 &result_not_equal); |
| 5420 | 5492 |
| 5421 // Setup registers so that we only need to increment one register | 5493 // Compare lengths - strings up to min-length are equal. |
| 5422 // in the loop. | |
| 5423 __ add(scratch2, min_length, | |
| 5424 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | |
| 5425 __ add(left, left, Operand(scratch2)); | |
| 5426 __ add(right, right, Operand(scratch2)); | |
| 5427 // Registers left and right points to the min_length character of strings. | |
| 5428 __ rsb(min_length, min_length, Operand(-1)); | |
| 5429 Register index = min_length; | |
| 5430 // Index starts at -min_length. | |
| 5431 | |
| 5432 { | |
| 5433 // Compare loop. | |
| 5434 Label loop; | |
| 5435 __ bind(&loop); | |
| 5436 // Compare characters. | |
| 5437 __ add(index, index, Operand(1), SetCC); | |
| 5438 __ ldrb(scratch2, MemOperand(left, index), ne); | |
| 5439 __ ldrb(scratch4, MemOperand(right, index), ne); | |
| 5440 // Skip to compare lengths with eq condition true. | |
| 5441 __ b(eq, &compare_lengths); | |
| 5442 __ cmp(scratch2, scratch4); | |
| 5443 __ b(eq, &loop); | |
| 5444 // Fallthrough with eq condition false. | |
| 5445 } | |
| 5446 // Compare lengths - strings up to min-length are equal. | |
| 5447 __ bind(&compare_lengths); | 5494 __ bind(&compare_lengths); |
| 5448 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0)); | 5495 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0)); |
| 5449 // Use zero length_delta as result. | 5496 // Use length_delta as result if it's zero. |
| 5450 __ mov(r0, Operand(length_delta), SetCC, eq); | 5497 __ mov(r0, Operand(length_delta), SetCC); |
| 5451 // Fall through to here if characters compare not-equal. | 5498 __ bind(&result_not_equal); |
| 5499 // Conditionally update the result based either on length_delta or |
| 5500 // the last comparion performed in the loop above. |
| 5452 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt); | 5501 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt); |
| 5453 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt); | 5502 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt); |
| 5454 __ Ret(); | 5503 __ Ret(); |
| 5455 } | 5504 } |
| 5456 | 5505 |
| 5457 | 5506 |
| 5507 void StringCompareStub::GenerateAsciiCharsCompareLoop( |
| 5508 MacroAssembler* masm, |
| 5509 Register left, |
| 5510 Register right, |
| 5511 Register length, |
| 5512 Register scratch1, |
| 5513 Register scratch2, |
| 5514 Label* chars_not_equal) { |
| 5515 // Change index to run from -length to -1 by adding length to string |
| 5516 // start. This means that loop ends when index reaches zero, which |
| 5517 // doesn't need an additional compare. |
| 5518 __ SmiUntag(length); |
| 5519 __ add(scratch1, length, |
| 5520 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 5521 __ add(left, left, Operand(scratch1)); |
| 5522 __ add(right, right, Operand(scratch1)); |
| 5523 __ rsb(length, length, Operand(0)); |
| 5524 Register index = length; // index = -length; |
| 5525 |
| 5526 // Compare loop. |
| 5527 Label loop; |
| 5528 __ bind(&loop); |
| 5529 __ ldrb(scratch1, MemOperand(left, index)); |
| 5530 __ ldrb(scratch2, MemOperand(right, index)); |
| 5531 __ cmp(scratch1, scratch2); |
| 5532 __ b(ne, chars_not_equal); |
| 5533 __ add(index, index, Operand(1), SetCC); |
| 5534 __ b(ne, &loop); |
| 5535 } |
| 5536 |
| 5537 |
| 5458 void StringCompareStub::Generate(MacroAssembler* masm) { | 5538 void StringCompareStub::Generate(MacroAssembler* masm) { |
| 5459 Label runtime; | 5539 Label runtime; |
| 5460 | 5540 |
| 5461 Counters* counters = masm->isolate()->counters(); | 5541 Counters* counters = masm->isolate()->counters(); |
| 5462 | 5542 |
| 5463 // Stack frame on entry. | 5543 // Stack frame on entry. |
| 5464 // sp[0]: right string | 5544 // sp[0]: right string |
| 5465 // sp[4]: left string | 5545 // sp[4]: left string |
| 5466 __ Ldrd(r0 , r1, MemOperand(sp)); // Load right in r0, left in r1. | 5546 __ Ldrd(r0 , r1, MemOperand(sp)); // Load right in r0, left in r1. |
| 5467 | 5547 |
| (...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5900 | 5980 |
| 5901 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0); | 5981 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0); |
| 5902 __ bind(&generic_stub); | 5982 __ bind(&generic_stub); |
| 5903 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 5983 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 5904 | 5984 |
| 5905 __ bind(&miss); | 5985 __ bind(&miss); |
| 5906 GenerateMiss(masm); | 5986 GenerateMiss(masm); |
| 5907 } | 5987 } |
| 5908 | 5988 |
| 5909 | 5989 |
| 5990 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { |
| 5991 ASSERT(state_ == CompareIC::SYMBOLS); |
| 5992 Label miss; |
| 5993 |
| 5994 // Registers containing left and right operands respectively. |
| 5995 Register left = r1; |
| 5996 Register right = r0; |
| 5997 Register tmp1 = r2; |
| 5998 Register tmp2 = r3; |
| 5999 |
| 6000 // Check that both operands are heap objects. |
| 6001 __ JumpIfEitherSmi(left, right, &miss); |
| 6002 |
| 6003 // Check that both operands are symbols. |
| 6004 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); |
| 6005 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); |
| 6006 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); |
| 6007 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); |
| 6008 STATIC_ASSERT(kSymbolTag != 0); |
| 6009 __ and_(tmp1, tmp1, Operand(tmp2)); |
| 6010 __ tst(tmp1, Operand(kIsSymbolMask)); |
| 6011 __ b(eq, &miss); |
| 6012 |
| 6013 // Symbols are compared by identity. |
| 6014 __ cmp(left, right); |
| 6015 // Make sure r0 is non-zero. At this point input operands are |
| 6016 // guaranteed to be non-zero. |
| 6017 ASSERT(right.is(r0)); |
| 6018 STATIC_ASSERT(EQUAL == 0); |
| 6019 STATIC_ASSERT(kSmiTag == 0); |
| 6020 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq); |
| 6021 __ Ret(); |
| 6022 |
| 6023 __ bind(&miss); |
| 6024 GenerateMiss(masm); |
| 6025 } |
| 6026 |
| 6027 |
| 5910 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { | 6028 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
| 5911 ASSERT(state_ == CompareIC::STRINGS); | 6029 ASSERT(state_ == CompareIC::STRINGS); |
| 5912 Label miss; | 6030 Label miss; |
| 5913 | 6031 |
| 5914 // Registers containing left and right operands respectively. | 6032 // Registers containing left and right operands respectively. |
| 5915 Register left = r1; | 6033 Register left = r1; |
| 5916 Register right = r0; | 6034 Register right = r0; |
| 5917 Register tmp1 = r2; | 6035 Register tmp1 = r2; |
| 5918 Register tmp2 = r3; | 6036 Register tmp2 = r3; |
| 5919 Register tmp3 = r4; | 6037 Register tmp3 = r4; |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5952 // guaranteed to be non-zero. | 6070 // guaranteed to be non-zero. |
| 5953 ASSERT(right.is(r0)); | 6071 ASSERT(right.is(r0)); |
| 5954 __ Ret(ne); | 6072 __ Ret(ne); |
| 5955 | 6073 |
| 5956 // Check that both strings are sequential ASCII. | 6074 // Check that both strings are sequential ASCII. |
| 5957 Label runtime; | 6075 Label runtime; |
| 5958 __ JumpIfBothInstanceTypesAreNotSequentialAscii(tmp1, tmp2, tmp3, tmp4, | 6076 __ JumpIfBothInstanceTypesAreNotSequentialAscii(tmp1, tmp2, tmp3, tmp4, |
| 5959 &runtime); | 6077 &runtime); |
| 5960 | 6078 |
| 5961 // Compare flat ASCII strings. Returns when done. | 6079 // Compare flat ASCII strings. Returns when done. |
| 5962 StringCompareStub::GenerateCompareFlatAsciiStrings( | 6080 StringCompareStub::GenerateFlatAsciiStringEquals( |
| 5963 masm, left, right, tmp1, tmp2, tmp3, tmp4); | 6081 masm, left, right, tmp1, tmp2, tmp3); |
| 5964 | 6082 |
| 5965 // Handle more complex cases in runtime. | 6083 // Handle more complex cases in runtime. |
| 5966 __ bind(&runtime); | 6084 __ bind(&runtime); |
| 5967 __ Push(left, right); | 6085 __ Push(left, right); |
| 5968 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 6086 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); |
| 5969 | 6087 |
| 5970 __ bind(&miss); | 6088 __ bind(&miss); |
| 5971 GenerateMiss(masm); | 6089 GenerateMiss(masm); |
| 5972 } | 6090 } |
| 5973 | 6091 |
| 5974 | 6092 |
| 5975 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 6093 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
| 5976 ASSERT(state_ == CompareIC::OBJECTS); | 6094 ASSERT(state_ == CompareIC::OBJECTS); |
| 5977 Label miss; | 6095 Label miss; |
| 5978 __ and_(r2, r1, Operand(r0)); | 6096 __ and_(r2, r1, Operand(r0)); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6035 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, | 6153 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, |
| 6036 Register target) { | 6154 Register target) { |
| 6037 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), | 6155 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), |
| 6038 RelocInfo::CODE_TARGET)); | 6156 RelocInfo::CODE_TARGET)); |
| 6039 // Push return address (accessible to GC through exit frame pc). | 6157 // Push return address (accessible to GC through exit frame pc). |
| 6040 __ str(pc, MemOperand(sp, 0)); | 6158 __ str(pc, MemOperand(sp, 0)); |
| 6041 __ Jump(target); // Call the C++ function. | 6159 __ Jump(target); // Call the C++ function. |
| 6042 } | 6160 } |
| 6043 | 6161 |
| 6044 | 6162 |
| 6163 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( |
| 6164 MacroAssembler* masm, |
| 6165 Label* miss, |
| 6166 Label* done, |
| 6167 Register receiver, |
| 6168 Register properties, |
| 6169 String* name, |
| 6170 Register scratch0) { |
| 6171 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 6172 // not equal to the name and kProbes-th slot is not used (its name is the |
| 6173 // undefined value), it guarantees the hash table doesn't contain the |
| 6174 // property. It's true even if some slots represent deleted properties |
| 6175 // (their names are the null value). |
| 6176 for (int i = 0; i < kInlinedProbes; i++) { |
| 6177 // scratch0 points to properties hash. |
| 6178 // Compute the masked index: (hash + i + i * i) & mask. |
| 6179 Register index = scratch0; |
| 6180 // Capacity is smi 2^n. |
| 6181 __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); |
| 6182 __ sub(index, index, Operand(1)); |
| 6183 __ and_(index, index, Operand( |
| 6184 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); |
| 6185 |
| 6186 // Scale the index by multiplying by the entry size. |
| 6187 ASSERT(StringDictionary::kEntrySize == 3); |
| 6188 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
| 6189 |
| 6190 Register entity_name = scratch0; |
| 6191 // Having undefined at this place means the name is not contained. |
| 6192 ASSERT_EQ(kSmiTagSize, 1); |
| 6193 Register tmp = properties; |
| 6194 __ add(tmp, properties, Operand(index, LSL, 1)); |
| 6195 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); |
| 6196 |
| 6197 ASSERT(!tmp.is(entity_name)); |
| 6198 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); |
| 6199 __ cmp(entity_name, tmp); |
| 6200 __ b(eq, done); |
| 6201 |
| 6202 if (i != kInlinedProbes - 1) { |
| 6203 // Stop if found the property. |
| 6204 __ cmp(entity_name, Operand(Handle<String>(name))); |
| 6205 __ b(eq, miss); |
| 6206 |
| 6207 // Check if the entry name is not a symbol. |
| 6208 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); |
| 6209 __ ldrb(entity_name, |
| 6210 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); |
| 6211 __ tst(entity_name, Operand(kIsSymbolMask)); |
| 6212 __ b(eq, miss); |
| 6213 |
| 6214 // Restore the properties. |
| 6215 __ ldr(properties, |
| 6216 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 6217 } |
| 6218 } |
| 6219 |
| 6220 const int spill_mask = |
| 6221 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() | |
| 6222 r2.bit() | r1.bit() | r0.bit()); |
| 6223 |
| 6224 __ stm(db_w, sp, spill_mask); |
| 6225 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 6226 __ mov(r1, Operand(Handle<String>(name))); |
| 6227 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); |
| 6228 MaybeObject* result = masm->TryCallStub(&stub); |
| 6229 if (result->IsFailure()) return result; |
| 6230 __ tst(r0, Operand(r0)); |
| 6231 __ ldm(ia_w, sp, spill_mask); |
| 6232 |
| 6233 __ b(eq, done); |
| 6234 __ b(ne, miss); |
| 6235 return result; |
| 6236 } |
| 6237 |
| 6238 |
| 6239 // Probe the string dictionary in the |elements| register. Jump to the |
| 6240 // |done| label if a property with the given name is found. Jump to |
| 6241 // the |miss| label otherwise. |
| 6242 // If lookup was successful |scratch2| will be equal to elements + 4 * index. |
| 6243 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
| 6244 Label* miss, |
| 6245 Label* done, |
| 6246 Register elements, |
| 6247 Register name, |
| 6248 Register scratch1, |
| 6249 Register scratch2) { |
| 6250 // Assert that name contains a string. |
| 6251 if (FLAG_debug_code) __ AbortIfNotString(name); |
| 6252 |
| 6253 // Compute the capacity mask. |
| 6254 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset)); |
| 6255 __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int |
| 6256 __ sub(scratch1, scratch1, Operand(1)); |
| 6257 |
| 6258 // Generate an unrolled loop that performs a few probes before |
| 6259 // giving up. Measurements done on Gmail indicate that 2 probes |
| 6260 // cover ~93% of loads from dictionaries. |
| 6261 for (int i = 0; i < kInlinedProbes; i++) { |
| 6262 // Compute the masked index: (hash + i + i * i) & mask. |
| 6263 __ ldr(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); |
| 6264 if (i > 0) { |
| 6265 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
| 6266 // the hash in a separate instruction. The value hash + i + i * i is right |
| 6267 // shifted in the following and instruction. |
| 6268 ASSERT(StringDictionary::GetProbeOffset(i) < |
| 6269 1 << (32 - String::kHashFieldOffset)); |
| 6270 __ add(scratch2, scratch2, Operand( |
| 6271 StringDictionary::GetProbeOffset(i) << String::kHashShift)); |
| 6272 } |
| 6273 __ and_(scratch2, scratch1, Operand(scratch2, LSR, String::kHashShift)); |
| 6274 |
| 6275 // Scale the index by multiplying by the element size. |
| 6276 ASSERT(StringDictionary::kEntrySize == 3); |
| 6277 // scratch2 = scratch2 * 3. |
| 6278 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1)); |
| 6279 |
| 6280 // Check if the key is identical to the name. |
| 6281 __ add(scratch2, elements, Operand(scratch2, LSL, 2)); |
| 6282 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset)); |
| 6283 __ cmp(name, Operand(ip)); |
| 6284 __ b(eq, done); |
| 6285 } |
| 6286 |
| 6287 const int spill_mask = |
| 6288 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | |
| 6289 r3.bit() | r2.bit() | r1.bit() | r0.bit()) & |
| 6290 ~(scratch1.bit() | scratch2.bit()); |
| 6291 |
| 6292 __ stm(db_w, sp, spill_mask); |
| 6293 __ Move(r0, elements); |
| 6294 __ Move(r1, name); |
| 6295 StringDictionaryLookupStub stub(POSITIVE_LOOKUP); |
| 6296 __ CallStub(&stub); |
| 6297 __ tst(r0, Operand(r0)); |
| 6298 __ mov(scratch2, Operand(r2)); |
| 6299 __ ldm(ia_w, sp, spill_mask); |
| 6300 |
| 6301 __ b(ne, done); |
| 6302 __ b(eq, miss); |
| 6303 } |
| 6304 |
| 6305 |
| 6306 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 6307 // Registers: |
| 6308 // result: StringDictionary to probe |
| 6309 // r1: key |
| 6310 // : StringDictionary to probe. |
| 6311 // index_: will hold an index of entry if lookup is successful. |
| 6312 // might alias with result_. |
| 6313 // Returns: |
| 6314 // result_ is zero if lookup failed, non zero otherwise. |
| 6315 |
| 6316 Register result = r0; |
| 6317 Register dictionary = r0; |
| 6318 Register key = r1; |
| 6319 Register index = r2; |
| 6320 Register mask = r3; |
| 6321 Register hash = r4; |
| 6322 Register undefined = r5; |
| 6323 Register entry_key = r6; |
| 6324 |
| 6325 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
| 6326 |
| 6327 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset)); |
| 6328 __ mov(mask, Operand(mask, ASR, kSmiTagSize)); |
| 6329 __ sub(mask, mask, Operand(1)); |
| 6330 |
| 6331 __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset)); |
| 6332 |
| 6333 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); |
| 6334 |
| 6335 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| 6336 // Compute the masked index: (hash + i + i * i) & mask. |
| 6337 // Capacity is smi 2^n. |
| 6338 if (i > 0) { |
| 6339 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
| 6340 // the hash in a separate instruction. The value hash + i + i * i is right |
| 6341 // shifted in the following and instruction. |
| 6342 ASSERT(StringDictionary::GetProbeOffset(i) < |
| 6343 1 << (32 - String::kHashFieldOffset)); |
| 6344 __ add(index, hash, Operand( |
| 6345 StringDictionary::GetProbeOffset(i) << String::kHashShift)); |
| 6346 } else { |
| 6347 __ mov(index, Operand(hash)); |
| 6348 } |
| 6349 __ and_(index, mask, Operand(index, LSR, String::kHashShift)); |
| 6350 |
| 6351 // Scale the index by multiplying by the entry size. |
| 6352 ASSERT(StringDictionary::kEntrySize == 3); |
| 6353 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
| 6354 |
| 6355 ASSERT_EQ(kSmiTagSize, 1); |
| 6356 __ add(index, dictionary, Operand(index, LSL, 2)); |
| 6357 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); |
| 6358 |
| 6359 // Having undefined at this place means the name is not contained. |
| 6360 __ cmp(entry_key, Operand(undefined)); |
| 6361 __ b(eq, ¬_in_dictionary); |
| 6362 |
| 6363 // Stop if found the property. |
| 6364 __ cmp(entry_key, Operand(key)); |
| 6365 __ b(eq, &in_dictionary); |
| 6366 |
| 6367 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| 6368 // Check if the entry name is not a symbol. |
| 6369 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); |
| 6370 __ ldrb(entry_key, |
| 6371 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); |
| 6372 __ tst(entry_key, Operand(kIsSymbolMask)); |
| 6373 __ b(eq, &maybe_in_dictionary); |
| 6374 } |
| 6375 } |
| 6376 |
| 6377 __ bind(&maybe_in_dictionary); |
| 6378 // If we are doing negative lookup then probing failure should be |
| 6379 // treated as a lookup success. For positive lookup probing failure |
| 6380 // should be treated as lookup failure. |
| 6381 if (mode_ == POSITIVE_LOOKUP) { |
| 6382 __ mov(result, Operand(0)); |
| 6383 __ Ret(); |
| 6384 } |
| 6385 |
| 6386 __ bind(&in_dictionary); |
| 6387 __ mov(result, Operand(1)); |
| 6388 __ Ret(); |
| 6389 |
| 6390 __ bind(¬_in_dictionary); |
| 6391 __ mov(result, Operand(0)); |
| 6392 __ Ret(); |
| 6393 } |
| 6394 |
| 6395 |
| 6045 #undef __ | 6396 #undef __ |
| 6046 | 6397 |
| 6047 } } // namespace v8::internal | 6398 } } // namespace v8::internal |
| 6048 | 6399 |
| 6049 #endif // V8_TARGET_ARCH_ARM | 6400 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |