| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 870 ASSERT(cell->value()->IsTheHole()); | 870 ASSERT(cell->value()->IsTheHole()); |
| 871 __ mov(scratch, Operand(Handle<Object>(cell))); | 871 __ mov(scratch, Operand(Handle<Object>(cell))); |
| 872 __ ldr(scratch, | 872 __ ldr(scratch, |
| 873 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); | 873 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); |
| 874 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 874 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 875 __ cmp(scratch, ip); | 875 __ cmp(scratch, ip); |
| 876 __ b(ne, miss); | 876 __ b(ne, miss); |
| 877 return cell; | 877 return cell; |
| 878 } | 878 } |
| 879 | 879 |
| 880 // Calls GenerateCheckPropertyCell for each global object in the prototype chain |
| 881 // from object to (but not including) holder. |
| 882 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells( |
| 883 MacroAssembler* masm, |
| 884 JSObject* object, |
| 885 JSObject* holder, |
| 886 String* name, |
| 887 Register scratch, |
| 888 Label* miss) { |
| 889 JSObject* current = object; |
| 890 while (current != holder) { |
| 891 if (current->IsGlobalObject()) { |
| 892 // Returns a cell or a failure. |
| 893 MaybeObject* result = GenerateCheckPropertyCell( |
| 894 masm, |
| 895 GlobalObject::cast(current), |
| 896 name, |
| 897 scratch, |
| 898 miss); |
| 899 if (result->IsFailure()) return result; |
| 900 } |
| 901 ASSERT(current->IsJSObject()); |
| 902 current = JSObject::cast(current->GetPrototype()); |
| 903 } |
| 904 return NULL; |
| 905 } |
| 906 |
| 907 |
| 880 | 908 |
| 881 #undef __ | 909 #undef __ |
| 882 #define __ ACCESS_MASM(masm()) | 910 #define __ ACCESS_MASM(masm()) |
| 883 | 911 |
| 884 | 912 |
| 885 Register StubCompiler::CheckPrototypes(JSObject* object, | 913 Register StubCompiler::CheckPrototypes(JSObject* object, |
| 886 Register object_reg, | 914 Register object_reg, |
| 887 JSObject* holder, | 915 JSObject* holder, |
| 888 Register holder_reg, | 916 Register holder_reg, |
| 889 Register scratch1, | 917 Register scratch1, |
| (...skipping 17 matching lines...) Expand all Loading... |
| 907 // Check the maps in the prototype chain. | 935 // Check the maps in the prototype chain. |
| 908 // Traverse the prototype chain from the object and do map checks. | 936 // Traverse the prototype chain from the object and do map checks. |
| 909 JSObject* current = object; | 937 JSObject* current = object; |
| 910 while (current != holder) { | 938 while (current != holder) { |
| 911 depth++; | 939 depth++; |
| 912 | 940 |
| 913 // Only global objects and objects that do not require access | 941 // Only global objects and objects that do not require access |
| 914 // checks are allowed in stubs. | 942 // checks are allowed in stubs. |
| 915 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); | 943 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); |
| 916 | 944 |
| 945 ASSERT(current->GetPrototype()->IsJSObject()); |
| 917 JSObject* prototype = JSObject::cast(current->GetPrototype()); | 946 JSObject* prototype = JSObject::cast(current->GetPrototype()); |
| 918 if (!current->HasFastProperties() && | 947 if (!current->HasFastProperties() && |
| 919 !current->IsJSGlobalObject() && | 948 !current->IsJSGlobalObject() && |
| 920 !current->IsJSGlobalProxy()) { | 949 !current->IsJSGlobalProxy()) { |
| 921 if (!name->IsSymbol()) { | 950 if (!name->IsSymbol()) { |
| 922 MaybeObject* lookup_result = HEAP->LookupSymbol(name); | 951 MaybeObject* maybe_lookup_result = HEAP->LookupSymbol(name); |
| 923 if (lookup_result->IsFailure()) { | 952 Object* lookup_result = NULL; // Initialization to please compiler. |
| 924 set_failure(Failure::cast(lookup_result)); | 953 if (!maybe_lookup_result->ToObject(&lookup_result)) { |
| 954 set_failure(Failure::cast(maybe_lookup_result)); |
| 925 return reg; | 955 return reg; |
| 926 } else { | |
| 927 name = String::cast(lookup_result->ToObjectUnchecked()); | |
| 928 } | 956 } |
| 957 name = String::cast(lookup_result); |
| 929 } | 958 } |
| 930 ASSERT(current->property_dictionary()->FindEntry(name) == | 959 ASSERT(current->property_dictionary()->FindEntry(name) == |
| 931 StringDictionary::kNotFound); | 960 StringDictionary::kNotFound); |
| 932 | 961 |
| 933 GenerateDictionaryNegativeLookup(masm(), | 962 GenerateDictionaryNegativeLookup(masm(), |
| 934 miss, | 963 miss, |
| 935 reg, | 964 reg, |
| 936 name, | 965 name, |
| 937 scratch1, | 966 scratch1, |
| 938 scratch2); | 967 scratch2); |
| 939 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 968 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 940 reg = holder_reg; // from now the object is in holder_reg | 969 reg = holder_reg; // from now the object is in holder_reg |
| 941 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 970 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 942 } else { | 971 } else if (HEAP->InNewSpace(prototype)) { |
| 943 // Get the map of the current object. | 972 // Get the map of the current object. |
| 944 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 973 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 945 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 974 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 946 | 975 |
| 947 // Branch on the result of the map check. | 976 // Branch on the result of the map check. |
| 948 __ b(ne, miss); | 977 __ b(ne, miss); |
| 949 | 978 |
| 950 // Check access rights to the global object. This has to happen | 979 // Check access rights to the global object. This has to happen |
| 951 // after the map check so that we know that the object is | 980 // after the map check so that we know that the object is |
| 952 // actually a global object. | 981 // actually a global object. |
| 953 if (current->IsJSGlobalProxy()) { | 982 if (current->IsJSGlobalProxy()) { |
| 954 __ CheckAccessGlobalProxy(reg, scratch1, miss); | 983 __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 955 // Restore scratch register to be the map of the object. In the | 984 // Restore scratch register to be the map of the object. In the |
| 956 // new space case below, we load the prototype from the map in | 985 // new space case below, we load the prototype from the map in |
| 957 // the scratch register. | 986 // the scratch register. |
| 958 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 987 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 959 } | 988 } |
| 960 | 989 |
| 961 reg = holder_reg; // from now the object is in holder_reg | 990 reg = holder_reg; // from now the object is in holder_reg |
| 962 if (HEAP->InNewSpace(prototype)) { | 991 // The prototype is in new space; we cannot store a reference |
| 963 // The prototype is in new space; we cannot store a reference | 992 // to it in the code. Load it from the map. |
| 964 // to it in the code. Load it from the map. | 993 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 965 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 994 } else { |
| 966 } else { | 995 // Check the map of the current object. |
| 967 // The prototype is in old space; load it directly. | 996 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 968 __ mov(reg, Operand(Handle<JSObject>(prototype))); | 997 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 998 // Branch on the result of the map check. |
| 999 __ b(ne, miss); |
| 1000 // Check access rights to the global object. This has to happen |
| 1001 // after the map check so that we know that the object is |
| 1002 // actually a global object. |
| 1003 if (current->IsJSGlobalProxy()) { |
| 1004 __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 969 } | 1005 } |
| 1006 // The prototype is in old space; load it directly. |
| 1007 reg = holder_reg; // from now the object is in holder_reg |
| 1008 __ mov(reg, Operand(Handle<JSObject>(prototype))); |
| 970 } | 1009 } |
| 971 | 1010 |
| 972 if (save_at_depth == depth) { | 1011 if (save_at_depth == depth) { |
| 973 __ str(reg, MemOperand(sp)); | 1012 __ str(reg, MemOperand(sp)); |
| 974 } | 1013 } |
| 975 | 1014 |
| 976 // Go to the next object in the prototype chain. | 1015 // Go to the next object in the prototype chain. |
| 977 current = prototype; | 1016 current = prototype; |
| 978 } | 1017 } |
| 979 | 1018 |
| 980 // Check the holder map. | 1019 // Check the holder map. |
| 981 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1020 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 982 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 1021 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 983 __ b(ne, miss); | 1022 __ b(ne, miss); |
| 984 | 1023 |
| 985 // Log the check depth. | 1024 // Log the check depth. |
| 986 LOG(IntEvent("check-maps-depth", depth + 1)); | 1025 LOG(IntEvent("check-maps-depth", depth + 1)); |
| 987 | 1026 |
| 988 // Perform security check for access to the global object and return | 1027 // Perform security check for access to the global object. |
| 989 // the holder register. | 1028 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); |
| 990 ASSERT(current == holder); | 1029 if (holder->IsJSGlobalProxy()) { |
| 991 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); | |
| 992 if (current->IsJSGlobalProxy()) { | |
| 993 __ CheckAccessGlobalProxy(reg, scratch1, miss); | 1030 __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 994 } | 1031 }; |
| 995 | 1032 |
| 996 // If we've skipped any global objects, it's not enough to verify | 1033 // If we've skipped any global objects, it's not enough to verify |
| 997 // that their maps haven't changed. We also need to check that the | 1034 // that their maps haven't changed. We also need to check that the |
| 998 // property cell for the property is still empty. | 1035 // property cell for the property is still empty. |
| 999 current = object; | 1036 MaybeObject* result = GenerateCheckPropertyCells(masm(), |
| 1000 while (current != holder) { | 1037 object, |
| 1001 if (current->IsGlobalObject()) { | 1038 holder, |
| 1002 MaybeObject* cell = GenerateCheckPropertyCell(masm(), | 1039 name, |
| 1003 GlobalObject::cast(current), | 1040 scratch1, |
| 1004 name, | 1041 miss); |
| 1005 scratch1, | 1042 if (result->IsFailure()) set_failure(Failure::cast(result)); |
| 1006 miss); | |
| 1007 if (cell->IsFailure()) { | |
| 1008 set_failure(Failure::cast(cell)); | |
| 1009 return reg; | |
| 1010 } | |
| 1011 } | |
| 1012 current = JSObject::cast(current->GetPrototype()); | |
| 1013 } | |
| 1014 | 1043 |
| 1015 // Return the register containing the holder. | 1044 // Return the register containing the holder. |
| 1016 return reg; | 1045 return reg; |
| 1017 } | 1046 } |
| 1018 | 1047 |
| 1019 | 1048 |
| 1020 void StubCompiler::GenerateLoadField(JSObject* object, | 1049 void StubCompiler::GenerateLoadField(JSObject* object, |
| 1021 JSObject* holder, | 1050 JSObject* holder, |
| 1022 Register receiver, | 1051 Register receiver, |
| 1023 Register scratch1, | 1052 Register scratch1, |
| (...skipping 624 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1648 scratch, | 1677 scratch, |
| 1649 result, | 1678 result, |
| 1650 &miss, // When not a string. | 1679 &miss, // When not a string. |
| 1651 &miss, // When not a number. | 1680 &miss, // When not a number. |
| 1652 &index_out_of_range, | 1681 &index_out_of_range, |
| 1653 STRING_INDEX_IS_NUMBER); | 1682 STRING_INDEX_IS_NUMBER); |
| 1654 char_code_at_generator.GenerateFast(masm()); | 1683 char_code_at_generator.GenerateFast(masm()); |
| 1655 __ Drop(argc + 1); | 1684 __ Drop(argc + 1); |
| 1656 __ Ret(); | 1685 __ Ret(); |
| 1657 | 1686 |
| 1658 ICRuntimeCallHelper call_helper; | 1687 StubRuntimeCallHelper call_helper; |
| 1659 char_code_at_generator.GenerateSlow(masm(), call_helper); | 1688 char_code_at_generator.GenerateSlow(masm(), call_helper); |
| 1660 | 1689 |
| 1661 __ bind(&index_out_of_range); | 1690 __ bind(&index_out_of_range); |
| 1662 __ LoadRoot(r0, Heap::kNanValueRootIndex); | 1691 __ LoadRoot(r0, Heap::kNanValueRootIndex); |
| 1663 __ Drop(argc + 1); | 1692 __ Drop(argc + 1); |
| 1664 __ Ret(); | 1693 __ Ret(); |
| 1665 | 1694 |
| 1666 __ bind(&miss); | 1695 __ bind(&miss); |
| 1667 Object* obj; | 1696 Object* obj; |
| 1668 { MaybeObject* maybe_obj = GenerateMissBranch(); | 1697 { MaybeObject* maybe_obj = GenerateMissBranch(); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1725 scratch2, | 1754 scratch2, |
| 1726 result, | 1755 result, |
| 1727 &miss, // When not a string. | 1756 &miss, // When not a string. |
| 1728 &miss, // When not a number. | 1757 &miss, // When not a number. |
| 1729 &index_out_of_range, | 1758 &index_out_of_range, |
| 1730 STRING_INDEX_IS_NUMBER); | 1759 STRING_INDEX_IS_NUMBER); |
| 1731 char_at_generator.GenerateFast(masm()); | 1760 char_at_generator.GenerateFast(masm()); |
| 1732 __ Drop(argc + 1); | 1761 __ Drop(argc + 1); |
| 1733 __ Ret(); | 1762 __ Ret(); |
| 1734 | 1763 |
| 1735 ICRuntimeCallHelper call_helper; | 1764 StubRuntimeCallHelper call_helper; |
| 1736 char_at_generator.GenerateSlow(masm(), call_helper); | 1765 char_at_generator.GenerateSlow(masm(), call_helper); |
| 1737 | 1766 |
| 1738 __ bind(&index_out_of_range); | 1767 __ bind(&index_out_of_range); |
| 1739 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); | 1768 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); |
| 1740 __ Drop(argc + 1); | 1769 __ Drop(argc + 1); |
| 1741 __ Ret(); | 1770 __ Ret(); |
| 1742 | 1771 |
| 1743 __ bind(&miss); | 1772 __ bind(&miss); |
| 1744 Object* obj; | 1773 Object* obj; |
| 1745 { MaybeObject* maybe_obj = GenerateMissBranch(); | 1774 { MaybeObject* maybe_obj = GenerateMissBranch(); |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1800 __ b(ne, &slow); | 1829 __ b(ne, &slow); |
| 1801 | 1830 |
| 1802 // Convert the smi code to uint16. | 1831 // Convert the smi code to uint16. |
| 1803 __ and_(code, code, Operand(Smi::FromInt(0xffff))); | 1832 __ and_(code, code, Operand(Smi::FromInt(0xffff))); |
| 1804 | 1833 |
| 1805 StringCharFromCodeGenerator char_from_code_generator(code, r0); | 1834 StringCharFromCodeGenerator char_from_code_generator(code, r0); |
| 1806 char_from_code_generator.GenerateFast(masm()); | 1835 char_from_code_generator.GenerateFast(masm()); |
| 1807 __ Drop(argc + 1); | 1836 __ Drop(argc + 1); |
| 1808 __ Ret(); | 1837 __ Ret(); |
| 1809 | 1838 |
| 1810 ICRuntimeCallHelper call_helper; | 1839 StubRuntimeCallHelper call_helper; |
| 1811 char_from_code_generator.GenerateSlow(masm(), call_helper); | 1840 char_from_code_generator.GenerateSlow(masm(), call_helper); |
| 1812 | 1841 |
| 1813 // Tail call the full function. We do not have to patch the receiver | 1842 // Tail call the full function. We do not have to patch the receiver |
| 1814 // because the function makes no use of it. | 1843 // because the function makes no use of it. |
| 1815 __ bind(&slow); | 1844 __ bind(&slow); |
| 1816 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); | 1845 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); |
| 1817 | 1846 |
| 1818 __ bind(&miss); | 1847 __ bind(&miss); |
| 1819 // r2: function name. | 1848 // r2: function name. |
| 1820 Object* obj; | 1849 Object* obj; |
| (...skipping 507 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2328 } | 2357 } |
| 2329 | 2358 |
| 2330 // Setup the context (function already in r1). | 2359 // Setup the context (function already in r1). |
| 2331 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 2360 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 2332 | 2361 |
| 2333 // Jump to the cached code (tail call). | 2362 // Jump to the cached code (tail call). |
| 2334 __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4); | 2363 __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4); |
| 2335 ASSERT(function->is_compiled()); | 2364 ASSERT(function->is_compiled()); |
| 2336 Handle<Code> code(function->code()); | 2365 Handle<Code> code(function->code()); |
| 2337 ParameterCount expected(function->shared()->formal_parameter_count()); | 2366 ParameterCount expected(function->shared()->formal_parameter_count()); |
| 2338 __ InvokeCode(code, expected, arguments(), | 2367 if (V8::UseCrankshaft()) { |
| 2339 RelocInfo::CODE_TARGET, JUMP_FUNCTION); | 2368 // TODO(kasperl): For now, we always call indirectly through the |
| 2369 // code field in the function to allow recompilation to take effect |
| 2370 // without changing any of the call sites. |
| 2371 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
| 2372 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION); |
| 2373 } else { |
| 2374 __ InvokeCode(code, expected, arguments(), |
| 2375 RelocInfo::CODE_TARGET, JUMP_FUNCTION); |
| 2376 } |
| 2340 | 2377 |
| 2341 // Handle call cache miss. | 2378 // Handle call cache miss. |
| 2342 __ bind(&miss); | 2379 __ bind(&miss); |
| 2343 __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3); | 2380 __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3); |
| 2344 Object* obj; | 2381 Object* obj; |
| 2345 { MaybeObject* maybe_obj = GenerateMissBranch(); | 2382 { MaybeObject* maybe_obj = GenerateMissBranch(); |
| 2346 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2383 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2347 } | 2384 } |
| 2348 | 2385 |
| 2349 // Return the generated code. | 2386 // Return the generated code. |
| (...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2866 | 2903 |
| 2867 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); | 2904 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); |
| 2868 __ bind(&miss); | 2905 __ bind(&miss); |
| 2869 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); | 2906 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); |
| 2870 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); | 2907 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 2871 | 2908 |
| 2872 return GetCode(CALLBACKS, name); | 2909 return GetCode(CALLBACKS, name); |
| 2873 } | 2910 } |
| 2874 | 2911 |
| 2875 | 2912 |
| 2913 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { |
| 2914 // ----------- S t a t e ------------- |
| 2915 // -- lr : return address |
| 2916 // -- r0 : key |
| 2917 // -- r1 : receiver |
| 2918 // ----------------------------------- |
| 2919 Label miss; |
| 2920 |
| 2921 // Check that the receiver isn't a smi. |
| 2922 __ tst(r1, Operand(kSmiTagMask)); |
| 2923 __ b(eq, &miss); |
| 2924 |
| 2925 // Check that the map matches. |
| 2926 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 2927 __ cmp(r2, Operand(Handle<Map>(receiver->map()))); |
| 2928 __ b(ne, &miss); |
| 2929 |
| 2930 // Check that the key is a smi. |
| 2931 __ tst(r0, Operand(kSmiTagMask)); |
| 2932 __ b(ne, &miss); |
| 2933 |
| 2934 // Get the elements array. |
| 2935 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset)); |
| 2936 __ AssertFastElements(r2); |
| 2937 |
| 2938 // Check that the key is within bounds. |
| 2939 __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 2940 __ cmp(r0, Operand(r3)); |
| 2941 __ b(hs, &miss); |
| 2942 |
| 2943 // Load the result and make sure it's not the hole. |
| 2944 __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 2945 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
| 2946 __ ldr(r4, |
| 2947 MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 2948 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2949 __ cmp(r4, ip); |
| 2950 __ b(eq, &miss); |
| 2951 __ mov(r0, r4); |
| 2952 __ Ret(); |
| 2953 |
| 2954 __ bind(&miss); |
| 2955 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 2956 |
| 2957 // Return the generated code. |
| 2958 return GetCode(NORMAL, NULL); |
| 2959 } |
| 2960 |
| 2961 |
| 2876 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, | 2962 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, |
| 2877 int index, | 2963 int index, |
| 2878 Map* transition, | 2964 Map* transition, |
| 2879 String* name) { | 2965 String* name) { |
| 2880 // ----------- S t a t e ------------- | 2966 // ----------- S t a t e ------------- |
| 2881 // -- r0 : value | 2967 // -- r0 : value |
| 2882 // -- r1 : key | 2968 // -- r1 : name |
| 2883 // -- r2 : receiver | 2969 // -- r2 : receiver |
| 2884 // -- lr : return address | 2970 // -- lr : return address |
| 2885 // ----------------------------------- | 2971 // ----------------------------------- |
| 2886 Label miss; | 2972 Label miss; |
| 2887 | 2973 |
| 2888 __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); | 2974 __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); |
| 2889 | 2975 |
| 2890 // Check that the name has not changed. | 2976 // Check that the name has not changed. |
| 2891 __ cmp(r1, Operand(Handle<String>(name))); | 2977 __ cmp(r1, Operand(Handle<String>(name))); |
| 2892 __ b(ne, &miss); | 2978 __ b(ne, &miss); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2905 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2991 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2906 Builtins::KeyedStoreIC_Miss)); | 2992 Builtins::KeyedStoreIC_Miss)); |
| 2907 | 2993 |
| 2908 __ Jump(ic, RelocInfo::CODE_TARGET); | 2994 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2909 | 2995 |
| 2910 // Return the generated code. | 2996 // Return the generated code. |
| 2911 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); | 2997 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 2912 } | 2998 } |
| 2913 | 2999 |
| 2914 | 3000 |
| 3001 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( |
| 3002 JSObject* receiver) { |
| 3003 // ----------- S t a t e ------------- |
| 3004 // -- r0 : value |
| 3005 // -- r1 : key |
| 3006 // -- r2 : receiver |
| 3007 // -- lr : return address |
| 3008 // -- r3 : scratch |
| 3009 // -- r4 : scratch (elements) |
| 3010 // ----------------------------------- |
| 3011 Label miss; |
| 3012 |
| 3013 Register value_reg = r0; |
| 3014 Register key_reg = r1; |
| 3015 Register receiver_reg = r2; |
| 3016 Register scratch = r3; |
| 3017 Register elements_reg = r4; |
| 3018 |
| 3019 // Check that the receiver isn't a smi. |
| 3020 __ tst(receiver_reg, Operand(kSmiTagMask)); |
| 3021 __ b(eq, &miss); |
| 3022 |
| 3023 // Check that the map matches. |
| 3024 __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 3025 __ cmp(scratch, Operand(Handle<Map>(receiver->map()))); |
| 3026 __ b(ne, &miss); |
| 3027 |
| 3028 // Check that the key is a smi. |
| 3029 __ tst(key_reg, Operand(kSmiTagMask)); |
| 3030 __ b(ne, &miss); |
| 3031 |
| 3032 // Get the elements array and make sure it is a fast element array, not 'cow'. |
| 3033 __ ldr(elements_reg, |
| 3034 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3035 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); |
| 3036 __ cmp(scratch, Operand(Handle<Map>(FACTORY->fixed_array_map()))); |
| 3037 __ b(ne, &miss); |
| 3038 |
| 3039 // Check that the key is within bounds. |
| 3040 if (receiver->IsJSArray()) { |
| 3041 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3042 } else { |
| 3043 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| 3044 } |
| 3045 // Compare smis. |
| 3046 __ cmp(key_reg, scratch); |
| 3047 __ b(hs, &miss); |
| 3048 |
| 3049 __ add(scratch, |
| 3050 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3051 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
| 3052 __ str(value_reg, |
| 3053 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 3054 __ RecordWrite(scratch, |
| 3055 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), |
| 3056 receiver_reg , elements_reg); |
| 3057 |
| 3058 // value_reg (r0) is preserved. |
| 3059 // Done. |
| 3060 __ Ret(); |
| 3061 |
| 3062 __ bind(&miss); |
| 3063 Handle<Code> ic( |
| 3064 Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss)); |
| 3065 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 3066 |
| 3067 // Return the generated code. |
| 3068 return GetCode(NORMAL, NULL); |
| 3069 } |
| 3070 |
| 3071 |
| 2915 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { | 3072 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { |
| 2916 // ----------- S t a t e ------------- | 3073 // ----------- S t a t e ------------- |
| 2917 // -- r0 : argc | 3074 // -- r0 : argc |
| 2918 // -- r1 : constructor | 3075 // -- r1 : constructor |
| 2919 // -- lr : return address | 3076 // -- lr : return address |
| 2920 // -- [sp] : last argument | 3077 // -- [sp] : last argument |
| 2921 // ----------------------------------- | 3078 // ----------------------------------- |
| 2922 Label generic_stub_call; | 3079 Label generic_stub_call; |
| 2923 | 3080 |
| 2924 // Use r7 for holding undefined which is used in several places below. | 3081 // Use r7 for holding undefined which is used in several places below. |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3055 // Return the generated code. | 3212 // Return the generated code. |
| 3056 return GetCode(); | 3213 return GetCode(); |
| 3057 } | 3214 } |
| 3058 | 3215 |
| 3059 | 3216 |
| 3060 #undef __ | 3217 #undef __ |
| 3061 | 3218 |
| 3062 } } // namespace v8::internal | 3219 } } // namespace v8::internal |
| 3063 | 3220 |
| 3064 #endif // V8_TARGET_ARCH_ARM | 3221 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |