| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 940 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 951 // Find the end of the inlined code for handling the load. | 951 // Find the end of the inlined code for handling the load. |
| 952 int b_offset = | 952 int b_offset = |
| 953 Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta; | 953 Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta; |
| 954 ASSERT(b_offset < 0); // Jumping back from deferred code. | 954 ASSERT(b_offset < 0); // Jumping back from deferred code. |
| 955 *inline_end_address = address_after_nop + b_offset; | 955 *inline_end_address = address_after_nop + b_offset; |
| 956 | 956 |
| 957 return true; | 957 return true; |
| 958 } | 958 } |
| 959 | 959 |
| 960 | 960 |
| 961 void LoadIC::ClearInlinedVersion(Address address) { | |
| 962 // Reset the map check of the inlined in-object property load (if present) to | |
| 963 // guarantee failure by holding an invalid map (the null value). The offset | |
| 964 // can be patched to anything. | |
| 965 PatchInlinedLoad(address, Heap::null_value(), 0); | |
| 966 } | |
| 967 | |
| 968 | |
| 969 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { | 961 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { |
| 970 // Find the end of the inlined code for handling the load if this is an | 962 // Find the end of the inlined code for handling the load if this is an |
| 971 // inlined IC call site. | 963 // inlined IC call site. |
| 972 Address inline_end_address; | 964 Address inline_end_address; |
| 973 if (!IsInlinedICSite(address, &inline_end_address)) return false; | 965 if (!IsInlinedICSite(address, &inline_end_address)) return false; |
| 974 | 966 |
| 975 // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]). | 967 // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]). |
| 976 // The immediate must be representable in 12 bits. | 968 // The immediate must be representable in 12 bits. |
| 977 ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12)); | 969 ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12)); |
| 978 Address ldr_property_instr_address = | 970 Address ldr_property_instr_address = |
| (...skipping 10 matching lines...) Expand all Loading... |
| 989 | 981 |
| 990 // Patch the map check. | 982 // Patch the map check. |
| 991 Address ldr_map_instr_address = | 983 Address ldr_map_instr_address = |
| 992 inline_end_address - 4 * Assembler::kInstrSize; | 984 inline_end_address - 4 * Assembler::kInstrSize; |
| 993 Assembler::set_target_address_at(ldr_map_instr_address, | 985 Assembler::set_target_address_at(ldr_map_instr_address, |
| 994 reinterpret_cast<Address>(map)); | 986 reinterpret_cast<Address>(map)); |
| 995 return true; | 987 return true; |
| 996 } | 988 } |
| 997 | 989 |
| 998 | 990 |
| 999 void KeyedLoadIC::ClearInlinedVersion(Address address) { | 991 bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) { |
| 1000 // Reset the map check of the inlined keyed load (if present) to | 992 // TODO(787): Implement inline stores on arm. |
| 1001 // guarantee failure by holding an invalid map (the null value). | 993 return false; |
| 1002 PatchInlinedLoad(address, Heap::null_value()); | |
| 1003 } | 994 } |
| 1004 | 995 |
| 1005 | 996 |
| 1006 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { | 997 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { |
| 1007 Address inline_end_address; | 998 Address inline_end_address; |
| 1008 if (!IsInlinedICSite(address, &inline_end_address)) return false; | 999 if (!IsInlinedICSite(address, &inline_end_address)) return false; |
| 1009 | 1000 |
| 1010 // Patch the map check. | 1001 // Patch the map check. |
| 1011 Address ldr_map_instr_address = | 1002 Address ldr_map_instr_address = |
| 1012 inline_end_address - | 1003 inline_end_address - |
| 1013 (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() * | 1004 (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() * |
| 1014 Assembler::kInstrSize); | 1005 Assembler::kInstrSize); |
| 1015 Assembler::set_target_address_at(ldr_map_instr_address, | 1006 Assembler::set_target_address_at(ldr_map_instr_address, |
| 1016 reinterpret_cast<Address>(map)); | 1007 reinterpret_cast<Address>(map)); |
| 1017 return true; | 1008 return true; |
| 1018 } | 1009 } |
| 1019 | 1010 |
| 1020 | 1011 |
| 1021 void KeyedStoreIC::ClearInlinedVersion(Address address) { | |
| 1022 // Insert null as the elements map to check for. This will make | |
| 1023 // sure that the elements fast-case map check fails so that control | |
| 1024 // flows to the IC instead of the inlined version. | |
| 1025 PatchInlinedStore(address, Heap::null_value()); | |
| 1026 } | |
| 1027 | |
| 1028 | |
| 1029 void KeyedStoreIC::RestoreInlinedVersion(Address address) { | |
| 1030 // Restore the fast-case elements map check so that the inlined | |
| 1031 // version can be used again. | |
| 1032 PatchInlinedStore(address, Heap::fixed_array_map()); | |
| 1033 } | |
| 1034 | |
| 1035 | |
| 1036 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { | 1012 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { |
| 1037 // Find the end of the inlined code for handling the store if this is an | 1013 // Find the end of the inlined code for handling the store if this is an |
| 1038 // inlined IC call site. | 1014 // inlined IC call site. |
| 1039 Address inline_end_address; | 1015 Address inline_end_address; |
| 1040 if (!IsInlinedICSite(address, &inline_end_address)) return false; | 1016 if (!IsInlinedICSite(address, &inline_end_address)) return false; |
| 1041 | 1017 |
| 1042 // Patch the map check. | 1018 // Patch the map check. |
| 1043 Address ldr_map_instr_address = | 1019 Address ldr_map_instr_address = |
| 1044 inline_end_address - | 1020 inline_end_address - |
| 1045 (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch * | 1021 (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch * |
| (...skipping 1192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2238 GenerateMiss(masm); | 2214 GenerateMiss(masm); |
| 2239 } | 2215 } |
| 2240 | 2216 |
| 2241 | 2217 |
| 2242 #undef __ | 2218 #undef __ |
| 2243 | 2219 |
| 2244 | 2220 |
| 2245 } } // namespace v8::internal | 2221 } } // namespace v8::internal |
| 2246 | 2222 |
| 2247 #endif // V8_TARGET_ARCH_ARM | 2223 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |