OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 908 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
919 | 919 |
920 __ mov(r3, r0); | 920 __ mov(r3, r0); |
921 __ Push(r3, r2); | 921 __ Push(r3, r2); |
922 | 922 |
923 // Perform tail call to the entry. | 923 // Perform tail call to the entry. |
924 ExternalReference ref = | 924 ExternalReference ref = |
925 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 925 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
926 __ TailCallExternalReference(ref, 2, 1); | 926 __ TailCallExternalReference(ref, 2, 1); |
927 } | 927 } |
928 | 928 |
929 // Returns the code marker, or the 0 if the code is not marked. | |
930 static inline int InlinedICSiteMarker(Address address, | |
931 Address* inline_end_address) { | |
932 if (V8::UseCrankshaft()) return false; | |
933 | |
934 // If the instruction after the call site is not the pseudo instruction nop1 | |
935 // then this is not related to an inlined in-object property load. The nop1 | |
936 // instruction is located just after the call to the IC in the deferred code | |
937 // handling the miss in the inlined code. After the nop1 instruction there is | |
938 // a branch instruction for jumping back from the deferred code. | |
939 Address address_after_call = address + Assembler::kCallTargetAddressOffset; | |
940 Instr instr_after_call = Assembler::instr_at(address_after_call); | |
941 int code_marker = MacroAssembler::GetCodeMarker(instr_after_call); | |
942 | |
943 // A negative result means the code is not marked. | |
944 if (code_marker <= 0) return 0; | |
945 | |
946 Address address_after_nop = address_after_call + Assembler::kInstrSize; | |
947 Instr instr_after_nop = Assembler::instr_at(address_after_nop); | |
948 // There may be some reg-reg move and frame merging code to skip over before | |
949 // the branch back from the DeferredReferenceGetKeyedValue code to the inlined | |
950 // code. | |
951 while (!Assembler::IsBranch(instr_after_nop)) { | |
952 address_after_nop += Assembler::kInstrSize; | |
953 instr_after_nop = Assembler::instr_at(address_after_nop); | |
954 } | |
955 | |
956 // Find the end of the inlined code for handling the load. | |
957 int b_offset = | |
958 Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta; | |
959 ASSERT(b_offset < 0); // Jumping back from deferred code. | |
960 *inline_end_address = address_after_nop + b_offset; | |
961 | |
962 return code_marker; | |
963 } | |
964 | |
965 | |
966 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { | |
967 if (V8::UseCrankshaft()) return false; | |
968 | |
969 // Find the end of the inlined code for handling the load if this is an | |
970 // inlined IC call site. | |
971 Address inline_end_address = 0; | |
972 if (InlinedICSiteMarker(address, &inline_end_address) | |
973 != Assembler::PROPERTY_ACCESS_INLINED) { | |
974 return false; | |
975 } | |
976 | |
977 // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]). | |
978 // The immediate must be representable in 12 bits. | |
979 ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12)); | |
980 Address ldr_property_instr_address = | |
981 inline_end_address - Assembler::kInstrSize; | |
982 ASSERT(Assembler::IsLdrRegisterImmediate( | |
983 Assembler::instr_at(ldr_property_instr_address))); | |
984 Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address); | |
985 ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset( | |
986 ldr_property_instr, offset - kHeapObjectTag); | |
987 Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr); | |
988 | |
989 // Indicate that code has changed. | |
990 CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize); | |
991 | |
992 // Patch the map check. | |
993 // For PROPERTY_ACCESS_INLINED, the load map instruction is generated | |
994 // 4 instructions before the end of the inlined code. | |
995 // See codgen-arm.cc CodeGenerator::EmitNamedLoad. | |
996 int ldr_map_offset = -4; | |
997 Address ldr_map_instr_address = | |
998 inline_end_address + ldr_map_offset * Assembler::kInstrSize; | |
999 Assembler::set_target_address_at(ldr_map_instr_address, | |
1000 reinterpret_cast<Address>(map)); | |
1001 return true; | |
1002 } | |
1003 | |
1004 | |
1005 bool LoadIC::PatchInlinedContextualLoad(Address address, | |
1006 Object* map, | |
1007 Object* cell, | |
1008 bool is_dont_delete) { | |
1009 // Find the end of the inlined code for handling the contextual load if | |
1010 // this is inlined IC call site. | |
1011 Address inline_end_address = 0; | |
1012 int marker = InlinedICSiteMarker(address, &inline_end_address); | |
1013 if (!((marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT) || | |
1014 (marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE))) { | |
1015 return false; | |
1016 } | |
1017 // On ARM we don't rely on the is_dont_delete argument as the hint is already | |
1018 // embedded in the code marker. | |
1019 bool marker_is_dont_delete = | |
1020 marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE; | |
1021 | |
1022 // These are the offsets from the end of the inlined code. | |
1023 // See codgen-arm.cc CodeGenerator::EmitNamedLoad. | |
1024 int ldr_map_offset = marker_is_dont_delete ? -5: -8; | |
1025 int ldr_cell_offset = marker_is_dont_delete ? -2: -5; | |
1026 if (FLAG_debug_code && marker_is_dont_delete) { | |
1027 // Three extra instructions were generated to check for the_hole_value. | |
1028 ldr_map_offset -= 3; | |
1029 ldr_cell_offset -= 3; | |
1030 } | |
1031 Address ldr_map_instr_address = | |
1032 inline_end_address + ldr_map_offset * Assembler::kInstrSize; | |
1033 Address ldr_cell_instr_address = | |
1034 inline_end_address + ldr_cell_offset * Assembler::kInstrSize; | |
1035 | |
1036 // Patch the map check. | |
1037 Assembler::set_target_address_at(ldr_map_instr_address, | |
1038 reinterpret_cast<Address>(map)); | |
1039 // Patch the cell address. | |
1040 Assembler::set_target_address_at(ldr_cell_instr_address, | |
1041 reinterpret_cast<Address>(cell)); | |
1042 | |
1043 return true; | |
1044 } | |
1045 | |
1046 | |
1047 bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) { | |
1048 if (V8::UseCrankshaft()) return false; | |
1049 | |
1050 // Find the end of the inlined code for the store if there is an | |
1051 // inlined version of the store. | |
1052 Address inline_end_address = 0; | |
1053 if (InlinedICSiteMarker(address, &inline_end_address) | |
1054 != Assembler::PROPERTY_ACCESS_INLINED) { | |
1055 return false; | |
1056 } | |
1057 | |
1058 // Compute the address of the map load instruction. | |
1059 Address ldr_map_instr_address = | |
1060 inline_end_address - | |
1061 (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() * | |
1062 Assembler::kInstrSize); | |
1063 | |
1064 // Update the offsets if initializing the inlined store. No reason | |
1065 // to update the offsets when clearing the inlined version because | |
1066 // it will bail out in the map check. | |
1067 if (map != HEAP->null_value()) { | |
1068 // Patch the offset in the actual store instruction. | |
1069 Address str_property_instr_address = | |
1070 ldr_map_instr_address + 3 * Assembler::kInstrSize; | |
1071 Instr str_property_instr = Assembler::instr_at(str_property_instr_address); | |
1072 ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr)); | |
1073 str_property_instr = Assembler::SetStrRegisterImmediateOffset( | |
1074 str_property_instr, offset - kHeapObjectTag); | |
1075 Assembler::instr_at_put(str_property_instr_address, str_property_instr); | |
1076 | |
1077 // Patch the offset in the add instruction that is part of the | |
1078 // write barrier. | |
1079 Address add_offset_instr_address = | |
1080 str_property_instr_address + Assembler::kInstrSize; | |
1081 Instr add_offset_instr = Assembler::instr_at(add_offset_instr_address); | |
1082 ASSERT(Assembler::IsAddRegisterImmediate(add_offset_instr)); | |
1083 add_offset_instr = Assembler::SetAddRegisterImmediateOffset( | |
1084 add_offset_instr, offset - kHeapObjectTag); | |
1085 Assembler::instr_at_put(add_offset_instr_address, add_offset_instr); | |
1086 | |
1087 // Indicate that code has changed. | |
1088 CPU::FlushICache(str_property_instr_address, 2 * Assembler::kInstrSize); | |
1089 } | |
1090 | |
1091 // Patch the map check. | |
1092 Assembler::set_target_address_at(ldr_map_instr_address, | |
1093 reinterpret_cast<Address>(map)); | |
1094 | |
1095 return true; | |
1096 } | |
1097 | |
1098 | |
1099 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { | |
1100 if (V8::UseCrankshaft()) return false; | |
1101 | |
1102 Address inline_end_address = 0; | |
1103 if (InlinedICSiteMarker(address, &inline_end_address) | |
1104 != Assembler::PROPERTY_ACCESS_INLINED) { | |
1105 return false; | |
1106 } | |
1107 | |
1108 // Patch the map check. | |
1109 Address ldr_map_instr_address = | |
1110 inline_end_address - | |
1111 (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() * | |
1112 Assembler::kInstrSize); | |
1113 Assembler::set_target_address_at(ldr_map_instr_address, | |
1114 reinterpret_cast<Address>(map)); | |
1115 return true; | |
1116 } | |
1117 | |
1118 | |
1119 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { | |
1120 if (V8::UseCrankshaft()) return false; | |
1121 | |
1122 // Find the end of the inlined code for handling the store if this is an | |
1123 // inlined IC call site. | |
1124 Address inline_end_address = 0; | |
1125 if (InlinedICSiteMarker(address, &inline_end_address) | |
1126 != Assembler::PROPERTY_ACCESS_INLINED) { | |
1127 return false; | |
1128 } | |
1129 | |
1130 // Patch the map check. | |
1131 Address ldr_map_instr_address = | |
1132 inline_end_address - | |
1133 (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch * | |
1134 Assembler::kInstrSize); | |
1135 Assembler::set_target_address_at(ldr_map_instr_address, | |
1136 reinterpret_cast<Address>(map)); | |
1137 return true; | |
1138 } | |
1139 | |
1140 | 929 |
1141 Object* KeyedLoadIC_Miss(Arguments args); | 930 Object* KeyedLoadIC_Miss(Arguments args); |
1142 | 931 |
1143 | 932 |
1144 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 933 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
1145 // ---------- S t a t e -------------- | 934 // ---------- S t a t e -------------- |
1146 // -- lr : return address | 935 // -- lr : return address |
1147 // -- r0 : key | 936 // -- r0 : key |
1148 // -- r1 : receiver | 937 // -- r1 : receiver |
1149 // ----------------------------------- | 938 // ----------------------------------- |
(...skipping 634 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1784 Register reg = Assembler::GetRn(instr_at_patch); | 1573 Register reg = Assembler::GetRn(instr_at_patch); |
1785 patcher.masm()->tst(reg, Operand(kSmiTagMask)); | 1574 patcher.masm()->tst(reg, Operand(kSmiTagMask)); |
1786 patcher.EmitCondition(eq); | 1575 patcher.EmitCondition(eq); |
1787 } | 1576 } |
1788 } | 1577 } |
1789 | 1578 |
1790 | 1579 |
1791 } } // namespace v8::internal | 1580 } } // namespace v8::internal |
1792 | 1581 |
1793 #endif // V8_TARGET_ARCH_ARM | 1582 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |