| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 73 enum ObjectToDoubleFlags { | 73 enum ObjectToDoubleFlags { |
| 74 // No special flags. | 74 // No special flags. |
| 75 NO_OBJECT_TO_DOUBLE_FLAGS = 0, | 75 NO_OBJECT_TO_DOUBLE_FLAGS = 0, |
| 76 // Object is known to be a non smi. | 76 // Object is known to be a non smi. |
| 77 OBJECT_NOT_SMI = 1 << 0, | 77 OBJECT_NOT_SMI = 1 << 0, |
| 78 // Don't load NaNs or infinities, branch to the non number case instead. | 78 // Don't load NaNs or infinities, branch to the non number case instead. |
| 79 AVOID_NANS_AND_INFINITIES = 1 << 1 | 79 AVOID_NANS_AND_INFINITIES = 1 << 1 |
| 80 }; | 80 }; |
| 81 | 81 |
| 82 | 82 |
| 83 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET }; |
| 84 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; |
| 85 enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved }; |
| 86 |
| 87 |
| 88 bool AreAliased(Register r1, Register r2, Register r3, Register r4); |
| 89 |
| 90 |
| 83 // MacroAssembler implements a collection of frequently used macros. | 91 // MacroAssembler implements a collection of frequently used macros. |
| 84 class MacroAssembler: public Assembler { | 92 class MacroAssembler: public Assembler { |
| 85 public: | 93 public: |
| 86 // The isolate parameter can be NULL if the macro assembler should | 94 // The isolate parameter can be NULL if the macro assembler should |
| 87 // not use isolate-dependent functionality. In this case, it's the | 95 // not use isolate-dependent functionality. In this case, it's the |
| 88 // responsibility of the caller to never invoke such function on the | 96 // responsibility of the caller to never invoke such function on the |
| 89 // macro assembler. | 97 // macro assembler. |
| 90 MacroAssembler(Isolate* isolate, void* buffer, int size); | 98 MacroAssembler(Isolate* isolate, void* buffer, int size); |
| 91 | 99 |
| 92 // Jump, Call, and Ret pseudo instructions implementing inter-working. | 100 // Jump, Call, and Ret pseudo instructions implementing inter-working. |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 151 | 159 |
| 152 // Load an object from the root table. | 160 // Load an object from the root table. |
| 153 void LoadRoot(Register destination, | 161 void LoadRoot(Register destination, |
| 154 Heap::RootListIndex index, | 162 Heap::RootListIndex index, |
| 155 Condition cond = al); | 163 Condition cond = al); |
| 156 // Store an object to the root table. | 164 // Store an object to the root table. |
| 157 void StoreRoot(Register source, | 165 void StoreRoot(Register source, |
| 158 Heap::RootListIndex index, | 166 Heap::RootListIndex index, |
| 159 Condition cond = al); | 167 Condition cond = al); |
| 160 | 168 |
| 169 // --------------------------------------------------------------------------- |
| 170 // GC Support |
| 161 | 171 |
| 162 // Check if object is in new space. | 172 void IncrementalMarkingRecordWriteHelper(Register object, |
| 163 // scratch can be object itself, but it will be clobbered. | 173 Register value, |
| 164 void InNewSpace(Register object, | 174 Register address); |
| 165 Register scratch, | |
| 166 Condition cond, // eq for new space, ne otherwise | |
| 167 Label* branch); | |
| 168 | 175 |
| 176 enum RememberedSetFinalAction { |
| 177 kReturnAtEnd, |
| 178 kFallThroughAtEnd |
| 179 }; |
| 169 | 180 |
| 170 // For the page containing |object| mark the region covering [address] | 181 // Record in the remembered set the fact that we have a pointer to new space |
| 171 // dirty. The object address must be in the first 8K of an allocated page. | 182 // at the address pointed to by the addr register. Only works if addr is not |
| 172 void RecordWriteHelper(Register object, | 183 // in new space. |
| 173 Register address, | 184 void RememberedSetHelper(Register addr, |
| 174 Register scratch); | 185 Register scratch, |
| 186 SaveFPRegsMode save_fp, |
| 187 RememberedSetFinalAction and_then); |
| 175 | 188 |
| 176 // For the page containing |object| mark the region covering | 189 void CheckPageFlag(Register object, |
| 177 // [object+offset] dirty. The object address must be in the first 8K | 190 Register scratch, |
| 178 // of an allocated page. The 'scratch' registers are used in the | 191 int mask, |
| 179 // implementation and all 3 registers are clobbered by the | 192 Condition cc, |
| 180 // operation, as well as the ip register. RecordWrite updates the | 193 Label* condition_met); |
| 181 // write barrier even when storing smis. | 194 |
| 182 void RecordWrite(Register object, | 195 // Check if object is in new space. Jumps if the object is not in new space. |
| 183 Operand offset, | 196 // The register scratch can be object itself, but it will be clobbered. |
| 197 void JumpIfNotInNewSpace(Register object, |
| 198 Register scratch, |
| 199 Label* branch) { |
| 200 InNewSpace(object, scratch, ne, branch); |
| 201 } |
| 202 |
| 203 // Check if object is in new space. Jumps if the object is in new space. |
| 204 // The register scratch can be object itself, but it will be clobbered. |
| 205 void JumpIfInNewSpace(Register object, |
| 206 Register scratch, |
| 207 Label* branch) { |
| 208 InNewSpace(object, scratch, eq, branch); |
| 209 } |
| 210 |
| 211 // Check if an object has a given incremental marking color. |
| 212 void HasColor(Register object, |
| 213 Register scratch0, |
| 214 Register scratch1, |
| 215 Label* has_color, |
| 216 int first_bit, |
| 217 int second_bit); |
| 218 |
| 219 void JumpIfBlack(Register object, |
| 184 Register scratch0, | 220 Register scratch0, |
| 185 Register scratch1); | 221 Register scratch1, |
| 222 Label* on_black); |
| 186 | 223 |
| 187 // For the page containing |object| mark the region covering | 224 // Checks the color of an object. If the object is already grey or black |
| 188 // [address] dirty. The object address must be in the first 8K of an | 225 // then we just fall through, since it is already live. If it is white and |
| 189 // allocated page. All 3 registers are clobbered by the operation, | 226 // we can determine that it doesn't need to be scanned, then we just mark it |
| 190 // as well as the ip register. RecordWrite updates the write barrier | 227 // black and fall through. For the rest we jump to the label so the |
| 191 // even when storing smis. | 228 // incremental marker can fix its assumptions. |
| 192 void RecordWrite(Register object, | 229 void EnsureNotWhite(Register object, |
| 193 Register address, | 230 Register scratch1, |
| 194 Register scratch); | 231 Register scratch2, |
| 232 Register scratch3, |
| 233 Label* object_is_white_and_not_data); |
| 234 |
| 235 // Detects conservatively whether an object is data-only, ie it does need to |
| 236 // be scanned by the garbage collector. |
| 237 void JumpIfDataObject(Register value, |
| 238 Register scratch, |
| 239 Label* not_data_object); |
| 240 |
| 241 // Notify the garbage collector that we wrote a pointer into an object. |
| 242 // |object| is the object being stored into, |value| is the object being |
| 243 // stored. value and scratch registers are clobbered by the operation. |
| 244 // The offset is the offset from the start of the object, not the offset from |
| 245 // the tagged HeapObject pointer. For use with FieldOperand(reg, off). |
| 246 void RecordWriteField( |
| 247 Register object, |
| 248 int offset, |
| 249 Register value, |
| 250 Register scratch, |
| 251 LinkRegisterStatus lr_status, |
| 252 SaveFPRegsMode save_fp, |
| 253 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 254 SmiCheck smi_check = INLINE_SMI_CHECK); |
| 255 |
| 256 // As above, but the offset has the tag presubtracted. For use with |
| 257 // MemOperand(reg, off). |
| 258 inline void RecordWriteContextSlot( |
| 259 Register context, |
| 260 int offset, |
| 261 Register value, |
| 262 Register scratch, |
| 263 LinkRegisterStatus lr_status, |
| 264 SaveFPRegsMode save_fp, |
| 265 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 266 SmiCheck smi_check = INLINE_SMI_CHECK) { |
| 267 RecordWriteField(context, |
| 268 offset + kHeapObjectTag, |
| 269 value, |
| 270 scratch, |
| 271 lr_status, |
| 272 save_fp, |
| 273 remembered_set_action, |
| 274 smi_check); |
| 275 } |
| 276 |
| 277 // For a given |object| notify the garbage collector that the slot |address| |
| 278 // has been written. |value| is the object being stored. The value and |
| 279 // address registers are clobbered by the operation. |
| 280 void RecordWrite( |
| 281 Register object, |
| 282 Register address, |
| 283 Register value, |
| 284 LinkRegisterStatus lr_status, |
| 285 SaveFPRegsMode save_fp, |
| 286 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 287 SmiCheck smi_check = INLINE_SMI_CHECK); |
| 195 | 288 |
| 196 // Push a handle. | 289 // Push a handle. |
| 197 void Push(Handle<Object> handle); | 290 void Push(Handle<Object> handle); |
| 198 | 291 |
| 199 // Push two registers. Pushes leftmost register first (to highest address). | 292 // Push two registers. Pushes leftmost register first (to highest address). |
| 200 void Push(Register src1, Register src2, Condition cond = al) { | 293 void Push(Register src1, Register src2, Condition cond = al) { |
| 201 ASSERT(!src1.is(src2)); | 294 ASSERT(!src1.is(src2)); |
| 202 if (src1.code() > src2.code()) { | 295 if (src1.code() > src2.code()) { |
| 203 stm(db_w, sp, src1.bit() | src2.bit(), cond); | 296 stm(db_w, sp, src1.bit() | src2.bit(), cond); |
| 204 } else { | 297 } else { |
| (...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1064 InvokeFlag flag, | 1157 InvokeFlag flag, |
| 1065 const CallWrapper& call_wrapper, | 1158 const CallWrapper& call_wrapper, |
| 1066 CallKind call_kind); | 1159 CallKind call_kind); |
| 1067 | 1160 |
| 1068 void InitializeNewString(Register string, | 1161 void InitializeNewString(Register string, |
| 1069 Register length, | 1162 Register length, |
| 1070 Heap::RootListIndex map_index, | 1163 Heap::RootListIndex map_index, |
| 1071 Register scratch1, | 1164 Register scratch1, |
| 1072 Register scratch2); | 1165 Register scratch2); |
| 1073 | 1166 |
| 1167 // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. |
| 1168 void InNewSpace(Register object, |
| 1169 Register scratch, |
| 1170 Condition cond, // eq for new space, ne otherwise. |
| 1171 Label* branch); |
| 1172 |
| 1173 // Helper for finding the mark bits for an address. Afterwards, the |
| 1174 // bitmap register points at the word with the mark bits and the mask |
| 1175 // the position of the first bit. Leaves addr_reg unchanged. |
| 1176 inline void GetMarkBits(Register addr_reg, |
| 1177 Register bitmap_reg, |
| 1178 Register mask_reg); |
| 1179 |
| 1074 // Compute memory operands for safepoint stack slots. | 1180 // Compute memory operands for safepoint stack slots. |
| 1075 static int SafepointRegisterStackIndex(int reg_code); | 1181 static int SafepointRegisterStackIndex(int reg_code); |
| 1076 MemOperand SafepointRegisterSlot(Register reg); | 1182 MemOperand SafepointRegisterSlot(Register reg); |
| 1077 MemOperand SafepointRegistersAndDoublesSlot(Register reg); | 1183 MemOperand SafepointRegistersAndDoublesSlot(Register reg); |
| 1078 | 1184 |
| 1079 bool generating_stub_; | 1185 bool generating_stub_; |
| 1080 bool allow_stub_calls_; | 1186 bool allow_stub_calls_; |
| 1081 bool has_frame_; | 1187 bool has_frame_; |
| 1082 // This handle will be patched with the code object on installation. | 1188 // This handle will be patched with the code object on installation. |
| 1083 Handle<Object> code_object_; | 1189 Handle<Object> code_object_; |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1140 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) | 1246 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) |
| 1141 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> | 1247 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> |
| 1142 #else | 1248 #else |
| 1143 #define ACCESS_MASM(masm) masm-> | 1249 #define ACCESS_MASM(masm) masm-> |
| 1144 #endif | 1250 #endif |
| 1145 | 1251 |
| 1146 | 1252 |
| 1147 } } // namespace v8::internal | 1253 } } // namespace v8::internal |
| 1148 | 1254 |
| 1149 #endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_ | 1255 #endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_ |
| OLD | NEW |