OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
85 AVOID_NANS_AND_INFINITIES = 1 << 1 | 85 AVOID_NANS_AND_INFINITIES = 1 << 1 |
86 }; | 86 }; |
87 | 87 |
88 // Allow programmer to use Branch Delay Slot of Branches, Jumps, Calls. | 88 // Allow programmer to use Branch Delay Slot of Branches, Jumps, Calls. |
89 enum BranchDelaySlot { | 89 enum BranchDelaySlot { |
90 USE_DELAY_SLOT, | 90 USE_DELAY_SLOT, |
91 PROTECT | 91 PROTECT |
92 }; | 92 }; |
93 | 93 |
94 | 94 |
| 95 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET }; |
| 96 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; |
| 97 enum RAStatus { kRAHasNotBeenSaved, kRAHasBeenSaved }; |
| 98 |
| 99 bool AreAliased(Register r1, Register r2, Register r3, Register r4); |
| 100 |
| 101 |
95 // ----------------------------------------------------------------------------- | 102 // ----------------------------------------------------------------------------- |
96 // Static helper functions. | 103 // Static helper functions. |
97 | 104 |
98 static MemOperand ContextOperand(Register context, int index) { | 105 static MemOperand ContextOperand(Register context, int index) { |
99 return MemOperand(context, Context::SlotOffset(index)); | 106 return MemOperand(context, Context::SlotOffset(index)); |
100 } | 107 } |
101 | 108 |
102 | 109 |
103 static inline MemOperand GlobalObjectOperand() { | 110 static inline MemOperand GlobalObjectOperand() { |
104 return ContextOperand(cp, Context::GLOBAL_INDEX); | 111 return ContextOperand(cp, Context::GLOBAL_INDEX); |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
233 | 240 |
234 // Jump unconditionally to given label. | 241 // Jump unconditionally to given label. |
235 // We NEED a nop in the branch delay slot, as it used by v8, for example in | 242 // We NEED a nop in the branch delay slot, as it used by v8, for example in |
236 // CodeGenerator::ProcessDeferred(). | 243 // CodeGenerator::ProcessDeferred(). |
237 // Currently the branch delay slot is filled by the MacroAssembler. | 244 // Currently the branch delay slot is filled by the MacroAssembler. |
238 // Use rather b(Label) for code generation. | 245 // Use rather b(Label) for code generation. |
239 void jmp(Label* L) { | 246 void jmp(Label* L) { |
240 Branch(L); | 247 Branch(L); |
241 } | 248 } |
242 | 249 |
| 250 |
243 // Load an object from the root table. | 251 // Load an object from the root table. |
244 void LoadRoot(Register destination, | 252 void LoadRoot(Register destination, |
245 Heap::RootListIndex index); | 253 Heap::RootListIndex index); |
246 void LoadRoot(Register destination, | 254 void LoadRoot(Register destination, |
247 Heap::RootListIndex index, | 255 Heap::RootListIndex index, |
248 Condition cond, Register src1, const Operand& src2); | 256 Condition cond, Register src1, const Operand& src2); |
249 | 257 |
250 // Store an object to the root table. | 258 // Store an object to the root table. |
251 void StoreRoot(Register source, | 259 void StoreRoot(Register source, |
252 Heap::RootListIndex index); | 260 Heap::RootListIndex index); |
253 void StoreRoot(Register source, | 261 void StoreRoot(Register source, |
254 Heap::RootListIndex index, | 262 Heap::RootListIndex index, |
255 Condition cond, Register src1, const Operand& src2); | 263 Condition cond, Register src1, const Operand& src2); |
256 | 264 |
257 | 265 |
258 // Check if object is in new space. | 266 // --------------------------------------------------------------------------- |
259 // scratch can be object itself, but it will be clobbered. | 267 // GC Support |
260 void InNewSpace(Register object, | 268 |
261 Register scratch, | 269 void IncrementalMarkingRecordWriteHelper(Register object, |
262 Condition cc, // eq for new space, ne otherwise. | 270 Register value, |
263 Label* branch); | 271 Register address); |
| 272 |
| 273 enum RememberedSetFinalAction { |
| 274 kReturnAtEnd, |
| 275 kFallThroughAtEnd |
| 276 }; |
264 | 277 |
265 | 278 |
266 // For the page containing |object| mark the region covering [address] | 279 // Record in the remembered set the fact that we have a pointer to new space |
267 // dirty. The object address must be in the first 8K of an allocated page. | 280 // at the address pointed to by the addr register. Only works if addr is not |
268 void RecordWriteHelper(Register object, | 281 // in new space. |
269 Register address, | 282 void RememberedSetHelper(Register addr, |
270 Register scratch); | 283 Register scratch, |
| 284 SaveFPRegsMode save_fp, |
| 285 RememberedSetFinalAction and_then); |
271 | 286 |
272 // For the page containing |object| mark the region covering | 287 void CheckPageFlag(Register object, |
273 // [object+offset] dirty. The object address must be in the first 8K | 288 Register scratch, |
274 // of an allocated page. The 'scratch' registers are used in the | 289 int mask, |
275 // implementation and all 3 registers are clobbered by the | 290 Condition cc, |
276 // operation, as well as the 'at' register. RecordWrite updates the | 291 Label* condition_met); |
277 // write barrier even when storing smis. | 292 |
278 void RecordWrite(Register object, | 293 // Check if object is in new space. Jumps if the object is not in new space. |
279 Operand offset, | 294 // The register scratch can be object itself, but it will be clobbered. |
| 295 void JumpIfNotInNewSpace(Register object, |
| 296 Register scratch, |
| 297 Label* branch) { |
| 298 InNewSpace(object, scratch, ne, branch); |
| 299 } |
| 300 |
| 301 // Check if object is in new space. Jumps if the object is in new space. |
| 302 // The register scratch can be object itself, but it will be clobbered. |
| 303 void JumpIfInNewSpace(Register object, |
| 304 Register scratch, |
| 305 Label* branch) { |
| 306 InNewSpace(object, scratch, eq, branch); |
| 307 } |
| 308 |
| 309 // Check if an object has a given incremental marking color. |
| 310 void HasColor(Register object, |
| 311 Register scratch0, |
| 312 Register scratch1, |
| 313 Label* has_color, |
| 314 int first_bit, |
| 315 int second_bit); |
| 316 |
| 317 void JumpIfBlack(Register object, |
280 Register scratch0, | 318 Register scratch0, |
281 Register scratch1); | 319 Register scratch1, |
| 320 Label* on_black); |
282 | 321 |
283 // For the page containing |object| mark the region covering | 322 // Checks the color of an object. If the object is already grey or black |
284 // [address] dirty. The object address must be in the first 8K of an | 323 // then we just fall through, since it is already live. If it is white and |
285 // allocated page. All 3 registers are clobbered by the operation, | 324 // we can determine that it doesn't need to be scanned, then we just mark it |
286 // as well as the ip register. RecordWrite updates the write barrier | 325 // black and fall through. For the rest we jump to the label so the |
287 // even when storing smis. | 326 // incremental marker can fix its assumptions. |
288 void RecordWrite(Register object, | 327 void EnsureNotWhite(Register object, |
289 Register address, | 328 Register scratch1, |
290 Register scratch); | 329 Register scratch2, |
| 330 Register scratch3, |
| 331 Label* object_is_white_and_not_data); |
| 332 |
| 333 // Detects conservatively whether an object is data-only, ie it does need to |
| 334 // be scanned by the garbage collector. |
| 335 void JumpIfDataObject(Register value, |
| 336 Register scratch, |
| 337 Label* not_data_object); |
| 338 |
| 339 // Notify the garbage collector that we wrote a pointer into an object. |
| 340 // |object| is the object being stored into, |value| is the object being |
| 341 // stored. value and scratch registers are clobbered by the operation. |
| 342 // The offset is the offset from the start of the object, not the offset from |
| 343 // the tagged HeapObject pointer. For use with FieldOperand(reg, off). |
| 344 void RecordWriteField( |
| 345 Register object, |
| 346 int offset, |
| 347 Register value, |
| 348 Register scratch, |
| 349 RAStatus ra_status, |
| 350 SaveFPRegsMode save_fp, |
| 351 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 352 SmiCheck smi_check = INLINE_SMI_CHECK); |
| 353 |
| 354 // As above, but the offset has the tag presubtracted. For use with |
| 355 // MemOperand(reg, off). |
| 356 inline void RecordWriteContextSlot( |
| 357 Register context, |
| 358 int offset, |
| 359 Register value, |
| 360 Register scratch, |
| 361 RAStatus ra_status, |
| 362 SaveFPRegsMode save_fp, |
| 363 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 364 SmiCheck smi_check = INLINE_SMI_CHECK) { |
| 365 RecordWriteField(context, |
| 366 offset + kHeapObjectTag, |
| 367 value, |
| 368 scratch, |
| 369 ra_status, |
| 370 save_fp, |
| 371 remembered_set_action, |
| 372 smi_check); |
| 373 } |
| 374 |
| 375 // For a given |object| notify the garbage collector that the slot |address| |
| 376 // has been written. |value| is the object being stored. The value and |
| 377 // address registers are clobbered by the operation. |
| 378 void RecordWrite( |
| 379 Register object, |
| 380 Register address, |
| 381 Register value, |
| 382 RAStatus ra_status, |
| 383 SaveFPRegsMode save_fp, |
| 384 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, |
| 385 SmiCheck smi_check = INLINE_SMI_CHECK); |
291 | 386 |
292 | 387 |
293 // --------------------------------------------------------------------------- | 388 // --------------------------------------------------------------------------- |
294 // Inline caching support. | 389 // Inline caching support. |
295 | 390 |
296 // Generate code for checking access rights - used for security checks | 391 // Generate code for checking access rights - used for security checks |
297 // on access to global objects across environments. The holder register | 392 // on access to global objects across environments. The holder register |
298 // is left untouched, whereas both scratch registers are clobbered. | 393 // is left untouched, whereas both scratch registers are clobbered. |
299 void CheckAccessGlobalProxy(Register holder_reg, | 394 void CheckAccessGlobalProxy(Register holder_reg, |
300 Register scratch, | 395 Register scratch, |
(...skipping 958 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1259 // Get the code for the given builtin. Returns if able to resolve | 1354 // Get the code for the given builtin. Returns if able to resolve |
1260 // the function in the 'resolved' flag. | 1355 // the function in the 'resolved' flag. |
1261 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved); | 1356 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved); |
1262 | 1357 |
1263 void InitializeNewString(Register string, | 1358 void InitializeNewString(Register string, |
1264 Register length, | 1359 Register length, |
1265 Heap::RootListIndex map_index, | 1360 Heap::RootListIndex map_index, |
1266 Register scratch1, | 1361 Register scratch1, |
1267 Register scratch2); | 1362 Register scratch2); |
1268 | 1363 |
| 1364 // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. |
| 1365 void InNewSpace(Register object, |
| 1366 Register scratch, |
| 1367 Condition cond, // eq for new space, ne otherwise. |
| 1368 Label* branch); |
| 1369 |
| 1370 // Helper for finding the mark bits for an address. Afterwards, the |
| 1371 // bitmap register points at the word with the mark bits and the mask |
| 1372 // the position of the first bit. Leaves addr_reg unchanged. |
| 1373 inline void GetMarkBits(Register addr_reg, |
| 1374 Register bitmap_reg, |
| 1375 Register mask_reg); |
| 1376 |
1269 // Compute memory operands for safepoint stack slots. | 1377 // Compute memory operands for safepoint stack slots. |
1270 static int SafepointRegisterStackIndex(int reg_code); | 1378 static int SafepointRegisterStackIndex(int reg_code); |
1271 MemOperand SafepointRegisterSlot(Register reg); | 1379 MemOperand SafepointRegisterSlot(Register reg); |
1272 MemOperand SafepointRegistersAndDoublesSlot(Register reg); | 1380 MemOperand SafepointRegistersAndDoublesSlot(Register reg); |
1273 | 1381 |
1274 bool generating_stub_; | 1382 bool generating_stub_; |
1275 bool allow_stub_calls_; | 1383 bool allow_stub_calls_; |
1276 bool has_frame_; | 1384 bool has_frame_; |
1277 // This handle will be patched with the code object on installation. | 1385 // This handle will be patched with the code object on installation. |
1278 Handle<Object> code_object_; | 1386 Handle<Object> code_object_; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1320 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) | 1428 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) |
1321 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) | 1429 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) |
1322 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> | 1430 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> |
1323 #else | 1431 #else |
1324 #define ACCESS_MASM(masm) masm-> | 1432 #define ACCESS_MASM(masm) masm-> |
1325 #endif | 1433 #endif |
1326 | 1434 |
1327 } } // namespace v8::internal | 1435 } } // namespace v8::internal |
1328 | 1436 |
1329 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ | 1437 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ |
OLD | NEW |