OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
245 SaveFPRegsMode fp_mode, | 245 SaveFPRegsMode fp_mode, |
246 RememberedSetAction remembered_set_action, | 246 RememberedSetAction remembered_set_action, |
247 SmiCheck smi_check) { | 247 SmiCheck smi_check) { |
248 ASSERT(!AreAliased(object, address, value, t8)); | 248 ASSERT(!AreAliased(object, address, value, t8)); |
249 ASSERT(!AreAliased(object, address, value, t9)); | 249 ASSERT(!AreAliased(object, address, value, t9)); |
250 // The compiled code assumes that record write doesn't change the | 250 // The compiled code assumes that record write doesn't change the |
251 // context register, so we check that none of the clobbered | 251 // context register, so we check that none of the clobbered |
252 // registers are cp. | 252 // registers are cp. |
253 ASSERT(!address.is(cp) && !value.is(cp)); | 253 ASSERT(!address.is(cp) && !value.is(cp)); |
254 | 254 |
| 255 if (emit_debug_code()) { |
| 256 lw(at, MemOperand(address)); |
| 257 Assert( |
| 258 eq, "Wrong address or value passed to RecordWrite", at, Operand(value)); |
| 259 } |
| 260 |
255 Label done; | 261 Label done; |
256 | 262 |
257 if (smi_check == INLINE_SMI_CHECK) { | 263 if (smi_check == INLINE_SMI_CHECK) { |
258 ASSERT_EQ(0, kSmiTag); | 264 ASSERT_EQ(0, kSmiTag); |
259 JumpIfSmi(value, &done); | 265 JumpIfSmi(value, &done); |
260 } | 266 } |
261 | 267 |
262 CheckPageFlag(value, | 268 CheckPageFlag(value, |
263 value, // Used as scratch. | 269 value, // Used as scratch. |
264 MemoryChunk::kPointersToHereAreInterestingMask, | 270 MemoryChunk::kPointersToHereAreInterestingMask, |
(...skipping 25 matching lines...) Expand all Loading... |
290 } | 296 } |
291 } | 297 } |
292 | 298 |
293 | 299 |
294 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. | 300 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. |
295 Register address, | 301 Register address, |
296 Register scratch, | 302 Register scratch, |
297 SaveFPRegsMode fp_mode, | 303 SaveFPRegsMode fp_mode, |
298 RememberedSetFinalAction and_then) { | 304 RememberedSetFinalAction and_then) { |
299 Label done; | 305 Label done; |
300 if (FLAG_debug_code) { | 306 if (emit_debug_code()) { |
301 Label ok; | 307 Label ok; |
302 JumpIfNotInNewSpace(object, scratch, &ok); | 308 JumpIfNotInNewSpace(object, scratch, &ok); |
303 stop("Remembered set pointer is in new space"); | 309 stop("Remembered set pointer is in new space"); |
304 bind(&ok); | 310 bind(&ok); |
305 } | 311 } |
306 // Load store buffer top. | 312 // Load store buffer top. |
307 ExternalReference store_buffer = | 313 ExternalReference store_buffer = |
308 ExternalReference::store_buffer_top(isolate()); | 314 ExternalReference::store_buffer_top(isolate()); |
309 li(t8, Operand(store_buffer)); | 315 li(t8, Operand(store_buffer)); |
310 lw(scratch, MemOperand(t8)); | 316 lw(scratch, MemOperand(t8)); |
(...skipping 4520 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4831 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 4837 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); |
4832 | 4838 |
4833 Label done; | 4839 Label done; |
4834 | 4840 |
4835 // Since both black and grey have a 1 in the first position and white does | 4841 // Since both black and grey have a 1 in the first position and white does |
4836 // not have a 1 there we only need to check one bit. | 4842 // not have a 1 there we only need to check one bit. |
4837 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); | 4843 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
4838 And(t8, mask_scratch, load_scratch); | 4844 And(t8, mask_scratch, load_scratch); |
4839 Branch(&done, ne, t8, Operand(zero_reg)); | 4845 Branch(&done, ne, t8, Operand(zero_reg)); |
4840 | 4846 |
4841 if (FLAG_debug_code) { | 4847 if (emit_debug_code()) { |
4842 // Check for impossible bit pattern. | 4848 // Check for impossible bit pattern. |
4843 Label ok; | 4849 Label ok; |
4844 // sll may overflow, making the check conservative. | 4850 // sll may overflow, making the check conservative. |
4845 sll(t8, mask_scratch, 1); | 4851 sll(t8, mask_scratch, 1); |
4846 And(t8, load_scratch, t8); | 4852 And(t8, load_scratch, t8); |
4847 Branch(&ok, eq, t8, Operand(zero_reg)); | 4853 Branch(&ok, eq, t8, Operand(zero_reg)); |
4848 stop("Impossible marking bit pattern"); | 4854 stop("Impossible marking bit pattern"); |
4849 bind(&ok); | 4855 bind(&ok); |
4850 } | 4856 } |
4851 | 4857 |
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5039 opcode == BGTZL); | 5045 opcode == BGTZL); |
5040 opcode = (cond == eq) ? BEQ : BNE; | 5046 opcode = (cond == eq) ? BEQ : BNE; |
5041 instr = (instr & ~kOpcodeMask) | opcode; | 5047 instr = (instr & ~kOpcodeMask) | opcode; |
5042 masm_.emit(instr); | 5048 masm_.emit(instr); |
5043 } | 5049 } |
5044 | 5050 |
5045 | 5051 |
5046 } } // namespace v8::internal | 5052 } } // namespace v8::internal |
5047 | 5053 |
5048 #endif // V8_TARGET_ARCH_MIPS | 5054 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |