OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #if V8_TARGET_ARCH_MIPS | 9 #if V8_TARGET_ARCH_MIPS |
10 | 10 |
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
187 | 187 |
188 | 188 |
189 void MacroAssembler::RecordWriteField( | 189 void MacroAssembler::RecordWriteField( |
190 Register object, | 190 Register object, |
191 int offset, | 191 int offset, |
192 Register value, | 192 Register value, |
193 Register dst, | 193 Register dst, |
194 RAStatus ra_status, | 194 RAStatus ra_status, |
195 SaveFPRegsMode save_fp, | 195 SaveFPRegsMode save_fp, |
196 RememberedSetAction remembered_set_action, | 196 RememberedSetAction remembered_set_action, |
197 SmiCheck smi_check) { | 197 SmiCheck smi_check, |
| 198 PointersToHereCheck pointers_to_here_check_for_value) { |
198 ASSERT(!AreAliased(value, dst, t8, object)); | 199 ASSERT(!AreAliased(value, dst, t8, object)); |
199 // First, check if a write barrier is even needed. The tests below | 200 // First, check if a write barrier is even needed. The tests below |
200 // catch stores of Smis. | 201 // catch stores of Smis. |
201 Label done; | 202 Label done; |
202 | 203 |
203 // Skip barrier if writing a smi. | 204 // Skip barrier if writing a smi. |
204 if (smi_check == INLINE_SMI_CHECK) { | 205 if (smi_check == INLINE_SMI_CHECK) { |
205 JumpIfSmi(value, &done); | 206 JumpIfSmi(value, &done); |
206 } | 207 } |
207 | 208 |
208 // Although the object register is tagged, the offset is relative to the start | 209 // Although the object register is tagged, the offset is relative to the start |
209 // of the object, so so offset must be a multiple of kPointerSize. | 210 // of the object, so so offset must be a multiple of kPointerSize. |
210 ASSERT(IsAligned(offset, kPointerSize)); | 211 ASSERT(IsAligned(offset, kPointerSize)); |
211 | 212 |
212 Addu(dst, object, Operand(offset - kHeapObjectTag)); | 213 Addu(dst, object, Operand(offset - kHeapObjectTag)); |
213 if (emit_debug_code()) { | 214 if (emit_debug_code()) { |
214 Label ok; | 215 Label ok; |
215 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1)); | 216 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1)); |
216 Branch(&ok, eq, t8, Operand(zero_reg)); | 217 Branch(&ok, eq, t8, Operand(zero_reg)); |
217 stop("Unaligned cell in write barrier"); | 218 stop("Unaligned cell in write barrier"); |
218 bind(&ok); | 219 bind(&ok); |
219 } | 220 } |
220 | 221 |
221 RecordWrite(object, | 222 RecordWrite(object, |
222 dst, | 223 dst, |
223 value, | 224 value, |
224 ra_status, | 225 ra_status, |
225 save_fp, | 226 save_fp, |
226 remembered_set_action, | 227 remembered_set_action, |
227 OMIT_SMI_CHECK); | 228 OMIT_SMI_CHECK, |
| 229 pointers_to_here_check_for_value); |
228 | 230 |
229 bind(&done); | 231 bind(&done); |
230 | 232 |
231 // Clobber clobbered input registers when running with the debug-code flag | 233 // Clobber clobbered input registers when running with the debug-code flag |
232 // turned on to provoke errors. | 234 // turned on to provoke errors. |
233 if (emit_debug_code()) { | 235 if (emit_debug_code()) { |
234 li(value, Operand(BitCast<int32_t>(kZapValue + 4))); | 236 li(value, Operand(BitCast<int32_t>(kZapValue + 4))); |
235 li(dst, Operand(BitCast<int32_t>(kZapValue + 8))); | 237 li(dst, Operand(BitCast<int32_t>(kZapValue + 8))); |
236 } | 238 } |
237 } | 239 } |
238 | 240 |
239 | 241 |
| 242 // Will clobber 4 registers: object, map, dst, ip. The |
| 243 // register 'object' contains a heap object pointer. |
| 244 void MacroAssembler::RecordWriteForMap(Register object, |
| 245 Register map, |
| 246 Register dst, |
| 247 RAStatus ra_status, |
| 248 SaveFPRegsMode fp_mode) { |
| 249 if (emit_debug_code()) { |
| 250 ASSERT(!dst.is(at)); |
| 251 lw(dst, FieldMemOperand(map, HeapObject::kMapOffset)); |
| 252 Check(eq, |
| 253 kWrongAddressOrValuePassedToRecordWrite, |
| 254 dst, |
| 255 Operand(isolate()->factory()->meta_map())); |
| 256 } |
| 257 |
| 258 if (!FLAG_incremental_marking) { |
| 259 return; |
| 260 } |
| 261 |
| 262 // Count number of write barriers in generated code. |
| 263 isolate()->counters()->write_barriers_static()->Increment(); |
| 264 // TODO(mstarzinger): Dynamic counter missing. |
| 265 |
| 266 if (emit_debug_code()) { |
| 267 lw(at, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 268 Check(eq, |
| 269 kWrongAddressOrValuePassedToRecordWrite, |
| 270 map, |
| 271 Operand(at)); |
| 272 } |
| 273 |
| 274 Label done; |
| 275 |
| 276 // A single check of the map's pages interesting flag suffices, since it is |
| 277 // only set during incremental collection, and then it's also guaranteed that |
| 278 // the from object's page's interesting flag is also set. This optimization |
| 279 // relies on the fact that maps can never be in new space. |
| 280 CheckPageFlag(map, |
| 281 map, // Used as scratch. |
| 282 MemoryChunk::kPointersToHereAreInterestingMask, |
| 283 eq, |
| 284 &done); |
| 285 |
| 286 Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag)); |
| 287 if (emit_debug_code()) { |
| 288 Label ok; |
| 289 And(at, dst, Operand((1 << kPointerSizeLog2) - 1)); |
| 290 Branch(&ok, eq, at, Operand(zero_reg)); |
| 291 stop("Unaligned cell in write barrier"); |
| 292 bind(&ok); |
| 293 } |
| 294 |
| 295 // Record the actual write. |
| 296 if (ra_status == kRAHasNotBeenSaved) { |
| 297 push(ra); |
| 298 } |
| 299 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET, |
| 300 fp_mode); |
| 301 CallStub(&stub); |
| 302 if (ra_status == kRAHasNotBeenSaved) { |
| 303 pop(ra); |
| 304 } |
| 305 |
| 306 bind(&done); |
| 307 |
| 308 // Clobber clobbered registers when running with the debug-code flag |
| 309 // turned on to provoke errors. |
| 310 if (emit_debug_code()) { |
| 311 li(dst, Operand(BitCast<int32_t>(kZapValue + 12))); |
| 312 li(map, Operand(BitCast<int32_t>(kZapValue + 16))); |
| 313 } |
| 314 } |
| 315 |
| 316 |
240 // Will clobber 4 registers: object, address, scratch, ip. The | 317 // Will clobber 4 registers: object, address, scratch, ip. The |
241 // register 'object' contains a heap object pointer. The heap object | 318 // register 'object' contains a heap object pointer. The heap object |
242 // tag is shifted away. | 319 // tag is shifted away. |
243 void MacroAssembler::RecordWrite(Register object, | 320 void MacroAssembler::RecordWrite( |
244 Register address, | 321 Register object, |
245 Register value, | 322 Register address, |
246 RAStatus ra_status, | 323 Register value, |
247 SaveFPRegsMode fp_mode, | 324 RAStatus ra_status, |
248 RememberedSetAction remembered_set_action, | 325 SaveFPRegsMode fp_mode, |
249 SmiCheck smi_check) { | 326 RememberedSetAction remembered_set_action, |
| 327 SmiCheck smi_check, |
| 328 PointersToHereCheck pointers_to_here_check_for_value) { |
250 ASSERT(!AreAliased(object, address, value, t8)); | 329 ASSERT(!AreAliased(object, address, value, t8)); |
251 ASSERT(!AreAliased(object, address, value, t9)); | 330 ASSERT(!AreAliased(object, address, value, t9)); |
252 | 331 |
253 if (emit_debug_code()) { | 332 if (emit_debug_code()) { |
254 lw(at, MemOperand(address)); | 333 lw(at, MemOperand(address)); |
255 Assert( | 334 Assert( |
256 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value)); | 335 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value)); |
257 } | 336 } |
258 | 337 |
| 338 if (remembered_set_action == OMIT_REMEMBERED_SET && |
| 339 !FLAG_incremental_marking) { |
| 340 return; |
| 341 } |
| 342 |
259 // Count number of write barriers in generated code. | 343 // Count number of write barriers in generated code. |
260 isolate()->counters()->write_barriers_static()->Increment(); | 344 isolate()->counters()->write_barriers_static()->Increment(); |
261 // TODO(mstarzinger): Dynamic counter missing. | 345 // TODO(mstarzinger): Dynamic counter missing. |
262 | 346 |
263 // First, check if a write barrier is even needed. The tests below | 347 // First, check if a write barrier is even needed. The tests below |
264 // catch stores of smis and stores into the young generation. | 348 // catch stores of smis and stores into the young generation. |
265 Label done; | 349 Label done; |
266 | 350 |
267 if (smi_check == INLINE_SMI_CHECK) { | 351 if (smi_check == INLINE_SMI_CHECK) { |
268 ASSERT_EQ(0, kSmiTag); | 352 ASSERT_EQ(0, kSmiTag); |
269 JumpIfSmi(value, &done); | 353 JumpIfSmi(value, &done); |
270 } | 354 } |
271 | 355 |
272 CheckPageFlag(value, | 356 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) { |
273 value, // Used as scratch. | 357 CheckPageFlag(value, |
274 MemoryChunk::kPointersToHereAreInterestingMask, | 358 value, // Used as scratch. |
275 eq, | 359 MemoryChunk::kPointersToHereAreInterestingMask, |
276 &done); | 360 eq, |
| 361 &done); |
| 362 } |
277 CheckPageFlag(object, | 363 CheckPageFlag(object, |
278 value, // Used as scratch. | 364 value, // Used as scratch. |
279 MemoryChunk::kPointersFromHereAreInterestingMask, | 365 MemoryChunk::kPointersFromHereAreInterestingMask, |
280 eq, | 366 eq, |
281 &done); | 367 &done); |
282 | 368 |
283 // Record the actual write. | 369 // Record the actual write. |
284 if (ra_status == kRAHasNotBeenSaved) { | 370 if (ra_status == kRAHasNotBeenSaved) { |
285 push(ra); | 371 push(ra); |
286 } | 372 } |
(...skipping 5383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5670 } | 5756 } |
5671 if (ms.shift() > 0) sra(result, result, ms.shift()); | 5757 if (ms.shift() > 0) sra(result, result, ms.shift()); |
5672 srl(at, dividend, 31); | 5758 srl(at, dividend, 31); |
5673 Addu(result, result, Operand(at)); | 5759 Addu(result, result, Operand(at)); |
5674 } | 5760 } |
5675 | 5761 |
5676 | 5762 |
5677 } } // namespace v8::internal | 5763 } } // namespace v8::internal |
5678 | 5764 |
5679 #endif // V8_TARGET_ARCH_MIPS | 5765 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |