OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
398 cmpp(map, FieldOperand(object, HeapObject::kMapOffset)); | 398 cmpp(map, FieldOperand(object, HeapObject::kMapOffset)); |
399 if (map.is(kScratchRegister)) popq(map); | 399 if (map.is(kScratchRegister)) popq(map); |
400 j(equal, &ok, Label::kNear); | 400 j(equal, &ok, Label::kNear); |
401 int3(); | 401 int3(); |
402 bind(&ok); | 402 bind(&ok); |
403 } | 403 } |
404 | 404 |
405 // Compute the address. | 405 // Compute the address. |
406 leap(dst, FieldOperand(object, HeapObject::kMapOffset)); | 406 leap(dst, FieldOperand(object, HeapObject::kMapOffset)); |
407 | 407 |
408 // Count number of write barriers in generated code. | |
409 isolate()->counters()->write_barriers_static()->Increment(); | |
410 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1); | |
411 | |
412 // First, check if a write barrier is even needed. The tests below | 408 // First, check if a write barrier is even needed. The tests below |
413 // catch stores of smis and stores into the young generation. | 409 // catch stores of smis and stores into the young generation. |
414 Label done; | 410 Label done; |
415 | 411 |
416 // A single check of the map's pages interesting flag suffices, since it is | 412 // A single check of the map's pages interesting flag suffices, since it is |
417 // only set during incremental collection, and then it's also guaranteed that | 413 // only set during incremental collection, and then it's also guaranteed that |
418 // the from object's page's interesting flag is also set. This optimization | 414 // the from object's page's interesting flag is also set. This optimization |
419 // relies on the fact that maps can never be in new space. | 415 // relies on the fact that maps can never be in new space. |
420 CheckPageFlag(map, | 416 CheckPageFlag(map, |
421 map, // Used as scratch. | 417 map, // Used as scratch. |
422 MemoryChunk::kPointersToHereAreInterestingMask, | 418 MemoryChunk::kPointersToHereAreInterestingMask, |
423 zero, | 419 zero, |
424 &done, | 420 &done, |
425 Label::kNear); | 421 Label::kNear); |
426 | 422 |
427 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET, | 423 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET, |
428 fp_mode); | 424 fp_mode); |
429 CallStub(&stub); | 425 CallStub(&stub); |
430 | 426 |
431 bind(&done); | 427 bind(&done); |
432 | 428 |
| 429 // Count number of write barriers in generated code. |
| 430 isolate()->counters()->write_barriers_static()->Increment(); |
| 431 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1); |
| 432 |
433 // Clobber clobbered registers when running with the debug-code flag | 433 // Clobber clobbered registers when running with the debug-code flag |
434 // turned on to provoke errors. | 434 // turned on to provoke errors. |
435 if (emit_debug_code()) { | 435 if (emit_debug_code()) { |
436 Move(dst, kZapValue, Assembler::RelocInfoNone()); | 436 Move(dst, kZapValue, Assembler::RelocInfoNone()); |
437 Move(map, kZapValue, Assembler::RelocInfoNone()); | 437 Move(map, kZapValue, Assembler::RelocInfoNone()); |
438 } | 438 } |
439 } | 439 } |
440 | 440 |
441 | 441 |
442 void MacroAssembler::RecordWrite( | 442 void MacroAssembler::RecordWrite( |
(...skipping 15 matching lines...) Expand all Loading... |
458 } | 458 } |
459 | 459 |
460 if (emit_debug_code()) { | 460 if (emit_debug_code()) { |
461 Label ok; | 461 Label ok; |
462 cmpp(value, Operand(address, 0)); | 462 cmpp(value, Operand(address, 0)); |
463 j(equal, &ok, Label::kNear); | 463 j(equal, &ok, Label::kNear); |
464 int3(); | 464 int3(); |
465 bind(&ok); | 465 bind(&ok); |
466 } | 466 } |
467 | 467 |
468 // Count number of write barriers in generated code. | |
469 isolate()->counters()->write_barriers_static()->Increment(); | |
470 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1); | |
471 | |
472 // First, check if a write barrier is even needed. The tests below | 468 // First, check if a write barrier is even needed. The tests below |
473 // catch stores of smis and stores into the young generation. | 469 // catch stores of smis and stores into the young generation. |
474 Label done; | 470 Label done; |
475 | 471 |
476 if (smi_check == INLINE_SMI_CHECK) { | 472 if (smi_check == INLINE_SMI_CHECK) { |
477 // Skip barrier if writing a smi. | 473 // Skip barrier if writing a smi. |
478 JumpIfSmi(value, &done); | 474 JumpIfSmi(value, &done); |
479 } | 475 } |
480 | 476 |
481 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) { | 477 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) { |
(...skipping 11 matching lines...) Expand all Loading... |
493 zero, | 489 zero, |
494 &done, | 490 &done, |
495 Label::kNear); | 491 Label::kNear); |
496 | 492 |
497 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, | 493 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, |
498 fp_mode); | 494 fp_mode); |
499 CallStub(&stub); | 495 CallStub(&stub); |
500 | 496 |
501 bind(&done); | 497 bind(&done); |
502 | 498 |
| 499 // Count number of write barriers in generated code. |
| 500 isolate()->counters()->write_barriers_static()->Increment(); |
| 501 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1); |
| 502 |
503 // Clobber clobbered registers when running with the debug-code flag | 503 // Clobber clobbered registers when running with the debug-code flag |
504 // turned on to provoke errors. | 504 // turned on to provoke errors. |
505 if (emit_debug_code()) { | 505 if (emit_debug_code()) { |
506 Move(address, kZapValue, Assembler::RelocInfoNone()); | 506 Move(address, kZapValue, Assembler::RelocInfoNone()); |
507 Move(value, kZapValue, Assembler::RelocInfoNone()); | 507 Move(value, kZapValue, Assembler::RelocInfoNone()); |
508 } | 508 } |
509 } | 509 } |
510 | 510 |
511 | 511 |
512 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { | 512 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { |
(...skipping 4863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5376 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); | 5376 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); |
5377 movl(rax, dividend); | 5377 movl(rax, dividend); |
5378 shrl(rax, Immediate(31)); | 5378 shrl(rax, Immediate(31)); |
5379 addl(rdx, rax); | 5379 addl(rdx, rax); |
5380 } | 5380 } |
5381 | 5381 |
5382 | 5382 |
5383 } } // namespace v8::internal | 5383 } } // namespace v8::internal |
5384 | 5384 |
5385 #endif // V8_TARGET_ARCH_X64 | 5385 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |