| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 6566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6577 __ mov(result, Operand(0)); | 6577 __ mov(result, Operand(0)); |
| 6578 __ Ret(); | 6578 __ Ret(); |
| 6579 } | 6579 } |
| 6580 | 6580 |
| 6581 | 6581 |
| 6582 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 6582 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 6583 // the value has just been written into the object, now this stub makes sure | 6583 // the value has just been written into the object, now this stub makes sure |
| 6584 // we keep the GC informed. The word in the object where the value has been | 6584 // we keep the GC informed. The word in the object where the value has been |
| 6585 // written is in the address register. | 6585 // written is in the address register. |
| 6586 void RecordWriteStub::Generate(MacroAssembler* masm) { | 6586 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| 6587 Label skip_to_incremental_noncompacting; |
| 6588 Label skip_to_incremental_compacting; |
| 6589 |
| 6590 // The first two instructions are generated with labels so as to get the |
| 6591 // offset fixed up correctly by the bind(Label*) call. We patch it back and |
| 6592 // forth between a compare instructions (a nop in this position) and the |
| 6593 // real branch when we start and stop incremental heap marking. |
| 6594 // See RecordWriteStub::Patch for details. |
| 6595 __ b(&skip_to_incremental_noncompacting); |
| 6596 __ b(&skip_to_incremental_compacting); |
| 6597 |
| 6587 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 6598 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6588 __ RememberedSetHelper( | 6599 __ RememberedSetHelper( |
| 6589 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 6600 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6590 } | 6601 } |
| 6591 __ Ret(); | 6602 __ Ret(); |
| 6603 |
| 6604 __ bind(&skip_to_incremental_noncompacting); |
| 6605 GenerateIncremental(masm, INCREMENTAL); |
| 6606 |
| 6607 __ bind(&skip_to_incremental_compacting); |
| 6608 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
| 6609 |
| 6610 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
| 6611 // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
| 6612 ASSERT(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12)); |
| 6613 ASSERT(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12)); |
| 6614 PatchBranchIntoNop(masm, 0); |
| 6615 PatchBranchIntoNop(masm, Assembler::kInstrSize); |
| 6592 } | 6616 } |
| 6593 | 6617 |
| 6594 | 6618 |
| 6619 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
| 6620 regs_.Save(masm); |
| 6621 |
| 6622 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6623 Label dont_need_remembered_set; |
| 6624 |
| 6625 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0)); |
| 6626 __ JumpIfNotInNewSpace(regs_.scratch0(), |
| 6627 regs_.scratch0(), |
| 6628 &dont_need_remembered_set); |
| 6629 |
| 6630 __ CheckPageFlag(regs_.object(), |
| 6631 regs_.scratch0(), |
| 6632 MemoryChunk::SCAN_ON_SCAVENGE, |
| 6633 ne, |
| 6634 &dont_need_remembered_set); |
| 6635 |
| 6636 // First notify the incremental marker if necessary, then update the |
| 6637 // remembered set. |
| 6638 CheckNeedsToInformIncrementalMarker( |
| 6639 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker); |
| 6640 InformIncrementalMarker(masm, mode); |
| 6641 regs_.Restore(masm); |
| 6642 __ RememberedSetHelper( |
| 6643 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6644 |
| 6645 __ bind(&dont_need_remembered_set); |
| 6646 } |
| 6647 |
| 6648 CheckNeedsToInformIncrementalMarker( |
| 6649 masm, kReturnOnNoNeedToInformIncrementalMarker); |
| 6650 InformIncrementalMarker(masm, mode); |
| 6651 regs_.Restore(masm); |
| 6652 __ Ret(); |
| 6653 } |
| 6654 |
| 6655 |
| 6656 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { |
| 6657 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6658 int argument_count = 3; |
| 6659 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| 6660 Register address = |
| 6661 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); |
| 6662 ASSERT(!address.is(regs_.object())); |
| 6663 ASSERT(!address.is(r0)); |
| 6664 __ Move(address, regs_.address()); |
| 6665 __ Move(r0, regs_.object()); |
| 6666 if (mode == INCREMENTAL_COMPACTION) { |
| 6667 __ Move(r1, address); |
| 6668 } else { |
| 6669 ASSERT(mode == INCREMENTAL); |
| 6670 __ ldr(r1, MemOperand(address, 0)); |
| 6671 } |
| 6672 __ mov(r2, Operand(ExternalReference::isolate_address())); |
| 6673 |
| 6674 // TODO(gc): Create a fast version of this C function that does not duplicate |
| 6675 // the checks done in the stub. |
| 6676 if (mode == INCREMENTAL_COMPACTION) { |
| 6677 __ CallCFunction( |
| 6678 ExternalReference::incremental_evacuation_record_write_function( |
| 6679 masm->isolate()), |
| 6680 argument_count); |
| 6681 } else { |
| 6682 ASSERT(mode == INCREMENTAL); |
| 6683 __ CallCFunction( |
| 6684 ExternalReference::incremental_marking_record_write_function( |
| 6685 masm->isolate()), |
| 6686 argument_count); |
| 6687 } |
| 6688 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6689 } |
| 6690 |
| 6691 |
| 6692 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| 6693 MacroAssembler* masm, |
| 6694 OnNoNeedToInformIncrementalMarker on_no_need) { |
| 6695 Label on_black; |
| 6696 |
| 6697 // Let's look at the color of the object: If it is not black we don't have |
| 6698 // to inform the incremental marker. |
| 6699 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); |
| 6700 |
| 6701 regs_.Restore(masm); |
| 6702 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 6703 __ RememberedSetHelper( |
| 6704 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6705 } else { |
| 6706 __ Ret(); |
| 6707 } |
| 6708 |
| 6709 __ bind(&on_black); |
| 6710 |
| 6711 // TODO(gc): Add call to EnsureNotWhite here. |
| 6712 |
| 6713 // Fall through when we need to inform the incremental marker. |
| 6714 } |
| 6715 |
| 6716 |
| 6595 #undef __ | 6717 #undef __ |
| 6596 | 6718 |
| 6597 } } // namespace v8::internal | 6719 } } // namespace v8::internal |
| 6598 | 6720 |
| 6599 #endif // V8_TARGET_ARCH_ARM | 6721 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |