OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
98 // Compute number of region covering addr. See Page::GetRegionNumberForAddress | 98 // Compute number of region covering addr. See Page::GetRegionNumberForAddress |
99 // method for more details. | 99 // method for more details. |
100 shrl(addr, Immediate(Page::kRegionSizeLog2)); | 100 shrl(addr, Immediate(Page::kRegionSizeLog2)); |
101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2)); | 101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2)); |
102 | 102 |
103 // Set dirty mark for region. | 103 // Set dirty mark for region. |
104 bts(Operand(object, Page::kDirtyFlagOffset), addr); | 104 bts(Operand(object, Page::kDirtyFlagOffset), addr); |
105 } | 105 } |
106 | 106 |
107 | 107 |
108 // For page containing |object| mark region covering [object+offset] dirty. | |
109 // object is the object being stored into, value is the object being stored. | |
110 // If offset is zero, then the index register contains the array index into | |
111 // the elements array represented a zero extended int32. Otherwise it can be | |
112 // used as a scratch register. | |
113 // All registers are clobbered by the operation. | |
114 void MacroAssembler::RecordWrite(Register object, | 108 void MacroAssembler::RecordWrite(Register object, |
115 int offset, | 109 int offset, |
116 Register value, | 110 Register value, |
117 Register index) { | 111 Register index) { |
118 // The compiled code assumes that record write doesn't change the | 112 // The compiled code assumes that record write doesn't change the |
119 // context register, so we check that none of the clobbered | 113 // context register, so we check that none of the clobbered |
120 // registers are rsi. | 114 // registers are rsi. |
121 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi)); | 115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi)); |
122 | 116 |
123 // First, check if a write barrier is even needed. The tests below | 117 // First, check if a write barrier is even needed. The tests below |
(...skipping 10 matching lines...) Expand all Loading... |
134 // avoid having the fast case for smis leave the registers | 128 // avoid having the fast case for smis leave the registers |
135 // unchanged. | 129 // unchanged. |
136 if (FLAG_debug_code) { | 130 if (FLAG_debug_code) { |
137 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
138 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
139 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
140 } | 134 } |
141 } | 135 } |
142 | 136 |
143 | 137 |
| 138 void MacroAssembler::RecordWrite(Register object, |
| 139 Register address, |
| 140 Register value) { |
| 141 // The compiled code assumes that record write doesn't change the |
| 142 // context register, so we check that none of the clobbered |
| 143 // registers are esi. |
| 144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi)); |
| 145 |
| 146 // First, check if a write barrier is even needed. The tests below |
| 147 // catch stores of Smis and stores into young gen. |
| 148 Label done; |
| 149 JumpIfSmi(value, &done); |
| 150 |
| 151 InNewSpace(object, value, equal, &done); |
| 152 |
| 153 RecordWriteHelper(object, address, value); |
| 154 |
| 155 bind(&done); |
| 156 |
| 157 // Clobber all input registers when running with the debug-code flag |
| 158 // turned on to provoke errors. |
| 159 if (FLAG_debug_code) { |
| 160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 163 } |
| 164 } |
| 165 |
| 166 |
144 void MacroAssembler::RecordWriteNonSmi(Register object, | 167 void MacroAssembler::RecordWriteNonSmi(Register object, |
145 int offset, | 168 int offset, |
146 Register scratch, | 169 Register scratch, |
147 Register index) { | 170 Register index) { |
148 Label done; | 171 Label done; |
149 | 172 |
150 if (FLAG_debug_code) { | 173 if (FLAG_debug_code) { |
151 Label okay; | 174 Label okay; |
152 JumpIfNotSmi(object, &okay); | 175 JumpIfNotSmi(object, &okay); |
153 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis"); | 176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis"); |
(...skipping 2601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2755 CPU::FlushICache(address_, size_); | 2778 CPU::FlushICache(address_, size_); |
2756 | 2779 |
2757 // Check that the code was patched as expected. | 2780 // Check that the code was patched as expected. |
2758 ASSERT(masm_.pc_ == address_ + size_); | 2781 ASSERT(masm_.pc_ == address_ + size_); |
2759 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2782 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2760 } | 2783 } |
2761 | 2784 |
2762 } } // namespace v8::internal | 2785 } } // namespace v8::internal |
2763 | 2786 |
2764 #endif // V8_TARGET_ARCH_X64 | 2787 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |