OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
171 // Set the remembered set bit for [object+offset]. | 171 // Set the remembered set bit for [object+offset]. |
172 // object is the object being stored into, value is the object being stored. | 172 // object is the object being stored into, value is the object being stored. |
173 // If offset is zero, then the smi_index register contains the array index into | 173 // If offset is zero, then the smi_index register contains the array index into |
174 // the elements array represented as a smi. Otherwise it can be used as a | 174 // the elements array represented as a smi. Otherwise it can be used as a |
175 // scratch register. | 175 // scratch register. |
176 // All registers are clobbered by the operation. | 176 // All registers are clobbered by the operation. |
177 void MacroAssembler::RecordWrite(Register object, | 177 void MacroAssembler::RecordWrite(Register object, |
178 int offset, | 178 int offset, |
179 Register value, | 179 Register value, |
180 Register smi_index) { | 180 Register smi_index) { |
| 181 // The compiled code assumes that record write doesn't change the |
| 182 // context register, so we check that none of the clobbered |
| 183 // registers are rsi. |
| 184 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi)); |
| 185 |
181 // First, check if a remembered set write is even needed. The tests below | 186 // First, check if a remembered set write is even needed. The tests below |
182 // catch stores of Smis and stores into young gen (which does not have space | 187 // catch stores of Smis and stores into young gen (which does not have space |
183 // for the remembered set bits. | 188 // for the remembered set bits. |
184 Label done; | 189 Label done; |
185 JumpIfSmi(value, &done); | 190 JumpIfSmi(value, &done); |
186 | 191 |
187 RecordWriteNonSmi(object, offset, value, smi_index); | 192 RecordWriteNonSmi(object, offset, value, smi_index); |
188 bind(&done); | 193 bind(&done); |
189 } | 194 } |
190 | 195 |
(...skipping 2344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2535 CodePatcher::~CodePatcher() { | 2540 CodePatcher::~CodePatcher() { |
2536 // Indicate that code has changed. | 2541 // Indicate that code has changed. |
2537 CPU::FlushICache(address_, size_); | 2542 CPU::FlushICache(address_, size_); |
2538 | 2543 |
2539 // Check that the code was patched as expected. | 2544 // Check that the code was patched as expected. |
2540 ASSERT(masm_.pc_ == address_ + size_); | 2545 ASSERT(masm_.pc_ == address_ + size_); |
2541 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2546 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2542 } | 2547 } |
2543 | 2548 |
2544 } } // namespace v8::internal | 2549 } } // namespace v8::internal |
OLD | NEW |