| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 91 } else { | 91 } else { |
| 92 int32_t new_space_start = reinterpret_cast<int32_t>( | 92 int32_t new_space_start = reinterpret_cast<int32_t>( |
| 93 ExternalReference::new_space_start().address()); | 93 ExternalReference::new_space_start().address()); |
| 94 lea(scratch, Operand(object, -new_space_start)); | 94 lea(scratch, Operand(object, -new_space_start)); |
| 95 and_(scratch, Heap::NewSpaceMask()); | 95 and_(scratch, Heap::NewSpaceMask()); |
| 96 j(cc, branch); | 96 j(cc, branch); |
| 97 } | 97 } |
| 98 } | 98 } |
| 99 | 99 |
| 100 | 100 |
| 101 void MacroAssembler::RecordWrite(Register object, int offset, | 101 void MacroAssembler::RecordWrite(Register object, |
| 102 Register value, Register scratch) { | 102 int offset, |
| 103 Register value, |
| 104 Register scratch) { |
| 103 // The compiled code assumes that record write doesn't change the | 105 // The compiled code assumes that record write doesn't change the |
| 104 // context register, so we check that none of the clobbered | 106 // context register, so we check that none of the clobbered |
| 105 // registers are esi. | 107 // registers are esi. |
| 106 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi)); | 108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi)); |
| 107 | 109 |
| 108 // First, check if a write barrier is even needed. The tests below | 110 // First, check if a write barrier is even needed. The tests below |
| 109 // catch stores of Smis and stores into young gen. | 111 // catch stores of Smis and stores into young gen. |
| 110 Label done; | 112 Label done; |
| 111 | 113 |
| 112 // Skip barrier if writing a smi. | 114 // Skip barrier if writing a smi. |
| (...skipping 1486 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1599 | 1601 |
| 1600 // Check that the code was patched as expected. | 1602 // Check that the code was patched as expected. |
| 1601 ASSERT(masm_.pc_ == address_ + size_); | 1603 ASSERT(masm_.pc_ == address_ + size_); |
| 1602 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1604 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 1603 } | 1605 } |
| 1604 | 1606 |
| 1605 | 1607 |
| 1606 } } // namespace v8::internal | 1608 } } // namespace v8::internal |
| 1607 | 1609 |
| 1608 #endif // V8_TARGET_ARCH_IA32 | 1610 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |