| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 219 | 219 |
| 220 // Test that the object address is not in the new space. We cannot | 220 // Test that the object address is not in the new space. We cannot |
| 221 // set remembered set bits in the new space. | 221 // set remembered set bits in the new space. |
| 222 movq(scratch, object); | 222 movq(scratch, object); |
| 223 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask()))); | 223 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask()))); |
| 224 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask()))); | 224 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask()))); |
| 225 movq(kScratchRegister, ExternalReference::new_space_start()); | 225 movq(kScratchRegister, ExternalReference::new_space_start()); |
| 226 cmpq(scratch, kScratchRegister); | 226 cmpq(scratch, kScratchRegister); |
| 227 j(equal, &done); | 227 j(equal, &done); |
| 228 | 228 |
| 229 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) { | 229 // The offset is relative to a tagged or untagged HeapObject pointer, |
| 230 // so either offset or offset + kHeapObjectTag must be a |
| 231 // multiple of kPointerSize. |
| 232 ASSERT(IsAligned(offset, kPointerSize) || |
| 233 IsAligned(offset + kHeapObjectTag, kPointerSize)); |
| 234 |
| 235 // We use optimized write barrier code if the word being written to is not in |
| 236 // a large object page, or is in the first "page" of a large object page. |
| 237 // We make sure that an offset is inside the right limits whether it is |
| 238 // tagged or untagged. |
| 239 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) { |
| 230 // Compute the bit offset in the remembered set, leave it in 'value'. | 240 // Compute the bit offset in the remembered set, leave it in 'value'. |
| 231 lea(scratch, Operand(object, offset)); | 241 lea(scratch, Operand(object, offset)); |
| 232 ASSERT(is_int32(Page::kPageAlignmentMask)); | 242 ASSERT(is_int32(Page::kPageAlignmentMask)); |
| 233 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask))); | 243 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask))); |
| 234 shr(scratch, Immediate(kObjectAlignmentBits)); | 244 shr(scratch, Immediate(kObjectAlignmentBits)); |
| 235 | 245 |
| 236 // Compute the page address from the heap object pointer, leave it in | 246 // Compute the page address from the heap object pointer, leave it in |
| 237 // 'object' (immediate value is sign extended). | 247 // 'object' (immediate value is sign extended). |
| 238 and_(object, Immediate(~Page::kPageAlignmentMask)); | 248 and_(object, Immediate(~Page::kPageAlignmentMask)); |
| 239 | 249 |
| (...skipping 2397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2637 CodePatcher::~CodePatcher() { | 2647 CodePatcher::~CodePatcher() { |
| 2638 // Indicate that code has changed. | 2648 // Indicate that code has changed. |
| 2639 CPU::FlushICache(address_, size_); | 2649 CPU::FlushICache(address_, size_); |
| 2640 | 2650 |
| 2641 // Check that the code was patched as expected. | 2651 // Check that the code was patched as expected. |
| 2642 ASSERT(masm_.pc_ == address_ + size_); | 2652 ASSERT(masm_.pc_ == address_ + size_); |
| 2643 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2653 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2644 } | 2654 } |
| 2645 | 2655 |
| 2646 } } // namespace v8::internal | 2656 } } // namespace v8::internal |
| OLD | NEW |