OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
68 | 68 |
69 void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) { | 69 void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) { |
70 CompareRoot(rsp, Heap::kStackLimitRootIndex); | 70 CompareRoot(rsp, Heap::kStackLimitRootIndex); |
71 j(below, on_stack_overflow); | 71 j(below, on_stack_overflow); |
72 } | 72 } |
73 | 73 |
74 | 74 |
75 void MacroAssembler::RecordWriteHelper(Register object, | 75 void MacroAssembler::RecordWriteHelper(Register object, |
76 Register addr, | 76 Register addr, |
77 Register scratch) { | 77 Register scratch) { |
| 78 if (FLAG_debug_code) { |
| 79 // Check that the object is not in new space. |
| 80 Label not_in_new_space; |
| 81 InNewSpace(object, scratch, not_equal, ¬_in_new_space); |
| 82 Abort("new-space object passed to RecordWriteHelper"); |
| 83 bind(¬_in_new_space); |
| 84 } |
| 85 |
78 Label fast; | 86 Label fast; |
79 | 87 |
80 // Compute the page start address from the heap object pointer, and reuse | 88 // Compute the page start address from the heap object pointer, and reuse |
81 // the 'object' register for it. | 89 // the 'object' register for it. |
82 ASSERT(is_int32(~Page::kPageAlignmentMask)); | 90 ASSERT(is_int32(~Page::kPageAlignmentMask)); |
83 and_(object, | 91 and_(object, |
84 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask))); | 92 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask))); |
85 Register page_start = object; | 93 Register page_start = object; |
86 | 94 |
87 // Compute the bit addr in the remembered set/index of the pointer in the | 95 // Compute the bit addr in the remembered set/index of the pointer in the |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
150 int offset, | 158 int offset, |
151 Register value, | 159 Register value, |
152 Register smi_index) { | 160 Register smi_index) { |
153 // The compiled code assumes that record write doesn't change the | 161 // The compiled code assumes that record write doesn't change the |
154 // context register, so we check that none of the clobbered | 162 // context register, so we check that none of the clobbered |
155 // registers are rsi. | 163 // registers are rsi. |
156 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi)); | 164 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi)); |
157 | 165 |
158 // First, check if a remembered set write is even needed. The tests below | 166 // First, check if a remembered set write is even needed. The tests below |
159 // catch stores of Smis and stores into young gen (which does not have space | 167 // catch stores of Smis and stores into young gen (which does not have space |
160 // for the remembered set bits. | 168 // for the remembered set bits). |
161 Label done; | 169 Label done; |
162 JumpIfSmi(value, &done); | 170 JumpIfSmi(value, &done); |
163 | 171 |
164 RecordWriteNonSmi(object, offset, value, smi_index); | 172 RecordWriteNonSmi(object, offset, value, smi_index); |
165 bind(&done); | 173 bind(&done); |
166 | 174 |
167 // Clobber all input registers when running with the debug-code flag | 175 // Clobber all input registers when running with the debug-code flag |
168 // turned on to provoke errors. This clobbering repeats the | 176 // turned on to provoke errors. This clobbering repeats the |
169 // clobbering done inside RecordWriteNonSmi but it's necessary to | 177 // clobbering done inside RecordWriteNonSmi but it's necessary to |
170 // avoid having the fast case for smis leave the registers | 178 // avoid having the fast case for smis leave the registers |
(...skipping 2501 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2672 CodePatcher::~CodePatcher() { | 2680 CodePatcher::~CodePatcher() { |
2673 // Indicate that code has changed. | 2681 // Indicate that code has changed. |
2674 CPU::FlushICache(address_, size_); | 2682 CPU::FlushICache(address_, size_); |
2675 | 2683 |
2676 // Check that the code was patched as expected. | 2684 // Check that the code was patched as expected. |
2677 ASSERT(masm_.pc_ == address_ + size_); | 2685 ASSERT(masm_.pc_ == address_ + size_); |
2678 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2686 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2679 } | 2687 } |
2680 | 2688 |
2681 } } // namespace v8::internal | 2689 } } // namespace v8::internal |
OLD | NEW |