OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-assembler.h" | 5 #include "src/compiler/code-assembler.h" |
6 | 6 |
7 #include <ostream> | 7 #include <ostream> |
8 | 8 |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/compiler/graph.h" | 10 #include "src/compiler/graph.h" |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 } | 112 } |
113 | 113 |
114 Node* CodeAssembler::ExternalConstant(ExternalReference address) { | 114 Node* CodeAssembler::ExternalConstant(ExternalReference address) { |
115 return raw_assembler_->ExternalConstant(address); | 115 return raw_assembler_->ExternalConstant(address); |
116 } | 116 } |
117 | 117 |
118 Node* CodeAssembler::Float64Constant(double value) { | 118 Node* CodeAssembler::Float64Constant(double value) { |
119 return raw_assembler_->Float64Constant(value); | 119 return raw_assembler_->Float64Constant(value); |
120 } | 120 } |
121 | 121 |
122 Node* CodeAssembler::BooleanMapConstant() { | |
123 return HeapConstant(isolate()->factory()->boolean_map()); | |
124 } | |
125 | |
126 Node* CodeAssembler::EmptyStringConstant() { | |
127 return LoadRoot(Heap::kempty_stringRootIndex); | |
128 } | |
129 | |
130 Node* CodeAssembler::HeapNumberMapConstant() { | |
131 return HeapConstant(isolate()->factory()->heap_number_map()); | |
132 } | |
133 | |
134 Node* CodeAssembler::NaNConstant() { | 122 Node* CodeAssembler::NaNConstant() { |
135 return LoadRoot(Heap::kNanValueRootIndex); | 123 return LoadRoot(Heap::kNanValueRootIndex); |
136 } | 124 } |
137 | 125 |
138 Node* CodeAssembler::NoContextConstant() { | |
139 return SmiConstant(Smi::FromInt(0)); | |
140 } | |
141 | |
142 Node* CodeAssembler::NullConstant() { | |
143 return LoadRoot(Heap::kNullValueRootIndex); | |
144 } | |
145 | |
146 Node* CodeAssembler::UndefinedConstant() { | |
147 return LoadRoot(Heap::kUndefinedValueRootIndex); | |
148 } | |
149 | |
150 Node* CodeAssembler::Parameter(int value) { | 126 Node* CodeAssembler::Parameter(int value) { |
151 return raw_assembler_->Parameter(value); | 127 return raw_assembler_->Parameter(value); |
152 } | 128 } |
153 | 129 |
154 void CodeAssembler::Return(Node* value) { | 130 void CodeAssembler::Return(Node* value) { |
155 return raw_assembler_->Return(value); | 131 return raw_assembler_->Return(value); |
156 } | 132 } |
157 | 133 |
158 void CodeAssembler::Bind(CodeAssembler::Label* label) { return label->Bind(); } | 134 void CodeAssembler::Bind(CodeAssembler::Label* label) { return label->Bind(); } |
159 | 135 |
160 Node* CodeAssembler::LoadFramePointer() { | 136 Node* CodeAssembler::LoadFramePointer() { |
161 return raw_assembler_->LoadFramePointer(); | 137 return raw_assembler_->LoadFramePointer(); |
162 } | 138 } |
163 | 139 |
164 Node* CodeAssembler::LoadParentFramePointer() { | 140 Node* CodeAssembler::LoadParentFramePointer() { |
165 return raw_assembler_->LoadParentFramePointer(); | 141 return raw_assembler_->LoadParentFramePointer(); |
166 } | 142 } |
167 | 143 |
168 Node* CodeAssembler::LoadStackPointer() { | 144 Node* CodeAssembler::LoadStackPointer() { |
169 return raw_assembler_->LoadStackPointer(); | 145 return raw_assembler_->LoadStackPointer(); |
170 } | 146 } |
171 | 147 |
172 Node* CodeAssembler::SmiShiftBitsConstant() { | 148 Node* CodeAssembler::SmiShiftBitsConstant() { |
173 return IntPtrConstant(kSmiShiftSize + kSmiTagSize); | 149 return IntPtrConstant(kSmiShiftSize + kSmiTagSize); |
174 } | 150 } |
175 | 151 |
176 Node* CodeAssembler::SmiTag(Node* value) { | |
177 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
178 } | |
179 | |
180 Node* CodeAssembler::SmiUntag(Node* value) { | |
181 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
182 } | |
183 | |
184 #define DEFINE_CODE_ASSEMBLER_BINARY_OP(name) \ | 152 #define DEFINE_CODE_ASSEMBLER_BINARY_OP(name) \ |
185 Node* CodeAssembler::name(Node* a, Node* b) { \ | 153 Node* CodeAssembler::name(Node* a, Node* b) { \ |
186 return raw_assembler_->name(a, b); \ | 154 return raw_assembler_->name(a, b); \ |
187 } | 155 } |
188 CODE_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_ASSEMBLER_BINARY_OP) | 156 CODE_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_ASSEMBLER_BINARY_OP) |
189 #undef DEFINE_CODE_ASSEMBLER_BINARY_OP | 157 #undef DEFINE_CODE_ASSEMBLER_BINARY_OP |
190 | 158 |
191 Node* CodeAssembler::WordShl(Node* value, int shift) { | 159 Node* CodeAssembler::WordShl(Node* value, int shift) { |
192 return raw_assembler_->WordShl(value, IntPtrConstant(shift)); | 160 return raw_assembler_->WordShl(value, IntPtrConstant(shift)); |
193 } | 161 } |
(...skipping 12 matching lines...) Expand all Loading... |
206 value = raw_assembler_->ChangeUint32ToUint64(value); | 174 value = raw_assembler_->ChangeUint32ToUint64(value); |
207 } | 175 } |
208 return value; | 176 return value; |
209 } | 177 } |
210 | 178 |
211 #define DEFINE_CODE_ASSEMBLER_UNARY_OP(name) \ | 179 #define DEFINE_CODE_ASSEMBLER_UNARY_OP(name) \ |
212 Node* CodeAssembler::name(Node* a) { return raw_assembler_->name(a); } | 180 Node* CodeAssembler::name(Node* a) { return raw_assembler_->name(a); } |
213 CODE_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_ASSEMBLER_UNARY_OP) | 181 CODE_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_ASSEMBLER_UNARY_OP) |
214 #undef DEFINE_CODE_ASSEMBLER_UNARY_OP | 182 #undef DEFINE_CODE_ASSEMBLER_UNARY_OP |
215 | 183 |
| 184 Node* CodeAssembler::Load(MachineType rep, Node* base) { |
| 185 return raw_assembler_->Load(rep, base); |
| 186 } |
| 187 |
| 188 Node* CodeAssembler::Load(MachineType rep, Node* base, Node* index) { |
| 189 return raw_assembler_->Load(rep, base, index); |
| 190 } |
| 191 |
| 192 Node* CodeAssembler::AtomicLoad(MachineType rep, Node* base, Node* index) { |
| 193 return raw_assembler_->AtomicLoad(rep, base, index); |
| 194 } |
| 195 |
216 Node* CodeAssembler::LoadRoot(Heap::RootListIndex root_index) { | 196 Node* CodeAssembler::LoadRoot(Heap::RootListIndex root_index) { |
217 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) { | 197 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) { |
218 Handle<Object> root = isolate()->heap()->root_handle(root_index); | 198 Handle<Object> root = isolate()->heap()->root_handle(root_index); |
219 if (root->IsSmi()) { | 199 if (root->IsSmi()) { |
220 return SmiConstant(Smi::cast(*root)); | 200 return SmiConstant(Smi::cast(*root)); |
221 } else { | 201 } else { |
222 return HeapConstant(Handle<HeapObject>::cast(root)); | 202 return HeapConstant(Handle<HeapObject>::cast(root)); |
223 } | 203 } |
224 } | 204 } |
225 | 205 |
226 compiler::Node* roots_array_start = | 206 compiler::Node* roots_array_start = |
227 ExternalConstant(ExternalReference::roots_array_start(isolate())); | 207 ExternalConstant(ExternalReference::roots_array_start(isolate())); |
228 USE(roots_array_start); | 208 USE(roots_array_start); |
229 | 209 |
230 // TODO(danno): Implement thee root-access case where the root is not constant | 210 // TODO(danno): Implement the root-access case where the root is not constant |
231 // and must be loaded from the root array. | 211 // and must be loaded from the root array. |
232 UNIMPLEMENTED(); | 212 UNIMPLEMENTED(); |
233 return nullptr; | 213 return nullptr; |
234 } | 214 } |
235 | 215 |
236 Node* CodeAssembler::AllocateRawUnaligned(Node* size_in_bytes, | |
237 AllocationFlags flags, | |
238 Node* top_address, | |
239 Node* limit_address) { | |
240 Node* top = Load(MachineType::Pointer(), top_address); | |
241 Node* limit = Load(MachineType::Pointer(), limit_address); | |
242 | |
243 // If there's not enough space, call the runtime. | |
244 RawMachineLabel runtime_call(RawMachineLabel::kDeferred), no_runtime_call, | |
245 merge_runtime; | |
246 raw_assembler_->Branch( | |
247 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), | |
248 &runtime_call, &no_runtime_call); | |
249 | |
250 raw_assembler_->Bind(&runtime_call); | |
251 // AllocateInTargetSpace does not use the context. | |
252 Node* context = IntPtrConstant(0); | |
253 Node* runtime_flags = SmiTag(Int32Constant( | |
254 AllocateDoubleAlignFlag::encode(false) | | |
255 AllocateTargetSpace::encode(flags & kPretenured | |
256 ? AllocationSpace::OLD_SPACE | |
257 : AllocationSpace::NEW_SPACE))); | |
258 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, | |
259 SmiTag(size_in_bytes), runtime_flags); | |
260 raw_assembler_->Goto(&merge_runtime); | |
261 | |
262 // When there is enough space, return `top' and bump it up. | |
263 raw_assembler_->Bind(&no_runtime_call); | |
264 Node* no_runtime_result = top; | |
265 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
266 IntPtrAdd(top, size_in_bytes)); | |
267 no_runtime_result = | |
268 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)); | |
269 raw_assembler_->Goto(&merge_runtime); | |
270 | |
271 raw_assembler_->Bind(&merge_runtime); | |
272 return raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
273 runtime_result, no_runtime_result); | |
274 } | |
275 | |
276 Node* CodeAssembler::AllocateRawAligned(Node* size_in_bytes, | |
277 AllocationFlags flags, | |
278 Node* top_address, | |
279 Node* limit_address) { | |
280 Node* top = Load(MachineType::Pointer(), top_address); | |
281 Node* limit = Load(MachineType::Pointer(), limit_address); | |
282 Node* adjusted_size = size_in_bytes; | |
283 if (flags & kDoubleAlignment) { | |
284 // TODO(epertoso): Simd128 alignment. | |
285 RawMachineLabel aligned, not_aligned, merge; | |
286 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), | |
287 ¬_aligned, &aligned); | |
288 | |
289 raw_assembler_->Bind(¬_aligned); | |
290 Node* not_aligned_size = | |
291 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); | |
292 raw_assembler_->Goto(&merge); | |
293 | |
294 raw_assembler_->Bind(&aligned); | |
295 raw_assembler_->Goto(&merge); | |
296 | |
297 raw_assembler_->Bind(&merge); | |
298 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
299 not_aligned_size, adjusted_size); | |
300 } | |
301 | |
302 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit); | |
303 | |
304 RawMachineLabel needs_filler, doesnt_need_filler, merge_address; | |
305 raw_assembler_->Branch( | |
306 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes), | |
307 &doesnt_need_filler, &needs_filler); | |
308 | |
309 raw_assembler_->Bind(&needs_filler); | |
310 // Store a filler and increase the address by kPointerSize. | |
311 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change | |
312 // it when Simd128 alignment is supported. | |
313 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, | |
314 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); | |
315 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize)); | |
316 raw_assembler_->Goto(&merge_address); | |
317 | |
318 raw_assembler_->Bind(&doesnt_need_filler); | |
319 Node* address_without_filler = address; | |
320 raw_assembler_->Goto(&merge_address); | |
321 | |
322 raw_assembler_->Bind(&merge_address); | |
323 address = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
324 address_with_filler, address_without_filler); | |
325 // Update the top. | |
326 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
327 IntPtrAdd(top, adjusted_size)); | |
328 return address; | |
329 } | |
330 | |
331 Node* CodeAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { | |
332 bool const new_space = !(flags & kPretenured); | |
333 Node* top_address = ExternalConstant( | |
334 new_space | |
335 ? ExternalReference::new_space_allocation_top_address(isolate()) | |
336 : ExternalReference::old_space_allocation_top_address(isolate())); | |
337 Node* limit_address = ExternalConstant( | |
338 new_space | |
339 ? ExternalReference::new_space_allocation_limit_address(isolate()) | |
340 : ExternalReference::old_space_allocation_limit_address(isolate())); | |
341 | |
342 #ifdef V8_HOST_ARCH_32_BIT | |
343 if (flags & kDoubleAlignment) { | |
344 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
345 limit_address); | |
346 } | |
347 #endif | |
348 | |
349 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
350 limit_address); | |
351 } | |
352 | |
353 Node* CodeAssembler::InnerAllocate(Node* previous, int offset) { | |
354 return IntPtrAdd(previous, IntPtrConstant(offset)); | |
355 } | |
356 | |
357 Node* CodeAssembler::Load(MachineType rep, Node* base) { | |
358 return raw_assembler_->Load(rep, base); | |
359 } | |
360 | |
361 Node* CodeAssembler::Load(MachineType rep, Node* base, Node* index) { | |
362 return raw_assembler_->Load(rep, base, index); | |
363 } | |
364 | |
365 Node* CodeAssembler::AtomicLoad(MachineType rep, Node* base, Node* index) { | |
366 return raw_assembler_->AtomicLoad(rep, base, index); | |
367 } | |
368 | |
369 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* value) { | 216 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* value) { |
370 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier); | 217 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier); |
371 } | 218 } |
372 | 219 |
373 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* index, | 220 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* index, |
374 Node* value) { | 221 Node* value) { |
375 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier); | 222 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier); |
376 } | 223 } |
377 | 224 |
378 Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base, | 225 Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base, |
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
841 } | 688 } |
842 } | 689 } |
843 } | 690 } |
844 | 691 |
845 bound_ = true; | 692 bound_ = true; |
846 } | 693 } |
847 | 694 |
848 } // namespace compiler | 695 } // namespace compiler |
849 } // namespace internal | 696 } // namespace internal |
850 } // namespace v8 | 697 } // namespace v8 |
OLD | NEW |