Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/compiler/code-stub-assembler.cc

Issue 1735803003: [turbofan] Adds an Allocate macro to the CodeStubAssembler. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-stub-assembler.h" 5 #include "src/compiler/code-stub-assembler.h"
6 6
7 #include <ostream> 7 #include <ostream>
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/compiler/graph.h" 10 #include "src/compiler/graph.h"
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
190 compiler::Node* roots_array_start = 190 compiler::Node* roots_array_start =
191 ExternalConstant(ExternalReference::roots_array_start(isolate())); 191 ExternalConstant(ExternalReference::roots_array_start(isolate()));
192 USE(roots_array_start); 192 USE(roots_array_start);
193 193
194 // TODO(danno): Implement thee root-access case where the root is not constant 194 // TODO(danno): Implement thee root-access case where the root is not constant
195 // and must be loaded from the root array. 195 // and must be loaded from the root array.
196 UNIMPLEMENTED(); 196 UNIMPLEMENTED();
197 return nullptr; 197 return nullptr;
198 } 198 }
199 199
200 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
201 AllocationFlags flags,
202 Node* top_address,
203 Node* limit_address) {
204 Node* top = Load(MachineType::Pointer(), top_address);
205 Node* limit = Load(MachineType::Pointer(), limit_address);
206
207 // If there's not enough space, call the runtime.
208 RawMachineLabel runtime_call, no_runtime_call, merge_runtime;
209 raw_assembler_->Branch(
210 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes),
211 &runtime_call, &no_runtime_call);
212
213 raw_assembler_->Bind(&runtime_call);
214 // AllocateInTargetSpace does not use the context.
215 Node* context = IntPtrConstant(0);
216 Node* runtime_flags = SmiTag(Int32Constant(
217 AllocateDoubleAlignFlag::encode(false) |
218 AllocateTargetSpace::encode(flags & kPretenured
219 ? AllocationSpace::OLD_SPACE
220 : AllocationSpace::NEW_SPACE)));
221 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
222 SmiTag(size_in_bytes), runtime_flags);
223 if (!(flags & kTagObject)) {
224 runtime_result = IntPtrSub(runtime_result, IntPtrConstant(1));
225 }
226 raw_assembler_->Goto(&merge_runtime);
227
228 // When there is enough space, return `top' and bump it up.
229 raw_assembler_->Bind(&no_runtime_call);
230 Node* no_runtime_result = top;
231 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
232 IntPtrAdd(top, size_in_bytes));
233 if (flags & kTagObject) {
234 no_runtime_result = IntPtrAdd(no_runtime_result, IntPtrConstant(1));
235 }
236 raw_assembler_->Goto(&merge_runtime);
237
238 raw_assembler_->Bind(&merge_runtime);
239 return raw_assembler_->Phi(MachineType::PointerRepresentation(),
240 runtime_result, no_runtime_result);
241 }
242
243 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
244 AllocationFlags flags,
245 Node* top_address,
246 Node* limit_address) {
247 Node* top = Load(MachineType::Pointer(), top_address);
248 Node* limit = Load(MachineType::Pointer(), limit_address);
249 Node* adjusted_size = size_in_bytes;
250 if (flags & kDoubleAlignment) {
251 // TODO(epertoso): Simd128 alignment.
252 RawMachineLabel aligned, not_aligned, merge;
253 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)),
254 &not_aligned, &aligned);
255
256 raw_assembler_->Bind(&not_aligned);
257 Node* not_aligned_size =
258 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
259 raw_assembler_->Goto(&merge);
260
261 raw_assembler_->Bind(&aligned);
262 raw_assembler_->Goto(&merge);
263
264 raw_assembler_->Bind(&merge);
265 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(),
266 not_aligned_size, adjusted_size);
267 }
268
269 // Call AllocateRawUnaligned with the potentially alignment-adjusted size, but
270 // don't let it tag the result for us.
271 Node* address =
272 AllocateRawUnaligned(adjusted_size, flags & ~kTagObject, top, limit);
273
274 RawMachineLabel needs_filler, doesnt_need_filler, merge_address;
275 raw_assembler_->Branch(
276 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes),
277 &doesnt_need_filler, &needs_filler);
278
279 raw_assembler_->Bind(&needs_filler);
280 // Store a filler and increase the address by kPointerSize.
281 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
282 // it when Simd128 alignment is supported.
283 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
284 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
285 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize));
286 raw_assembler_->Goto(&merge_address);
287
288 raw_assembler_->Bind(&doesnt_need_filler);
289 Node* address_without_filler = address;
290 raw_assembler_->Goto(&merge_address);
291
292 raw_assembler_->Bind(&merge_address);
293 address = raw_assembler_->Phi(MachineType::PointerRepresentation(),
294 address_with_filler, address_without_filler);
295 // Update the top.
296 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
297 IntPtrAdd(address, size_in_bytes));
298 if (flags & kTagObject) {
299 address = IntPtrAdd(address, IntPtrConstant(1));
300 }
301 return address;
302 }
303
304 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
305 bool const new_space = !(flags & kPretenured);
306 Node* top_address = ExternalConstant(
307 new_space
308 ? ExternalReference::new_space_allocation_top_address(isolate())
309 : ExternalReference::old_space_allocation_top_address(isolate()));
310 Node* limit_address = ExternalConstant(
311 new_space
312 ? ExternalReference::new_space_allocation_limit_address(isolate())
313 : ExternalReference::old_space_allocation_limit_address(isolate()));
314
315 #ifdef V8_HOST_ARCH_32_BIT
316 if (flags & kDoubleAlignment) {
317 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address,
318 limit_address);
319 }
320 #endif
321
322 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address,
323 limit_address);
324 }
325
200 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { 326 Node* CodeStubAssembler::Load(MachineType rep, Node* base) {
201 return raw_assembler_->Load(rep, base); 327 return raw_assembler_->Load(rep, base);
202 } 328 }
203 329
204 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { 330 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) {
205 return raw_assembler_->Load(rep, base, index); 331 return raw_assembler_->Load(rep, base, index);
206 } 332 }
207 333
208 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, 334 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base,
209 Node* value) { 335 Node* value) {
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 } 732 }
607 } 733 }
608 } 734 }
609 735
610 bound_ = true; 736 bound_ = true;
611 } 737 }
612 738
613 } // namespace compiler 739 } // namespace compiler
614 } // namespace internal 740 } // namespace internal
615 } // namespace v8 741 } // namespace v8
OLDNEW
« src/compiler/code-stub-assembler.h ('K') | « src/compiler/code-stub-assembler.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698