OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 | 10 |
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
336 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, | 336 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, |
337 SmiTag(size_in_bytes), runtime_flags); | 337 SmiTag(size_in_bytes), runtime_flags); |
338 result.Bind(runtime_result); | 338 result.Bind(runtime_result); |
339 Goto(&merge_runtime); | 339 Goto(&merge_runtime); |
340 | 340 |
341 // When there is enough space, return `top' and bump it up. | 341 // When there is enough space, return `top' and bump it up. |
342 Bind(&no_runtime_call); | 342 Bind(&no_runtime_call); |
343 Node* no_runtime_result = top; | 343 Node* no_runtime_result = top; |
344 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | 344 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
345 IntPtrAdd(top, size_in_bytes)); | 345 IntPtrAdd(top, size_in_bytes)); |
346 no_runtime_result = | 346 no_runtime_result = BitcastWordToTagged( |
347 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)); | 347 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag))); |
348 result.Bind(no_runtime_result); | 348 result.Bind(no_runtime_result); |
349 Goto(&merge_runtime); | 349 Goto(&merge_runtime); |
350 | 350 |
351 Bind(&merge_runtime); | 351 Bind(&merge_runtime); |
352 return result.value(); | 352 return result.value(); |
353 } | 353 } |
354 | 354 |
355 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, | 355 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, |
356 AllocationFlags flags, | 356 AllocationFlags flags, |
357 Node* top_address, | 357 Node* top_address, |
(...skipping 27 matching lines...) Expand all Loading... |
385 merge_address(this, &address); | 385 merge_address(this, &address); |
386 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler, | 386 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler, |
387 &needs_filler); | 387 &needs_filler); |
388 | 388 |
389 Bind(&needs_filler); | 389 Bind(&needs_filler); |
390 // Store a filler and increase the address by kPointerSize. | 390 // Store a filler and increase the address by kPointerSize. |
391 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change | 391 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change |
392 // it when Simd128 alignment is supported. | 392 // it when Simd128 alignment is supported. |
393 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, | 393 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, |
394 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); | 394 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); |
395 address.Bind(IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))); | 395 address.Bind(BitcastWordToTagged( |
| 396 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize)))); |
396 Goto(&merge_address); | 397 Goto(&merge_address); |
397 | 398 |
398 Bind(&doesnt_need_filler); | 399 Bind(&doesnt_need_filler); |
399 Goto(&merge_address); | 400 Goto(&merge_address); |
400 | 401 |
401 Bind(&merge_address); | 402 Bind(&merge_address); |
402 // Update the top. | 403 // Update the top. |
403 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | 404 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
404 IntPtrAdd(top, adjusted_size.value())); | 405 IntPtrAdd(top, adjusted_size.value())); |
405 return address.value(); | 406 return address.value(); |
(...skipping 15 matching lines...) Expand all Loading... |
421 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, | 422 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, |
422 limit_address); | 423 limit_address); |
423 } | 424 } |
424 #endif | 425 #endif |
425 | 426 |
426 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, | 427 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, |
427 limit_address); | 428 limit_address); |
428 } | 429 } |
429 | 430 |
430 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { | 431 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { |
431 return IntPtrAdd(previous, IntPtrConstant(offset)); | 432 return BitcastWordToTagged(IntPtrAdd(previous, IntPtrConstant(offset))); |
432 } | 433 } |
433 | 434 |
434 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 435 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
435 MachineType rep) { | 436 MachineType rep) { |
436 return Load(rep, buffer, IntPtrConstant(offset)); | 437 return Load(rep, buffer, IntPtrConstant(offset)); |
437 } | 438 } |
438 | 439 |
439 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | 440 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, |
440 MachineType rep) { | 441 MachineType rep) { |
441 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); | 442 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); |
(...skipping 681 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1123 } | 1124 } |
1124 | 1125 |
1125 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, | 1126 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, |
1126 uint32_t mask) { | 1127 uint32_t mask) { |
1127 return Word32Shr(Word32And(word32, Int32Constant(mask)), | 1128 return Word32Shr(Word32And(word32, Int32Constant(mask)), |
1128 Int32Constant(shift)); | 1129 Int32Constant(shift)); |
1129 } | 1130 } |
1130 | 1131 |
1131 } // namespace internal | 1132 } // namespace internal |
1132 } // namespace v8 | 1133 } // namespace v8 |
OLD | NEW |