Chromium Code Reviews| Index: src/code-stub-assembler.cc |
| diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
| index d06e0a2313aa3279630ecec75aef04f6d7d2099a..21cb9edeace2ae53b065038bab66ae7a6cabc4dc 100644 |
| --- a/src/code-stub-assembler.cc |
| +++ b/src/code-stub-assembler.cc |
| @@ -680,10 +680,8 @@ void CodeStubAssembler::BranchIfFastJSArray( |
| BranchIfPrototypesHaveNoElements(map, if_true, if_false); |
| } |
| -Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| - AllocationFlags flags, |
| - Node* top_address, |
| - Node* limit_address) { |
| +Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags, |
| + Node* top_address, Node* limit_address) { |
| Node* top = Load(MachineType::Pointer(), top_address); |
| Node* limit = Load(MachineType::Pointer(), limit_address); |
| @@ -697,7 +695,7 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next); |
| Node* runtime_flags = SmiConstant( |
| - Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | |
| + Smi::FromInt(AllocateDoubleAlignFlag::encode(flags & kDoubleAlignment) | |
|
Camillo Bruni
2017/03/30 12:08:23
please add
bool needs_double_alignment = flags
ivica.bogosavljevic
2017/03/30 13:47:57
Acknowledged.
|
| AllocateTargetSpace::encode(AllocationSpace::LO_SPACE))); |
| Node* const runtime_result = |
| CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), |
| @@ -708,7 +706,25 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| Bind(&next); |
| } |
| - Node* new_top = IntPtrAdd(top, size_in_bytes); |
| + Variable adjusted_size(this, MachineType::PointerRepresentation(), |
| + size_in_bytes); |
| + |
| + if (flags & kDoubleAlignment) { |
| + Label not_aligned(this), done_alignment(this, &adjusted_size); |
| + |
| + Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned, |
| + &done_alignment); |
| + |
| + Bind(¬_aligned); |
| + Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4)); |
| + adjusted_size.Bind(not_aligned_size); |
| + Goto(&done_alignment); |
| + |
| + Bind(&done_alignment); |
| + } |
| + |
| + Node* new_top = IntPtrAdd(top, adjusted_size.value()); |
| + |
| Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call, |
| &no_runtime_call); |
| @@ -716,7 +732,7 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| Node* runtime_result; |
| if (flags & kPretenured) { |
| Node* runtime_flags = SmiConstant( |
| - Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | |
| + Smi::FromInt(AllocateDoubleAlignFlag::encode(flags & kDoubleAlignment) | |
| AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE))); |
| runtime_result = |
| CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), |
| @@ -733,8 +749,29 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| Node* no_runtime_result = top; |
| StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
| new_top); |
| + |
| + Variable address(this, MachineType::PointerRepresentation(), |
| + no_runtime_result); |
| + |
| + if (flags & kDoubleAlignment) { |
| + Label needs_filler(this), done_filling(this, &address); |
| + Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling, |
| + &needs_filler); |
| + |
| + Bind(&needs_filler); |
| + // Store a filler and increase the address by kPointerSize. |
| + StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, |
| + LoadRoot(Heap::kOnePointerFillerMapRootIndex)); |
| + address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4))); |
| + |
| + Goto(&done_filling); |
| + |
| + Bind(&done_filling); |
| + } |
| + |
| no_runtime_result = BitcastWordToTagged( |
| - IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag))); |
| + IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag))); |
| + |
| result.Bind(no_runtime_result); |
| Goto(&merge_runtime); |
| @@ -742,48 +779,20 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| return result.value(); |
| } |
| +Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| + AllocationFlags flags, |
| + Node* top_address, |
| + Node* limit_address) { |
|
Camillo Bruni
2017/03/30 12:08:23
CHECK((flags & kDoubleAlignment) == 0);
For consi
ivica.bogosavljevic
2017/03/30 13:47:57
This is true but for 32bit architectures only. On
|
| + return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address, |
|
Camillo Bruni
2017/03/30 12:08:23
... and then just use flags here directly.
|
| + limit_address); |
| +} |
| + |
| Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, |
|
Camillo Bruni
2017/03/30 12:08:23
Let's make this fully obvious: can you rename this
ivica.bogosavljevic
2017/03/30 13:47:57
Acknowledged.
|
| AllocationFlags flags, |
| Node* top_address, |
| Node* limit_address) { |
| - Node* top = Load(MachineType::Pointer(), top_address); |
| - Variable adjusted_size(this, MachineType::PointerRepresentation(), |
| - size_in_bytes); |
| - if (flags & kDoubleAlignment) { |
| - Label not_aligned(this), done_alignment(this, &adjusted_size); |
| - Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned, |
| - &done_alignment); |
| - |
| - Bind(¬_aligned); |
| - Node* not_aligned_size = |
| - IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); |
| - adjusted_size.Bind(not_aligned_size); |
| - Goto(&done_alignment); |
| - |
| - Bind(&done_alignment); |
| - } |
| - |
| - Variable address(this, MachineRepresentation::kTagged, |
| - AllocateRawUnaligned(adjusted_size.value(), kNone, |
| - top_address, limit_address)); |
| - |
| - Label needs_filler(this), done_filling(this, &address); |
| - Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling, |
| - &needs_filler); |
| - |
| - Bind(&needs_filler); |
| - // Store a filler and increase the address by kPointerSize. |
| - StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, |
| - LoadRoot(Heap::kOnePointerFillerMapRootIndex)); |
| - address.Bind(BitcastWordToTagged( |
| - IntPtrAdd(address.value(), IntPtrConstant(kPointerSize)))); |
| - Goto(&done_filling); |
| - |
| - Bind(&done_filling); |
| - // Update the top. |
| - StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
| - IntPtrAdd(top, adjusted_size.value())); |
| - return address.value(); |
| + return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address, |
| + limit_address); |
| } |
| Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes, |