| Index: src/heap/spaces.cc
|
| diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc
|
| index 33307a149d59d8ac5e6076b3950aa89a7418bc7c..8069e51ce19b7b304926556216d53c5ea8f0818a 100644
|
| --- a/src/heap/spaces.cc
|
| +++ b/src/heap/spaces.cc
|
| @@ -1463,28 +1463,33 @@
|
| Address old_top = allocation_info_.top();
|
| Address high = to_space_.page_high();
|
| if (allocation_info_.limit() < high) {
|
| - int alignment_size = Heap::GetFillToAlign(old_top, alignment);
|
| - int aligned_size_in_bytes = size_in_bytes + alignment_size;
|
| -
|
| // Either the limit has been lowered because linear allocation was disabled
|
| // or because incremental marking wants to get a chance to do a step. Set
|
| // the new limit accordingly.
|
| - Address new_top = old_top + aligned_size_in_bytes;
|
| + int aligned_size = size_in_bytes;
|
| + aligned_size += (alignment != kWordAligned) ? kPointerSize : 0;
|
| + Address new_top = old_top + aligned_size;
|
| int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_);
|
| heap()->incremental_marking()->Step(bytes_allocated,
|
| IncrementalMarking::GC_VIA_STACK_GUARD);
|
| - UpdateInlineAllocationLimit(aligned_size_in_bytes);
|
| + UpdateInlineAllocationLimit(aligned_size);
|
| top_on_previous_step_ = new_top;
|
| - if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes);
|
| - return AllocateRawAligned(size_in_bytes, alignment);
|
| + if (alignment == kDoubleAligned)
|
| + return AllocateRawAligned(size_in_bytes, kDoubleAligned);
|
| + else if (alignment == kDoubleUnaligned)
|
| + return AllocateRawAligned(size_in_bytes, kDoubleUnaligned);
|
| + return AllocateRawUnaligned(size_in_bytes);
|
| } else if (AddFreshPage()) {
|
| // Switched to new page. Try allocating again.
|
| int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
|
| heap()->incremental_marking()->Step(bytes_allocated,
|
| IncrementalMarking::GC_VIA_STACK_GUARD);
|
| top_on_previous_step_ = to_space_.page_low();
|
| - if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes);
|
| - return AllocateRawAligned(size_in_bytes, alignment);
|
| + if (alignment == kDoubleAligned)
|
| + return AllocateRawAligned(size_in_bytes, kDoubleAligned);
|
| + else if (alignment == kDoubleUnaligned)
|
| + return AllocateRawAligned(size_in_bytes, kDoubleUnaligned);
|
| + return AllocateRawUnaligned(size_in_bytes);
|
| } else {
|
| return AllocationResult::Retry();
|
| }
|
|
|