Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(405)

Unified Diff: src/heap/spaces-inl.h

Issue 1141523002: Implement unaligned allocate and allocate heap numbers in runtime double unaligned. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | src/objects.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/heap/spaces-inl.h
diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h
index 78fda3c1e1562b99b99a9aa267b5b3ae79614868..70548881302a6bfcfa50cde440dd8f8cce3339c5 100644
--- a/src/heap/spaces-inl.h
+++ b/src/heap/spaces-inl.h
@@ -250,11 +250,17 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
}
-HeapObject* PagedSpace::AllocateLinearlyDoubleAlign(int size_in_bytes) {
+HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes,
+ AllocationAlignment alignment) {
Address current_top = allocation_info_.top();
int alignment_size = 0;
- if ((OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
+ if (alignment == kDoubleAligned &&
+ (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
+ alignment_size = kPointerSize;
+ size_in_bytes += alignment_size;
+ } else if (alignment == kDoubleUnaligned &&
+ (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
alignment_size = kPointerSize;
size_in_bytes += alignment_size;
}
@@ -262,9 +268,10 @@ HeapObject* PagedSpace::AllocateLinearlyDoubleAlign(int size_in_bytes) {
if (new_top > allocation_info_.limit()) return NULL;
allocation_info_.set_top(new_top);
- if (alignment_size > 0)
- return heap()->EnsureDoubleAligned(HeapObject::FromAddress(current_top),
- size_in_bytes);
+ if (alignment_size > 0) {
+ return heap()->EnsureAligned(HeapObject::FromAddress(current_top),
+ size_in_bytes, alignment);
+ }
return HeapObject::FromAddress(current_top);
}
@@ -293,9 +300,10 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) {
// Raw allocation.
-AllocationResult PagedSpace::AllocateRawDoubleAligned(int size_in_bytes) {
+AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
+ AllocationAlignment alignment) {
DCHECK(identity() == OLD_SPACE);
- HeapObject* object = AllocateLinearlyDoubleAlign(size_in_bytes);
+ HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment);
int aligned_size_in_bytes = size_in_bytes + kPointerSize;
if (object == NULL) {
@@ -304,7 +312,7 @@ AllocationResult PagedSpace::AllocateRawDoubleAligned(int size_in_bytes) {
object = SlowAllocateRaw(aligned_size_in_bytes);
}
if (object != NULL) {
- object = heap()->EnsureDoubleAligned(object, aligned_size_in_bytes);
+ object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment);
}
}
@@ -321,20 +329,25 @@ AllocationResult PagedSpace::AllocateRawDoubleAligned(int size_in_bytes) {
// NewSpace
-AllocationResult NewSpace::AllocateRawDoubleAligned(int size_in_bytes) {
+AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
+ AllocationAlignment alignment) {
Address old_top = allocation_info_.top();
int alignment_size = 0;
int aligned_size_in_bytes = 0;
// If double alignment is required and top pointer is not aligned, we allocate
// additional memory to take care of the alignment.
- if ((OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
+ if (alignment == kDoubleAligned &&
+ (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
+ alignment_size += kPointerSize;
+ } else if (alignment == kDoubleUnaligned &&
+ (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
alignment_size += kPointerSize;
}
aligned_size_in_bytes = size_in_bytes + alignment_size;
if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
- return SlowAllocateRaw(size_in_bytes, true);
+ return SlowAllocateRaw(size_in_bytes, alignment);
}
HeapObject* obj = HeapObject::FromAddress(old_top);
@@ -342,12 +355,15 @@ AllocationResult NewSpace::AllocateRawDoubleAligned(int size_in_bytes) {
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
if (alignment_size > 0) {
- obj = heap()->EnsureDoubleAligned(obj, aligned_size_in_bytes);
+ obj = heap()->PrecedeWithFiller(obj);
}
// The slow path above ultimately goes through AllocateRaw, so this suffices.
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
+ DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
+ (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
+
return obj;
}
@@ -356,7 +372,7 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes) {
Address old_top = allocation_info_.top();
if (allocation_info_.limit() - old_top < size_in_bytes) {
- return SlowAllocateRaw(size_in_bytes, false);
+ return SlowAllocateRaw(size_in_bytes, kWordAligned);
}
HeapObject* obj = HeapObject::FromAddress(old_top);
« no previous file with comments | « src/heap/spaces.cc ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698