Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1150593003: Clean up aligned allocation code in preparation for SIMD alignments. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Handle filler after object. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« src/globals.h ('K') | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
244 Address current_top = allocation_info_.top(); 244 Address current_top = allocation_info_.top();
245 Address new_top = current_top + size_in_bytes; 245 Address new_top = current_top + size_in_bytes;
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes, 253 HeapObject* PagedSpace::AllocateLinearlyAligned(int* size_in_bytes,
254 AllocationAlignment alignment) { 254 AllocationAlignment alignment) {
255 Address current_top = allocation_info_.top(); 255 Address current_top = allocation_info_.top();
256 int alignment_size = 0; 256 int filler_size = Heap::GetMisalignment(current_top, alignment);
257 257
258 if (alignment == kDoubleAligned && 258 Address new_top = current_top + filler_size + *size_in_bytes;
259 (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
260 alignment_size = kPointerSize;
261 size_in_bytes += alignment_size;
262 } else if (alignment == kDoubleUnaligned &&
263 (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
264 alignment_size = kPointerSize;
265 size_in_bytes += alignment_size;
266 }
267 Address new_top = current_top + size_in_bytes;
268 if (new_top > allocation_info_.limit()) return NULL; 259 if (new_top > allocation_info_.limit()) return NULL;
269 260
270 allocation_info_.set_top(new_top); 261 allocation_info_.set_top(new_top);
271 if (alignment_size > 0) { 262 if (filler_size > 0) {
272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top), 263 *size_in_bytes += filler_size;
273 size_in_bytes, alignment); 264 return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
265 filler_size);
274 } 266 }
267
275 return HeapObject::FromAddress(current_top); 268 return HeapObject::FromAddress(current_top);
276 } 269 }
277 270
278 271
279 // Raw allocation. 272 // Raw allocation.
280 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { 273 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
281 HeapObject* object = AllocateLinearly(size_in_bytes); 274 HeapObject* object = AllocateLinearly(size_in_bytes);
282 275
283 if (object == NULL) { 276 if (object == NULL) {
284 object = free_list_.Allocate(size_in_bytes); 277 object = free_list_.Allocate(size_in_bytes);
(...skipping 11 matching lines...) Expand all
296 } 289 }
297 290
298 return AllocationResult::Retry(identity()); 291 return AllocationResult::Retry(identity());
299 } 292 }
300 293
301 294
302 // Raw allocation. 295 // Raw allocation.
303 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, 296 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
304 AllocationAlignment alignment) { 297 AllocationAlignment alignment) {
305 DCHECK(identity() == OLD_SPACE); 298 DCHECK(identity() == OLD_SPACE);
306 HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment); 299 int allocation_size = size_in_bytes;
307 int aligned_size_in_bytes = size_in_bytes + kPointerSize; 300 HeapObject* object = AllocateLinearlyAligned(&allocation_size, alignment);
308 301
309 if (object == NULL) { 302 if (object == NULL) {
310 object = free_list_.Allocate(aligned_size_in_bytes); 303 // We don't know the actual misalignment until the object is allocated, so
304 // add the worst case filler size to the allocation request.
305 allocation_size = size_in_bytes + Heap::GetMaximumMisalignment(alignment);
306 object = free_list_.Allocate(allocation_size);
311 if (object == NULL) { 307 if (object == NULL) {
312 object = SlowAllocateRaw(aligned_size_in_bytes); 308 object = SlowAllocateRaw(allocation_size);
313 } 309 }
314 if (object != NULL) { 310 if (object != NULL && allocation_size > size_in_bytes) {
315 object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment); 311 int filler_size = Heap::GetMisalignment(object->address(), alignment);
312 if (filler_size) {
313 object = heap()->PrecedeWithFiller(object, filler_size);
314 } else {
315 // object is aligned, fill the extra space at the end of the allocation.
316 DCHECK(allocation_size > size_in_bytes);
317 heap()->CreateFillerObjectAt(object->address() + size_in_bytes,
318 allocation_size - size_in_bytes);
319 }
316 } 320 }
317 } 321 }
318 322
319 if (object != NULL) { 323 if (object != NULL) {
320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 324 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), allocation_size);
321 return object; 325 return object;
322 } 326 }
323 327
324 return AllocationResult::Retry(identity()); 328 return AllocationResult::Retry(identity());
325 } 329 }
326 330
327 331
328 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, 332 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
329 AllocationAlignment alignment) { 333 AllocationAlignment alignment) {
330 #ifdef V8_HOST_ARCH_32_BIT 334 #ifdef V8_HOST_ARCH_32_BIT
331 return alignment == kDoubleAligned 335 return alignment == kDoubleAligned
332 ? AllocateRawAligned(size_in_bytes, kDoubleAligned) 336 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
333 : AllocateRawUnaligned(size_in_bytes); 337 : AllocateRawUnaligned(size_in_bytes);
334 #else 338 #else
335 return AllocateRawUnaligned(size_in_bytes); 339 return AllocateRawUnaligned(size_in_bytes);
336 #endif 340 #endif
337 } 341 }
338 342
339 343
340 // ----------------------------------------------------------------------------- 344 // -----------------------------------------------------------------------------
341 // NewSpace 345 // NewSpace
342 346
343 347
344 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 348 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
345 AllocationAlignment alignment) { 349 AllocationAlignment alignment) {
346 Address old_top = allocation_info_.top(); 350 Address old_top = allocation_info_.top();
347 int alignment_size = 0; 351 int alignment_size = Heap::GetMisalignment(old_top, alignment);
348 int aligned_size_in_bytes = 0; 352 int aligned_size_in_bytes = size_in_bytes + alignment_size;
349
350 // If double alignment is required and top pointer is not aligned, we allocate
351 // additional memory to take care of the alignment.
352 if (alignment == kDoubleAligned &&
353 (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
354 alignment_size += kPointerSize;
355 } else if (alignment == kDoubleUnaligned &&
356 (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
357 alignment_size += kPointerSize;
358 }
359 aligned_size_in_bytes = size_in_bytes + alignment_size;
360 353
361 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 354 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
362 return SlowAllocateRaw(size_in_bytes, alignment); 355 return SlowAllocateRaw(size_in_bytes, alignment);
363 } 356 }
364 357
365 HeapObject* obj = HeapObject::FromAddress(old_top); 358 HeapObject* obj = HeapObject::FromAddress(old_top);
366 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 359 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes);
367 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 360 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
368 361
369 if (alignment_size > 0) { 362 if (alignment_size > 0) {
370 obj = heap()->PrecedeWithFiller(obj); 363 obj = heap()->PrecedeWithFiller(obj, alignment_size);
371 } 364 }
372 365
373 // The slow path above ultimately goes through AllocateRaw, so this suffices. 366 // The slow path above ultimately goes through AllocateRaw, so this suffices.
374 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 367 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
375 368
376 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
377 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
378
379 return obj; 369 return obj;
380 } 370 }
381 371
382 372
383 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 373 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
384 Address old_top = allocation_info_.top(); 374 Address old_top = allocation_info_.top();
385 375
386 if (allocation_info_.limit() - old_top < size_in_bytes) { 376 if (allocation_info_.limit() - old_top < size_in_bytes) {
387 return SlowAllocateRaw(size_in_bytes, kWordAligned); 377 return SlowAllocateRaw(size_in_bytes, kWordAligned);
388 } 378 }
(...skipping 28 matching lines...) Expand all
417 407
418 408
419 intptr_t LargeObjectSpace::Available() { 409 intptr_t LargeObjectSpace::Available() {
420 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 410 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
421 } 411 }
422 412
423 } 413 }
424 } // namespace v8::internal 414 } // namespace v8::internal
425 415
426 #endif // V8_HEAP_SPACES_INL_H_ 416 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« src/globals.h ('K') | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698