Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(701)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1150593003: Clean up aligned allocation code in preparation for SIMD alignments. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix DoubleAlignForDeserialization, fix test when top is misaligned. Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
244 Address current_top = allocation_info_.top(); 244 Address current_top = allocation_info_.top();
245 Address new_top = current_top + size_in_bytes; 245 Address new_top = current_top + size_in_bytes;
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes, 253 HeapObject* PagedSpace::AllocateLinearlyAligned(int* size_in_bytes,
254 AllocationAlignment alignment) { 254 AllocationAlignment alignment) {
255 Address current_top = allocation_info_.top(); 255 Address current_top = allocation_info_.top();
256 int alignment_size = 0; 256 int filler_size = Heap::GetFillToAlign(current_top, alignment);
257 257
258 if (alignment == kDoubleAligned && 258 Address new_top = current_top + filler_size + *size_in_bytes;
259 (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
260 alignment_size = kPointerSize;
261 size_in_bytes += alignment_size;
262 } else if (alignment == kDoubleUnaligned &&
263 (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
264 alignment_size = kPointerSize;
265 size_in_bytes += alignment_size;
266 }
267 Address new_top = current_top + size_in_bytes;
268 if (new_top > allocation_info_.limit()) return NULL; 259 if (new_top > allocation_info_.limit()) return NULL;
269 260
270 allocation_info_.set_top(new_top); 261 allocation_info_.set_top(new_top);
271 if (alignment_size > 0) { 262 if (filler_size > 0) {
272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top), 263 *size_in_bytes += filler_size;
273 size_in_bytes, alignment); 264 return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
265 filler_size);
274 } 266 }
267
275 return HeapObject::FromAddress(current_top); 268 return HeapObject::FromAddress(current_top);
276 } 269 }
277 270
278 271
279 // Raw allocation. 272 // Raw allocation.
280 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { 273 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
281 HeapObject* object = AllocateLinearly(size_in_bytes); 274 HeapObject* object = AllocateLinearly(size_in_bytes);
282 275
283 if (object == NULL) { 276 if (object == NULL) {
284 object = free_list_.Allocate(size_in_bytes); 277 object = free_list_.Allocate(size_in_bytes);
(...skipping 11 matching lines...) Expand all
296 } 289 }
297 290
298 return AllocationResult::Retry(identity()); 291 return AllocationResult::Retry(identity());
299 } 292 }
300 293
301 294
302 // Raw allocation. 295 // Raw allocation.
303 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, 296 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
304 AllocationAlignment alignment) { 297 AllocationAlignment alignment) {
305 DCHECK(identity() == OLD_SPACE); 298 DCHECK(identity() == OLD_SPACE);
306 HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment); 299 int allocation_size = size_in_bytes;
307 int aligned_size_in_bytes = size_in_bytes + kPointerSize; 300 HeapObject* object = AllocateLinearlyAligned(&allocation_size, alignment);
308 301
309 if (object == NULL) { 302 if (object == NULL) {
310 object = free_list_.Allocate(aligned_size_in_bytes); 303 // We don't know exactly how much filler we need to align until space is
304 // allocated, so assume the worst case.
305 int filler_size = Heap::GetMaximumFillToAlign(alignment);
306 allocation_size += filler_size;
307 object = free_list_.Allocate(allocation_size);
311 if (object == NULL) { 308 if (object == NULL) {
312 object = SlowAllocateRaw(aligned_size_in_bytes); 309 object = SlowAllocateRaw(allocation_size);
313 } 310 }
314 if (object != NULL) { 311 if (object != NULL && filler_size != 0) {
315 object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment); 312 object = heap()->AlignWithFiller(object, size_in_bytes, allocation_size,
313 alignment);
316 } 314 }
317 } 315 }
318 316
319 if (object != NULL) { 317 if (object != NULL) {
320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 318 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), allocation_size);
321 return object; 319 return object;
322 } 320 }
323 321
324 return AllocationResult::Retry(identity()); 322 return AllocationResult::Retry(identity());
325 } 323 }
326 324
327 325
328 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, 326 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
329 AllocationAlignment alignment) { 327 AllocationAlignment alignment) {
330 #ifdef V8_HOST_ARCH_32_BIT 328 #ifdef V8_HOST_ARCH_32_BIT
331 return alignment == kDoubleAligned 329 return alignment == kDoubleAligned
332 ? AllocateRawAligned(size_in_bytes, kDoubleAligned) 330 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
333 : AllocateRawUnaligned(size_in_bytes); 331 : AllocateRawUnaligned(size_in_bytes);
334 #else 332 #else
335 return AllocateRawUnaligned(size_in_bytes); 333 return AllocateRawUnaligned(size_in_bytes);
336 #endif 334 #endif
337 } 335 }
338 336
339 337
340 // ----------------------------------------------------------------------------- 338 // -----------------------------------------------------------------------------
341 // NewSpace 339 // NewSpace
342 340
343 341
344 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 342 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
345 AllocationAlignment alignment) { 343 AllocationAlignment alignment) {
346 Address old_top = allocation_info_.top(); 344 Address old_top = allocation_info_.top();
347 int alignment_size = 0; 345 int filler_size = Heap::GetFillToAlign(old_top, alignment);
348 int aligned_size_in_bytes = 0; 346 int aligned_size_in_bytes = size_in_bytes + filler_size;
349
350 // If double alignment is required and top pointer is not aligned, we allocate
351 // additional memory to take care of the alignment.
352 if (alignment == kDoubleAligned &&
353 (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
354 alignment_size += kPointerSize;
355 } else if (alignment == kDoubleUnaligned &&
356 (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
357 alignment_size += kPointerSize;
358 }
359 aligned_size_in_bytes = size_in_bytes + alignment_size;
360 347
361 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 348 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
362 return SlowAllocateRaw(size_in_bytes, alignment); 349 return SlowAllocateRaw(size_in_bytes, alignment);
363 } 350 }
364 351
365 HeapObject* obj = HeapObject::FromAddress(old_top); 352 HeapObject* obj = HeapObject::FromAddress(old_top);
366 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 353 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes);
367 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 354 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
368 355
369 if (alignment_size > 0) { 356 if (filler_size > 0) {
370 obj = heap()->PrecedeWithFiller(obj); 357 obj = heap()->PrecedeWithFiller(obj, filler_size);
371 } 358 }
372 359
373 // The slow path above ultimately goes through AllocateRaw, so this suffices. 360 // The slow path above ultimately goes through AllocateRaw, so this suffices.
374 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 361 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
375 362
376 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
377 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
378
379 return obj; 363 return obj;
380 } 364 }
381 365
382 366
383 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 367 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
384 Address old_top = allocation_info_.top(); 368 Address old_top = allocation_info_.top();
385 369
386 if (allocation_info_.limit() - old_top < size_in_bytes) { 370 if (allocation_info_.limit() - old_top < size_in_bytes) {
387 return SlowAllocateRaw(size_in_bytes, kWordAligned); 371 return SlowAllocateRaw(size_in_bytes, kWordAligned);
388 } 372 }
(...skipping 28 matching lines...) Expand all
417 401
418 402
419 intptr_t LargeObjectSpace::Available() { 403 intptr_t LargeObjectSpace::Available() {
420 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 404 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
421 } 405 }
422 406
423 } 407 }
424 } // namespace v8::internal 408 } // namespace v8::internal
425 409
426 #endif // V8_HEAP_SPACES_INL_H_ 410 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698