Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(186)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1138643005: Clean-up aligned allocation logic. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/cctest.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
270 allocation_info_.set_top(new_top); 270 allocation_info_.set_top(new_top);
271 if (alignment_size > 0) { 271 if (alignment_size > 0) {
272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top), 272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top),
273 size_in_bytes, alignment); 273 size_in_bytes, alignment);
274 } 274 }
275 return HeapObject::FromAddress(current_top); 275 return HeapObject::FromAddress(current_top);
276 } 276 }
277 277
278 278
279 // Raw allocation. 279 // Raw allocation.
280 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { 280 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
281 HeapObject* object = AllocateLinearly(size_in_bytes); 281 HeapObject* object = AllocateLinearly(size_in_bytes);
282 282
283 if (object == NULL) { 283 if (object == NULL) {
284 object = free_list_.Allocate(size_in_bytes); 284 object = free_list_.Allocate(size_in_bytes);
285 if (object == NULL) { 285 if (object == NULL) {
286 object = SlowAllocateRaw(size_in_bytes); 286 object = SlowAllocateRaw(size_in_bytes);
287 } 287 }
288 } 288 }
289 289
290 if (object != NULL) { 290 if (object != NULL) {
(...skipping 27 matching lines...) Expand all
318 318
319 if (object != NULL) { 319 if (object != NULL) {
320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
321 return object; 321 return object;
322 } 322 }
323 323
324 return AllocationResult::Retry(identity()); 324 return AllocationResult::Retry(identity());
325 } 325 }
326 326
327 327
328 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
329 AllocationAlignment alignment) {
330 #ifdef V8_HOST_ARCH_32_BIT
331 return alignment == kDoubleAligned
332 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
333 : AllocateRawUnaligned(size_in_bytes);
334 #else
335 return AllocateRawUnaligned(size_in_bytes);
336 #endif
337 }
338
339
328 // ----------------------------------------------------------------------------- 340 // -----------------------------------------------------------------------------
329 // NewSpace 341 // NewSpace
330 342
331 343
332 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 344 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
333 AllocationAlignment alignment) { 345 AllocationAlignment alignment) {
334 Address old_top = allocation_info_.top(); 346 Address old_top = allocation_info_.top();
335 int alignment_size = 0; 347 int alignment_size = 0;
336 int aligned_size_in_bytes = 0; 348 int aligned_size_in_bytes = 0;
337 349
(...skipping 23 matching lines...) Expand all
361 // The slow path above ultimately goes through AllocateRaw, so this suffices. 373 // The slow path above ultimately goes through AllocateRaw, so this suffices.
362 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 374 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
363 375
364 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) || 376 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
365 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0)); 377 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
366 378
367 return obj; 379 return obj;
368 } 380 }
369 381
370 382
371 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { 383 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
372 Address old_top = allocation_info_.top(); 384 Address old_top = allocation_info_.top();
373 385
374 if (allocation_info_.limit() - old_top < size_in_bytes) { 386 if (allocation_info_.limit() - old_top < size_in_bytes) {
375 return SlowAllocateRaw(size_in_bytes, kWordAligned); 387 return SlowAllocateRaw(size_in_bytes, kWordAligned);
376 } 388 }
377 389
378 HeapObject* obj = HeapObject::FromAddress(old_top); 390 HeapObject* obj = HeapObject::FromAddress(old_top);
379 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); 391 allocation_info_.set_top(allocation_info_.top() + size_in_bytes);
380 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 392 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
381 393
382 // The slow path above ultimately goes through AllocateRaw, so this suffices. 394 // The slow path above ultimately goes through AllocateRaw, so this suffices.
383 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 395 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
384 396
385 return obj; 397 return obj;
386 } 398 }
387 399
388 400
401 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
402 AllocationAlignment alignment) {
403 #ifdef V8_HOST_ARCH_32_BIT
404 return alignment == kDoubleAligned
405 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
406 : AllocateRawUnaligned(size_in_bytes);
407 #else
408 return AllocateRawUnaligned(size_in_bytes);
409 #endif
410 }
411
412
389 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { 413 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) {
390 heap->incremental_marking()->SetOldSpacePageFlags(chunk); 414 heap->incremental_marking()->SetOldSpacePageFlags(chunk);
391 return static_cast<LargePage*>(chunk); 415 return static_cast<LargePage*>(chunk);
392 } 416 }
393 417
394 418
395 intptr_t LargeObjectSpace::Available() { 419 intptr_t LargeObjectSpace::Available() {
396 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 420 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
397 } 421 }
398 422
399 } 423 }
400 } // namespace v8::internal 424 } // namespace v8::internal
401 425
402 #endif // V8_HEAP_SPACES_INL_H_ 426 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/cctest.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698