Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(234)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1265443003: remove recursion from NewSpace::AllocateRaw* (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: address comments from the code review Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
338 #endif 338 #endif
339 } 339 }
340 340
341 341
342 // ----------------------------------------------------------------------------- 342 // -----------------------------------------------------------------------------
343 // NewSpace 343 // NewSpace
344 344
345 345
346 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 346 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
347 AllocationAlignment alignment) { 347 AllocationAlignment alignment) {
348 Address old_top = allocation_info_.top(); 348 Address top = allocation_info_.top();
349 int filler_size = Heap::GetFillToAlign(old_top, alignment); 349 int filler_size = Heap::GetFillToAlign(top, alignment);
350 int aligned_size_in_bytes = size_in_bytes + filler_size; 350 int aligned_size_in_bytes = size_in_bytes + filler_size;
351 351
352 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 352 if (allocation_info_.limit() - top < aligned_size_in_bytes) {
353 return SlowAllocateRaw(size_in_bytes, alignment); 353 // See if we can create room.
354 if (!EnsureAllocation(size_in_bytes, alignment)) {
355 return AllocationResult::Retry();
356 }
357
358 top = allocation_info_.top();
359 filler_size = Heap::GetFillToAlign(top, alignment);
360 aligned_size_in_bytes = size_in_bytes + filler_size;
354 } 361 }
355 362
356 HeapObject* obj = HeapObject::FromAddress(old_top); 363 HeapObject* obj = HeapObject::FromAddress(top);
357 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 364 allocation_info_.set_top(top + aligned_size_in_bytes);
358 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 365 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
359 366
360 if (filler_size > 0) { 367 if (filler_size > 0) {
361 obj = heap()->PrecedeWithFiller(obj, filler_size); 368 obj = heap()->PrecedeWithFiller(obj, filler_size);
362 } 369 }
363 370
364 // The slow path above ultimately goes through AllocateRaw, so this suffices.
365 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 371 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
366 372
367 return obj; 373 return obj;
368 } 374 }
369 375
370 376
371 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 377 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
372 Address old_top = allocation_info_.top(); 378 Address top = allocation_info_.top();
379 if (allocation_info_.limit() - top < size_in_bytes) {
380 // See if we can create room.
381 if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
382 return AllocationResult::Retry();
383 }
373 384
374 if (allocation_info_.limit() - old_top < size_in_bytes) { 385 top = allocation_info_.top();
375 return SlowAllocateRaw(size_in_bytes, kWordAligned);
376 } 386 }
377 387
378 HeapObject* obj = HeapObject::FromAddress(old_top); 388 HeapObject* obj = HeapObject::FromAddress(top);
379 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); 389 allocation_info_.set_top(top + size_in_bytes);
380 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 390 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
381 391
382 // The slow path above ultimately goes through AllocateRaw, so this suffices.
383 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 392 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
384 393
385 return obj; 394 return obj;
386 } 395 }
387 396
388 397
389 AllocationResult NewSpace::AllocateRaw(int size_in_bytes, 398 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
390 AllocationAlignment alignment) { 399 AllocationAlignment alignment) {
391 #ifdef V8_HOST_ARCH_32_BIT 400 #ifdef V8_HOST_ARCH_32_BIT
392 return alignment == kDoubleAligned 401 return alignment == kDoubleAligned
(...skipping 12 matching lines...) Expand all
405 414
406 415
407 intptr_t LargeObjectSpace::Available() { 416 intptr_t LargeObjectSpace::Available() {
408 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 417 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
409 } 418 }
410 419
411 } 420 }
412 } // namespace v8::internal 421 } // namespace v8::internal
413 422
414 #endif // V8_HEAP_SPACES_INL_H_ 423 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698