Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(24)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1265443003: remove recursion from NewSpace::AllocateRaw* (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: EnsureAllocation now guarantees that bump-pointer will succeed Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« src/heap/spaces.cc ('K') | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
338 #endif 338 #endif
339 } 339 }
340 340
341 341
342 // ----------------------------------------------------------------------------- 342 // -----------------------------------------------------------------------------
343 // NewSpace 343 // NewSpace
344 344
345 345
346 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 346 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
347 AllocationAlignment alignment) { 347 AllocationAlignment alignment) {
348 Address old_top = allocation_info_.top(); 348 Address top = allocation_info_.top();
349 int filler_size = Heap::GetFillToAlign(old_top, alignment); 349 int filler_size = Heap::GetFillToAlign(top, alignment);
350 int aligned_size_in_bytes = size_in_bytes + filler_size; 350 int aligned_size_in_bytes = size_in_bytes + filler_size;
351 351
352 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 352 if (allocation_info_.limit() - top < aligned_size_in_bytes) {
353 return SlowAllocateRaw(size_in_bytes, alignment); 353 // See if we can create room.
354 if (!EnsureAllocation(size_in_bytes, alignment)) {
355 return AllocationResult::Retry();
356 }
357
358 top = allocation_info_.top();
Hannes Payer (out of office) 2015/07/31 07:50:52 We got a new top here, we have to recalculate fil
ofrobots 2015/07/31 13:47:38 Done.
354 } 359 }
355 360
356 HeapObject* obj = HeapObject::FromAddress(old_top); 361 HeapObject* obj = HeapObject::FromAddress(top);
357 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 362 allocation_info_.set_top(top + aligned_size_in_bytes);
358 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 363 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
359 364
360 if (filler_size > 0) { 365 if (filler_size > 0) {
361 obj = heap()->PrecedeWithFiller(obj, filler_size); 366 obj = heap()->PrecedeWithFiller(obj, filler_size);
362 } 367 }
363 368
364 // The slow path above ultimately goes through AllocateRaw, so this suffices.
365 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 369 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
366 370
367 return obj; 371 return obj;
368 } 372 }
369 373
370 374
371 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 375 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
372 Address old_top = allocation_info_.top(); 376 Address top = allocation_info_.top();
377 if (allocation_info_.limit() - top < size_in_bytes) {
378 // See if we can create room.
379 if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
380 return AllocationResult::Retry();
381 }
373 382
374 if (allocation_info_.limit() - old_top < size_in_bytes) { 383 top = allocation_info_.top();
375 return SlowAllocateRaw(size_in_bytes, kWordAligned);
376 } 384 }
377 385
378 HeapObject* obj = HeapObject::FromAddress(old_top); 386 HeapObject* obj = HeapObject::FromAddress(top);
379 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); 387 allocation_info_.set_top(top + size_in_bytes);
380 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 388 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
381 389
382 // The slow path above ultimately goes through AllocateRaw, so this suffices.
383 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 390 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
384 391
385 return obj; 392 return obj;
386 } 393 }
387 394
388 395
389 AllocationResult NewSpace::AllocateRaw(int size_in_bytes, 396 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
390 AllocationAlignment alignment) { 397 AllocationAlignment alignment) {
391 #ifdef V8_HOST_ARCH_32_BIT 398 #ifdef V8_HOST_ARCH_32_BIT
392 return alignment == kDoubleAligned 399 return alignment == kDoubleAligned
(...skipping 12 matching lines...) Expand all
405 412
406 413
407 intptr_t LargeObjectSpace::Available() { 414 intptr_t LargeObjectSpace::Available() {
408 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 415 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
409 } 416 }
410 417
411 } 418 }
412 } // namespace v8::internal 419 } // namespace v8::internal
413 420
414 #endif // V8_HEAP_SPACES_INL_H_ 421 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« src/heap/spaces.cc ('K') | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698