Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1159123002: Revert of Clean up aligned allocation code in preparation for SIMD alignments. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
244 Address current_top = allocation_info_.top(); 244 Address current_top = allocation_info_.top();
245 Address new_top = current_top + size_in_bytes; 245 Address new_top = current_top + size_in_bytes;
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 HeapObject* PagedSpace::AllocateLinearlyAligned(int* size_in_bytes, 253 HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes,
254 AllocationAlignment alignment) { 254 AllocationAlignment alignment) {
255 Address current_top = allocation_info_.top(); 255 Address current_top = allocation_info_.top();
256 int filler_size = Heap::GetFillToAlign(current_top, alignment); 256 int alignment_size = 0;
257 257
258 Address new_top = current_top + filler_size + *size_in_bytes; 258 if (alignment == kDoubleAligned &&
259 (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
260 alignment_size = kPointerSize;
261 size_in_bytes += alignment_size;
262 } else if (alignment == kDoubleUnaligned &&
263 (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
264 alignment_size = kPointerSize;
265 size_in_bytes += alignment_size;
266 }
267 Address new_top = current_top + size_in_bytes;
259 if (new_top > allocation_info_.limit()) return NULL; 268 if (new_top > allocation_info_.limit()) return NULL;
260 269
261 allocation_info_.set_top(new_top); 270 allocation_info_.set_top(new_top);
262 if (filler_size > 0) { 271 if (alignment_size > 0) {
263 *size_in_bytes += filler_size; 272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top),
264 return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top), 273 size_in_bytes, alignment);
265 filler_size);
266 } 274 }
267
268 return HeapObject::FromAddress(current_top); 275 return HeapObject::FromAddress(current_top);
269 } 276 }
270 277
271 278
272 // Raw allocation. 279 // Raw allocation.
273 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { 280 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
274 HeapObject* object = AllocateLinearly(size_in_bytes); 281 HeapObject* object = AllocateLinearly(size_in_bytes);
275 282
276 if (object == NULL) { 283 if (object == NULL) {
277 object = free_list_.Allocate(size_in_bytes); 284 object = free_list_.Allocate(size_in_bytes);
(...skipping 11 matching lines...) Expand all
289 } 296 }
290 297
291 return AllocationResult::Retry(identity()); 298 return AllocationResult::Retry(identity());
292 } 299 }
293 300
294 301
295 // Raw allocation. 302 // Raw allocation.
296 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, 303 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
297 AllocationAlignment alignment) { 304 AllocationAlignment alignment) {
298 DCHECK(identity() == OLD_SPACE); 305 DCHECK(identity() == OLD_SPACE);
299 int allocation_size = size_in_bytes; 306 HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment);
300 HeapObject* object = AllocateLinearlyAligned(&allocation_size, alignment); 307 int aligned_size_in_bytes = size_in_bytes + kPointerSize;
301 308
302 if (object == NULL) { 309 if (object == NULL) {
303 // We don't know exactly how much filler we need to align until space is 310 object = free_list_.Allocate(aligned_size_in_bytes);
304 // allocated, so assume the worst case.
305 int filler_size = Heap::GetMaximumFillToAlign(alignment);
306 allocation_size += filler_size;
307 object = free_list_.Allocate(allocation_size);
308 if (object == NULL) { 311 if (object == NULL) {
309 object = SlowAllocateRaw(allocation_size); 312 object = SlowAllocateRaw(aligned_size_in_bytes);
310 } 313 }
311 if (object != NULL && filler_size != 0) { 314 if (object != NULL) {
312 object = heap()->AlignWithFiller(object, size_in_bytes, allocation_size, 315 object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment);
313 alignment);
314 } 316 }
315 } 317 }
316 318
317 if (object != NULL) { 319 if (object != NULL) {
318 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), allocation_size); 320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
319 return object; 321 return object;
320 } 322 }
321 323
322 return AllocationResult::Retry(identity()); 324 return AllocationResult::Retry(identity());
323 } 325 }
324 326
325 327
326 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, 328 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
327 AllocationAlignment alignment) { 329 AllocationAlignment alignment) {
328 #ifdef V8_HOST_ARCH_32_BIT 330 #ifdef V8_HOST_ARCH_32_BIT
329 return alignment == kDoubleAligned 331 return alignment == kDoubleAligned
330 ? AllocateRawAligned(size_in_bytes, kDoubleAligned) 332 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
331 : AllocateRawUnaligned(size_in_bytes); 333 : AllocateRawUnaligned(size_in_bytes);
332 #else 334 #else
333 return AllocateRawUnaligned(size_in_bytes); 335 return AllocateRawUnaligned(size_in_bytes);
334 #endif 336 #endif
335 } 337 }
336 338
337 339
338 // ----------------------------------------------------------------------------- 340 // -----------------------------------------------------------------------------
339 // NewSpace 341 // NewSpace
340 342
341 343
342 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 344 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
343 AllocationAlignment alignment) { 345 AllocationAlignment alignment) {
344 Address old_top = allocation_info_.top(); 346 Address old_top = allocation_info_.top();
345 int filler_size = Heap::GetFillToAlign(old_top, alignment); 347 int alignment_size = 0;
346 int aligned_size_in_bytes = size_in_bytes + filler_size; 348 int aligned_size_in_bytes = 0;
349
350 // If double alignment is required and top pointer is not aligned, we allocate
351 // additional memory to take care of the alignment.
352 if (alignment == kDoubleAligned &&
353 (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
354 alignment_size += kPointerSize;
355 } else if (alignment == kDoubleUnaligned &&
356 (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
357 alignment_size += kPointerSize;
358 }
359 aligned_size_in_bytes = size_in_bytes + alignment_size;
347 360
348 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 361 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
349 return SlowAllocateRaw(size_in_bytes, alignment); 362 return SlowAllocateRaw(size_in_bytes, alignment);
350 } 363 }
351 364
352 HeapObject* obj = HeapObject::FromAddress(old_top); 365 HeapObject* obj = HeapObject::FromAddress(old_top);
353 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 366 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes);
354 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 367 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
355 368
356 if (filler_size > 0) { 369 if (alignment_size > 0) {
357 obj = heap()->PrecedeWithFiller(obj, filler_size); 370 obj = heap()->PrecedeWithFiller(obj);
358 } 371 }
359 372
360 // The slow path above ultimately goes through AllocateRaw, so this suffices. 373 // The slow path above ultimately goes through AllocateRaw, so this suffices.
361 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 374 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
362 375
376 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
377 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
378
363 return obj; 379 return obj;
364 } 380 }
365 381
366 382
367 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 383 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
368 Address old_top = allocation_info_.top(); 384 Address old_top = allocation_info_.top();
369 385
370 if (allocation_info_.limit() - old_top < size_in_bytes) { 386 if (allocation_info_.limit() - old_top < size_in_bytes) {
371 return SlowAllocateRaw(size_in_bytes, kWordAligned); 387 return SlowAllocateRaw(size_in_bytes, kWordAligned);
372 } 388 }
(...skipping 28 matching lines...) Expand all
401 417
402 418
403 intptr_t LargeObjectSpace::Available() { 419 intptr_t LargeObjectSpace::Available() {
404 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 420 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
405 } 421 }
406 422
407 } 423 }
408 } // namespace v8::internal 424 } // namespace v8::internal
409 425
410 #endif // V8_HEAP_SPACES_INL_H_ 426 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698