Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(19)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1150593003: Clean up aligned allocation code in preparation for SIMD alignments. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix compile. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
244 Address current_top = allocation_info_.top(); 244 Address current_top = allocation_info_.top();
245 Address new_top = current_top + size_in_bytes; 245 Address new_top = current_top + size_in_bytes;
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes, 253 HeapObject* PagedSpace::AllocateLinearlyAligned(int* size_in_bytes,
254 AllocationAlignment alignment) { 254 AllocationAlignment alignment) {
255 Address current_top = allocation_info_.top(); 255 Address current_top = allocation_info_.top();
256 int alignment_size = 0; 256 int alignment_size = Heap::GetAlignmentSize(current_top, alignment);
257 257
258 if (alignment == kDoubleAligned && 258 Address new_top = current_top + alignment_size + *size_in_bytes;
259 (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
260 alignment_size = kPointerSize;
261 size_in_bytes += alignment_size;
262 } else if (alignment == kDoubleUnaligned &&
263 (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
264 alignment_size = kPointerSize;
265 size_in_bytes += alignment_size;
266 }
267 Address new_top = current_top + size_in_bytes;
268 if (new_top > allocation_info_.limit()) return NULL; 259 if (new_top > allocation_info_.limit()) return NULL;
269 260
270 allocation_info_.set_top(new_top); 261 allocation_info_.set_top(new_top);
271 if (alignment_size > 0) { 262 if (alignment_size > 0) {
272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top), 263 *size_in_bytes += alignment_size;
273 size_in_bytes, alignment); 264 return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
265 alignment_size);
274 } 266 }
267
275 return HeapObject::FromAddress(current_top); 268 return HeapObject::FromAddress(current_top);
276 } 269 }
277 270
278 271
279 // Raw allocation. 272 // Raw allocation.
280 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { 273 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
281 HeapObject* object = AllocateLinearly(size_in_bytes); 274 HeapObject* object = AllocateLinearly(size_in_bytes);
282 275
283 if (object == NULL) { 276 if (object == NULL) {
284 object = free_list_.Allocate(size_in_bytes); 277 object = free_list_.Allocate(size_in_bytes);
(...skipping 11 matching lines...) Expand all
296 } 289 }
297 290
298 return AllocationResult::Retry(identity()); 291 return AllocationResult::Retry(identity());
299 } 292 }
300 293
301 294
302 // Raw allocation. 295 // Raw allocation.
303 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, 296 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
304 AllocationAlignment alignment) { 297 AllocationAlignment alignment) {
305 DCHECK(identity() == OLD_SPACE); 298 DCHECK(identity() == OLD_SPACE);
306 HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment); 299 int aligned_size_in_bytes = size_in_bytes;
307 int aligned_size_in_bytes = size_in_bytes + kPointerSize; 300 HeapObject* object =
301 AllocateLinearlyAligned(&aligned_size_in_bytes, alignment);
308 302
309 if (object == NULL) { 303 if (object == NULL) {
310 object = free_list_.Allocate(aligned_size_in_bytes); 304 object = free_list_.Allocate(aligned_size_in_bytes);
Hannes Payer (out of office) 2015/05/21 10:38:17 I don't see how alignment works for free-list allo
bbudge 2015/05/21 10:55:45 Thanks for pointing this out. I'll have to modify
311 if (object == NULL) { 305 if (object == NULL) {
312 object = SlowAllocateRaw(aligned_size_in_bytes); 306 object = SlowAllocateRaw(aligned_size_in_bytes);
313 } 307 }
314 if (object != NULL) { 308 if (object != NULL) {
315 object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment); 309 object = heap()->PrecedeWithFiller(object,
310 aligned_size_in_bytes - size_in_bytes);
316 } 311 }
317 } 312 }
318 313
319 if (object != NULL) { 314 if (object != NULL) {
320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 315 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
321 return object; 316 return object;
322 } 317 }
323 318
324 return AllocationResult::Retry(identity()); 319 return AllocationResult::Retry(identity());
325 } 320 }
(...skipping 11 matching lines...) Expand all
337 } 332 }
338 333
339 334
340 // ----------------------------------------------------------------------------- 335 // -----------------------------------------------------------------------------
341 // NewSpace 336 // NewSpace
342 337
343 338
344 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, 339 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
345 AllocationAlignment alignment) { 340 AllocationAlignment alignment) {
346 Address old_top = allocation_info_.top(); 341 Address old_top = allocation_info_.top();
347 int alignment_size = 0; 342 int alignment_size = Heap::GetAlignmentSize(old_top, alignment);
348 int aligned_size_in_bytes = 0; 343 int aligned_size_in_bytes = size_in_bytes + alignment_size;
349
350 // If double alignment is required and top pointer is not aligned, we allocate
351 // additional memory to take care of the alignment.
352 if (alignment == kDoubleAligned &&
353 (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
354 alignment_size += kPointerSize;
355 } else if (alignment == kDoubleUnaligned &&
356 (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
357 alignment_size += kPointerSize;
358 }
359 aligned_size_in_bytes = size_in_bytes + alignment_size;
360 344
361 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 345 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
362 return SlowAllocateRaw(size_in_bytes, alignment); 346 return SlowAllocateRaw(size_in_bytes, alignment);
363 } 347 }
364 348
365 HeapObject* obj = HeapObject::FromAddress(old_top); 349 HeapObject* obj = HeapObject::FromAddress(old_top);
366 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 350 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes);
367 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 351 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
368 352
369 if (alignment_size > 0) { 353 if (alignment_size > 0) {
370 obj = heap()->PrecedeWithFiller(obj); 354 obj = heap()->PrecedeWithFiller(obj, alignment_size);
371 } 355 }
372 356
373 // The slow path above ultimately goes through AllocateRaw, so this suffices. 357 // The slow path above ultimately goes through AllocateRaw, so this suffices.
374 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 358 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
375 359
376 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
377 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
bbudge 2015/05/20 15:39:38 I eliminated this since it can never assert.
378
379 return obj; 360 return obj;
380 } 361 }
381 362
382 363
383 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { 364 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
384 Address old_top = allocation_info_.top(); 365 Address old_top = allocation_info_.top();
385 366
386 if (allocation_info_.limit() - old_top < size_in_bytes) { 367 if (allocation_info_.limit() - old_top < size_in_bytes) {
387 return SlowAllocateRaw(size_in_bytes, kWordAligned); 368 return SlowAllocateRaw(size_in_bytes, kWordAligned);
388 } 369 }
(...skipping 28 matching lines...) Expand all
417 398
418 399
419 intptr_t LargeObjectSpace::Available() { 400 intptr_t LargeObjectSpace::Available() {
420 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 401 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
421 } 402 }
422 403
423 } 404 }
424 } // namespace v8::internal 405 } // namespace v8::internal
425 406
426 #endif // V8_HEAP_SPACES_INL_H_ 407 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698