Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(544)

Side by Side Diff: src/heap/spaces-inl.h

Issue 1141523002: Implement unaligned allocate and allocate heap numbers in runtime double unaligned. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.cc ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_INL_H_ 5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_
7 7
8 #include "src/heap/spaces.h" 8 #include "src/heap/spaces.h"
9 #include "src/heap-profiler.h" 9 #include "src/heap-profiler.h"
10 #include "src/isolate.h" 10 #include "src/isolate.h"
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
244 Address current_top = allocation_info_.top(); 244 Address current_top = allocation_info_.top();
245 Address new_top = current_top + size_in_bytes; 245 Address new_top = current_top + size_in_bytes;
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 HeapObject* PagedSpace::AllocateLinearlyDoubleAlign(int size_in_bytes) { 253 HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes,
254 AllocationAlignment alignment) {
254 Address current_top = allocation_info_.top(); 255 Address current_top = allocation_info_.top();
255 int alignment_size = 0; 256 int alignment_size = 0;
256 257
257 if ((OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) { 258 if (alignment == kDoubleAligned &&
259 (OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) {
260 alignment_size = kPointerSize;
261 size_in_bytes += alignment_size;
262 } else if (alignment == kDoubleUnaligned &&
263 (OffsetFrom(current_top) & kDoubleAlignmentMask) == 0) {
258 alignment_size = kPointerSize; 264 alignment_size = kPointerSize;
259 size_in_bytes += alignment_size; 265 size_in_bytes += alignment_size;
260 } 266 }
261 Address new_top = current_top + size_in_bytes; 267 Address new_top = current_top + size_in_bytes;
262 if (new_top > allocation_info_.limit()) return NULL; 268 if (new_top > allocation_info_.limit()) return NULL;
263 269
264 allocation_info_.set_top(new_top); 270 allocation_info_.set_top(new_top);
265 if (alignment_size > 0) 271 if (alignment_size > 0) {
266 return heap()->EnsureDoubleAligned(HeapObject::FromAddress(current_top), 272 return heap()->EnsureAligned(HeapObject::FromAddress(current_top),
267 size_in_bytes); 273 size_in_bytes, alignment);
274 }
268 return HeapObject::FromAddress(current_top); 275 return HeapObject::FromAddress(current_top);
269 } 276 }
270 277
271 278
272 // Raw allocation. 279 // Raw allocation.
273 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { 280 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) {
274 HeapObject* object = AllocateLinearly(size_in_bytes); 281 HeapObject* object = AllocateLinearly(size_in_bytes);
275 282
276 if (object == NULL) { 283 if (object == NULL) {
277 object = free_list_.Allocate(size_in_bytes); 284 object = free_list_.Allocate(size_in_bytes);
278 if (object == NULL) { 285 if (object == NULL) {
279 object = SlowAllocateRaw(size_in_bytes); 286 object = SlowAllocateRaw(size_in_bytes);
280 } 287 }
281 } 288 }
282 289
283 if (object != NULL) { 290 if (object != NULL) {
284 if (identity() == CODE_SPACE) { 291 if (identity() == CODE_SPACE) {
285 SkipList::Update(object->address(), size_in_bytes); 292 SkipList::Update(object->address(), size_in_bytes);
286 } 293 }
287 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 294 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
288 return object; 295 return object;
289 } 296 }
290 297
291 return AllocationResult::Retry(identity()); 298 return AllocationResult::Retry(identity());
292 } 299 }
293 300
294 301
295 // Raw allocation. 302 // Raw allocation.
296 AllocationResult PagedSpace::AllocateRawDoubleAligned(int size_in_bytes) { 303 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
304 AllocationAlignment alignment) {
297 DCHECK(identity() == OLD_SPACE); 305 DCHECK(identity() == OLD_SPACE);
298 HeapObject* object = AllocateLinearlyDoubleAlign(size_in_bytes); 306 HeapObject* object = AllocateLinearlyAligned(size_in_bytes, alignment);
299 int aligned_size_in_bytes = size_in_bytes + kPointerSize; 307 int aligned_size_in_bytes = size_in_bytes + kPointerSize;
300 308
301 if (object == NULL) { 309 if (object == NULL) {
302 object = free_list_.Allocate(aligned_size_in_bytes); 310 object = free_list_.Allocate(aligned_size_in_bytes);
303 if (object == NULL) { 311 if (object == NULL) {
304 object = SlowAllocateRaw(aligned_size_in_bytes); 312 object = SlowAllocateRaw(aligned_size_in_bytes);
305 } 313 }
306 if (object != NULL) { 314 if (object != NULL) {
307 object = heap()->EnsureDoubleAligned(object, aligned_size_in_bytes); 315 object = heap()->EnsureAligned(object, aligned_size_in_bytes, alignment);
308 } 316 }
309 } 317 }
310 318
311 if (object != NULL) { 319 if (object != NULL) {
312 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); 320 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
313 return object; 321 return object;
314 } 322 }
315 323
316 return AllocationResult::Retry(identity()); 324 return AllocationResult::Retry(identity());
317 } 325 }
318 326
319 327
320 // ----------------------------------------------------------------------------- 328 // -----------------------------------------------------------------------------
321 // NewSpace 329 // NewSpace
322 330
323 331
324 AllocationResult NewSpace::AllocateRawDoubleAligned(int size_in_bytes) { 332 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
333 AllocationAlignment alignment) {
325 Address old_top = allocation_info_.top(); 334 Address old_top = allocation_info_.top();
326 int alignment_size = 0; 335 int alignment_size = 0;
327 int aligned_size_in_bytes = 0; 336 int aligned_size_in_bytes = 0;
328 337
329 // If double alignment is required and top pointer is not aligned, we allocate 338 // If double alignment is required and top pointer is not aligned, we allocate
330 // additional memory to take care of the alignment. 339 // additional memory to take care of the alignment.
331 if ((OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) { 340 if (alignment == kDoubleAligned &&
341 (OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) {
342 alignment_size += kPointerSize;
343 } else if (alignment == kDoubleUnaligned &&
344 (OffsetFrom(old_top) & kDoubleAlignmentMask) == 0) {
332 alignment_size += kPointerSize; 345 alignment_size += kPointerSize;
333 } 346 }
334 aligned_size_in_bytes = size_in_bytes + alignment_size; 347 aligned_size_in_bytes = size_in_bytes + alignment_size;
335 348
336 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { 349 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) {
337 return SlowAllocateRaw(size_in_bytes, true); 350 return SlowAllocateRaw(size_in_bytes, alignment);
338 } 351 }
339 352
340 HeapObject* obj = HeapObject::FromAddress(old_top); 353 HeapObject* obj = HeapObject::FromAddress(old_top);
341 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); 354 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes);
342 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 355 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
343 356
344 if (alignment_size > 0) { 357 if (alignment_size > 0) {
345 obj = heap()->EnsureDoubleAligned(obj, aligned_size_in_bytes); 358 obj = heap()->PrecedeWithFiller(obj);
346 } 359 }
347 360
348 // The slow path above ultimately goes through AllocateRaw, so this suffices. 361 // The slow path above ultimately goes through AllocateRaw, so this suffices.
349 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 362 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
350 363
364 DCHECK((kDoubleAligned && (OffsetFrom(obj) & kDoubleAlignmentMask) == 0) ||
365 (kDoubleUnaligned && (OffsetFrom(obj) & kDoubleAlignmentMask) != 0));
366
351 return obj; 367 return obj;
352 } 368 }
353 369
354 370
355 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { 371 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) {
356 Address old_top = allocation_info_.top(); 372 Address old_top = allocation_info_.top();
357 373
358 if (allocation_info_.limit() - old_top < size_in_bytes) { 374 if (allocation_info_.limit() - old_top < size_in_bytes) {
359 return SlowAllocateRaw(size_in_bytes, false); 375 return SlowAllocateRaw(size_in_bytes, kWordAligned);
360 } 376 }
361 377
362 HeapObject* obj = HeapObject::FromAddress(old_top); 378 HeapObject* obj = HeapObject::FromAddress(old_top);
363 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); 379 allocation_info_.set_top(allocation_info_.top() + size_in_bytes);
364 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 380 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
365 381
366 // The slow path above ultimately goes through AllocateRaw, so this suffices. 382 // The slow path above ultimately goes through AllocateRaw, so this suffices.
367 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); 383 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
368 384
369 return obj; 385 return obj;
370 } 386 }
371 387
372 388
373 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { 389 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) {
374 heap->incremental_marking()->SetOldSpacePageFlags(chunk); 390 heap->incremental_marking()->SetOldSpacePageFlags(chunk);
375 return static_cast<LargePage*>(chunk); 391 return static_cast<LargePage*>(chunk);
376 } 392 }
377 393
378 394
379 intptr_t LargeObjectSpace::Available() { 395 intptr_t LargeObjectSpace::Available() {
380 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 396 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
381 } 397 }
382 398
383 } 399 }
384 } // namespace v8::internal 400 } // namespace v8::internal
385 401
386 #endif // V8_HEAP_SPACES_INL_H_ 402 #endif // V8_HEAP_SPACES_INL_H_
OLDNEW
« no previous file with comments | « src/heap/spaces.cc ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698