OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
433 // Raw allocation. | 433 // Raw allocation. |
434 Object* PagedSpace::AllocateRaw(int size_in_bytes) { | 434 Object* PagedSpace::AllocateRaw(int size_in_bytes) { |
435 ASSERT(HasBeenSetup()); | 435 ASSERT(HasBeenSetup()); |
436 ASSERT_OBJECT_SIZE(size_in_bytes); | 436 ASSERT_OBJECT_SIZE(size_in_bytes); |
437 HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes); | 437 HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes); |
438 if (object != NULL) return object; | 438 if (object != NULL) return object; |
439 | 439 |
440 object = SlowAllocateRaw(size_in_bytes); | 440 object = SlowAllocateRaw(size_in_bytes); |
441 if (object != NULL) return object; | 441 if (object != NULL) return object; |
442 | 442 |
443 return Failure::RetryAfterGC(size_in_bytes, identity()); | 443 return Failure::RetryAfterGC(identity()); |
444 } | 444 } |
445 | 445 |
446 | 446 |
447 // Reallocating (and promoting) objects during a compacting collection. | 447 // Reallocating (and promoting) objects during a compacting collection. |
448 Object* PagedSpace::MCAllocateRaw(int size_in_bytes) { | 448 Object* PagedSpace::MCAllocateRaw(int size_in_bytes) { |
449 ASSERT(HasBeenSetup()); | 449 ASSERT(HasBeenSetup()); |
450 ASSERT_OBJECT_SIZE(size_in_bytes); | 450 ASSERT_OBJECT_SIZE(size_in_bytes); |
451 HeapObject* object = AllocateLinearly(&mc_forwarding_info_, size_in_bytes); | 451 HeapObject* object = AllocateLinearly(&mc_forwarding_info_, size_in_bytes); |
452 if (object != NULL) return object; | 452 if (object != NULL) return object; |
453 | 453 |
454 object = SlowMCAllocateRaw(size_in_bytes); | 454 object = SlowMCAllocateRaw(size_in_bytes); |
455 if (object != NULL) return object; | 455 if (object != NULL) return object; |
456 | 456 |
457 return Failure::RetryAfterGC(size_in_bytes, identity()); | 457 return Failure::RetryAfterGC(identity()); |
458 } | 458 } |
459 | 459 |
460 | 460 |
461 // ----------------------------------------------------------------------------- | 461 // ----------------------------------------------------------------------------- |
462 // LargeObjectChunk | 462 // LargeObjectChunk |
463 | 463 |
464 HeapObject* LargeObjectChunk::GetObject() { | 464 HeapObject* LargeObjectChunk::GetObject() { |
465 // Round the chunk address up to the nearest page-aligned address | 465 // Round the chunk address up to the nearest page-aligned address |
466 // and return the heap object in that page. | 466 // and return the heap object in that page. |
467 Page* page = Page::FromAddress(RoundUp(address(), Page::kPageSize)); | 467 Page* page = Page::FromAddress(RoundUp(address(), Page::kPageSize)); |
468 return HeapObject::FromAddress(page->ObjectAreaStart()); | 468 return HeapObject::FromAddress(page->ObjectAreaStart()); |
469 } | 469 } |
470 | 470 |
471 | 471 |
472 // ----------------------------------------------------------------------------- | 472 // ----------------------------------------------------------------------------- |
473 // LargeObjectSpace | 473 // LargeObjectSpace |
474 | 474 |
475 Object* NewSpace::AllocateRawInternal(int size_in_bytes, | 475 Object* NewSpace::AllocateRawInternal(int size_in_bytes, |
476 AllocationInfo* alloc_info) { | 476 AllocationInfo* alloc_info) { |
477 Address new_top = alloc_info->top + size_in_bytes; | 477 Address new_top = alloc_info->top + size_in_bytes; |
478 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(size_in_bytes); | 478 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(); |
479 | 479 |
480 Object* obj = HeapObject::FromAddress(alloc_info->top); | 480 Object* obj = HeapObject::FromAddress(alloc_info->top); |
481 alloc_info->top = new_top; | 481 alloc_info->top = new_top; |
482 #ifdef DEBUG | 482 #ifdef DEBUG |
483 SemiSpace* space = | 483 SemiSpace* space = |
484 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; | 484 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; |
485 ASSERT(space->low() <= alloc_info->top | 485 ASSERT(space->low() <= alloc_info->top |
486 && alloc_info->top <= space->high() | 486 && alloc_info->top <= space->high() |
487 && alloc_info->limit == space->high()); | 487 && alloc_info->limit == space->high()); |
488 #endif | 488 #endif |
489 return obj; | 489 return obj; |
490 } | 490 } |
491 | 491 |
492 | 492 |
493 bool FreeListNode::IsFreeListNode(HeapObject* object) { | 493 bool FreeListNode::IsFreeListNode(HeapObject* object) { |
494 return object->map() == Heap::raw_unchecked_byte_array_map() | 494 return object->map() == Heap::raw_unchecked_byte_array_map() |
495 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() | 495 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() |
496 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); | 496 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); |
497 } | 497 } |
498 | 498 |
499 } } // namespace v8::internal | 499 } } // namespace v8::internal |
500 | 500 |
501 #endif // V8_SPACES_INL_H_ | 501 #endif // V8_SPACES_INL_H_ |
OLD | NEW |