| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 423 if (new_top > alloc_info->limit) return NULL; | 423 if (new_top > alloc_info->limit) return NULL; |
| 424 | 424 |
| 425 alloc_info->top = new_top; | 425 alloc_info->top = new_top; |
| 426 ASSERT(alloc_info->VerifyPagedAllocation()); | 426 ASSERT(alloc_info->VerifyPagedAllocation()); |
| 427 accounting_stats_.AllocateBytes(size_in_bytes); | 427 accounting_stats_.AllocateBytes(size_in_bytes); |
| 428 return HeapObject::FromAddress(current_top); | 428 return HeapObject::FromAddress(current_top); |
| 429 } | 429 } |
| 430 | 430 |
| 431 | 431 |
| 432 // Raw allocation. | 432 // Raw allocation. |
| 433 Object* PagedSpace::AllocateRaw(int size_in_bytes) { | 433 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { |
| 434 ASSERT(HasBeenSetup()); | 434 ASSERT(HasBeenSetup()); |
| 435 ASSERT_OBJECT_SIZE(size_in_bytes); | 435 ASSERT_OBJECT_SIZE(size_in_bytes); |
| 436 HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes); | 436 HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes); |
| 437 if (object != NULL) return object; | 437 if (object != NULL) return object; |
| 438 | 438 |
| 439 object = SlowAllocateRaw(size_in_bytes); | 439 object = SlowAllocateRaw(size_in_bytes); |
| 440 if (object != NULL) return object; | 440 if (object != NULL) return object; |
| 441 | 441 |
| 442 return Failure::RetryAfterGC(identity()); | 442 return Failure::RetryAfterGC(identity()); |
| 443 } | 443 } |
| 444 | 444 |
| 445 | 445 |
| 446 // Reallocating (and promoting) objects during a compacting collection. | 446 // Reallocating (and promoting) objects during a compacting collection. |
| 447 Object* PagedSpace::MCAllocateRaw(int size_in_bytes) { | 447 MaybeObject* PagedSpace::MCAllocateRaw(int size_in_bytes) { |
| 448 ASSERT(HasBeenSetup()); | 448 ASSERT(HasBeenSetup()); |
| 449 ASSERT_OBJECT_SIZE(size_in_bytes); | 449 ASSERT_OBJECT_SIZE(size_in_bytes); |
| 450 HeapObject* object = AllocateLinearly(&mc_forwarding_info_, size_in_bytes); | 450 HeapObject* object = AllocateLinearly(&mc_forwarding_info_, size_in_bytes); |
| 451 if (object != NULL) return object; | 451 if (object != NULL) return object; |
| 452 | 452 |
| 453 object = SlowMCAllocateRaw(size_in_bytes); | 453 object = SlowMCAllocateRaw(size_in_bytes); |
| 454 if (object != NULL) return object; | 454 if (object != NULL) return object; |
| 455 | 455 |
| 456 return Failure::RetryAfterGC(identity()); | 456 return Failure::RetryAfterGC(identity()); |
| 457 } | 457 } |
| 458 | 458 |
| 459 | 459 |
| 460 // ----------------------------------------------------------------------------- | 460 // ----------------------------------------------------------------------------- |
| 461 // LargeObjectChunk | 461 // LargeObjectChunk |
| 462 | 462 |
| 463 HeapObject* LargeObjectChunk::GetObject() { | 463 HeapObject* LargeObjectChunk::GetObject() { |
| 464 // Round the chunk address up to the nearest page-aligned address | 464 // Round the chunk address up to the nearest page-aligned address |
| 465 // and return the heap object in that page. | 465 // and return the heap object in that page. |
| 466 Page* page = Page::FromAddress(RoundUp(address(), Page::kPageSize)); | 466 Page* page = Page::FromAddress(RoundUp(address(), Page::kPageSize)); |
| 467 return HeapObject::FromAddress(page->ObjectAreaStart()); | 467 return HeapObject::FromAddress(page->ObjectAreaStart()); |
| 468 } | 468 } |
| 469 | 469 |
| 470 | 470 |
| 471 // ----------------------------------------------------------------------------- | 471 // ----------------------------------------------------------------------------- |
| 472 // LargeObjectSpace | 472 // LargeObjectSpace |
| 473 | 473 |
| 474 Object* NewSpace::AllocateRawInternal(int size_in_bytes, | 474 MaybeObject* NewSpace::AllocateRawInternal(int size_in_bytes, |
| 475 AllocationInfo* alloc_info) { | 475 AllocationInfo* alloc_info) { |
| 476 Address new_top = alloc_info->top + size_in_bytes; | 476 Address new_top = alloc_info->top + size_in_bytes; |
| 477 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(); | 477 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(); |
| 478 | 478 |
| 479 Object* obj = HeapObject::FromAddress(alloc_info->top); | 479 Object* obj = HeapObject::FromAddress(alloc_info->top); |
| 480 alloc_info->top = new_top; | 480 alloc_info->top = new_top; |
| 481 #ifdef DEBUG | 481 #ifdef DEBUG |
| 482 SemiSpace* space = | 482 SemiSpace* space = |
| 483 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; | 483 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; |
| 484 ASSERT(space->low() <= alloc_info->top | 484 ASSERT(space->low() <= alloc_info->top |
| 485 && alloc_info->top <= space->high() | 485 && alloc_info->top <= space->high() |
| 486 && alloc_info->limit == space->high()); | 486 && alloc_info->limit == space->high()); |
| 487 #endif | 487 #endif |
| 488 return obj; | 488 return obj; |
| 489 } | 489 } |
| 490 | 490 |
| 491 | 491 |
| 492 bool FreeListNode::IsFreeListNode(HeapObject* object) { | 492 bool FreeListNode::IsFreeListNode(HeapObject* object) { |
| 493 return object->map() == Heap::raw_unchecked_byte_array_map() | 493 return object->map() == Heap::raw_unchecked_byte_array_map() |
| 494 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() | 494 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() |
| 495 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); | 495 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); |
| 496 } | 496 } |
| 497 | 497 |
| 498 } } // namespace v8::internal | 498 } } // namespace v8::internal |
| 499 | 499 |
| 500 #endif // V8_SPACES_INL_H_ | 500 #endif // V8_SPACES_INL_H_ |
| OLD | NEW |