| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 276 } | 276 } |
| 277 | 277 |
| 278 | 278 |
| 279 // Try linear allocation in the page of alloc_info's allocation top. Does | 279 // Try linear allocation in the page of alloc_info's allocation top. Does |
| 280 // not contain slow case logic (eg, move to the next page or try free list | 280 // not contain slow case logic (eg, move to the next page or try free list |
| 281 // allocation) so it can be used by all the allocation functions and for all | 281 // allocation) so it can be used by all the allocation functions and for all |
| 282 // the paged spaces. | 282 // the paged spaces. |
| 283 HeapObject* PagedSpace::AllocateLinearly(AllocationInfo* alloc_info, | 283 HeapObject* PagedSpace::AllocateLinearly(AllocationInfo* alloc_info, |
| 284 int size_in_bytes) { | 284 int size_in_bytes) { |
| 285 Address current_top = alloc_info->top; | 285 Address current_top = alloc_info->top; |
| 286 //// TODO(MIPS.6) |
| 287 //#ifdef V8_TARGET_ARCH_MIPS |
| 288 // Address new_top = current_top + RoundUp(size_in_bytes,8); |
| 289 //#else |
| 286 Address new_top = current_top + size_in_bytes; | 290 Address new_top = current_top + size_in_bytes; |
| 291 //#endif |
| 292 |
| 287 if (new_top > alloc_info->limit) return NULL; | 293 if (new_top > alloc_info->limit) return NULL; |
| 288 | 294 |
| 289 alloc_info->top = new_top; | 295 alloc_info->top = new_top; |
| 290 ASSERT(alloc_info->VerifyPagedAllocation()); | 296 ASSERT(alloc_info->VerifyPagedAllocation()); |
| 291 accounting_stats_.AllocateBytes(size_in_bytes); | 297 accounting_stats_.AllocateBytes(size_in_bytes); |
| 292 return HeapObject::FromAddress(current_top); | 298 return HeapObject::FromAddress(current_top); |
| 293 } | 299 } |
| 294 | 300 |
| 295 | 301 |
| 296 // Raw allocation. | 302 // Raw allocation. |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 338 int LargeObjectSpace::ExtraRSetBytesFor(int object_size) { | 344 int LargeObjectSpace::ExtraRSetBytesFor(int object_size) { |
| 339 int extra_rset_bits = | 345 int extra_rset_bits = |
| 340 RoundUp((object_size - Page::kObjectAreaSize) / kPointerSize, | 346 RoundUp((object_size - Page::kObjectAreaSize) / kPointerSize, |
| 341 kBitsPerInt); | 347 kBitsPerInt); |
| 342 return extra_rset_bits / kBitsPerByte; | 348 return extra_rset_bits / kBitsPerByte; |
| 343 } | 349 } |
| 344 | 350 |
| 345 | 351 |
| 346 Object* NewSpace::AllocateRawInternal(int size_in_bytes, | 352 Object* NewSpace::AllocateRawInternal(int size_in_bytes, |
| 347 AllocationInfo* alloc_info) { | 353 AllocationInfo* alloc_info) { |
| 348 Address new_top = alloc_info->top + size_in_bytes; | 354 |
| 355 Address current_top = alloc_info->top; |
| 356 //// TODO(MIPS.6) |
| 357 //#ifdef V8_TARGET_ARCH_MIPS |
| 358 // Address new_top = current_top + RoundUp(size_in_bytes,8); |
| 359 //#else |
| 360 Address new_top = current_top + size_in_bytes; |
| 361 //#endif |
| 362 |
| 349 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(size_in_bytes); | 363 if (new_top > alloc_info->limit) return Failure::RetryAfterGC(size_in_bytes); |
| 350 | 364 |
| 351 Object* obj = HeapObject::FromAddress(alloc_info->top); | 365 Object* obj = HeapObject::FromAddress(alloc_info->top); |
| 352 alloc_info->top = new_top; | 366 alloc_info->top = new_top; |
| 353 #ifdef DEBUG | 367 #ifdef DEBUG |
| 354 SemiSpace* space = | 368 SemiSpace* space = |
| 355 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; | 369 (alloc_info == &allocation_info_) ? &to_space_ : &from_space_; |
| 356 ASSERT(space->low() <= alloc_info->top | 370 ASSERT(space->low() <= alloc_info->top |
| 357 && alloc_info->top <= space->high() | 371 && alloc_info->top <= space->high() |
| 358 && alloc_info->limit == space->high()); | 372 && alloc_info->limit == space->high()); |
| 359 #endif | 373 #endif |
| 360 return obj; | 374 return obj; |
| 361 } | 375 } |
| 362 | 376 |
| 363 | 377 |
| 364 bool FreeListNode::IsFreeListNode(HeapObject* object) { | 378 bool FreeListNode::IsFreeListNode(HeapObject* object) { |
| 365 return object->map() == Heap::raw_unchecked_byte_array_map() | 379 return object->map() == Heap::raw_unchecked_byte_array_map() |
| 366 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() | 380 || object->map() == Heap::raw_unchecked_one_pointer_filler_map() |
| 367 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); | 381 || object->map() == Heap::raw_unchecked_two_pointer_filler_map(); |
| 368 } | 382 } |
| 369 | 383 |
| 370 } } // namespace v8::internal | 384 } } // namespace v8::internal |
| 371 | 385 |
| 372 #endif // V8_SPACES_INL_H_ | 386 #endif // V8_SPACES_INL_H_ |
| OLD | NEW |