| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 286     } | 286     } | 
| 287     return object; | 287     return object; | 
| 288   } | 288   } | 
| 289 | 289 | 
| 290   return Failure::RetryAfterGC(identity()); | 290   return Failure::RetryAfterGC(identity()); | 
| 291 } | 291 } | 
| 292 | 292 | 
| 293 | 293 | 
| 294 // ----------------------------------------------------------------------------- | 294 // ----------------------------------------------------------------------------- | 
| 295 // NewSpace | 295 // NewSpace | 
| 296 MaybeObject* NewSpace::AllocateRawInternal(int size_in_bytes) { | 296 | 
|  | 297 | 
|  | 298 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { | 
| 297   Address old_top = allocation_info_.top; | 299   Address old_top = allocation_info_.top; | 
| 298   if (allocation_info_.limit - old_top < size_in_bytes) { | 300   if (allocation_info_.limit - old_top < size_in_bytes) { | 
| 299     Address new_top = old_top + size_in_bytes; | 301     return SlowAllocateRaw(size_in_bytes); | 
| 300     Address high = to_space_.page_high(); |  | 
| 301     if (allocation_info_.limit < high) { |  | 
| 302       // Incremental marking has lowered the limit to get a |  | 
| 303       // chance to do a step. |  | 
| 304       allocation_info_.limit = Min( |  | 
| 305           allocation_info_.limit + inline_allocation_limit_step_, |  | 
| 306           high); |  | 
| 307       int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); |  | 
| 308       heap()->incremental_marking()->Step(bytes_allocated); |  | 
| 309       top_on_previous_step_ = new_top; |  | 
| 310       return AllocateRawInternal(size_in_bytes); |  | 
| 311     } else if (AddFreshPage()) { |  | 
| 312       // Switched to new page. Try allocating again. |  | 
| 313       int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |  | 
| 314       heap()->incremental_marking()->Step(bytes_allocated); |  | 
| 315       top_on_previous_step_ = to_space_.page_low(); |  | 
| 316       return AllocateRawInternal(size_in_bytes); |  | 
| 317     } else { |  | 
| 318       return Failure::RetryAfterGC(); |  | 
| 319     } |  | 
| 320   } | 302   } | 
| 321 | 303 | 
| 322   Object* obj = HeapObject::FromAddress(allocation_info_.top); | 304   Object* obj = HeapObject::FromAddress(allocation_info_.top); | 
| 323   allocation_info_.top += size_in_bytes; | 305   allocation_info_.top += size_in_bytes; | 
| 324   ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 306   ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 
| 325 | 307 | 
| 326   return obj; | 308   return obj; | 
| 327 } | 309 } | 
| 328 | 310 | 
| 329 | 311 | 
| (...skipping 29 matching lines...) Expand all  Loading... | 
| 359   Map* map = object->map(); | 341   Map* map = object->map(); | 
| 360   Heap* heap = object->GetHeap(); | 342   Heap* heap = object->GetHeap(); | 
| 361   return map == heap->raw_unchecked_free_space_map() | 343   return map == heap->raw_unchecked_free_space_map() | 
| 362       || map == heap->raw_unchecked_one_pointer_filler_map() | 344       || map == heap->raw_unchecked_one_pointer_filler_map() | 
| 363       || map == heap->raw_unchecked_two_pointer_filler_map(); | 345       || map == heap->raw_unchecked_two_pointer_filler_map(); | 
| 364 } | 346 } | 
| 365 | 347 | 
| 366 } }  // namespace v8::internal | 348 } }  // namespace v8::internal | 
| 367 | 349 | 
| 368 #endif  // V8_SPACES_INL_H_ | 350 #endif  // V8_SPACES_INL_H_ | 
| OLD | NEW | 
|---|