OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
257 ASSERT(page->owner() == owner()); | 257 ASSERT(page->owner() == owner()); |
258 set_prev_chunk(page); | 258 set_prev_chunk(page); |
259 } | 259 } |
260 | 260 |
261 | 261 |
262 // Try linear allocation in the page of alloc_info's allocation top. Does | 262 // Try linear allocation in the page of alloc_info's allocation top. Does |
263 // not contain slow case logic (e.g. move to the next page or try free list | 263 // not contain slow case logic (e.g. move to the next page or try free list |
264 // allocation) so it can be used by all the allocation functions and for all | 264 // allocation) so it can be used by all the allocation functions and for all |
265 // the paged spaces. | 265 // the paged spaces. |
266 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { | 266 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
267 Address current_top = allocation_info_.top; | 267 Address current_top = allocation_info_.top(); |
268 Address new_top = current_top + size_in_bytes; | 268 Address new_top = current_top + size_in_bytes; |
269 if (new_top > allocation_info_.limit) return NULL; | 269 if (new_top > allocation_info_.limit()) return NULL; |
270 | 270 |
271 allocation_info_.top = new_top; | 271 allocation_info_.set_top(new_top); |
272 return HeapObject::FromAddress(current_top); | 272 return HeapObject::FromAddress(current_top); |
273 } | 273 } |
274 | 274 |
275 | 275 |
276 // Raw allocation. | 276 // Raw allocation. |
277 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes, | 277 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes, |
278 AllocationType event) { | 278 AllocationType event) { |
279 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); | 279 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); |
280 | 280 |
281 HeapObject* object = AllocateLinearly(size_in_bytes); | 281 HeapObject* object = AllocateLinearly(size_in_bytes); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
317 | 317 |
318 return Failure::RetryAfterGC(identity()); | 318 return Failure::RetryAfterGC(identity()); |
319 } | 319 } |
320 | 320 |
321 | 321 |
322 // ----------------------------------------------------------------------------- | 322 // ----------------------------------------------------------------------------- |
323 // NewSpace | 323 // NewSpace |
324 | 324 |
325 | 325 |
326 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { | 326 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { |
327 Address old_top = allocation_info_.top; | 327 Address old_top = allocation_info_.top(); |
328 #ifdef DEBUG | 328 #ifdef DEBUG |
329 // If we are stressing compaction we waste some memory in new space | 329 // If we are stressing compaction we waste some memory in new space |
330 // in order to get more frequent GCs. | 330 // in order to get more frequent GCs. |
331 if (FLAG_stress_compaction && !heap()->linear_allocation()) { | 331 if (FLAG_stress_compaction && !heap()->linear_allocation()) { |
332 if (allocation_info_.limit - old_top >= size_in_bytes * 4) { | 332 if (allocation_info_.limit() - old_top >= size_in_bytes * 4) { |
333 int filler_size = size_in_bytes * 4; | 333 int filler_size = size_in_bytes * 4; |
334 for (int i = 0; i < filler_size; i += kPointerSize) { | 334 for (int i = 0; i < filler_size; i += kPointerSize) { |
335 *(reinterpret_cast<Object**>(old_top + i)) = | 335 *(reinterpret_cast<Object**>(old_top + i)) = |
336 heap()->one_pointer_filler_map(); | 336 heap()->one_pointer_filler_map(); |
337 } | 337 } |
338 old_top += filler_size; | 338 old_top += filler_size; |
339 allocation_info_.top += filler_size; | 339 Address new_top = allocation_info_.top() + filler_size; |
Michael Starzinger
2013/10/25 08:34:56
nit: Only one white-space after assignment operato
Hannes Payer (out of office)
2013/11/20 09:58:47
Done.
| |
340 allocation_info_.set_top(new_top); | |
Michael Starzinger
2013/10/25 08:34:56
nit: Maybe inline calculation instead of local var
Hannes Payer (out of office)
2013/11/20 09:58:47
Done.
| |
340 } | 341 } |
341 } | 342 } |
342 #endif | 343 #endif |
343 | 344 |
344 if (allocation_info_.limit - old_top < size_in_bytes) { | 345 if (allocation_info_.limit() - old_top < size_in_bytes) { |
345 return SlowAllocateRaw(size_in_bytes); | 346 return SlowAllocateRaw(size_in_bytes); |
346 } | 347 } |
347 | 348 |
348 HeapObject* obj = HeapObject::FromAddress(old_top); | 349 HeapObject* obj = HeapObject::FromAddress(old_top); |
349 allocation_info_.top += size_in_bytes; | 350 Address new_top = allocation_info_.top() + size_in_bytes; |
351 allocation_info_.set_top(new_top); | |
Michael Starzinger
2013/10/25 08:34:56
nit: Maybe inline calculation instead of local var
Hannes Payer (out of office)
2013/11/20 09:58:47
Done.
| |
350 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 352 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
351 | 353 |
352 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); | 354 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); |
353 if (profiler != NULL && profiler->is_tracking_allocations()) { | 355 if (profiler != NULL && profiler->is_tracking_allocations()) { |
354 profiler->NewObjectEvent(obj->address(), size_in_bytes); | 356 profiler->NewObjectEvent(obj->address(), size_in_bytes); |
355 } | 357 } |
356 | 358 |
357 return obj; | 359 return obj; |
358 } | 360 } |
359 | 361 |
(...skipping 13 matching lines...) Expand all Loading... | |
373 Map* map = object->map(); | 375 Map* map = object->map(); |
374 Heap* heap = object->GetHeap(); | 376 Heap* heap = object->GetHeap(); |
375 return map == heap->raw_unchecked_free_space_map() | 377 return map == heap->raw_unchecked_free_space_map() |
376 || map == heap->raw_unchecked_one_pointer_filler_map() | 378 || map == heap->raw_unchecked_one_pointer_filler_map() |
377 || map == heap->raw_unchecked_two_pointer_filler_map(); | 379 || map == heap->raw_unchecked_two_pointer_filler_map(); |
378 } | 380 } |
379 | 381 |
380 } } // namespace v8::internal | 382 } } // namespace v8::internal |
381 | 383 |
382 #endif // V8_SPACES_INL_H_ | 384 #endif // V8_SPACES_INL_H_ |
OLD | NEW |