OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 10 matching lines...) Expand all Loading... |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_SPACES_INL_H_ | 28 #ifndef V8_SPACES_INL_H_ |
29 #define V8_SPACES_INL_H_ | 29 #define V8_SPACES_INL_H_ |
30 | 30 |
| 31 #include "heap-profiler.h" |
31 #include "isolate.h" | 32 #include "isolate.h" |
32 #include "spaces.h" | 33 #include "spaces.h" |
33 #include "v8memory.h" | 34 #include "v8memory.h" |
34 | 35 |
35 namespace v8 { | 36 namespace v8 { |
36 namespace internal { | 37 namespace internal { |
37 | 38 |
38 | 39 |
39 // ----------------------------------------------------------------------------- | 40 // ----------------------------------------------------------------------------- |
40 // Bitmap | 41 // Bitmap |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
266 Address current_top = allocation_info_.top; | 267 Address current_top = allocation_info_.top; |
267 Address new_top = current_top + size_in_bytes; | 268 Address new_top = current_top + size_in_bytes; |
268 if (new_top > allocation_info_.limit) return NULL; | 269 if (new_top > allocation_info_.limit) return NULL; |
269 | 270 |
270 allocation_info_.top = new_top; | 271 allocation_info_.top = new_top; |
271 return HeapObject::FromAddress(current_top); | 272 return HeapObject::FromAddress(current_top); |
272 } | 273 } |
273 | 274 |
274 | 275 |
275 // Raw allocation. | 276 // Raw allocation. |
276 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { | 277 HeapObject* PagedSpace::AllocateRawHelper(int size_in_bytes) { |
277 HeapObject* object = AllocateLinearly(size_in_bytes); | 278 HeapObject* object = AllocateLinearly(size_in_bytes); |
278 if (object != NULL) { | 279 if (object != NULL) { |
279 if (identity() == CODE_SPACE) { | 280 if (identity() == CODE_SPACE) { |
280 SkipList::Update(object->address(), size_in_bytes); | 281 SkipList::Update(object->address(), size_in_bytes); |
281 } | 282 } |
282 return object; | 283 return object; |
283 } | 284 } |
284 | 285 |
285 ASSERT(!heap()->linear_allocation() || | 286 ASSERT(!heap()->linear_allocation() || |
286 (anchor_.next_chunk() == &anchor_ && | 287 (anchor_.next_chunk() == &anchor_ && |
287 anchor_.prev_chunk() == &anchor_)); | 288 anchor_.prev_chunk() == &anchor_)); |
288 | 289 |
289 object = free_list_.Allocate(size_in_bytes); | 290 object = free_list_.Allocate(size_in_bytes); |
290 if (object != NULL) { | 291 if (object != NULL) { |
291 if (identity() == CODE_SPACE) { | 292 if (identity() == CODE_SPACE) { |
292 SkipList::Update(object->address(), size_in_bytes); | 293 SkipList::Update(object->address(), size_in_bytes); |
293 } | 294 } |
294 return object; | 295 return object; |
295 } | 296 } |
296 | 297 |
297 object = SlowAllocateRaw(size_in_bytes); | 298 object = SlowAllocateRaw(size_in_bytes); |
298 if (object != NULL) { | 299 if (object != NULL) { |
299 if (identity() == CODE_SPACE) { | 300 if (identity() == CODE_SPACE) { |
300 SkipList::Update(object->address(), size_in_bytes); | 301 SkipList::Update(object->address(), size_in_bytes); |
301 } | 302 } |
302 return object; | 303 return object; |
303 } | 304 } |
304 | 305 |
| 306 return NULL; |
| 307 } |
| 308 |
| 309 |
| 310 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes, |
| 311 AllocationType event) { |
| 312 HeapObject* object = AllocateRawHelper(size_in_bytes); |
| 313 if (object != NULL) { |
| 314 if (event == NEW_OBJECT) { |
| 315 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); |
| 316 if (profiler->is_tracking_allocations()) { |
| 317 profiler->NewObjectEvent(object->address(), size_in_bytes); |
| 318 } |
| 319 } |
| 320 return object; |
| 321 } |
305 return Failure::RetryAfterGC(identity()); | 322 return Failure::RetryAfterGC(identity()); |
306 } | 323 } |
307 | 324 |
308 | 325 |
309 // ----------------------------------------------------------------------------- | 326 // ----------------------------------------------------------------------------- |
310 // NewSpace | 327 // NewSpace |
311 | 328 |
312 | 329 |
313 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { | 330 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { |
314 Address old_top = allocation_info_.top; | 331 Address old_top = allocation_info_.top; |
(...skipping 10 matching lines...) Expand all Loading... |
325 old_top += filler_size; | 342 old_top += filler_size; |
326 allocation_info_.top += filler_size; | 343 allocation_info_.top += filler_size; |
327 } | 344 } |
328 } | 345 } |
329 #endif | 346 #endif |
330 | 347 |
331 if (allocation_info_.limit - old_top < size_in_bytes) { | 348 if (allocation_info_.limit - old_top < size_in_bytes) { |
332 return SlowAllocateRaw(size_in_bytes); | 349 return SlowAllocateRaw(size_in_bytes); |
333 } | 350 } |
334 | 351 |
335 Object* obj = HeapObject::FromAddress(old_top); | 352 HeapObject* obj = HeapObject::FromAddress(old_top); |
336 allocation_info_.top += size_in_bytes; | 353 allocation_info_.top += size_in_bytes; |
337 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 354 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
338 | 355 |
| 356 HeapProfiler* profiler = heap()->isolate()->heap_profiler(); |
| 357 if (profiler->is_tracking_allocations()) { |
| 358 profiler->NewObjectEvent(obj->address(), size_in_bytes); |
| 359 } |
| 360 |
339 return obj; | 361 return obj; |
340 } | 362 } |
341 | 363 |
342 | 364 |
343 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { | 365 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { |
344 heap->incremental_marking()->SetOldSpacePageFlags(chunk); | 366 heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
345 return static_cast<LargePage*>(chunk); | 367 return static_cast<LargePage*>(chunk); |
346 } | 368 } |
347 | 369 |
348 | 370 |
349 intptr_t LargeObjectSpace::Available() { | 371 intptr_t LargeObjectSpace::Available() { |
350 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); | 372 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); |
351 } | 373 } |
352 | 374 |
353 | 375 |
354 bool FreeListNode::IsFreeListNode(HeapObject* object) { | 376 bool FreeListNode::IsFreeListNode(HeapObject* object) { |
355 Map* map = object->map(); | 377 Map* map = object->map(); |
356 Heap* heap = object->GetHeap(); | 378 Heap* heap = object->GetHeap(); |
357 return map == heap->raw_unchecked_free_space_map() | 379 return map == heap->raw_unchecked_free_space_map() |
358 || map == heap->raw_unchecked_one_pointer_filler_map() | 380 || map == heap->raw_unchecked_one_pointer_filler_map() |
359 || map == heap->raw_unchecked_two_pointer_filler_map(); | 381 || map == heap->raw_unchecked_two_pointer_filler_map(); |
360 } | 382 } |
361 | 383 |
362 } } // namespace v8::internal | 384 } } // namespace v8::internal |
363 | 385 |
364 #endif // V8_SPACES_INL_H_ | 386 #endif // V8_SPACES_INL_H_ |
OLD | NEW |