OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
344 // The allocation size calculation can overflow for large sizes. | 344 // The allocation size calculation can overflow for large sizes. |
345 RELEASE_ASSERT(allocationSize > size); | 345 RELEASE_ASSERT(allocationSize > size); |
346 // Align size with allocation granularity. | 346 // Align size with allocation granularity. |
347 allocationSize = (allocationSize + allocationMask) & ~allocationMask; | 347 allocationSize = (allocationSize + allocationMask) & ~allocationMask; |
348 return allocationSize; | 348 return allocationSize; |
349 } | 349 } |
350 static Address allocateOnArenaIndex(ThreadState*, size_t, int arenaIndex, si
ze_t gcInfoIndex, const char* typeName); | 350 static Address allocateOnArenaIndex(ThreadState*, size_t, int arenaIndex, si
ze_t gcInfoIndex, const char* typeName); |
351 template<typename T> static Address allocate(size_t, bool eagerlySweep = fal
se); | 351 template<typename T> static Address allocate(size_t, bool eagerlySweep = fal
se); |
352 template<typename T> static Address reallocate(void* previous, size_t); | 352 template<typename T> static Address reallocate(void* previous, size_t); |
353 | 353 |
354 static const char* gcReasonString(BlinkGC::GCReason); | |
355 static void collectGarbage(BlinkGC::StackState, BlinkGC::GCType, BlinkGC::GC
Reason); | |
356 static void collectGarbageForTerminatingThread(ThreadState*); | |
357 static void collectAllGarbage(); | |
358 | |
359 void processMarkingStack(Visitor*); | 354 void processMarkingStack(Visitor*); |
360 void postMarkingProcessing(Visitor*); | 355 void postMarkingProcessing(Visitor*); |
361 void globalWeakProcessing(Visitor*); | 356 void globalWeakProcessing(Visitor*); |
362 | 357 |
363 void preGC(); | 358 void preGC(); |
364 void postGC(BlinkGC::GCType); | 359 void postGC(BlinkGC::GCType); |
365 | 360 |
366 // Conservatively checks whether an address is a pointer in any of the | 361 // Conservatively checks whether an address is a pointer in any of the |
367 // thread heaps. If so marks the object pointed to as live. | 362 // thread heaps. If so marks the object pointed to as live. |
368 Address checkAndMarkPointer(Visitor*, Address); | 363 Address checkAndMarkPointer(Visitor*, Address); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
624 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) | 619 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) |
625 { | 620 { |
626 T** cell = reinterpret_cast<T**>(object); | 621 T** cell = reinterpret_cast<T**>(object); |
627 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 622 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
628 *cell = nullptr; | 623 *cell = nullptr; |
629 } | 624 } |
630 | 625 |
631 } // namespace blink | 626 } // namespace blink |
632 | 627 |
633 #endif // Heap_h | 628 #endif // Heap_h |
OLD | NEW |