OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
344 // The allocation size calculation can overflow for large sizes. | 344 // The allocation size calculation can overflow for large sizes. |
345 RELEASE_ASSERT(allocationSize > size); | 345 RELEASE_ASSERT(allocationSize > size); |
346 // Align size with allocation granularity. | 346 // Align size with allocation granularity. |
347 allocationSize = (allocationSize + allocationMask) & ~allocationMask; | 347 allocationSize = (allocationSize + allocationMask) & ~allocationMask; |
348 return allocationSize; | 348 return allocationSize; |
349 } | 349 } |
350 static Address allocateOnArenaIndex(ThreadState*, size_t, int arenaIndex, si
ze_t gcInfoIndex, const char* typeName); | 350 static Address allocateOnArenaIndex(ThreadState*, size_t, int arenaIndex, si
ze_t gcInfoIndex, const char* typeName); |
351 template<typename T> static Address allocate(size_t, bool eagerlySweep = fal
se); | 351 template<typename T> static Address allocate(size_t, bool eagerlySweep = fal
se); |
352 template<typename T> static Address reallocate(void* previous, size_t); | 352 template<typename T> static Address reallocate(void* previous, size_t); |
353 | 353 |
| 354 static const char* gcReasonString(BlinkGC::GCReason); |
| 355 static void collectGarbage(BlinkGC::StackState, BlinkGC::GCType, BlinkGC::GC
Reason); |
| 356 static void collectGarbageForTerminatingThread(ThreadState*); |
| 357 static void collectAllGarbage(); |
| 358 |
354 void processMarkingStack(Visitor*); | 359 void processMarkingStack(Visitor*); |
355 void postMarkingProcessing(Visitor*); | 360 void postMarkingProcessing(Visitor*); |
356 void globalWeakProcessing(Visitor*); | 361 void globalWeakProcessing(Visitor*); |
357 | 362 |
358 void preGC(); | 363 void preGC(); |
359 void postGC(BlinkGC::GCType); | 364 void postGC(BlinkGC::GCType); |
360 | 365 |
361 // Conservatively checks whether an address is a pointer in any of the | 366 // Conservatively checks whether an address is a pointer in any of the |
362 // thread heaps. If so marks the object pointed to as live. | 367 // thread heaps. If so marks the object pointed to as live. |
363 Address checkAndMarkPointer(Visitor*, Address); | 368 Address checkAndMarkPointer(Visitor*, Address); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
619 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) | 624 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) |
620 { | 625 { |
621 T** cell = reinterpret_cast<T**>(object); | 626 T** cell = reinterpret_cast<T**>(object); |
622 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 627 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
623 *cell = nullptr; | 628 *cell = nullptr; |
624 } | 629 } |
625 | 630 |
626 } // namespace blink | 631 } // namespace blink |
627 | 632 |
628 #endif // Heap_h | 633 #endif // Heap_h |
OLD | NEW |