OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
164 size_t m_objectSizeAtLastGC; | 164 size_t m_objectSizeAtLastGC; |
165 size_t m_markedObjectSize; | 165 size_t m_markedObjectSize; |
166 size_t m_markedObjectSizeAtLastCompleteSweep; | 166 size_t m_markedObjectSizeAtLastCompleteSweep; |
167 size_t m_wrapperCount; | 167 size_t m_wrapperCount; |
168 size_t m_wrapperCountAtLastGC; | 168 size_t m_wrapperCountAtLastGC; |
169 size_t m_collectedWrapperCount; | 169 size_t m_collectedWrapperCount; |
170 size_t m_partitionAllocSizeAtLastGC; | 170 size_t m_partitionAllocSizeAtLastGC; |
171 double m_estimatedMarkingTimePerByte; | 171 double m_estimatedMarkingTimePerByte; |
172 }; | 172 }; |
173 | 173 |
174 class PLATFORM_EXPORT ThreadHeap { | 174 class PLATFORM_EXPORT Heap { |
175 STATIC_ONLY(ThreadHeap); | 175 STATIC_ONLY(Heap); |
176 public: | 176 public: |
177 static void init(); | 177 static void init(); |
178 static void shutdown(); | 178 static void shutdown(); |
179 | 179 |
180 #if ENABLE(ASSERT) | 180 #if ENABLE(ASSERT) |
181 static BasePage* findPageFromAddress(Address); | 181 static BasePage* findPageFromAddress(Address); |
182 static BasePage* findPageFromAddress(const void* pointer) { return findPageF
romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); } | 182 static BasePage* findPageFromAddress(const void* pointer) { return findPageF
romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); } |
183 #endif | 183 #endif |
184 | 184 |
185 template<typename T> | 185 template<typename T> |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
227 template<typename T> | 227 template<typename T> |
228 NO_LAZY_SWEEP_SANITIZE_ADDRESS | 228 NO_LAZY_SWEEP_SANITIZE_ADDRESS |
229 static bool willObjectBeLazilySwept(const T* objectPointer) | 229 static bool willObjectBeLazilySwept(const T* objectPointer) |
230 { | 230 { |
231 static_assert(IsGarbageCollectedType<T>::value, "only objects deriving f
rom GarbageCollected can be used."); | 231 static_assert(IsGarbageCollectedType<T>::value, "only objects deriving f
rom GarbageCollected can be used."); |
232 BasePage* page = pageFromObject(objectPointer); | 232 BasePage* page = pageFromObject(objectPointer); |
233 if (page->hasBeenSwept()) | 233 if (page->hasBeenSwept()) |
234 return false; | 234 return false; |
235 ASSERT(page->arena()->getThreadState()->isSweepingInProgress()); | 235 ASSERT(page->arena()->getThreadState()->isSweepingInProgress()); |
236 | 236 |
237 return !ThreadHeap::isHeapObjectAlive(const_cast<T*>(objectPointer)); | 237 return !Heap::isHeapObjectAlive(const_cast<T*>(objectPointer)); |
238 } | 238 } |
239 | 239 |
240 // Push a trace callback on the marking stack. | 240 // Push a trace callback on the marking stack. |
241 static void pushTraceCallback(void* containerObject, TraceCallback); | 241 static void pushTraceCallback(void* containerObject, TraceCallback); |
242 | 242 |
243 // Push a trace callback on the post-marking callback stack. These | 243 // Push a trace callback on the post-marking callback stack. These |
244 // callbacks are called after normal marking (including ephemeron | 244 // callbacks are called after normal marking (including ephemeron |
245 // iteration). | 245 // iteration). |
246 static void pushPostMarkingCallback(void*, TraceCallback); | 246 static void pushPostMarkingCallback(void*, TraceCallback); |
247 | 247 |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
404 public: | 404 public: |
405 using GarbageCollectedType = T; | 405 using GarbageCollectedType = T; |
406 | 406 |
407 void* operator new(size_t size) | 407 void* operator new(size_t size) |
408 { | 408 { |
409 return allocateObject(size, IsEagerlyFinalizedType<T>::value); | 409 return allocateObject(size, IsEagerlyFinalizedType<T>::value); |
410 } | 410 } |
411 | 411 |
412 static void* allocateObject(size_t size, bool eagerlySweep) | 412 static void* allocateObject(size_t size, bool eagerlySweep) |
413 { | 413 { |
414 return ThreadHeap::allocate<T>(size, eagerlySweep); | 414 return Heap::allocate<T>(size, eagerlySweep); |
415 } | 415 } |
416 | 416 |
417 void operator delete(void* p) | 417 void operator delete(void* p) |
418 { | 418 { |
419 ASSERT_NOT_REACHED(); | 419 ASSERT_NOT_REACHED(); |
420 } | 420 } |
421 | 421 |
422 protected: | 422 protected: |
423 GarbageCollected() | 423 GarbageCollected() |
424 { | 424 { |
(...skipping 12 matching lines...) Expand all Loading... |
437 // | 437 // |
438 // An exception to the use of sized arenas is made for class types that | 438 // An exception to the use of sized arenas is made for class types that |
439 // require prompt finalization after a garbage collection. That is, their | 439 // require prompt finalization after a garbage collection. That is, their |
440 // instances have to be finalized early and cannot be delayed until lazy | 440 // instances have to be finalized early and cannot be delayed until lazy |
441 // sweeping kicks in for their heap and page. The EAGERLY_FINALIZE() | 441 // sweeping kicks in for their heap and page. The EAGERLY_FINALIZE() |
442 // macro is used to declare a class (and its derived classes) as being | 442 // macro is used to declare a class (and its derived classes) as being |
443 // in need of eager finalization. Must be defined with 'public' visibility | 443 // in need of eager finalization. Must be defined with 'public' visibility |
444 // for a class. | 444 // for a class. |
445 // | 445 // |
446 | 446 |
447 inline int ThreadHeap::arenaIndexForObjectSize(size_t size) | 447 inline int Heap::arenaIndexForObjectSize(size_t size) |
448 { | 448 { |
449 if (size < 64) { | 449 if (size < 64) { |
450 if (size < 32) | 450 if (size < 32) |
451 return BlinkGC::NormalPage1ArenaIndex; | 451 return BlinkGC::NormalPage1ArenaIndex; |
452 return BlinkGC::NormalPage2ArenaIndex; | 452 return BlinkGC::NormalPage2ArenaIndex; |
453 } | 453 } |
454 if (size < 128) | 454 if (size < 128) |
455 return BlinkGC::NormalPage3ArenaIndex; | 455 return BlinkGC::NormalPage3ArenaIndex; |
456 return BlinkGC::NormalPage4ArenaIndex; | 456 return BlinkGC::NormalPage4ArenaIndex; |
457 } | 457 } |
458 | 458 |
459 inline bool ThreadHeap::isNormalArenaIndex(int index) | 459 inline bool Heap::isNormalArenaIndex(int index) |
460 { | 460 { |
461 return index >= BlinkGC::NormalPage1ArenaIndex && index <= BlinkGC::NormalPa
ge4ArenaIndex; | 461 return index >= BlinkGC::NormalPage1ArenaIndex && index <= BlinkGC::NormalPa
ge4ArenaIndex; |
462 } | 462 } |
463 | 463 |
464 #define DECLARE_EAGER_FINALIZATION_OPERATOR_NEW() \ | 464 #define DECLARE_EAGER_FINALIZATION_OPERATOR_NEW() \ |
465 public: \ | 465 public: \ |
466 GC_PLUGIN_IGNORE("491488") \ | 466 GC_PLUGIN_IGNORE("491488") \ |
467 void* operator new(size_t size) \ | 467 void* operator new(size_t size) \ |
468 { \ | 468 { \ |
469 return allocateObject(size, true); \ | 469 return allocateObject(size, true); \ |
(...skipping 19 matching lines...) Expand all Loading... |
489 }; | 489 }; |
490 #define EAGERLY_FINALIZE() \ | 490 #define EAGERLY_FINALIZE() \ |
491 private: \ | 491 private: \ |
492 VerifyEagerFinalization m_verifyEagerFinalization; \ | 492 VerifyEagerFinalization m_verifyEagerFinalization; \ |
493 public: \ | 493 public: \ |
494 typedef int IsEagerlyFinalizedMarker | 494 typedef int IsEagerlyFinalizedMarker |
495 #else | 495 #else |
496 #define EAGERLY_FINALIZE() typedef int IsEagerlyFinalizedMarker | 496 #define EAGERLY_FINALIZE() typedef int IsEagerlyFinalizedMarker |
497 #endif | 497 #endif |
498 | 498 |
499 inline Address ThreadHeap::allocateOnArenaIndex(ThreadState* state, size_t size,
int arenaIndex, size_t gcInfoIndex, const char* typeName) | 499 inline Address Heap::allocateOnArenaIndex(ThreadState* state, size_t size, int a
renaIndex, size_t gcInfoIndex, const char* typeName) |
500 { | 500 { |
501 ASSERT(state->isAllocationAllowed()); | 501 ASSERT(state->isAllocationAllowed()); |
502 ASSERT(arenaIndex != BlinkGC::LargeObjectArenaIndex); | 502 ASSERT(arenaIndex != BlinkGC::LargeObjectArenaIndex); |
503 NormalPageArena* arena = static_cast<NormalPageArena*>(state->arena(arenaInd
ex)); | 503 NormalPageArena* arena = static_cast<NormalPageArena*>(state->arena(arenaInd
ex)); |
504 Address address = arena->allocateObject(allocationSizeFromSize(size), gcInfo
Index); | 504 Address address = arena->allocateObject(allocationSizeFromSize(size), gcInfo
Index); |
505 HeapAllocHooks::allocationHookIfEnabled(address, size, typeName); | 505 HeapAllocHooks::allocationHookIfEnabled(address, size, typeName); |
506 return address; | 506 return address; |
507 } | 507 } |
508 | 508 |
509 template<typename T> | 509 template<typename T> |
510 Address ThreadHeap::allocate(size_t size, bool eagerlySweep) | 510 Address Heap::allocate(size_t size, bool eagerlySweep) |
511 { | 511 { |
512 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); | 512 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); |
513 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(T); | 513 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(T); |
514 return ThreadHeap::allocateOnArenaIndex(state, size, eagerlySweep ? BlinkGC:
:EagerSweepArenaIndex : ThreadHeap::arenaIndexForObjectSize(size), GCInfoTrait<T
>::index(), typeName); | 514 return Heap::allocateOnArenaIndex(state, size, eagerlySweep ? BlinkGC::Eager
SweepArenaIndex : Heap::arenaIndexForObjectSize(size), GCInfoTrait<T>::index(),
typeName); |
515 } | 515 } |
516 | 516 |
517 template<typename T> | 517 template<typename T> |
518 Address ThreadHeap::reallocate(void* previous, size_t size) | 518 Address Heap::reallocate(void* previous, size_t size) |
519 { | 519 { |
520 // Not intended to be a full C realloc() substitute; | 520 // Not intended to be a full C realloc() substitute; |
521 // realloc(nullptr, size) is not a supported alias for malloc(size). | 521 // realloc(nullptr, size) is not a supported alias for malloc(size). |
522 | 522 |
523 // TODO(sof): promptly free the previous object. | 523 // TODO(sof): promptly free the previous object. |
524 if (!size) { | 524 if (!size) { |
525 // If the new size is 0 this is considered equivalent to free(previous). | 525 // If the new size is 0 this is considered equivalent to free(previous). |
526 return nullptr; | 526 return nullptr; |
527 } | 527 } |
528 | 528 |
529 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); | 529 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); |
530 HeapObjectHeader* previousHeader = HeapObjectHeader::fromPayload(previous); | 530 HeapObjectHeader* previousHeader = HeapObjectHeader::fromPayload(previous); |
531 BasePage* page = pageFromObject(previousHeader); | 531 BasePage* page = pageFromObject(previousHeader); |
532 ASSERT(page); | 532 ASSERT(page); |
533 int arenaIndex = page->arena()->arenaIndex(); | 533 int arenaIndex = page->arena()->arenaIndex(); |
534 // Recompute the effective heap index if previous allocation | 534 // Recompute the effective heap index if previous allocation |
535 // was on the normal arenas or a large object. | 535 // was on the normal arenas or a large object. |
536 if (isNormalArenaIndex(arenaIndex) || arenaIndex == BlinkGC::LargeObjectAren
aIndex) | 536 if (isNormalArenaIndex(arenaIndex) || arenaIndex == BlinkGC::LargeObjectAren
aIndex) |
537 arenaIndex = arenaIndexForObjectSize(size); | 537 arenaIndex = arenaIndexForObjectSize(size); |
538 | 538 |
539 // TODO(haraken): We don't support reallocate() for finalizable objects. | 539 // TODO(haraken): We don't support reallocate() for finalizable objects. |
540 ASSERT(!ThreadHeap::gcInfo(previousHeader->gcInfoIndex())->hasFinalizer()); | 540 ASSERT(!Heap::gcInfo(previousHeader->gcInfoIndex())->hasFinalizer()); |
541 ASSERT(previousHeader->gcInfoIndex() == GCInfoTrait<T>::index()); | 541 ASSERT(previousHeader->gcInfoIndex() == GCInfoTrait<T>::index()); |
542 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(T); | 542 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(T); |
543 HeapAllocHooks::freeHookIfEnabled(static_cast<Address>(previous)); | 543 HeapAllocHooks::freeHookIfEnabled(static_cast<Address>(previous)); |
544 Address address = ThreadHeap::allocateOnArenaIndex(state, size, arenaIndex,
GCInfoTrait<T>::index(), typeName); | 544 Address address = Heap::allocateOnArenaIndex(state, size, arenaIndex, GCInfo
Trait<T>::index(), typeName); |
545 size_t copySize = previousHeader->payloadSize(); | 545 size_t copySize = previousHeader->payloadSize(); |
546 if (copySize > size) | 546 if (copySize > size) |
547 copySize = size; | 547 copySize = size; |
548 memcpy(address, previous, copySize); | 548 memcpy(address, previous, copySize); |
549 return address; | 549 return address; |
550 } | 550 } |
551 | 551 |
552 template<typename Derived> | 552 template<typename Derived> |
553 template<typename T> | 553 template<typename T> |
554 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) | 554 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) |
555 { | 555 { |
556 T** cell = reinterpret_cast<T**>(object); | 556 T** cell = reinterpret_cast<T**>(object); |
557 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 557 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
558 *cell = nullptr; | 558 *cell = nullptr; |
559 } | 559 } |
560 | 560 |
561 } // namespace blink | 561 } // namespace blink |
562 | 562 |
563 #endif // Heap_h | 563 #endif // Heap_h |
OLD | NEW |