OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 981 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
992 } | 992 } |
993 | 993 |
994 static void increaseAllocatedObjectSize(size_t delta) { atomicAdd(&s_allocat
edObjectSize, static_cast<long>(delta)); } | 994 static void increaseAllocatedObjectSize(size_t delta) { atomicAdd(&s_allocat
edObjectSize, static_cast<long>(delta)); } |
995 static void decreaseAllocatedObjectSize(size_t delta) { atomicSubtract(&s_al
locatedObjectSize, static_cast<long>(delta)); } | 995 static void decreaseAllocatedObjectSize(size_t delta) { atomicSubtract(&s_al
locatedObjectSize, static_cast<long>(delta)); } |
996 static size_t allocatedObjectSize() { return acquireLoad(&s_allocatedObjectS
ize); } | 996 static size_t allocatedObjectSize() { return acquireLoad(&s_allocatedObjectS
ize); } |
997 static void increaseMarkedObjectSize(size_t delta) { atomicAdd(&s_markedObje
ctSize, static_cast<long>(delta)); } | 997 static void increaseMarkedObjectSize(size_t delta) { atomicAdd(&s_markedObje
ctSize, static_cast<long>(delta)); } |
998 static size_t markedObjectSize() { return acquireLoad(&s_markedObjectSize);
} | 998 static size_t markedObjectSize() { return acquireLoad(&s_markedObjectSize);
} |
999 static void increaseAllocatedSpace(size_t delta) { atomicAdd(&s_allocatedSpa
ce, static_cast<long>(delta)); } | 999 static void increaseAllocatedSpace(size_t delta) { atomicAdd(&s_allocatedSpa
ce, static_cast<long>(delta)); } |
1000 static void decreaseAllocatedSpace(size_t delta) { atomicSubtract(&s_allocat
edSpace, static_cast<long>(delta)); } | 1000 static void decreaseAllocatedSpace(size_t delta) { atomicSubtract(&s_allocat
edSpace, static_cast<long>(delta)); } |
1001 static size_t allocatedSpace() { return acquireLoad(&s_allocatedSpace); } | 1001 static size_t allocatedSpace() { return acquireLoad(&s_allocatedSpace); } |
| 1002 |
1002 static double estimatedMarkingTime(); | 1003 static double estimatedMarkingTime(); |
1003 | 1004 |
| 1005 // On object allocation, register the object's externally allocated memory. |
| 1006 static inline void increaseExternallyAllocatedBytes(size_t); |
| 1007 static size_t externallyAllocatedBytes() { return acquireLoad(&s_externallyA
llocatedBytes); } |
| 1008 |
| 1009 // On object tracing, register the object's externally allocated memory (as
still live.) |
| 1010 static void increaseExternallyAllocatedBytesAlive(size_t delta) |
| 1011 { |
| 1012 ASSERT(ThreadState::current()->isInGC()); |
| 1013 s_externallyAllocatedBytesAlive += delta; |
| 1014 } |
| 1015 static size_t externallyAllocatedBytesAlive() { return s_externallyAllocated
BytesAlive; } |
| 1016 |
| 1017 static void requestUrgentGC(); |
| 1018 static void clearUrgentGC() { releaseStore(&s_requestedUrgentGC, 0); } |
| 1019 static bool isUrgentGCRequested() { return acquireLoad(&s_requestedUrgentGC)
; } |
| 1020 |
1004 private: | 1021 private: |
1005 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted | 1022 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted |
1006 // by base addresses. | 1023 // by base addresses. |
1007 class RegionTree { | 1024 class RegionTree { |
1008 public: | 1025 public: |
1009 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left
(nullptr), m_right(nullptr) { } | 1026 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left
(nullptr), m_right(nullptr) { } |
1010 ~RegionTree() | 1027 ~RegionTree() |
1011 { | 1028 { |
1012 delete m_left; | 1029 delete m_left; |
1013 delete m_right; | 1030 delete m_right; |
1014 } | 1031 } |
1015 PageMemoryRegion* lookup(Address); | 1032 PageMemoryRegion* lookup(Address); |
1016 static void add(RegionTree*, RegionTree**); | 1033 static void add(RegionTree*, RegionTree**); |
1017 static void remove(PageMemoryRegion*, RegionTree**); | 1034 static void remove(PageMemoryRegion*, RegionTree**); |
1018 private: | 1035 private: |
1019 PageMemoryRegion* m_region; | 1036 PageMemoryRegion* m_region; |
1020 RegionTree* m_left; | 1037 RegionTree* m_left; |
1021 RegionTree* m_right; | 1038 RegionTree* m_right; |
1022 }; | 1039 }; |
1023 | 1040 |
1024 static void resetAllocatedObjectSize() { ASSERT(ThreadState::current()->isIn
GC()); s_allocatedObjectSize = 0; } | 1041 // Reset counters that track live and allocated-since-last-GC sizes. |
1025 static void resetMarkedObjectSize() { ASSERT(ThreadState::current()->isInGC(
)); s_markedObjectSize = 0; } | 1042 static void resetHeapCounters(); |
1026 | 1043 |
1027 static Visitor* s_markingVisitor; | 1044 static Visitor* s_markingVisitor; |
1028 static CallbackStack* s_markingStack; | 1045 static CallbackStack* s_markingStack; |
1029 static CallbackStack* s_postMarkingCallbackStack; | 1046 static CallbackStack* s_postMarkingCallbackStack; |
1030 static CallbackStack* s_weakCallbackStack; | 1047 static CallbackStack* s_weakCallbackStack; |
1031 static CallbackStack* s_ephemeronStack; | 1048 static CallbackStack* s_ephemeronStack; |
1032 static HeapDoesNotContainCache* s_heapDoesNotContainCache; | 1049 static HeapDoesNotContainCache* s_heapDoesNotContainCache; |
1033 static bool s_shutdownCalled; | 1050 static bool s_shutdownCalled; |
1034 static bool s_lastGCWasConservative; | 1051 static bool s_lastGCWasConservative; |
1035 static FreePagePool* s_freePagePool; | 1052 static FreePagePool* s_freePagePool; |
1036 static OrphanedPagePool* s_orphanedPagePool; | 1053 static OrphanedPagePool* s_orphanedPagePool; |
1037 static RegionTree* s_regionTree; | 1054 static RegionTree* s_regionTree; |
1038 static size_t s_allocatedSpace; | 1055 static size_t s_allocatedSpace; |
1039 static size_t s_allocatedObjectSize; | 1056 static size_t s_allocatedObjectSize; |
1040 static size_t s_markedObjectSize; | 1057 static size_t s_markedObjectSize; |
| 1058 static size_t s_externallyAllocatedBytes; |
| 1059 static size_t s_externallyAllocatedBytesAlive; |
| 1060 static unsigned s_requestedUrgentGC; |
| 1061 |
1041 friend class ThreadState; | 1062 friend class ThreadState; |
1042 }; | 1063 }; |
1043 | 1064 |
1044 template<typename T> | 1065 template<typename T> |
1045 struct HeapIndexTrait { | 1066 struct HeapIndexTrait { |
1046 static int index() { return NormalPageHeapIndex; }; | 1067 static int index() { return NormalPageHeapIndex; }; |
1047 }; | 1068 }; |
1048 | 1069 |
1049 // FIXME: The forward declaration is layering violation. | 1070 // FIXME: The forward declaration is layering violation. |
1050 #define DEFINE_TYPED_HEAP_TRAIT(Type) \ | 1071 #define DEFINE_TYPED_HEAP_TRAIT(Type) \ |
(...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1498 // FIXME: We don't support reallocate() for finalizable objects. | 1519 // FIXME: We don't support reallocate() for finalizable objects. |
1499 ASSERT(!Heap::gcInfo(previousHeader->gcInfoIndex())->hasFinalizer()); | 1520 ASSERT(!Heap::gcInfo(previousHeader->gcInfoIndex())->hasFinalizer()); |
1500 ASSERT(previousHeader->gcInfoIndex() == GCInfoTrait<T>::index()); | 1521 ASSERT(previousHeader->gcInfoIndex() == GCInfoTrait<T>::index()); |
1501 size_t copySize = previousHeader->payloadSize(); | 1522 size_t copySize = previousHeader->payloadSize(); |
1502 if (copySize > size) | 1523 if (copySize > size) |
1503 copySize = size; | 1524 copySize = size; |
1504 memcpy(address, previous, copySize); | 1525 memcpy(address, previous, copySize); |
1505 return address; | 1526 return address; |
1506 } | 1527 } |
1507 | 1528 |
| 1529 void Heap::increaseExternallyAllocatedBytes(size_t delta) |
| 1530 { |
| 1531 // Flag GC urgency on a 50% increase in external allocation |
| 1532 // since the last GC, but not for less than 100M. |
| 1533 // |
| 1534 // FIXME: consider other, 'better' policies (e.g., have the count of |
| 1535 // heap objects with external allocations be taken into |
| 1536 // account, ...) The overall goal here is to trigger a |
| 1537 // GC such that it considerably lessens memory pressure |
| 1538 // for a renderer process, when absolutely needed. |
| 1539 size_t externalBytesAllocatedSinceLastGC = atomicAdd(&s_externallyAllocatedB
ytes, static_cast<long>(delta)); |
| 1540 if (LIKELY(externalBytesAllocatedSinceLastGC < 100 * 1024 * 1024)) |
| 1541 return; |
| 1542 |
| 1543 if (UNLIKELY(isUrgentGCRequested())) |
| 1544 return; |
| 1545 |
| 1546 size_t externalBytesAliveAtLastGC = externallyAllocatedBytesAlive(); |
| 1547 if (UNLIKELY(externalBytesAllocatedSinceLastGC > externalBytesAliveAtLastGC
/ 2)) |
| 1548 Heap::requestUrgentGC(); |
| 1549 } |
| 1550 |
1508 class HeapAllocatorQuantizer { | 1551 class HeapAllocatorQuantizer { |
1509 public: | 1552 public: |
1510 template<typename T> | 1553 template<typename T> |
1511 static size_t quantizedSize(size_t count) | 1554 static size_t quantizedSize(size_t count) |
1512 { | 1555 { |
1513 RELEASE_ASSERT(count <= kMaxUnquantizedAllocation / sizeof(T)); | 1556 RELEASE_ASSERT(count <= kMaxUnquantizedAllocation / sizeof(T)); |
1514 return BaseHeap::roundedAllocationSize(count * sizeof(T)); | 1557 return BaseHeap::roundedAllocationSize(count * sizeof(T)); |
1515 } | 1558 } |
1516 static const size_t kMaxUnquantizedAllocation = maxHeapObjectSize; | 1559 static const size_t kMaxUnquantizedAllocation = maxHeapObjectSize; |
1517 }; | 1560 }; |
(...skipping 1007 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2525 template<typename T, size_t inlineCapacity> | 2568 template<typename T, size_t inlineCapacity> |
2526 struct GCInfoTrait<HeapVector<T, inlineCapacity>> : public GCInfoTrait<Vector<T,
inlineCapacity, HeapAllocator>> { }; | 2569 struct GCInfoTrait<HeapVector<T, inlineCapacity>> : public GCInfoTrait<Vector<T,
inlineCapacity, HeapAllocator>> { }; |
2527 template<typename T, size_t inlineCapacity> | 2570 template<typename T, size_t inlineCapacity> |
2528 struct GCInfoTrait<HeapDeque<T, inlineCapacity>> : public GCInfoTrait<Deque<T, i
nlineCapacity, HeapAllocator>> { }; | 2571 struct GCInfoTrait<HeapDeque<T, inlineCapacity>> : public GCInfoTrait<Deque<T, i
nlineCapacity, HeapAllocator>> { }; |
2529 template<typename T, typename U, typename V> | 2572 template<typename T, typename U, typename V> |
2530 struct GCInfoTrait<HeapHashCountedSet<T, U, V>> : public GCInfoTrait<HashCounted
Set<T, U, V, HeapAllocator>> { }; | 2573 struct GCInfoTrait<HeapHashCountedSet<T, U, V>> : public GCInfoTrait<HashCounted
Set<T, U, V, HeapAllocator>> { }; |
2531 | 2574 |
2532 } // namespace blink | 2575 } // namespace blink |
2533 | 2576 |
2534 #endif // Heap_h | 2577 #endif // Heap_h |
OLD | NEW |