Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(267)

Side by Side Diff: third_party/WebKit/Source/wtf/Vector.h

Issue 2531973002: Simple BlinkGC heap compaction. (Closed)
Patch Set: Clear unused pages before decommitting Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
3 * 3 *
4 * This library is free software; you can redistribute it and/or 4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public 5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either 6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version. 7 * version 2 of the License, or (at your option) any later version.
8 * 8 *
9 * This library is distributed in the hope that it will be useful, 9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
(...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after
485 485
486 using Base::clearUnusedSlots; 486 using Base::clearUnusedSlots;
487 using Base::checkUnusedSlots; 487 using Base::checkUnusedSlots;
488 488
489 bool hasOutOfLineBuffer() const { 489 bool hasOutOfLineBuffer() const {
490 // When inlineCapacity is 0 we have an out of line buffer if we have a 490 // When inlineCapacity is 0 we have an out of line buffer if we have a
491 // buffer. 491 // buffer.
492 return buffer(); 492 return buffer();
493 } 493 }
494 494
495 T** bufferSlot() { return &m_buffer; }
496
495 protected: 497 protected:
496 using Base::m_size; 498 using Base::m_size;
497 499
498 private: 500 private:
499 using Base::m_buffer; 501 using Base::m_buffer;
500 using Base::m_capacity; 502 using Base::m_capacity;
501 }; 503 };
502 504
503 template <typename T, size_t inlineCapacity, typename Allocator> 505 template <typename T, size_t inlineCapacity, typename Allocator>
504 class VectorBuffer : protected VectorBufferBase<T, true, Allocator> { 506 class VectorBuffer : protected VectorBufferBase<T, true, Allocator> {
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
764 Allocator::leaveGCForbiddenScope(); 766 Allocator::leaveGCForbiddenScope();
765 } 767 }
766 768
767 using Base::buffer; 769 using Base::buffer;
768 using Base::capacity; 770 using Base::capacity;
769 771
770 bool hasOutOfLineBuffer() const { 772 bool hasOutOfLineBuffer() const {
771 return buffer() && buffer() != inlineBuffer(); 773 return buffer() && buffer() != inlineBuffer();
772 } 774 }
773 775
776 T** bufferSlot() { return &m_buffer; }
777
774 protected: 778 protected:
775 using Base::m_size; 779 using Base::m_size;
776 780
777 private: 781 private:
778 using Base::m_buffer; 782 using Base::m_buffer;
779 using Base::m_capacity; 783 using Base::m_capacity;
780 784
781 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T); 785 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
782 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); } 786 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
783 const T* inlineBuffer() const { 787 const T* inlineBuffer() const {
(...skipping 812 matching lines...) Expand 10 before | Expand all | Expand 10 after
1596 if (!buffer()) 1600 if (!buffer())
1597 return; 1601 return;
1598 if (this->hasOutOfLineBuffer()) { 1602 if (this->hasOutOfLineBuffer()) {
1599 // This is a performance optimization for a case where the buffer has 1603 // This is a performance optimization for a case where the buffer has
1600 // been already traced by somewhere. This can happen if the conservative 1604 // been already traced by somewhere. This can happen if the conservative
1601 // scanning traced an on-stack (false-positive or real) pointer to the 1605 // scanning traced an on-stack (false-positive or real) pointer to the
1602 // HeapVector, and then visitor->trace() traces the HeapVector. 1606 // HeapVector, and then visitor->trace() traces the HeapVector.
1603 if (Allocator::isHeapObjectAlive(buffer())) 1607 if (Allocator::isHeapObjectAlive(buffer()))
1604 return; 1608 return;
1605 Allocator::markNoTracing(visitor, buffer()); 1609 Allocator::markNoTracing(visitor, buffer());
1610 Allocator::registerBackingStoreReference(visitor, Base::bufferSlot());
1606 } 1611 }
1607 const T* bufferBegin = buffer(); 1612 const T* bufferBegin = buffer();
1608 const T* bufferEnd = buffer() + size(); 1613 const T* bufferEnd = buffer() + size();
1609 if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) { 1614 if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) {
1610 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; 1615 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd;
1611 bufferEntry++) 1616 bufferEntry++)
1612 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>( 1617 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>(
1613 visitor, *const_cast<T*>(bufferEntry)); 1618 visitor, *const_cast<T*>(bufferEntry));
1614 checkUnusedSlots(buffer() + size(), buffer() + capacity()); 1619 checkUnusedSlots(buffer() + size(), buffer() + capacity());
1615 } 1620 }
1616 } 1621 }
1617 1622
1618 } // namespace WTF 1623 } // namespace WTF
1619 1624
1620 using WTF::Vector; 1625 using WTF::Vector;
1621 1626
1622 #endif // WTF_Vector_h 1627 #endif // WTF_Vector_h
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698