| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #ifndef HeapAllocator_h |
| 6 #define HeapAllocator_h |
| 7 |
| 8 #include "platform/heap/Heap.h" |
| 9 #include "wtf/Assertions.h" |
| 10 #include "wtf/Atomics.h" |
| 11 #include "wtf/Deque.h" |
| 12 #include "wtf/HashCountedSet.h" |
| 13 #include "wtf/HashMap.h" |
| 14 #include "wtf/HashSet.h" |
| 15 #include "wtf/HashTable.h" |
| 16 #include "wtf/LinkedHashSet.h" |
| 17 #include "wtf/ListHashSet.h" |
| 18 #include "wtf/TypeTraits.h" |
| 19 #include "wtf/Vector.h" |
| 20 |
| 21 namespace blink { |
| 22 |
| 23 class HeapAllocatorQuantizer { |
| 24 public: |
| 25 template<typename T> |
| 26 static size_t quantizedSize(size_t count) |
| 27 { |
| 28 RELEASE_ASSERT(count <= kMaxUnquantizedAllocation / sizeof(T)); |
| 29 return Heap::roundedAllocationSize(count * sizeof(T)); |
| 30 } |
| 31 static const size_t kMaxUnquantizedAllocation = maxHeapObjectSize; |
| 32 }; |
| 33 |
| 34 template<typename T> struct WeakHandlingHashTraits : WTF::SimpleClassHashTraits<
T> { |
| 35 // We want to treat the object as a weak object in the sense that it can |
| 36 // disappear from hash sets and hash maps. |
| 37 static const WTF::WeakHandlingFlag weakHandlingFlag = WTF::WeakHandlingInCol
lections; |
| 38 // Normally whether or not an object needs tracing is inferred |
| 39 // automatically from the presence of the trace method, but we don't |
| 40 // necessarily have a trace method, and we may not need one because T |
| 41 // can perhaps only be allocated inside collections, never as independent |
| 42 // objects. Explicitly mark this as needing tracing and it will be traced |
| 43 // in collections using the traceInCollection method, which it must have. |
| 44 template<typename U = void> struct NeedsTracingLazily { |
| 45 static const bool value = true; |
| 46 }; |
| 47 // The traceInCollection method traces differently depending on whether we |
| 48 // are strongifying the trace operation. We strongify the trace operation |
| 49 // when there are active iterators on the object. In this case all |
| 50 // WeakMembers are marked like strong members so that elements do not |
| 51 // suddenly disappear during iteration. Returns true if weak pointers to |
| 52 // dead objects were found: In this case any strong pointers were not yet |
| 53 // traced and the entry should be removed from the collection. |
| 54 template<typename VisitorDispatcher> |
| 55 static bool traceInCollection(VisitorDispatcher visitor, T& t, WTF::ShouldWe
akPointersBeMarkedStrongly strongify) |
| 56 { |
| 57 return t.traceInCollection(visitor, strongify); |
| 58 } |
| 59 }; |
| 60 |
| 61 template<bool needsTracing, WTF::WeakHandlingFlag weakHandlingFlag, WTF::ShouldW
eakPointersBeMarkedStrongly strongify, typename T, typename Traits> struct Colle
ctionBackingTraceTrait; |
| 62 |
| 63 // This is a static-only class used as a trait on collections to make them heap |
| 64 // allocated. However see also HeapListHashSetAllocator. |
| 65 class HeapAllocator { |
| 66 public: |
| 67 using Quantizer = HeapAllocatorQuantizer; |
| 68 using Visitor = blink::Visitor; |
| 69 static const bool isGarbageCollected = true; |
| 70 |
| 71 template <typename T> |
| 72 static T* allocateVectorBacking(size_t size) |
| 73 { |
| 74 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 75 ASSERT(state->isAllocationAllowed()); |
| 76 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 77 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->vectorBacking
Heap(gcInfoIndex)); |
| 78 return reinterpret_cast<T*>(heap->allocateObject(Heap::allocationSizeFro
mSize(size), gcInfoIndex)); |
| 79 } |
| 80 template <typename T> |
| 81 static T* allocateExpandedVectorBacking(size_t size) |
| 82 { |
| 83 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 84 ASSERT(state->isAllocationAllowed()); |
| 85 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 86 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->vectorBacking
Heap(gcInfoIndex)); |
| 87 return reinterpret_cast<T*>(heap->allocateObject(Heap::allocationSizeFro
mSize(size), gcInfoIndex)); |
| 88 } |
| 89 PLATFORM_EXPORT static void freeVectorBacking(void*); |
| 90 PLATFORM_EXPORT static bool expandVectorBacking(void*, size_t); |
| 91 static inline bool shrinkVectorBacking(void* address, size_t quantizedCurren
tSize, size_t quantizedShrunkSize) |
| 92 { |
| 93 // Returns always true, so the inlining in turn enables call site simpli
fications. |
| 94 backingShrink(address, quantizedCurrentSize, quantizedShrunkSize); |
| 95 return true; |
| 96 } |
| 97 template <typename T> |
| 98 static T* allocateInlineVectorBacking(size_t size) |
| 99 { |
| 100 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 101 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 102 return reinterpret_cast<T*>(Heap::allocateOnHeapIndex(state, size, Inlin
eVectorHeapIndex, gcInfoIndex)); |
| 103 } |
| 104 PLATFORM_EXPORT static void freeInlineVectorBacking(void*); |
| 105 PLATFORM_EXPORT static bool expandInlineVectorBacking(void*, size_t); |
| 106 static inline bool shrinkInlineVectorBacking(void* address, size_t quantized
CurrentSize, size_t quantizedShrunkSize) |
| 107 { |
| 108 backingShrink(address, quantizedCurrentSize, quantizedShrunkSize); |
| 109 return true; |
| 110 } |
| 111 |
| 112 template <typename T, typename HashTable> |
| 113 static T* allocateHashTableBacking(size_t size) |
| 114 { |
| 115 size_t gcInfoIndex = GCInfoTrait<HeapHashTableBacking<HashTable>>::index
(); |
| 116 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 117 return reinterpret_cast<T*>(Heap::allocateOnHeapIndex(state, size, HashT
ableHeapIndex, gcInfoIndex)); |
| 118 } |
| 119 template <typename T, typename HashTable> |
| 120 static T* allocateZeroedHashTableBacking(size_t size) |
| 121 { |
| 122 return allocateHashTableBacking<T, HashTable>(size); |
| 123 } |
| 124 PLATFORM_EXPORT static void freeHashTableBacking(void* address); |
| 125 PLATFORM_EXPORT static bool expandHashTableBacking(void*, size_t); |
| 126 |
| 127 template <typename Return, typename Metadata> |
| 128 static Return malloc(size_t size) |
| 129 { |
| 130 return reinterpret_cast<Return>(Heap::allocate<Metadata>(size)); |
| 131 } |
| 132 static void free(void* address) { } |
| 133 template<typename T> |
| 134 static void* newArray(size_t bytes) |
| 135 { |
| 136 ASSERT_NOT_REACHED(); |
| 137 return 0; |
| 138 } |
| 139 |
| 140 static void deleteArray(void* ptr) |
| 141 { |
| 142 ASSERT_NOT_REACHED(); |
| 143 } |
| 144 |
| 145 static bool isAllocationAllowed() |
| 146 { |
| 147 return ThreadState::current()->isAllocationAllowed(); |
| 148 } |
| 149 |
| 150 template<typename VisitorDispatcher> |
| 151 static void markNoTracing(VisitorDispatcher visitor, const void* t) { visito
r->markNoTracing(t); } |
| 152 |
| 153 template<typename VisitorDispatcher, typename T, typename Traits> |
| 154 static void trace(VisitorDispatcher visitor, T& t) |
| 155 { |
| 156 CollectionBackingTraceTrait<WTF::ShouldBeTraced<Traits>::value, Traits::
weakHandlingFlag, WTF::WeakPointersActWeak, T, Traits>::trace(visitor, t); |
| 157 } |
| 158 |
| 159 template<typename VisitorDispatcher> |
| 160 static void registerDelayedMarkNoTracing(VisitorDispatcher visitor, const vo
id* object) |
| 161 { |
| 162 visitor->registerDelayedMarkNoTracing(object); |
| 163 } |
| 164 |
| 165 template<typename VisitorDispatcher> |
| 166 static void registerWeakMembers(VisitorDispatcher visitor, const void* closu
re, const void* object, WeakPointerCallback callback) |
| 167 { |
| 168 visitor->registerWeakMembers(closure, object, callback); |
| 169 } |
| 170 |
| 171 template<typename VisitorDispatcher> |
| 172 static void registerWeakTable(VisitorDispatcher visitor, const void* closure
, EphemeronCallback iterationCallback, EphemeronCallback iterationDoneCallback) |
| 173 { |
| 174 visitor->registerWeakTable(closure, iterationCallback, iterationDoneCall
back); |
| 175 } |
| 176 |
| 177 #if ENABLE(ASSERT) |
| 178 template<typename VisitorDispatcher> |
| 179 static bool weakTableRegistered(VisitorDispatcher visitor, const void* closu
re) |
| 180 { |
| 181 return visitor->weakTableRegistered(closure); |
| 182 } |
| 183 #endif |
| 184 |
| 185 template<typename T> |
| 186 struct ResultType { |
| 187 using Type = T*; |
| 188 }; |
| 189 |
| 190 template<typename T> |
| 191 struct OtherType { |
| 192 using Type = T*; |
| 193 }; |
| 194 |
| 195 template<typename T> |
| 196 static T& getOther(T* other) |
| 197 { |
| 198 return *other; |
| 199 } |
| 200 |
| 201 static void enterNoAllocationScope() |
| 202 { |
| 203 #if ENABLE(ASSERT) |
| 204 ThreadState::current()->enterNoAllocationScope(); |
| 205 #endif |
| 206 } |
| 207 |
| 208 static void leaveNoAllocationScope() |
| 209 { |
| 210 #if ENABLE(ASSERT) |
| 211 ThreadState::current()->leaveNoAllocationScope(); |
| 212 #endif |
| 213 } |
| 214 |
| 215 static void enterGCForbiddenScope() |
| 216 { |
| 217 ThreadState::current()->enterGCForbiddenScope(); |
| 218 } |
| 219 |
| 220 static void leaveGCForbiddenScope() |
| 221 { |
| 222 ThreadState::current()->leaveGCForbiddenScope(); |
| 223 } |
| 224 |
| 225 private: |
| 226 static void backingFree(void*); |
| 227 static bool backingExpand(void*, size_t); |
| 228 PLATFORM_EXPORT static void backingShrink(void*, size_t quantizedCurrentSize
, size_t quantizedShrunkSize); |
| 229 |
| 230 template<typename T, size_t u, typename V> friend class WTF::Vector; |
| 231 template<typename T, typename U, typename V, typename W> friend class WTF::H
ashSet; |
| 232 template<typename T, typename U, typename V, typename W, typename X, typenam
e Y> friend class WTF::HashMap; |
| 233 }; |
| 234 |
| 235 template<typename VisitorDispatcher, typename Value> |
| 236 static void traceListHashSetValue(VisitorDispatcher visitor, Value& value) |
| 237 { |
| 238 // We use the default hash traits for the value in the node, because |
| 239 // ListHashSet does not let you specify any specific ones. |
| 240 // We don't allow ListHashSet of WeakMember, so we set that one false |
| 241 // (there's an assert elsewhere), but we have to specify some value for the |
| 242 // strongify template argument, so we specify WTF::WeakPointersActWeak, |
| 243 // arbitrarily. |
| 244 CollectionBackingTraceTrait<WTF::ShouldBeTraced<WTF::HashTraits<Value>>::val
ue, WTF::NoWeakHandlingInCollections, WTF::WeakPointersActWeak, Value, WTF::Hash
Traits<Value>>::trace(visitor, value); |
| 245 } |
| 246 |
| 247 // The inline capacity is just a dummy template argument to match the off-heap |
| 248 // allocator. |
| 249 // This inherits from the static-only HeapAllocator trait class, but we do |
| 250 // declare pointers to instances. These pointers are always null, and no |
| 251 // objects are instantiated. |
| 252 template<typename ValueArg, size_t inlineCapacity> |
| 253 class HeapListHashSetAllocator : public HeapAllocator { |
| 254 public: |
| 255 using TableAllocator = HeapAllocator; |
| 256 using Node = WTF::ListHashSetNode<ValueArg, HeapListHashSetAllocator>; |
| 257 |
| 258 class AllocatorProvider { |
| 259 public: |
| 260 // For the heap allocation we don't need an actual allocator object, so |
| 261 // we just return null. |
| 262 HeapListHashSetAllocator* get() const { return 0; } |
| 263 |
| 264 // No allocator object is needed. |
| 265 void createAllocatorIfNeeded() { } |
| 266 void releaseAllocator() { } |
| 267 |
| 268 // There is no allocator object in the HeapListHashSet (unlike in the |
| 269 // regular ListHashSet) so there is nothing to swap. |
| 270 void swap(AllocatorProvider& other) { } |
| 271 }; |
| 272 |
| 273 void deallocate(void* dummy) { } |
| 274 |
| 275 // This is not a static method even though it could be, because it needs to |
| 276 // match the one that the (off-heap) ListHashSetAllocator has. The 'this' |
| 277 // pointer will always be null. |
| 278 void* allocateNode() |
| 279 { |
| 280 // Consider using a LinkedHashSet instead if this compile-time assert fa
ils: |
| 281 static_assert(!WTF::IsWeak<ValueArg>::value, "weak pointers in a ListHas
hSet will result in null entries in the set"); |
| 282 |
| 283 return malloc<void*, Node>(sizeof(Node)); |
| 284 } |
| 285 |
| 286 template<typename VisitorDispatcher> |
| 287 static void traceValue(VisitorDispatcher visitor, Node* node) |
| 288 { |
| 289 traceListHashSetValue(visitor, node->m_value); |
| 290 } |
| 291 }; |
| 292 |
| 293 template<typename T, typename Traits = WTF::VectorTraits<T>> class HeapVectorBac
king { |
| 294 public: |
| 295 static void finalize(void* pointer); |
| 296 void finalizeGarbageCollectedObject() { finalize(this); } |
| 297 }; |
| 298 |
| 299 template<typename T, typename Traits> |
| 300 void HeapVectorBacking<T, Traits>::finalize(void* pointer) |
| 301 { |
| 302 static_assert(Traits::needsDestruction, "Only vector buffers with items requ
iring destruction should be finalized"); |
| 303 // See the comment in HeapVectorBacking::trace. |
| 304 static_assert(Traits::canInitializeWithMemset || WTF::IsPolymorphic<T>::valu
e, "HeapVectorBacking doesn't support objects that cannot be initialized with me
mset or don't have a vtable"); |
| 305 |
| 306 ASSERT(!WTF::IsTriviallyDestructible<T>::value); |
| 307 HeapObjectHeader* header = HeapObjectHeader::fromPayload(pointer); |
| 308 // Use the payload size as recorded by the heap to determine how many |
| 309 // elements to finalize. |
| 310 size_t length = header->payloadSize() / sizeof(T); |
| 311 T* buffer = reinterpret_cast<T*>(pointer); |
| 312 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER |
| 313 // As commented above, HeapVectorBacking calls finalizers for unused slots |
| 314 // (which are already zeroed out). |
| 315 ANNOTATE_CHANGE_SIZE(buffer, length, 0, length); |
| 316 #endif |
| 317 if (WTF::IsPolymorphic<T>::value) { |
| 318 for (unsigned i = 0; i < length; ++i) { |
| 319 if (blink::vTableInitialized(&buffer[i])) |
| 320 buffer[i].~T(); |
| 321 } |
| 322 } else { |
| 323 for (unsigned i = 0; i < length; ++i) { |
| 324 buffer[i].~T(); |
| 325 } |
| 326 } |
| 327 } |
| 328 |
| 329 // CollectionBackingTraceTrait. Do nothing for things in collections that don't |
| 330 // need tracing, or call TraceInCollectionTrait for those that do. |
| 331 |
| 332 // Specialization for things that don't need marking and have no weak pointers. |
| 333 // We do nothing, even if WTF::WeakPointersActStrong. |
| 334 template<WTF::ShouldWeakPointersBeMarkedStrongly strongify, typename T, typename
Traits> |
| 335 struct CollectionBackingTraceTrait<false, WTF::NoWeakHandlingInCollections, stro
ngify, T, Traits> { |
| 336 template<typename VisitorDispatcher> |
| 337 static bool trace(VisitorDispatcher, T&) { return false; } |
| 338 }; |
| 339 |
| 340 template<typename T> |
| 341 static void verifyGarbageCollectedIfMember(T*) |
| 342 { |
| 343 } |
| 344 |
| 345 template<typename T> |
| 346 static void verifyGarbageCollectedIfMember(Member<T>* t) |
| 347 { |
| 348 STATIC_ASSERT_IS_GARBAGE_COLLECTED(T, "non garbage collected object in membe
r"); |
| 349 } |
| 350 |
| 351 // Specialization for things that either need marking or have weak pointers or |
| 352 // both. |
| 353 template<bool needsTracing, WTF::WeakHandlingFlag weakHandlingFlag, WTF::ShouldW
eakPointersBeMarkedStrongly strongify, typename T, typename Traits> |
| 354 struct CollectionBackingTraceTrait { |
| 355 template<typename VisitorDispatcher> |
| 356 static bool trace(VisitorDispatcher visitor, T&t) |
| 357 { |
| 358 verifyGarbageCollectedIfMember(reinterpret_cast<T*>(0)); |
| 359 return WTF::TraceInCollectionTrait<weakHandlingFlag, strongify, T, Trait
s>::trace(visitor, t); |
| 360 } |
| 361 }; |
| 362 |
| 363 template<typename Table> class HeapHashTableBacking { |
| 364 public: |
| 365 static void finalize(void* pointer); |
| 366 void finalizeGarbageCollectedObject() { finalize(this); } |
| 367 }; |
| 368 |
| 369 template<typename Table> |
| 370 void HeapHashTableBacking<Table>::finalize(void* pointer) |
| 371 { |
| 372 using Value = typename Table::ValueType; |
| 373 ASSERT(!WTF::IsTriviallyDestructible<Value>::value); |
| 374 HeapObjectHeader* header = HeapObjectHeader::fromPayload(pointer); |
| 375 // Use the payload size as recorded by the heap to determine how many |
| 376 // elements to finalize. |
| 377 size_t length = header->payloadSize() / sizeof(Value); |
| 378 Value* table = reinterpret_cast<Value*>(pointer); |
| 379 for (unsigned i = 0; i < length; ++i) { |
| 380 if (!Table::isEmptyOrDeletedBucket(table[i])) |
| 381 table[i].~Value(); |
| 382 } |
| 383 } |
| 384 |
| 385 // FIXME: These should just be template aliases: |
| 386 // |
| 387 // template<typename T, size_t inlineCapacity = 0> |
| 388 // using HeapVector = Vector<T, inlineCapacity, HeapAllocator>; |
| 389 // |
| 390 // as soon as all the compilers we care about support that. |
| 391 // MSVC supports it only in MSVC 2013. |
| 392 template< |
| 393 typename KeyArg, |
| 394 typename MappedArg, |
| 395 typename HashArg = typename DefaultHash<KeyArg>::Hash, |
| 396 typename KeyTraitsArg = HashTraits<KeyArg>, |
| 397 typename MappedTraitsArg = HashTraits<MappedArg>> |
| 398 class HeapHashMap : public HashMap<KeyArg, MappedArg, HashArg, KeyTraitsArg, Map
pedTraitsArg, HeapAllocator> { }; |
| 399 |
| 400 template< |
| 401 typename ValueArg, |
| 402 typename HashArg = typename DefaultHash<ValueArg>::Hash, |
| 403 typename TraitsArg = HashTraits<ValueArg>> |
| 404 class HeapHashSet : public HashSet<ValueArg, HashArg, TraitsArg, HeapAllocator>
{ }; |
| 405 |
| 406 template< |
| 407 typename ValueArg, |
| 408 typename HashArg = typename DefaultHash<ValueArg>::Hash, |
| 409 typename TraitsArg = HashTraits<ValueArg>> |
| 410 class HeapLinkedHashSet : public LinkedHashSet<ValueArg, HashArg, TraitsArg, Hea
pAllocator> { }; |
| 411 |
| 412 template< |
| 413 typename ValueArg, |
| 414 size_t inlineCapacity = 0, // The inlineCapacity is just a dummy to match Li
stHashSet (off-heap). |
| 415 typename HashArg = typename DefaultHash<ValueArg>::Hash> |
| 416 class HeapListHashSet : public ListHashSet<ValueArg, inlineCapacity, HashArg, He
apListHashSetAllocator<ValueArg, inlineCapacity>> { }; |
| 417 |
| 418 template< |
| 419 typename Value, |
| 420 typename HashFunctions = typename DefaultHash<Value>::Hash, |
| 421 typename Traits = HashTraits<Value>> |
| 422 class HeapHashCountedSet : public HashCountedSet<Value, HashFunctions, Traits, H
eapAllocator> { }; |
| 423 |
| 424 template<typename T, size_t inlineCapacity = 0> |
| 425 class HeapVector : public Vector<T, inlineCapacity, HeapAllocator> { |
| 426 public: |
| 427 HeapVector() { } |
| 428 |
| 429 explicit HeapVector(size_t size) : Vector<T, inlineCapacity, HeapAllocator>(
size) |
| 430 { |
| 431 } |
| 432 |
| 433 HeapVector(size_t size, const T& val) : Vector<T, inlineCapacity, HeapAlloca
tor>(size, val) |
| 434 { |
| 435 } |
| 436 |
| 437 template<size_t otherCapacity> |
| 438 HeapVector(const HeapVector<T, otherCapacity>& other) |
| 439 : Vector<T, inlineCapacity, HeapAllocator>(other) |
| 440 { |
| 441 } |
| 442 |
| 443 template<typename U> |
| 444 void append(const U* data, size_t dataSize) |
| 445 { |
| 446 Vector<T, inlineCapacity, HeapAllocator>::append(data, dataSize); |
| 447 } |
| 448 |
| 449 template<typename U> |
| 450 void append(const U& other) |
| 451 { |
| 452 Vector<T, inlineCapacity, HeapAllocator>::append(other); |
| 453 } |
| 454 |
| 455 template<typename U, size_t otherCapacity> |
| 456 void appendVector(const HeapVector<U, otherCapacity>& other) |
| 457 { |
| 458 const Vector<U, otherCapacity, HeapAllocator>& otherVector = other; |
| 459 Vector<T, inlineCapacity, HeapAllocator>::appendVector(otherVector); |
| 460 } |
| 461 }; |
| 462 |
| 463 template<typename T, size_t inlineCapacity = 0> |
| 464 class HeapDeque : public Deque<T, inlineCapacity, HeapAllocator> { |
| 465 public: |
| 466 HeapDeque() { } |
| 467 |
| 468 explicit HeapDeque(size_t size) : Deque<T, inlineCapacity, HeapAllocator>(si
ze) |
| 469 { |
| 470 } |
| 471 |
| 472 HeapDeque(size_t size, const T& val) : Deque<T, inlineCapacity, HeapAllocato
r>(size, val) |
| 473 { |
| 474 } |
| 475 |
| 476 // FIXME: Doesn't work if there is an inline buffer, due to crbug.com/360572 |
| 477 HeapDeque<T, 0>& operator=(const HeapDeque& other) |
| 478 { |
| 479 HeapDeque<T> copy(other); |
| 480 swap(copy); |
| 481 return *this; |
| 482 } |
| 483 |
| 484 // FIXME: Doesn't work if there is an inline buffer, due to crbug.com/360572 |
| 485 void swap(HeapDeque& other) |
| 486 { |
| 487 Deque<T, inlineCapacity, HeapAllocator>::swap(other); |
| 488 } |
| 489 |
| 490 template<size_t otherCapacity> |
| 491 HeapDeque(const HeapDeque<T, otherCapacity>& other) |
| 492 : Deque<T, inlineCapacity, HeapAllocator>(other) |
| 493 { |
| 494 } |
| 495 |
| 496 template<typename U> |
| 497 void append(const U& other) |
| 498 { |
| 499 Deque<T, inlineCapacity, HeapAllocator>::append(other); |
| 500 } |
| 501 }; |
| 502 |
| 503 template<typename T, size_t i> |
| 504 inline void swap(HeapVector<T, i>& a, HeapVector<T, i>& b) { a.swap(b); } |
| 505 template<typename T, size_t i> |
| 506 inline void swap(HeapDeque<T, i>& a, HeapDeque<T, i>& b) { a.swap(b); } |
| 507 template<typename T, typename U, typename V> |
| 508 inline void swap(HeapHashSet<T, U, V>& a, HeapHashSet<T, U, V>& b) { a.swap(b);
} |
| 509 template<typename T, typename U, typename V, typename W, typename X> |
| 510 inline void swap(HeapHashMap<T, U, V, W, X>& a, HeapHashMap<T, U, V, W, X>& b) {
a.swap(b); } |
| 511 template<typename T, size_t i, typename U> |
| 512 inline void swap(HeapListHashSet<T, i, U>& a, HeapListHashSet<T, i, U>& b) { a.s
wap(b); } |
| 513 template<typename T, typename U, typename V> |
| 514 inline void swap(HeapLinkedHashSet<T, U, V>& a, HeapLinkedHashSet<T, U, V>& b) {
a.swap(b); } |
| 515 template<typename T, typename U, typename V> |
| 516 inline void swap(HeapHashCountedSet<T, U, V>& a, HeapHashCountedSet<T, U, V>& b)
{ a.swap(b); } |
| 517 |
| 518 } // namespace blink |
| 519 |
| 520 #endif |
| OLD | NEW |