Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(97)

Side by Side Diff: third_party/WebKit/Source/wtf/Vector.h

Issue 2585673002: Replace ASSERT, ENABLE(ASSERT), and ASSERT_NOT_REACHED in wtf (Closed)
Patch Set: Fix an Asan issue with LinkedHashSetNodeBase::unlink Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
3 * 3 *
4 * This library is free software; you can redistribute it and/or 4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public 5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either 6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version. 7 * version 2 of the License, or (at your option) any later version.
8 * 8 *
9 * This library is distributed in the hope that it will be useful, 9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
75 } 75 }
76 }; 76 };
77 77
78 template <bool unusedSlotsMustBeZeroed, typename T> 78 template <bool unusedSlotsMustBeZeroed, typename T>
79 struct VectorUnusedSlotClearer; 79 struct VectorUnusedSlotClearer;
80 80
81 template <typename T> 81 template <typename T>
82 struct VectorUnusedSlotClearer<false, T> { 82 struct VectorUnusedSlotClearer<false, T> {
83 STATIC_ONLY(VectorUnusedSlotClearer); 83 STATIC_ONLY(VectorUnusedSlotClearer);
84 static void clear(T*, T*) {} 84 static void clear(T*, T*) {}
85 #if ENABLE(ASSERT) 85 #if DCHECK_IS_ON()
86 static void checkCleared(const T*, const T*) {} 86 static void checkCleared(const T*, const T*) {}
87 #endif 87 #endif
88 }; 88 };
89 89
90 template <typename T> 90 template <typename T>
91 struct VectorUnusedSlotClearer<true, T> { 91 struct VectorUnusedSlotClearer<true, T> {
92 STATIC_ONLY(VectorUnusedSlotClearer); 92 STATIC_ONLY(VectorUnusedSlotClearer);
93 static void clear(T* begin, T* end) { 93 static void clear(T* begin, T* end) {
94 memset(reinterpret_cast<void*>(begin), 0, sizeof(T) * (end - begin)); 94 memset(reinterpret_cast<void*>(begin), 0, sizeof(T) * (end - begin));
95 } 95 }
96 96
97 #if ENABLE(ASSERT) 97 #if DCHECK_IS_ON()
98 static void checkCleared(const T* begin, const T* end) { 98 static void checkCleared(const T* begin, const T* end) {
99 const unsigned char* unusedArea = 99 const unsigned char* unusedArea =
100 reinterpret_cast<const unsigned char*>(begin); 100 reinterpret_cast<const unsigned char*>(begin);
101 const unsigned char* endAddress = 101 const unsigned char* endAddress =
102 reinterpret_cast<const unsigned char*>(end); 102 reinterpret_cast<const unsigned char*>(end);
103 ASSERT(endAddress >= unusedArea); 103 DCHECK_GE(endAddress, unusedArea);
104 for (int i = 0; i < endAddress - unusedArea; ++i) 104 for (int i = 0; i < endAddress - unusedArea; ++i)
105 ASSERT(!unusedArea[i]); 105 DCHECK(!unusedArea[i]);
106 } 106 }
107 #endif 107 #endif
108 }; 108 };
109 109
110 template <bool canInitializeWithMemset, typename T> 110 template <bool canInitializeWithMemset, typename T>
111 struct VectorInitializer; 111 struct VectorInitializer;
112 112
113 template <typename T> 113 template <typename T>
114 struct VectorInitializer<false, T> { 114 struct VectorInitializer<false, T> {
115 STATIC_ONLY(VectorInitializer); 115 STATIC_ONLY(VectorInitializer);
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
238 } 238 }
239 }; 239 };
240 240
241 template <bool canCompareWithMemcmp, typename T> 241 template <bool canCompareWithMemcmp, typename T>
242 struct VectorComparer; 242 struct VectorComparer;
243 243
244 template <typename T> 244 template <typename T>
245 struct VectorComparer<false, T> { 245 struct VectorComparer<false, T> {
246 STATIC_ONLY(VectorComparer); 246 STATIC_ONLY(VectorComparer);
247 static bool compare(const T* a, const T* b, size_t size) { 247 static bool compare(const T* a, const T* b, size_t size) {
248 ASSERT(a); 248 DCHECK(a);
249 ASSERT(b); 249 DCHECK(b);
250 return std::equal(a, a + size, b); 250 return std::equal(a, a + size, b);
251 } 251 }
252 }; 252 };
253 253
254 template <typename T> 254 template <typename T>
255 struct VectorComparer<true, T> { 255 struct VectorComparer<true, T> {
256 STATIC_ONLY(VectorComparer); 256 STATIC_ONLY(VectorComparer);
257 static bool compare(const T* a, const T* b, size_t size) { 257 static bool compare(const T* a, const T* b, size_t size) {
258 ASSERT(a); 258 DCHECK(a);
259 ASSERT(b); 259 DCHECK(b);
260 return memcmp(a, b, sizeof(T) * size) == 0; 260 return memcmp(a, b, sizeof(T) * size) == 0;
261 } 261 }
262 }; 262 };
263 263
264 template <typename T> 264 template <typename T>
265 struct VectorElementComparer { 265 struct VectorElementComparer {
266 STATIC_ONLY(VectorElementComparer); 266 STATIC_ONLY(VectorElementComparer);
267 template <typename U> 267 template <typename U>
268 static bool compareElement(const T& left, const U& right) { 268 static bool compareElement(const T& left, const U& right) {
269 return left == right; 269 return left == right;
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
327 } 327 }
328 }; 328 };
329 329
330 template <typename T, bool hasInlineCapacity, typename Allocator> 330 template <typename T, bool hasInlineCapacity, typename Allocator>
331 class VectorBufferBase { 331 class VectorBufferBase {
332 WTF_MAKE_NONCOPYABLE(VectorBufferBase); 332 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
333 DISALLOW_NEW(); 333 DISALLOW_NEW();
334 334
335 public: 335 public:
336 void allocateBuffer(size_t newCapacity) { 336 void allocateBuffer(size_t newCapacity) {
337 ASSERT(newCapacity); 337 DCHECK(newCapacity);
338 size_t sizeToAllocate = allocationSize(newCapacity); 338 size_t sizeToAllocate = allocationSize(newCapacity);
339 if (hasInlineCapacity) 339 if (hasInlineCapacity)
340 m_buffer = 340 m_buffer =
341 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate); 341 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate);
342 else 342 else
343 m_buffer = Allocator::template allocateVectorBacking<T>(sizeToAllocate); 343 m_buffer = Allocator::template allocateVectorBacking<T>(sizeToAllocate);
344 m_capacity = sizeToAllocate / sizeof(T); 344 m_capacity = sizeToAllocate / sizeof(T);
345 } 345 }
346 346
347 void allocateExpandedBuffer(size_t newCapacity) { 347 void allocateExpandedBuffer(size_t newCapacity) {
348 ASSERT(newCapacity); 348 DCHECK(newCapacity);
349 size_t sizeToAllocate = allocationSize(newCapacity); 349 size_t sizeToAllocate = allocationSize(newCapacity);
350 if (hasInlineCapacity) 350 if (hasInlineCapacity)
351 m_buffer = 351 m_buffer =
352 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate); 352 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate);
353 else 353 else
354 m_buffer = 354 m_buffer =
355 Allocator::template allocateExpandedVectorBacking<T>(sizeToAllocate); 355 Allocator::template allocateExpandedVectorBacking<T>(sizeToAllocate);
356 m_capacity = sizeToAllocate / sizeof(T); 356 m_capacity = sizeToAllocate / sizeof(T);
357 } 357 }
358 358
(...skipping 10 matching lines...) Expand all
369 // finalizing, we clear out the unused slots so that the visitor or the 369 // finalizing, we clear out the unused slots so that the visitor or the
370 // finalizer does not cause a problem when visiting the unused slots. 370 // finalizer does not cause a problem when visiting the unused slots.
371 VectorUnusedSlotClearer< 371 VectorUnusedSlotClearer<
372 Allocator::isGarbageCollected && 372 Allocator::isGarbageCollected &&
373 (VectorTraits<T>::needsDestruction || 373 (VectorTraits<T>::needsDestruction ||
374 IsTraceableInCollectionTrait<VectorTraits<T>>::value), 374 IsTraceableInCollectionTrait<VectorTraits<T>>::value),
375 T>::clear(from, to); 375 T>::clear(from, to);
376 } 376 }
377 377
378 void checkUnusedSlots(const T* from, const T* to) { 378 void checkUnusedSlots(const T* from, const T* to) {
379 #if ENABLE(ASSERT) && !defined(ANNOTATE_CONTIGUOUS_CONTAINER) 379 #if DCHECK_IS_ON() && !defined(ANNOTATE_CONTIGUOUS_CONTAINER)
380 VectorUnusedSlotClearer< 380 VectorUnusedSlotClearer<
381 Allocator::isGarbageCollected && 381 Allocator::isGarbageCollected &&
382 (VectorTraits<T>::needsDestruction || 382 (VectorTraits<T>::needsDestruction ||
383 IsTraceableInCollectionTrait<VectorTraits<T>>::value), 383 IsTraceableInCollectionTrait<VectorTraits<T>>::value),
384 T>::checkCleared(from, to); 384 T>::checkCleared(from, to);
385 #endif 385 #endif
386 } 386 }
387 387
388 // |end| is exclusive, a la STL. 388 // |end| is exclusive, a la STL.
389 struct OffsetRange final { 389 struct OffsetRange final {
390 OffsetRange() : begin(0), end(0) {} 390 OffsetRange() : begin(0), end(0) {}
391 explicit OffsetRange(size_t begin, size_t end) : begin(begin), end(end) { 391 explicit OffsetRange(size_t begin, size_t end) : begin(begin), end(end) {
392 ASSERT(begin <= end); 392 DCHECK_LE(begin, end);
393 } 393 }
394 bool empty() const { return begin == end; } 394 bool empty() const { return begin == end; }
395 size_t begin; 395 size_t begin;
396 size_t end; 396 size_t end;
397 }; 397 };
398 398
399 protected: 399 protected:
400 VectorBufferBase() : m_buffer(nullptr), m_capacity(0) {} 400 VectorBufferBase() : m_buffer(nullptr), m_capacity(0) {}
401 401
402 VectorBufferBase(T* buffer, size_t capacity) 402 VectorBufferBase(T* buffer, size_t capacity)
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 bool expandBuffer(size_t newCapacity) { 442 bool expandBuffer(size_t newCapacity) {
443 size_t sizeToAllocate = allocationSize(newCapacity); 443 size_t sizeToAllocate = allocationSize(newCapacity);
444 if (Allocator::expandVectorBacking(m_buffer, sizeToAllocate)) { 444 if (Allocator::expandVectorBacking(m_buffer, sizeToAllocate)) {
445 m_capacity = sizeToAllocate / sizeof(T); 445 m_capacity = sizeToAllocate / sizeof(T);
446 return true; 446 return true;
447 } 447 }
448 return false; 448 return false;
449 } 449 }
450 450
451 inline bool shrinkBuffer(size_t newCapacity) { 451 inline bool shrinkBuffer(size_t newCapacity) {
452 ASSERT(newCapacity < capacity()); 452 DCHECK_LT(newCapacity, capacity());
453 size_t sizeToAllocate = allocationSize(newCapacity); 453 size_t sizeToAllocate = allocationSize(newCapacity);
454 if (Allocator::shrinkVectorBacking(m_buffer, allocationSize(capacity()), 454 if (Allocator::shrinkVectorBacking(m_buffer, allocationSize(capacity()),
455 sizeToAllocate)) { 455 sizeToAllocate)) {
456 m_capacity = sizeToAllocate / sizeof(T); 456 m_capacity = sizeToAllocate / sizeof(T);
457 return true; 457 return true;
458 } 458 }
459 return false; 459 return false;
460 } 460 }
461 461
462 void resetBufferPointer() { 462 void resetBufferPointer() {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
528 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate) { 528 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate) {
529 Allocator::freeInlineVectorBacking(bufferToDeallocate); 529 Allocator::freeInlineVectorBacking(bufferToDeallocate);
530 } 530 }
531 531
532 void deallocateBuffer(T* bufferToDeallocate) { 532 void deallocateBuffer(T* bufferToDeallocate) {
533 if (UNLIKELY(bufferToDeallocate != inlineBuffer())) 533 if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
534 reallyDeallocateBuffer(bufferToDeallocate); 534 reallyDeallocateBuffer(bufferToDeallocate);
535 } 535 }
536 536
537 bool expandBuffer(size_t newCapacity) { 537 bool expandBuffer(size_t newCapacity) {
538 ASSERT(newCapacity > inlineCapacity); 538 DCHECK_GT(newCapacity, inlineCapacity);
539 if (m_buffer == inlineBuffer()) 539 if (m_buffer == inlineBuffer())
540 return false; 540 return false;
541 541
542 size_t sizeToAllocate = allocationSize(newCapacity); 542 size_t sizeToAllocate = allocationSize(newCapacity);
543 if (Allocator::expandInlineVectorBacking(m_buffer, sizeToAllocate)) { 543 if (Allocator::expandInlineVectorBacking(m_buffer, sizeToAllocate)) {
544 m_capacity = sizeToAllocate / sizeof(T); 544 m_capacity = sizeToAllocate / sizeof(T);
545 return true; 545 return true;
546 } 546 }
547 return false; 547 return false;
548 } 548 }
549 549
550 inline bool shrinkBuffer(size_t newCapacity) { 550 inline bool shrinkBuffer(size_t newCapacity) {
551 ASSERT(newCapacity < capacity()); 551 DCHECK_LT(newCapacity, capacity());
552 if (newCapacity <= inlineCapacity) { 552 if (newCapacity <= inlineCapacity) {
553 // We need to switch to inlineBuffer. Vector::shrinkCapacity will 553 // We need to switch to inlineBuffer. Vector::shrinkCapacity will
554 // handle it. 554 // handle it.
555 return false; 555 return false;
556 } 556 }
557 ASSERT(m_buffer != inlineBuffer()); 557 DCHECK_NE(m_buffer, inlineBuffer());
558 size_t newSize = allocationSize(newCapacity); 558 size_t newSize = allocationSize(newCapacity);
559 if (!Allocator::shrinkInlineVectorBacking( 559 if (!Allocator::shrinkInlineVectorBacking(
560 m_buffer, allocationSize(capacity()), newSize)) 560 m_buffer, allocationSize(capacity()), newSize))
561 return false; 561 return false;
562 m_capacity = newSize / sizeof(T); 562 m_capacity = newSize / sizeof(T);
563 return true; 563 return true;
564 } 564 }
565 565
566 void resetBufferPointer() { 566 void resetBufferPointer() {
567 m_buffer = inlineBuffer(); 567 m_buffer = inlineBuffer();
568 m_capacity = inlineCapacity; 568 m_capacity = inlineCapacity;
569 } 569 }
570 570
571 void allocateBuffer(size_t newCapacity) { 571 void allocateBuffer(size_t newCapacity) {
572 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks. 572 // FIXME: This should DCHECK(!m_buffer) to catch misuse/leaks.
573 if (newCapacity > inlineCapacity) 573 if (newCapacity > inlineCapacity)
574 Base::allocateBuffer(newCapacity); 574 Base::allocateBuffer(newCapacity);
575 else 575 else
576 resetBufferPointer(); 576 resetBufferPointer();
577 } 577 }
578 578
579 void allocateExpandedBuffer(size_t newCapacity) { 579 void allocateExpandedBuffer(size_t newCapacity) {
580 if (newCapacity > inlineCapacity) 580 if (newCapacity > inlineCapacity)
581 Base::allocateExpandedBuffer(newCapacity); 581 Base::allocateExpandedBuffer(newCapacity);
582 else 582 else
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
635 // buffer's inline buffer. Elements in an out-of-line buffer won't move, 635 // buffer's inline buffer. Elements in an out-of-line buffer won't move,
636 // because we can just swap pointers of out-of-line buffers. 636 // because we can just swap pointers of out-of-line buffers.
637 T* thisSourceBegin = nullptr; 637 T* thisSourceBegin = nullptr;
638 size_t thisSourceSize = 0; 638 size_t thisSourceSize = 0;
639 T* thisDestinationBegin = nullptr; 639 T* thisDestinationBegin = nullptr;
640 if (buffer() == inlineBuffer()) { 640 if (buffer() == inlineBuffer()) {
641 thisSourceBegin = buffer(); 641 thisSourceBegin = buffer();
642 thisSourceSize = m_size; 642 thisSourceSize = m_size;
643 thisDestinationBegin = other.inlineBuffer(); 643 thisDestinationBegin = other.inlineBuffer();
644 if (!thisHole.empty()) { // Sanity check. 644 if (!thisHole.empty()) { // Sanity check.
645 ASSERT(thisHole.begin < thisHole.end); 645 DCHECK_LT(thisHole.begin, thisHole.end);
646 ASSERT(thisHole.end <= thisSourceSize); 646 DCHECK_LE(thisHole.end, thisSourceSize);
647 } 647 }
648 } else { 648 } else {
649 // We don't need the hole information for an out-of-line buffer. 649 // We don't need the hole information for an out-of-line buffer.
650 thisHole.begin = thisHole.end = 0; 650 thisHole.begin = thisHole.end = 0;
651 } 651 }
652 T* otherSourceBegin = nullptr; 652 T* otherSourceBegin = nullptr;
653 size_t otherSourceSize = 0; 653 size_t otherSourceSize = 0;
654 T* otherDestinationBegin = nullptr; 654 T* otherDestinationBegin = nullptr;
655 if (other.buffer() == other.inlineBuffer()) { 655 if (other.buffer() == other.inlineBuffer()) {
656 otherSourceBegin = other.buffer(); 656 otherSourceBegin = other.buffer();
657 otherSourceSize = other.m_size; 657 otherSourceSize = other.m_size;
658 otherDestinationBegin = inlineBuffer(); 658 otherDestinationBegin = inlineBuffer();
659 if (!otherHole.empty()) { 659 if (!otherHole.empty()) {
660 ASSERT(otherHole.begin < otherHole.end); 660 DCHECK_LT(otherHole.begin, otherHole.end);
661 ASSERT(otherHole.end <= otherSourceSize); 661 DCHECK_LE(otherHole.end, otherSourceSize);
662 } 662 }
663 } else { 663 } else {
664 otherHole.begin = otherHole.end = 0; 664 otherHole.begin = otherHole.end = 0;
665 } 665 }
666 666
667 // Next, we mutate members and do other bookkeeping. We do pointer swapping 667 // Next, we mutate members and do other bookkeeping. We do pointer swapping
668 // (for out-of-line buffers) here if we can. From now on, don't assume 668 // (for out-of-line buffers) here if we can. From now on, don't assume
669 // buffer() or capacity() maintains their original values. 669 // buffer() or capacity() maintains their original values.
670 std::swap(m_capacity, other.m_capacity); 670 std::swap(m_capacity, other.m_capacity);
671 if (thisSourceBegin && 671 if (thisSourceBegin &&
672 !otherSourceBegin) { // Our buffer is inline, theirs is not. 672 !otherSourceBegin) { // Our buffer is inline, theirs is not.
673 ASSERT(buffer() == inlineBuffer()); 673 DCHECK_EQ(buffer(), inlineBuffer());
674 ASSERT(other.buffer() != other.inlineBuffer()); 674 DCHECK_NE(other.buffer(), other.inlineBuffer());
675 ANNOTATE_DELETE_BUFFER(m_buffer, inlineCapacity, m_size); 675 ANNOTATE_DELETE_BUFFER(m_buffer, inlineCapacity, m_size);
676 m_buffer = other.buffer(); 676 m_buffer = other.buffer();
677 other.m_buffer = other.inlineBuffer(); 677 other.m_buffer = other.inlineBuffer();
678 std::swap(m_size, other.m_size); 678 std::swap(m_size, other.m_size);
679 ANNOTATE_NEW_BUFFER(other.m_buffer, inlineCapacity, other.m_size); 679 ANNOTATE_NEW_BUFFER(other.m_buffer, inlineCapacity, other.m_size);
680 } else if (!thisSourceBegin && 680 } else if (!thisSourceBegin &&
681 otherSourceBegin) { // Their buffer is inline, ours is not. 681 otherSourceBegin) { // Their buffer is inline, ours is not.
682 ASSERT(buffer() != inlineBuffer()); 682 DCHECK_NE(buffer(), inlineBuffer());
683 ASSERT(other.buffer() == other.inlineBuffer()); 683 DCHECK_EQ(other.buffer(), other.inlineBuffer());
684 ANNOTATE_DELETE_BUFFER(other.m_buffer, inlineCapacity, other.m_size); 684 ANNOTATE_DELETE_BUFFER(other.m_buffer, inlineCapacity, other.m_size);
685 other.m_buffer = buffer(); 685 other.m_buffer = buffer();
686 m_buffer = inlineBuffer(); 686 m_buffer = inlineBuffer();
687 std::swap(m_size, other.m_size); 687 std::swap(m_size, other.m_size);
688 ANNOTATE_NEW_BUFFER(m_buffer, inlineCapacity, m_size); 688 ANNOTATE_NEW_BUFFER(m_buffer, inlineCapacity, m_size);
689 } else { // Both buffers are inline. 689 } else { // Both buffers are inline.
690 ASSERT(thisSourceBegin && otherSourceBegin); 690 DCHECK(thisSourceBegin);
691 ASSERT(buffer() == inlineBuffer()); 691 DCHECK(otherSourceBegin);
692 ASSERT(other.buffer() == other.inlineBuffer()); 692 DCHECK_EQ(buffer(), inlineBuffer());
693 DCHECK_EQ(other.buffer(), other.inlineBuffer());
693 ANNOTATE_CHANGE_SIZE(m_buffer, inlineCapacity, m_size, other.m_size); 694 ANNOTATE_CHANGE_SIZE(m_buffer, inlineCapacity, m_size, other.m_size);
694 ANNOTATE_CHANGE_SIZE(other.m_buffer, inlineCapacity, other.m_size, 695 ANNOTATE_CHANGE_SIZE(other.m_buffer, inlineCapacity, other.m_size,
695 m_size); 696 m_size);
696 std::swap(m_size, other.m_size); 697 std::swap(m_size, other.m_size);
697 } 698 }
698 699
699 // We are ready to move elements. We determine an action for each "section", 700 // We are ready to move elements. We determine an action for each "section",
700 // which is a contiguous range such that all elements in the range are 701 // which is a contiguous range such that all elements in the range are
701 // treated similarly. 702 // treated similarly.
702 size_t sectionBegin = 0; 703 size_t sectionBegin = 0;
703 while (sectionBegin < inlineCapacity) { 704 while (sectionBegin < inlineCapacity) {
704 // To determine the end of this section, we list up all the boundaries 705 // To determine the end of this section, we list up all the boundaries
705 // where the "occupiedness" may change. 706 // where the "occupiedness" may change.
706 size_t sectionEnd = inlineCapacity; 707 size_t sectionEnd = inlineCapacity;
707 if (thisSourceBegin && sectionBegin < thisSourceSize) 708 if (thisSourceBegin && sectionBegin < thisSourceSize)
708 sectionEnd = std::min(sectionEnd, thisSourceSize); 709 sectionEnd = std::min(sectionEnd, thisSourceSize);
709 if (!thisHole.empty() && sectionBegin < thisHole.begin) 710 if (!thisHole.empty() && sectionBegin < thisHole.begin)
710 sectionEnd = std::min(sectionEnd, thisHole.begin); 711 sectionEnd = std::min(sectionEnd, thisHole.begin);
711 if (!thisHole.empty() && sectionBegin < thisHole.end) 712 if (!thisHole.empty() && sectionBegin < thisHole.end)
712 sectionEnd = std::min(sectionEnd, thisHole.end); 713 sectionEnd = std::min(sectionEnd, thisHole.end);
713 if (otherSourceBegin && sectionBegin < otherSourceSize) 714 if (otherSourceBegin && sectionBegin < otherSourceSize)
714 sectionEnd = std::min(sectionEnd, otherSourceSize); 715 sectionEnd = std::min(sectionEnd, otherSourceSize);
715 if (!otherHole.empty() && sectionBegin < otherHole.begin) 716 if (!otherHole.empty() && sectionBegin < otherHole.begin)
716 sectionEnd = std::min(sectionEnd, otherHole.begin); 717 sectionEnd = std::min(sectionEnd, otherHole.begin);
717 if (!otherHole.empty() && sectionBegin < otherHole.end) 718 if (!otherHole.empty() && sectionBegin < otherHole.end)
718 sectionEnd = std::min(sectionEnd, otherHole.end); 719 sectionEnd = std::min(sectionEnd, otherHole.end);
719 720
720 ASSERT(sectionBegin < sectionEnd); 721 DCHECK_LT(sectionBegin, sectionEnd);
721 722
722 // Is the |sectionBegin|-th element of |thisSource| occupied? 723 // Is the |sectionBegin|-th element of |thisSource| occupied?
723 bool thisOccupied = false; 724 bool thisOccupied = false;
724 if (thisSourceBegin && sectionBegin < thisSourceSize) { 725 if (thisSourceBegin && sectionBegin < thisSourceSize) {
725 // Yes, it's occupied, unless the position is in a hole. 726 // Yes, it's occupied, unless the position is in a hole.
726 if (thisHole.empty() || sectionBegin < thisHole.begin || 727 if (thisHole.empty() || sectionBegin < thisHole.begin ||
727 sectionBegin >= thisHole.end) 728 sectionBegin >= thisHole.end)
728 thisOccupied = true; 729 thisOccupied = true;
729 } 730 }
730 bool otherOccupied = false; 731 bool otherOccupied = false;
731 if (otherSourceBegin && sectionBegin < otherSourceSize) { 732 if (otherSourceBegin && sectionBegin < otherSourceSize) {
732 if (otherHole.empty() || sectionBegin < otherHole.begin || 733 if (otherHole.empty() || sectionBegin < otherHole.begin ||
733 sectionBegin >= otherHole.end) 734 sectionBegin >= otherHole.end)
734 otherOccupied = true; 735 otherOccupied = true;
735 } 736 }
736 737
737 if (thisOccupied && otherOccupied) { 738 if (thisOccupied && otherOccupied) {
738 // Both occupied; swap them. In this case, one's destination must be the 739 // Both occupied; swap them. In this case, one's destination must be the
739 // other's source (i.e. both ranges are in inline buffers). 740 // other's source (i.e. both ranges are in inline buffers).
740 ASSERT(thisDestinationBegin == otherSourceBegin); 741 DCHECK_EQ(thisDestinationBegin, otherSourceBegin);
741 ASSERT(otherDestinationBegin == thisSourceBegin); 742 DCHECK_EQ(otherDestinationBegin, thisSourceBegin);
742 TypeOperations::swap(thisSourceBegin + sectionBegin, 743 TypeOperations::swap(thisSourceBegin + sectionBegin,
743 thisSourceBegin + sectionEnd, 744 thisSourceBegin + sectionEnd,
744 otherSourceBegin + sectionBegin); 745 otherSourceBegin + sectionBegin);
745 } else if (thisOccupied) { 746 } else if (thisOccupied) {
746 // Move from ours to theirs. 747 // Move from ours to theirs.
747 TypeOperations::move(thisSourceBegin + sectionBegin, 748 TypeOperations::move(thisSourceBegin + sectionBegin,
748 thisSourceBegin + sectionEnd, 749 thisSourceBegin + sectionEnd,
749 thisDestinationBegin + sectionBegin); 750 thisDestinationBegin + sectionBegin);
750 Base::clearUnusedSlots(thisSourceBegin + sectionBegin, 751 Base::clearUnusedSlots(thisSourceBegin + sectionBegin,
751 thisSourceBegin + sectionEnd); 752 thisSourceBegin + sectionEnd);
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
969 void prepend(const U*, size_t); 970 void prepend(const U*, size_t);
970 template <typename U> 971 template <typename U>
971 void prepend(U&&); 972 void prepend(U&&);
972 template <typename U, size_t c, typename V> 973 template <typename U, size_t c, typename V>
973 void prependVector(const Vector<U, c, V>&); 974 void prependVector(const Vector<U, c, V>&);
974 975
975 void remove(size_t position); 976 void remove(size_t position);
976 void remove(size_t position, size_t length); 977 void remove(size_t position, size_t length);
977 978
978 void pop_back() { 979 void pop_back() {
979 ASSERT(!isEmpty()); 980 DCHECK(!isEmpty());
980 shrink(size() - 1); 981 shrink(size() - 1);
981 } 982 }
982 983
983 Vector(size_t size, const T& val) : Base(size) { 984 Vector(size_t size, const T& val) : Base(size) {
984 ANNOTATE_NEW_BUFFER(begin(), capacity(), size); 985 ANNOTATE_NEW_BUFFER(begin(), capacity(), size);
985 m_size = size; 986 m_size = size;
986 TypeOperations::uninitializedFill(begin(), end(), val); 987 TypeOperations::uninitializedFill(begin(), end(), val);
987 } 988 }
988 989
989 void fill(const T&, size_t); 990 void fill(const T&, size_t);
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1055 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>:: 1056 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::
1056 operator=(const Vector<T, inlineCapacity, Allocator>& other) { 1057 operator=(const Vector<T, inlineCapacity, Allocator>& other) {
1057 if (UNLIKELY(&other == this)) 1058 if (UNLIKELY(&other == this))
1058 return *this; 1059 return *this;
1059 1060
1060 if (size() > other.size()) { 1061 if (size() > other.size()) {
1061 shrink(other.size()); 1062 shrink(other.size());
1062 } else if (other.size() > capacity()) { 1063 } else if (other.size() > capacity()) {
1063 clear(); 1064 clear();
1064 reserveCapacity(other.size()); 1065 reserveCapacity(other.size());
1065 ASSERT(begin()); 1066 DCHECK(begin());
1066 } 1067 }
1067 1068
1068 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); 1069 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size());
1069 std::copy(other.begin(), other.begin() + size(), begin()); 1070 std::copy(other.begin(), other.begin() + size(), begin());
1070 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 1071 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
1071 m_size = other.size(); 1072 m_size = other.size();
1072 1073
1073 return *this; 1074 return *this;
1074 } 1075 }
1075 1076
1076 inline bool typelessPointersAreEqual(const void* a, const void* b) { 1077 inline bool typelessPointersAreEqual(const void* a, const void* b) {
1077 return a == b; 1078 return a == b;
1078 } 1079 }
1079 1080
1080 template <typename T, size_t inlineCapacity, typename Allocator> 1081 template <typename T, size_t inlineCapacity, typename Allocator>
1081 template <size_t otherCapacity> 1082 template <size_t otherCapacity>
1082 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>:: 1083 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::
1083 operator=(const Vector<T, otherCapacity, Allocator>& other) { 1084 operator=(const Vector<T, otherCapacity, Allocator>& other) {
1084 // If the inline capacities match, we should call the more specific 1085 // If the inline capacities match, we should call the more specific
1085 // template. If the inline capacities don't match, the two objects 1086 // template. If the inline capacities don't match, the two objects
1086 // shouldn't be allocated the same address. 1087 // shouldn't be allocated the same address.
1087 ASSERT(!typelessPointersAreEqual(&other, this)); 1088 DCHECK(!typelessPointersAreEqual(&other, this));
1088 1089
1089 if (size() > other.size()) { 1090 if (size() > other.size()) {
1090 shrink(other.size()); 1091 shrink(other.size());
1091 } else if (other.size() > capacity()) { 1092 } else if (other.size() > capacity()) {
1092 clear(); 1093 clear();
1093 reserveCapacity(other.size()); 1094 reserveCapacity(other.size());
1094 ASSERT(begin()); 1095 DCHECK(begin());
1095 } 1096 }
1096 1097
1097 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); 1098 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size());
1098 std::copy(other.begin(), other.begin() + size(), begin()); 1099 std::copy(other.begin(), other.begin() + size(), begin());
1099 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 1100 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
1100 m_size = other.size(); 1101 m_size = other.size();
1101 1102
1102 return *this; 1103 return *this;
1103 } 1104 }
1104 1105
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1177 return kNotFound; 1178 return kNotFound;
1178 } 1179 }
1179 1180
1180 template <typename T, size_t inlineCapacity, typename Allocator> 1181 template <typename T, size_t inlineCapacity, typename Allocator>
1181 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize) { 1182 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize) {
1182 if (size() > newSize) { 1183 if (size() > newSize) {
1183 shrink(newSize); 1184 shrink(newSize);
1184 } else if (newSize > capacity()) { 1185 } else if (newSize > capacity()) {
1185 clear(); 1186 clear();
1186 reserveCapacity(newSize); 1187 reserveCapacity(newSize);
1187 ASSERT(begin()); 1188 DCHECK(begin());
1188 } 1189 }
1189 1190
1190 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); 1191 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
1191 std::fill(begin(), end(), val); 1192 std::fill(begin(), end(), val);
1192 TypeOperations::uninitializedFill(end(), begin() + newSize, val); 1193 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
1193 m_size = newSize; 1194 m_size = newSize;
1194 } 1195 }
1195 1196
1196 template <typename T, size_t inlineCapacity, typename Allocator> 1197 template <typename T, size_t inlineCapacity, typename Allocator>
1197 template <typename Iterator> 1198 template <typename Iterator>
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1261 expandCapacity(size); 1262 expandCapacity(size);
1262 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); 1263 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1263 TypeOperations::initialize(end(), begin() + size); 1264 TypeOperations::initialize(end(), begin() + size);
1264 } 1265 }
1265 1266
1266 m_size = size; 1267 m_size = size;
1267 } 1268 }
1268 1269
1269 template <typename T, size_t inlineCapacity, typename Allocator> 1270 template <typename T, size_t inlineCapacity, typename Allocator>
1270 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size) { 1271 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size) {
1271 ASSERT(size <= m_size); 1272 DCHECK_LE(size, m_size);
1272 TypeOperations::destruct(begin() + size, end()); 1273 TypeOperations::destruct(begin() + size, end());
1273 clearUnusedSlots(begin() + size, end()); 1274 clearUnusedSlots(begin() + size, end());
1274 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); 1275 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1275 m_size = size; 1276 m_size = size;
1276 } 1277 }
1277 1278
1278 template <typename T, size_t inlineCapacity, typename Allocator> 1279 template <typename T, size_t inlineCapacity, typename Allocator>
1279 void Vector<T, inlineCapacity, Allocator>::grow(size_t size) { 1280 void Vector<T, inlineCapacity, Allocator>::grow(size_t size) {
1280 ASSERT(size >= m_size); 1281 DCHECK_GE(size, m_size);
1281 if (size > capacity()) 1282 if (size > capacity())
1282 expandCapacity(size); 1283 expandCapacity(size);
1283 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); 1284 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1284 TypeOperations::initialize(end(), begin() + size); 1285 TypeOperations::initialize(end(), begin() + size);
1285 m_size = size; 1286 m_size = size;
1286 } 1287 }
1287 1288
1288 template <typename T, size_t inlineCapacity, typename Allocator> 1289 template <typename T, size_t inlineCapacity, typename Allocator>
1289 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity) { 1290 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity) {
1290 if (UNLIKELY(newCapacity <= capacity())) 1291 if (UNLIKELY(newCapacity <= capacity()))
(...skipping 18 matching lines...) Expand all
1309 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); 1310 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1310 TypeOperations::move(oldBuffer, oldEnd, begin()); 1311 TypeOperations::move(oldBuffer, oldEnd, begin());
1311 clearUnusedSlots(oldBuffer, oldEnd); 1312 clearUnusedSlots(oldBuffer, oldEnd);
1312 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); 1313 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size);
1313 Base::deallocateBuffer(oldBuffer); 1314 Base::deallocateBuffer(oldBuffer);
1314 } 1315 }
1315 1316
1316 template <typename T, size_t inlineCapacity, typename Allocator> 1317 template <typename T, size_t inlineCapacity, typename Allocator>
1317 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity( 1318 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(
1318 size_t initialCapacity) { 1319 size_t initialCapacity) {
1319 ASSERT(!m_size); 1320 DCHECK(!m_size);
1320 ASSERT(capacity() == INLINE_CAPACITY); 1321 DCHECK(capacity() == INLINE_CAPACITY);
1321 if (initialCapacity > INLINE_CAPACITY) { 1322 if (initialCapacity > INLINE_CAPACITY) {
1322 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size); 1323 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size);
1323 Base::allocateBuffer(initialCapacity); 1324 Base::allocateBuffer(initialCapacity);
1324 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); 1325 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1325 } 1326 }
1326 } 1327 }
1327 1328
1328 template <typename T, size_t inlineCapacity, typename Allocator> 1329 template <typename T, size_t inlineCapacity, typename Allocator>
1329 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity) { 1330 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity) {
1330 if (newCapacity >= capacity()) 1331 if (newCapacity >= capacity())
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1365 } 1366 }
1366 1367
1367 // Templatizing these is better than just letting the conversion happen 1368 // Templatizing these is better than just letting the conversion happen
1368 // implicitly, because for instance it allows a PassRefPtr to be appended to a 1369 // implicitly, because for instance it allows a PassRefPtr to be appended to a
1369 // RefPtr vector without refcount thrash. 1370 // RefPtr vector without refcount thrash.
1370 1371
1371 template <typename T, size_t inlineCapacity, typename Allocator> 1372 template <typename T, size_t inlineCapacity, typename Allocator>
1372 template <typename U> 1373 template <typename U>
1373 void Vector<T, inlineCapacity, Allocator>::append(const U* data, 1374 void Vector<T, inlineCapacity, Allocator>::append(const U* data,
1374 size_t dataSize) { 1375 size_t dataSize) {
1375 ASSERT(Allocator::isAllocationAllowed()); 1376 DCHECK(Allocator::isAllocationAllowed());
1376 size_t newSize = m_size + dataSize; 1377 size_t newSize = m_size + dataSize;
1377 if (newSize > capacity()) { 1378 if (newSize > capacity()) {
1378 data = expandCapacity(newSize, data); 1379 data = expandCapacity(newSize, data);
1379 ASSERT(begin()); 1380 DCHECK(begin());
1380 } 1381 }
1381 RELEASE_ASSERT(newSize >= m_size); 1382 RELEASE_ASSERT(newSize >= m_size);
1382 T* dest = end(); 1383 T* dest = end();
1383 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); 1384 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
1384 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy( 1385 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(
1385 data, &data[dataSize], dest); 1386 data, &data[dataSize], dest);
1386 m_size = newSize; 1387 m_size = newSize;
1387 } 1388 }
1388 1389
1389 template <typename T, size_t inlineCapacity, typename Allocator> 1390 template <typename T, size_t inlineCapacity, typename Allocator>
1390 template <typename U> 1391 template <typename U>
1391 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::push_back(U&& val) { 1392 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::push_back(U&& val) {
1392 return append(std::forward<U>(val)); 1393 return append(std::forward<U>(val));
1393 } 1394 }
1394 1395
1395 template <typename T, size_t inlineCapacity, typename Allocator> 1396 template <typename T, size_t inlineCapacity, typename Allocator>
1396 template <typename U> 1397 template <typename U>
1397 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(U&& val) { 1398 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(U&& val) {
1398 ASSERT(Allocator::isAllocationAllowed()); 1399 DCHECK(Allocator::isAllocationAllowed());
1399 if (LIKELY(size() != capacity())) { 1400 if (LIKELY(size() != capacity())) {
1400 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); 1401 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1401 new (NotNull, end()) T(std::forward<U>(val)); 1402 new (NotNull, end()) T(std::forward<U>(val));
1402 ++m_size; 1403 ++m_size;
1403 return; 1404 return;
1404 } 1405 }
1405 1406
1406 appendSlowCase(std::forward<U>(val)); 1407 appendSlowCase(std::forward<U>(val));
1407 } 1408 }
1408 1409
(...skipping 11 matching lines...) Expand all
1420 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); 1421 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1421 T* t = new (NotNull, end()) T(std::forward<Args>(args)...); 1422 T* t = new (NotNull, end()) T(std::forward<Args>(args)...);
1422 ++m_size; 1423 ++m_size;
1423 return *t; 1424 return *t;
1424 } 1425 }
1425 1426
1426 template <typename T, size_t inlineCapacity, typename Allocator> 1427 template <typename T, size_t inlineCapacity, typename Allocator>
1427 template <typename U> 1428 template <typename U>
1428 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase( 1429 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(
1429 U&& val) { 1430 U&& val) {
1430 ASSERT(size() == capacity()); 1431 DCHECK_EQ(size(), capacity());
1431 1432
1432 typename std::remove_reference<U>::type* ptr = &val; 1433 typename std::remove_reference<U>::type* ptr = &val;
1433 ptr = expandCapacity(size() + 1, ptr); 1434 ptr = expandCapacity(size() + 1, ptr);
1434 ASSERT(begin()); 1435 DCHECK(begin());
1435 1436
1436 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); 1437 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1437 new (NotNull, end()) T(std::forward<U>(*ptr)); 1438 new (NotNull, end()) T(std::forward<U>(*ptr));
1438 ++m_size; 1439 ++m_size;
1439 } 1440 }
1440 1441
1441 // This version of append saves a branch in the case where you know that the 1442 // This version of append saves a branch in the case where you know that the
1442 // vector's capacity is large enough for the append to succeed. 1443 // vector's capacity is large enough for the append to succeed.
1443 1444
1444 template <typename T, size_t inlineCapacity, typename Allocator> 1445 template <typename T, size_t inlineCapacity, typename Allocator>
1445 template <typename U> 1446 template <typename U>
1446 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend( 1447 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(
1447 U&& val) { 1448 U&& val) {
1448 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER 1449 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1449 // Vectors in ASAN builds don't have inlineCapacity. 1450 // Vectors in ASAN builds don't have inlineCapacity.
1450 append(std::forward<U>(val)); 1451 append(std::forward<U>(val));
1451 #else 1452 #else
1452 ASSERT(size() < capacity()); 1453 DCHECK_LT(size(), capacity());
1453 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); 1454 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1454 new (NotNull, end()) T(std::forward<U>(val)); 1455 new (NotNull, end()) T(std::forward<U>(val));
1455 ++m_size; 1456 ++m_size;
1456 #endif 1457 #endif
1457 } 1458 }
1458 1459
1459 template <typename T, size_t inlineCapacity, typename Allocator> 1460 template <typename T, size_t inlineCapacity, typename Allocator>
1460 template <typename U, size_t otherCapacity, typename OtherAllocator> 1461 template <typename U, size_t otherCapacity, typename OtherAllocator>
1461 inline void Vector<T, inlineCapacity, Allocator>::appendVector( 1462 inline void Vector<T, inlineCapacity, Allocator>::appendVector(
1462 const Vector<U, otherCapacity, OtherAllocator>& val) { 1463 const Vector<U, otherCapacity, OtherAllocator>& val) {
1463 append(val.begin(), val.size()); 1464 append(val.begin(), val.size());
1464 } 1465 }
1465 1466
1466 template <typename T, size_t inlineCapacity, typename Allocator> 1467 template <typename T, size_t inlineCapacity, typename Allocator>
1467 template <typename U> 1468 template <typename U>
1468 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, 1469 void Vector<T, inlineCapacity, Allocator>::insert(size_t position,
1469 const U* data, 1470 const U* data,
1470 size_t dataSize) { 1471 size_t dataSize) {
1471 ASSERT(Allocator::isAllocationAllowed()); 1472 DCHECK(Allocator::isAllocationAllowed());
1472 RELEASE_ASSERT(position <= size()); 1473 RELEASE_ASSERT(position <= size());
1473 size_t newSize = m_size + dataSize; 1474 size_t newSize = m_size + dataSize;
1474 if (newSize > capacity()) { 1475 if (newSize > capacity()) {
1475 data = expandCapacity(newSize, data); 1476 data = expandCapacity(newSize, data);
1476 ASSERT(begin()); 1477 DCHECK(begin());
1477 } 1478 }
1478 RELEASE_ASSERT(newSize >= m_size); 1479 RELEASE_ASSERT(newSize >= m_size);
1479 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); 1480 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
1480 T* spot = begin() + position; 1481 T* spot = begin() + position;
1481 TypeOperations::moveOverlapping(spot, end(), spot + dataSize); 1482 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1482 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy( 1483 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(
1483 data, &data[dataSize], spot); 1484 data, &data[dataSize], spot);
1484 m_size = newSize; 1485 m_size = newSize;
1485 } 1486 }
1486 1487
1487 template <typename T, size_t inlineCapacity, typename Allocator> 1488 template <typename T, size_t inlineCapacity, typename Allocator>
1488 template <typename U> 1489 template <typename U>
1489 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, 1490 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position,
1490 U&& val) { 1491 U&& val) {
1491 ASSERT(Allocator::isAllocationAllowed()); 1492 DCHECK(Allocator::isAllocationAllowed());
1492 RELEASE_ASSERT(position <= size()); 1493 RELEASE_ASSERT(position <= size());
1493 typename std::remove_reference<U>::type* data = &val; 1494 typename std::remove_reference<U>::type* data = &val;
1494 if (size() == capacity()) { 1495 if (size() == capacity()) {
1495 data = expandCapacity(size() + 1, data); 1496 data = expandCapacity(size() + 1, data);
1496 ASSERT(begin()); 1497 DCHECK(begin());
1497 } 1498 }
1498 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); 1499 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1499 T* spot = begin() + position; 1500 T* spot = begin() + position;
1500 TypeOperations::moveOverlapping(spot, end(), spot + 1); 1501 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1501 new (NotNull, spot) T(std::forward<U>(*data)); 1502 new (NotNull, spot) T(std::forward<U>(*data));
1502 ++m_size; 1503 ++m_size;
1503 } 1504 }
1504 1505
1505 template <typename T, size_t inlineCapacity, typename Allocator> 1506 template <typename T, size_t inlineCapacity, typename Allocator>
1506 template <typename U, size_t c, typename OtherAllocator> 1507 template <typename U, size_t c, typename OtherAllocator>
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
1589 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, 1590 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a,
1590 const Vector<T, inlineCapacityB, Allocator>& b) { 1591 const Vector<T, inlineCapacityB, Allocator>& b) {
1591 return !(a == b); 1592 return !(a == b);
1592 } 1593 }
1593 1594
1594 // This is only called if the allocator is a HeapAllocator. It is used when 1595 // This is only called if the allocator is a HeapAllocator. It is used when
1595 // visiting during a tracing GC. 1596 // visiting during a tracing GC.
1596 template <typename T, size_t inlineCapacity, typename Allocator> 1597 template <typename T, size_t inlineCapacity, typename Allocator>
1597 template <typename VisitorDispatcher> 1598 template <typename VisitorDispatcher>
1598 void Vector<T, inlineCapacity, Allocator>::trace(VisitorDispatcher visitor) { 1599 void Vector<T, inlineCapacity, Allocator>::trace(VisitorDispatcher visitor) {
1599 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled. 1600 DCHECK(Allocator::isGarbageCollected) << "Garbage collector must be enabled.";
1600 if (!buffer()) 1601 if (!buffer())
1601 return; 1602 return;
1602 if (this->hasOutOfLineBuffer()) { 1603 if (this->hasOutOfLineBuffer()) {
1603 // This is a performance optimization for a case where the buffer has 1604 // This is a performance optimization for a case where the buffer has
1604 // been already traced by somewhere. This can happen if the conservative 1605 // been already traced by somewhere. This can happen if the conservative
1605 // scanning traced an on-stack (false-positive or real) pointer to the 1606 // scanning traced an on-stack (false-positive or real) pointer to the
1606 // HeapVector, and then visitor->trace() traces the HeapVector. 1607 // HeapVector, and then visitor->trace() traces the HeapVector.
1607 if (Allocator::isHeapObjectAlive(buffer())) 1608 if (Allocator::isHeapObjectAlive(buffer()))
1608 return; 1609 return;
1609 Allocator::markNoTracing(visitor, buffer()); 1610 Allocator::markNoTracing(visitor, buffer());
1610 Allocator::registerBackingStoreReference(visitor, Base::bufferSlot()); 1611 Allocator::registerBackingStoreReference(visitor, Base::bufferSlot());
1611 } 1612 }
1612 const T* bufferBegin = buffer(); 1613 const T* bufferBegin = buffer();
1613 const T* bufferEnd = buffer() + size(); 1614 const T* bufferEnd = buffer() + size();
1614 if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) { 1615 if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) {
1615 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; 1616 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd;
1616 bufferEntry++) 1617 bufferEntry++)
1617 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>( 1618 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>(
1618 visitor, *const_cast<T*>(bufferEntry)); 1619 visitor, *const_cast<T*>(bufferEntry));
1619 checkUnusedSlots(buffer() + size(), buffer() + capacity()); 1620 checkUnusedSlots(buffer() + size(), buffer() + capacity());
1620 } 1621 }
1621 } 1622 }
1622 1623
1623 } // namespace WTF 1624 } // namespace WTF
1624 1625
1625 using WTF::Vector; 1626 using WTF::Vector;
1626 1627
1627 #endif // WTF_Vector_h 1628 #endif // WTF_Vector_h
OLDNEW
« no previous file with comments | « third_party/WebKit/Source/wtf/TreeNode.h ('k') | third_party/WebKit/Source/wtf/allocator/PartitionAllocator.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698