| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. | 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * This library is free software; you can redistribute it and/or | 4 * This library is free software; you can redistribute it and/or |
| 5 * modify it under the terms of the GNU Library General Public | 5 * modify it under the terms of the GNU Library General Public |
| 6 * License as published by the Free Software Foundation; either | 6 * License as published by the Free Software Foundation; either |
| 7 * version 2 of the License, or (at your option) any later version. | 7 * version 2 of the License, or (at your option) any later version. |
| 8 * | 8 * |
| 9 * This library is distributed in the hope that it will be useful, | 9 * This library is distributed in the hope that it will be useful, |
| 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of | 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 53 #endif | 53 #endif |
| 54 | 54 |
| 55 template <typename T, size_t inlineBuffer, typename Allocator> | 55 template <typename T, size_t inlineBuffer, typename Allocator> |
| 56 class Deque; | 56 class Deque; |
| 57 | 57 |
| 58 template <bool needsDestruction, typename T> | 58 template <bool needsDestruction, typename T> |
| 59 struct VectorDestructor; | 59 struct VectorDestructor; |
| 60 | 60 |
| 61 template <typename T> | 61 template <typename T> |
| 62 struct VectorDestructor<false, T> { | 62 struct VectorDestructor<false, T> { |
| 63 STATIC_ONLY(VectorDestructor); | 63 STATIC_ONLY(VectorDestructor); |
| 64 static void destruct(T*, T*) {} | 64 static void destruct(T*, T*) {} |
| 65 }; | 65 }; |
| 66 | 66 |
| 67 template <typename T> | 67 template <typename T> |
| 68 struct VectorDestructor<true, T> { | 68 struct VectorDestructor<true, T> { |
| 69 STATIC_ONLY(VectorDestructor); | 69 STATIC_ONLY(VectorDestructor); |
| 70 static void destruct(T* begin, T* end) | 70 static void destruct(T* begin, T* end) { |
| 71 { | 71 for (T* cur = begin; cur != end; ++cur) |
| 72 for (T* cur = begin; cur != end; ++cur) | 72 cur->~T(); |
| 73 cur->~T(); | 73 } |
| 74 } | |
| 75 }; | 74 }; |
| 76 | 75 |
| 77 template <bool unusedSlotsMustBeZeroed, typename T> | 76 template <bool unusedSlotsMustBeZeroed, typename T> |
| 78 struct VectorUnusedSlotClearer; | 77 struct VectorUnusedSlotClearer; |
| 79 | 78 |
| 80 template <typename T> | 79 template <typename T> |
| 81 struct VectorUnusedSlotClearer<false, T> { | 80 struct VectorUnusedSlotClearer<false, T> { |
| 82 STATIC_ONLY(VectorUnusedSlotClearer); | 81 STATIC_ONLY(VectorUnusedSlotClearer); |
| 83 static void clear(T*, T*) {} | 82 static void clear(T*, T*) {} |
| 84 #if ENABLE(ASSERT) | 83 #if ENABLE(ASSERT) |
| 85 static void checkCleared(const T*, const T*) {} | 84 static void checkCleared(const T*, const T*) {} |
| 86 #endif | 85 #endif |
| 87 }; | 86 }; |
| 88 | 87 |
| 89 template <typename T> | 88 template <typename T> |
| 90 struct VectorUnusedSlotClearer<true, T> { | 89 struct VectorUnusedSlotClearer<true, T> { |
| 91 STATIC_ONLY(VectorUnusedSlotClearer); | 90 STATIC_ONLY(VectorUnusedSlotClearer); |
| 92 static void clear(T* begin, T* end) | 91 static void clear(T* begin, T* end) { |
| 93 { | 92 memset(reinterpret_cast<void*>(begin), 0, sizeof(T) * (end - begin)); |
| 94 memset(reinterpret_cast<void*>(begin), 0, sizeof(T) * (end - begin)); | 93 } |
| 95 } | |
| 96 | 94 |
| 97 #if ENABLE(ASSERT) | 95 #if ENABLE(ASSERT) |
| 98 static void checkCleared(const T* begin, const T* end) | 96 static void checkCleared(const T* begin, const T* end) { |
| 99 { | 97 const unsigned char* unusedArea = |
| 100 const unsigned char* unusedArea = reinterpret_cast<const unsigned char*>
(begin); | 98 reinterpret_cast<const unsigned char*>(begin); |
| 101 const unsigned char* endAddress = reinterpret_cast<const unsigned char*>
(end); | 99 const unsigned char* endAddress = |
| 102 ASSERT(endAddress >= unusedArea); | 100 reinterpret_cast<const unsigned char*>(end); |
| 103 for (int i = 0; i < endAddress - unusedArea; ++i) | 101 ASSERT(endAddress >= unusedArea); |
| 104 ASSERT(!unusedArea[i]); | 102 for (int i = 0; i < endAddress - unusedArea; ++i) |
| 105 } | 103 ASSERT(!unusedArea[i]); |
| 104 } |
| 106 #endif | 105 #endif |
| 107 }; | 106 }; |
| 108 | 107 |
| 109 template <bool canInitializeWithMemset, typename T> | 108 template <bool canInitializeWithMemset, typename T> |
| 110 struct VectorInitializer; | 109 struct VectorInitializer; |
| 111 | 110 |
| 112 template <typename T> | 111 template <typename T> |
| 113 struct VectorInitializer<false, T> { | 112 struct VectorInitializer<false, T> { |
| 114 STATIC_ONLY(VectorInitializer); | 113 STATIC_ONLY(VectorInitializer); |
| 115 static void initialize(T* begin, T* end) | 114 static void initialize(T* begin, T* end) { |
| 116 { | 115 for (T* cur = begin; cur != end; ++cur) |
| 117 for (T* cur = begin; cur != end; ++cur) | 116 new (NotNull, cur) T; |
| 118 new (NotNull, cur) T; | 117 } |
| 119 } | |
| 120 }; | 118 }; |
| 121 | 119 |
| 122 template <typename T> | 120 template <typename T> |
| 123 struct VectorInitializer<true, T> { | 121 struct VectorInitializer<true, T> { |
| 124 STATIC_ONLY(VectorInitializer); | 122 STATIC_ONLY(VectorInitializer); |
| 125 static void initialize(T* begin, T* end) | 123 static void initialize(T* begin, T* end) { |
| 126 { | 124 memset(begin, 0, |
| 127 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(
begin)); | 125 reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin)); |
| 128 } | 126 } |
| 129 }; | 127 }; |
| 130 | 128 |
| 131 template <bool canMoveWithMemcpy, typename T> | 129 template <bool canMoveWithMemcpy, typename T> |
| 132 struct VectorMover; | 130 struct VectorMover; |
| 133 | 131 |
| 134 template <typename T> | 132 template <typename T> |
| 135 struct VectorMover<false, T> { | 133 struct VectorMover<false, T> { |
| 136 STATIC_ONLY(VectorMover); | 134 STATIC_ONLY(VectorMover); |
| 137 static void move(T* src, T* srcEnd, T* dst) | 135 static void move(T* src, T* srcEnd, T* dst) { |
| 138 { | 136 while (src != srcEnd) { |
| 139 while (src != srcEnd) { | 137 new (NotNull, dst) T(std::move(*src)); |
| 140 new (NotNull, dst) T(std::move(*src)); | 138 src->~T(); |
| 141 src->~T(); | 139 ++dst; |
| 142 ++dst; | 140 ++src; |
| 143 ++src; | |
| 144 } | |
| 145 } | 141 } |
| 146 static void moveOverlapping(T* src, T* srcEnd, T* dst) | 142 } |
| 147 { | 143 static void moveOverlapping(T* src, T* srcEnd, T* dst) { |
| 148 if (src > dst) { | 144 if (src > dst) { |
| 149 move(src, srcEnd, dst); | 145 move(src, srcEnd, dst); |
| 150 } else { | 146 } else { |
| 151 T* dstEnd = dst + (srcEnd - src); | 147 T* dstEnd = dst + (srcEnd - src); |
| 152 while (src != srcEnd) { | 148 while (src != srcEnd) { |
| 153 --srcEnd; | 149 --srcEnd; |
| 154 --dstEnd; | 150 --dstEnd; |
| 155 new (NotNull, dstEnd) T(std::move(*srcEnd)); | 151 new (NotNull, dstEnd) T(std::move(*srcEnd)); |
| 156 srcEnd->~T(); | 152 srcEnd->~T(); |
| 157 } | 153 } |
| 158 } | |
| 159 } | 154 } |
| 160 static void swap(T* src, T* srcEnd, T* dst) | 155 } |
| 161 { | 156 static void swap(T* src, T* srcEnd, T* dst) { |
| 162 std::swap_ranges(src, srcEnd, dst); | 157 std::swap_ranges(src, srcEnd, dst); |
| 163 } | 158 } |
| 164 }; | 159 }; |
| 165 | 160 |
| 166 template <typename T> | 161 template <typename T> |
| 167 struct VectorMover<true, T> { | 162 struct VectorMover<true, T> { |
| 168 STATIC_ONLY(VectorMover); | 163 STATIC_ONLY(VectorMover); |
| 169 static void move(const T* src, const T* srcEnd, T* dst) | 164 static void move(const T* src, const T* srcEnd, T* dst) { |
| 170 { | 165 if (LIKELY(dst && src)) |
| 171 if (LIKELY(dst && src)) | 166 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - |
| 172 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret
_cast<const char*>(src)); | 167 reinterpret_cast<const char*>(src)); |
| 173 } | 168 } |
| 174 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) | 169 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) { |
| 175 { | 170 if (LIKELY(dst && src)) |
| 176 if (LIKELY(dst && src)) | 171 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - |
| 177 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpre
t_cast<const char*>(src)); | 172 reinterpret_cast<const char*>(src)); |
| 178 } | 173 } |
| 179 static void swap(T* src, T* srcEnd, T* dst) | 174 static void swap(T* src, T* srcEnd, T* dst) { |
| 180 { | 175 std::swap_ranges(reinterpret_cast<char*>(src), |
| 181 std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(s
rcEnd), reinterpret_cast<char*>(dst)); | 176 reinterpret_cast<char*>(srcEnd), |
| 182 } | 177 reinterpret_cast<char*>(dst)); |
| 178 } |
| 183 }; | 179 }; |
| 184 | 180 |
| 185 template <bool canCopyWithMemcpy, typename T> | 181 template <bool canCopyWithMemcpy, typename T> |
| 186 struct VectorCopier; | 182 struct VectorCopier; |
| 187 | 183 |
| 188 template <typename T> | 184 template <typename T> |
| 189 struct VectorCopier<false, T> { | 185 struct VectorCopier<false, T> { |
| 190 STATIC_ONLY(VectorCopier); | 186 STATIC_ONLY(VectorCopier); |
| 191 template <typename U> | 187 template <typename U> |
| 192 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst) | 188 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst) { |
| 193 { | 189 while (src != srcEnd) { |
| 194 while (src != srcEnd) { | 190 new (NotNull, dst) T(*src); |
| 195 new (NotNull, dst) T(*src); | 191 ++dst; |
| 196 ++dst; | 192 ++src; |
| 197 ++src; | |
| 198 } | |
| 199 } | 193 } |
| 194 } |
| 200 }; | 195 }; |
| 201 | 196 |
| 202 template <typename T> | 197 template <typename T> |
| 203 struct VectorCopier<true, T> { | 198 struct VectorCopier<true, T> { |
| 204 STATIC_ONLY(VectorCopier); | 199 STATIC_ONLY(VectorCopier); |
| 205 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) | 200 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) { |
| 206 { | 201 if (LIKELY(dst && src)) |
| 207 if (LIKELY(dst && src)) | 202 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - |
| 208 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret
_cast<const char*>(src)); | 203 reinterpret_cast<const char*>(src)); |
| 209 } | 204 } |
| 210 template <typename U> | 205 template <typename U> |
| 211 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst) | 206 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst) { |
| 212 { | 207 VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst); |
| 213 VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst); | 208 } |
| 214 } | |
| 215 }; | 209 }; |
| 216 | 210 |
| 217 template <bool canFillWithMemset, typename T> | 211 template <bool canFillWithMemset, typename T> |
| 218 struct VectorFiller; | 212 struct VectorFiller; |
| 219 | 213 |
| 220 template <typename T> | 214 template <typename T> |
| 221 struct VectorFiller<false, T> { | 215 struct VectorFiller<false, T> { |
| 222 STATIC_ONLY(VectorFiller); | 216 STATIC_ONLY(VectorFiller); |
| 223 static void uninitializedFill(T* dst, T* dstEnd, const T& val) | 217 static void uninitializedFill(T* dst, T* dstEnd, const T& val) { |
| 224 { | 218 while (dst != dstEnd) { |
| 225 while (dst != dstEnd) { | 219 new (NotNull, dst) T(val); |
| 226 new (NotNull, dst) T(val); | 220 ++dst; |
| 227 ++dst; | |
| 228 } | |
| 229 } | 221 } |
| 222 } |
| 230 }; | 223 }; |
| 231 | 224 |
| 232 template <typename T> | 225 template <typename T> |
| 233 struct VectorFiller<true, T> { | 226 struct VectorFiller<true, T> { |
| 234 STATIC_ONLY(VectorFiller); | 227 STATIC_ONLY(VectorFiller); |
| 235 static void uninitializedFill(T* dst, T* dstEnd, const T& val) | 228 static void uninitializedFill(T* dst, T* dstEnd, const T& val) { |
| 236 { | 229 static_assert(sizeof(T) == sizeof(char), "size of type should be one"); |
| 237 static_assert(sizeof(T) == sizeof(char), "size of type should be one"); | |
| 238 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE) | 230 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE) |
| 239 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst))) | 231 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst))) |
| 240 memset(dst, val, dstEnd - dst); | 232 memset(dst, val, dstEnd - dst); |
| 241 #else | 233 #else |
| 242 memset(dst, val, dstEnd - dst); | 234 memset(dst, val, dstEnd - dst); |
| 243 #endif | 235 #endif |
| 244 } | 236 } |
| 245 }; | 237 }; |
| 246 | 238 |
| 247 template <bool canCompareWithMemcmp, typename T> | 239 template <bool canCompareWithMemcmp, typename T> |
| 248 struct VectorComparer; | 240 struct VectorComparer; |
| 249 | 241 |
| 250 template <typename T> | 242 template <typename T> |
| 251 struct VectorComparer<false, T> { | 243 struct VectorComparer<false, T> { |
| 252 STATIC_ONLY(VectorComparer); | 244 STATIC_ONLY(VectorComparer); |
| 253 static bool compare(const T* a, const T* b, size_t size) | 245 static bool compare(const T* a, const T* b, size_t size) { |
| 254 { | 246 ASSERT(a); |
| 255 ASSERT(a); | 247 ASSERT(b); |
| 256 ASSERT(b); | 248 return std::equal(a, a + size, b); |
| 257 return std::equal(a, a + size, b); | 249 } |
| 258 } | |
| 259 }; | 250 }; |
| 260 | 251 |
| 261 template <typename T> | 252 template <typename T> |
| 262 struct VectorComparer<true, T> { | 253 struct VectorComparer<true, T> { |
| 263 STATIC_ONLY(VectorComparer); | 254 STATIC_ONLY(VectorComparer); |
| 264 static bool compare(const T* a, const T* b, size_t size) | 255 static bool compare(const T* a, const T* b, size_t size) { |
| 265 { | 256 ASSERT(a); |
| 266 ASSERT(a); | 257 ASSERT(b); |
| 267 ASSERT(b); | 258 return memcmp(a, b, sizeof(T) * size) == 0; |
| 268 return memcmp(a, b, sizeof(T) * size) == 0; | 259 } |
| 269 } | |
| 270 }; | 260 }; |
| 271 | 261 |
| 272 template <typename T> | 262 template <typename T> |
| 273 struct VectorTypeOperations { | 263 struct VectorTypeOperations { |
| 274 STATIC_ONLY(VectorTypeOperations); | 264 STATIC_ONLY(VectorTypeOperations); |
| 275 static void destruct(T* begin, T* end) | 265 static void destruct(T* begin, T* end) { |
| 276 { | 266 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, |
| 277 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin,
end); | 267 end); |
| 278 } | 268 } |
| 279 | 269 |
| 280 static void initialize(T* begin, T* end) | 270 static void initialize(T* begin, T* end) { |
| 281 { | 271 VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize( |
| 282 VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initiali
ze(begin, end); | 272 begin, end); |
| 283 } | 273 } |
| 284 | 274 |
| 285 static void move(T* src, T* srcEnd, T* dst) | 275 static void move(T* src, T* srcEnd, T* dst) { |
| 286 { | 276 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst); |
| 287 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, ds
t); | 277 } |
| 288 } | 278 |
| 289 | 279 static void moveOverlapping(T* src, T* srcEnd, T* dst) { |
| 290 static void moveOverlapping(T* src, T* srcEnd, T* dst) | 280 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping( |
| 291 { | 281 src, srcEnd, dst); |
| 292 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src,
srcEnd, dst); | 282 } |
| 293 } | 283 |
| 294 | 284 static void swap(T* src, T* srcEnd, T* dst) { |
| 295 static void swap(T* src, T* srcEnd, T* dst) | 285 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst); |
| 296 { | 286 } |
| 297 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, ds
t); | 287 |
| 298 } | 288 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) { |
| 299 | 289 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy( |
| 300 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) | 290 src, srcEnd, dst); |
| 301 { | 291 } |
| 302 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(s
rc, srcEnd, dst); | 292 |
| 303 } | 293 static void uninitializedFill(T* dst, T* dstEnd, const T& val) { |
| 304 | 294 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill( |
| 305 static void uninitializedFill(T* dst, T* dstEnd, const T& val) | 295 dst, dstEnd, val); |
| 306 { | 296 } |
| 307 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(d
st, dstEnd, val); | 297 |
| 308 } | 298 static bool compare(const T* a, const T* b, size_t size) { |
| 309 | 299 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare( |
| 310 static bool compare(const T* a, const T* b, size_t size) | 300 a, b, size); |
| 311 { | 301 } |
| 312 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare
(a, b, size); | |
| 313 } | |
| 314 }; | 302 }; |
| 315 | 303 |
| 316 template <typename T, bool hasInlineCapacity, typename Allocator> | 304 template <typename T, bool hasInlineCapacity, typename Allocator> |
| 317 class VectorBufferBase { | 305 class VectorBufferBase { |
| 318 WTF_MAKE_NONCOPYABLE(VectorBufferBase); | 306 WTF_MAKE_NONCOPYABLE(VectorBufferBase); |
| 319 DISALLOW_NEW(); | 307 DISALLOW_NEW(); |
| 320 public: | 308 |
| 321 void allocateBuffer(size_t newCapacity) | 309 public: |
| 322 { | 310 void allocateBuffer(size_t newCapacity) { |
| 323 ASSERT(newCapacity); | 311 ASSERT(newCapacity); |
| 324 size_t sizeToAllocate = allocationSize(newCapacity); | 312 size_t sizeToAllocate = allocationSize(newCapacity); |
| 325 if (hasInlineCapacity) | 313 if (hasInlineCapacity) |
| 326 m_buffer = Allocator::template allocateInlineVectorBacking<T>(sizeTo
Allocate); | 314 m_buffer = |
| 327 else | 315 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate); |
| 328 m_buffer = Allocator::template allocateVectorBacking<T>(sizeToAlloca
te); | 316 else |
| 329 m_capacity = sizeToAllocate / sizeof(T); | 317 m_buffer = Allocator::template allocateVectorBacking<T>(sizeToAllocate); |
| 318 m_capacity = sizeToAllocate / sizeof(T); |
| 319 } |
| 320 |
| 321 void allocateExpandedBuffer(size_t newCapacity) { |
| 322 ASSERT(newCapacity); |
| 323 size_t sizeToAllocate = allocationSize(newCapacity); |
| 324 if (hasInlineCapacity) |
| 325 m_buffer = |
| 326 Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate); |
| 327 else |
| 328 m_buffer = |
| 329 Allocator::template allocateExpandedVectorBacking<T>(sizeToAllocate); |
| 330 m_capacity = sizeToAllocate / sizeof(T); |
| 331 } |
| 332 |
| 333 size_t allocationSize(size_t capacity) const { |
| 334 return Allocator::template quantizedSize<T>(capacity); |
| 335 } |
| 336 |
| 337 T* buffer() { return m_buffer; } |
| 338 const T* buffer() const { return m_buffer; } |
| 339 size_t capacity() const { return m_capacity; } |
| 340 |
| 341 void clearUnusedSlots(T* from, T* to) { |
| 342 // If the vector backing is garbage-collected and needs tracing or |
| 343 // finalizing, we clear out the unused slots so that the visitor or the |
| 344 // finalizer does not cause a problem when visiting the unused slots. |
| 345 VectorUnusedSlotClearer<Allocator::isGarbageCollected && |
| 346 (VectorTraits<T>::needsDestruction || |
| 347 NeedsTracingTrait<VectorTraits<T>>::value), |
| 348 T>::clear(from, to); |
| 349 } |
| 350 |
| 351 void checkUnusedSlots(const T* from, const T* to) { |
| 352 #if ENABLE(ASSERT) && !defined(ANNOTATE_CONTIGUOUS_CONTAINER) |
| 353 VectorUnusedSlotClearer<Allocator::isGarbageCollected && |
| 354 (VectorTraits<T>::needsDestruction || |
| 355 NeedsTracingTrait<VectorTraits<T>>::value), |
| 356 T>::checkCleared(from, to); |
| 357 #endif |
| 358 } |
| 359 |
| 360 protected: |
| 361 VectorBufferBase() : m_buffer(nullptr), m_capacity(0) {} |
| 362 |
| 363 VectorBufferBase(T* buffer, size_t capacity) |
| 364 : m_buffer(buffer), m_capacity(capacity) {} |
| 365 |
| 366 T* m_buffer; |
| 367 unsigned m_capacity; |
| 368 unsigned m_size; |
| 369 }; |
| 370 |
| 371 template <typename T, |
| 372 size_t inlineCapacity, |
| 373 typename Allocator = PartitionAllocator> |
| 374 class VectorBuffer; |
| 375 |
| 376 template <typename T, typename Allocator> |
| 377 class VectorBuffer<T, 0, Allocator> |
| 378 : protected VectorBufferBase<T, false, Allocator> { |
| 379 private: |
| 380 typedef VectorBufferBase<T, false, Allocator> Base; |
| 381 |
| 382 public: |
| 383 VectorBuffer() {} |
| 384 |
| 385 VectorBuffer(size_t capacity) { |
| 386 // Calling malloc(0) might take a lock and may actually do an allocation |
| 387 // on some systems. |
| 388 if (capacity) |
| 389 allocateBuffer(capacity); |
| 390 } |
| 391 |
| 392 void destruct() { |
| 393 deallocateBuffer(m_buffer); |
| 394 m_buffer = nullptr; |
| 395 } |
| 396 |
| 397 void deallocateBuffer(T* bufferToDeallocate) { |
| 398 Allocator::freeVectorBacking(bufferToDeallocate); |
| 399 } |
| 400 |
| 401 bool expandBuffer(size_t newCapacity) { |
| 402 size_t sizeToAllocate = allocationSize(newCapacity); |
| 403 if (Allocator::expandVectorBacking(m_buffer, sizeToAllocate)) { |
| 404 m_capacity = sizeToAllocate / sizeof(T); |
| 405 return true; |
| 330 } | 406 } |
| 331 | 407 return false; |
| 332 void allocateExpandedBuffer(size_t newCapacity) | 408 } |
| 333 { | 409 |
| 334 ASSERT(newCapacity); | 410 inline bool shrinkBuffer(size_t newCapacity) { |
| 335 size_t sizeToAllocate = allocationSize(newCapacity); | 411 ASSERT(newCapacity < capacity()); |
| 336 if (hasInlineCapacity) | 412 size_t sizeToAllocate = allocationSize(newCapacity); |
| 337 m_buffer = Allocator::template allocateInlineVectorBacking<T>(sizeTo
Allocate); | 413 if (Allocator::shrinkVectorBacking(m_buffer, allocationSize(capacity()), |
| 338 else | 414 sizeToAllocate)) { |
| 339 m_buffer = Allocator::template allocateExpandedVectorBacking<T>(size
ToAllocate); | 415 m_capacity = sizeToAllocate / sizeof(T); |
| 340 m_capacity = sizeToAllocate / sizeof(T); | 416 return true; |
| 341 } | 417 } |
| 342 | 418 return false; |
| 343 size_t allocationSize(size_t capacity) const | 419 } |
| 344 { | 420 |
| 345 return Allocator::template quantizedSize<T>(capacity); | 421 void resetBufferPointer() { |
| 422 m_buffer = nullptr; |
| 423 m_capacity = 0; |
| 424 } |
| 425 |
| 426 void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other) { |
| 427 std::swap(m_buffer, other.m_buffer); |
| 428 std::swap(m_capacity, other.m_capacity); |
| 429 } |
| 430 |
| 431 using Base::allocateBuffer; |
| 432 using Base::allocationSize; |
| 433 |
| 434 using Base::buffer; |
| 435 using Base::capacity; |
| 436 |
| 437 using Base::clearUnusedSlots; |
| 438 using Base::checkUnusedSlots; |
| 439 |
| 440 bool hasOutOfLineBuffer() const { |
| 441 // When inlineCapacity is 0 we have an out of line buffer if we have a |
| 442 // buffer. |
| 443 return buffer(); |
| 444 } |
| 445 |
| 446 protected: |
| 447 using Base::m_size; |
| 448 |
| 449 private: |
| 450 using Base::m_buffer; |
| 451 using Base::m_capacity; |
| 452 }; |
| 453 |
| 454 template <typename T, size_t inlineCapacity, typename Allocator> |
| 455 class VectorBuffer : protected VectorBufferBase<T, true, Allocator> { |
| 456 WTF_MAKE_NONCOPYABLE(VectorBuffer); |
| 457 typedef VectorBufferBase<T, true, Allocator> Base; |
| 458 |
| 459 public: |
| 460 VectorBuffer() : Base(inlineBuffer(), inlineCapacity) {} |
| 461 |
| 462 VectorBuffer(size_t capacity) : Base(inlineBuffer(), inlineCapacity) { |
| 463 if (capacity > inlineCapacity) |
| 464 Base::allocateBuffer(capacity); |
| 465 } |
| 466 |
| 467 void destruct() { |
| 468 deallocateBuffer(m_buffer); |
| 469 m_buffer = nullptr; |
| 470 } |
| 471 |
| 472 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate) { |
| 473 Allocator::freeInlineVectorBacking(bufferToDeallocate); |
| 474 } |
| 475 |
| 476 void deallocateBuffer(T* bufferToDeallocate) { |
| 477 if (UNLIKELY(bufferToDeallocate != inlineBuffer())) |
| 478 reallyDeallocateBuffer(bufferToDeallocate); |
| 479 } |
| 480 |
| 481 bool expandBuffer(size_t newCapacity) { |
| 482 ASSERT(newCapacity > inlineCapacity); |
| 483 if (m_buffer == inlineBuffer()) |
| 484 return false; |
| 485 |
| 486 size_t sizeToAllocate = allocationSize(newCapacity); |
| 487 if (Allocator::expandInlineVectorBacking(m_buffer, sizeToAllocate)) { |
| 488 m_capacity = sizeToAllocate / sizeof(T); |
| 489 return true; |
| 346 } | 490 } |
| 347 | 491 return false; |
| 348 T* buffer() { return m_buffer; } | 492 } |
| 349 const T* buffer() const { return m_buffer; } | 493 |
| 350 size_t capacity() const { return m_capacity; } | 494 inline bool shrinkBuffer(size_t newCapacity) { |
| 351 | 495 ASSERT(newCapacity < capacity()); |
| 352 void clearUnusedSlots(T* from, T* to) | 496 if (newCapacity <= inlineCapacity) { |
| 353 { | 497 // We need to switch to inlineBuffer. Vector::shrinkCapacity will |
| 354 // If the vector backing is garbage-collected and needs tracing or | 498 // handle it. |
| 355 // finalizing, we clear out the unused slots so that the visitor or the | 499 return false; |
| 356 // finalizer does not cause a problem when visiting the unused slots. | |
| 357 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T
>::needsDestruction || NeedsTracingTrait<VectorTraits<T>>::value), T>::clear(fro
m, to); | |
| 358 } | 500 } |
| 359 | 501 ASSERT(m_buffer != inlineBuffer()); |
| 360 void checkUnusedSlots(const T* from, const T* to) | 502 size_t newSize = allocationSize(newCapacity); |
| 361 { | 503 if (!Allocator::shrinkInlineVectorBacking( |
| 362 #if ENABLE(ASSERT) && !defined(ANNOTATE_CONTIGUOUS_CONTAINER) | 504 m_buffer, allocationSize(capacity()), newSize)) |
| 363 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T
>::needsDestruction || NeedsTracingTrait<VectorTraits<T>>::value), T>::checkClea
red(from, to); | 505 return false; |
| 506 m_capacity = newSize / sizeof(T); |
| 507 return true; |
| 508 } |
| 509 |
| 510 void resetBufferPointer() { |
| 511 m_buffer = inlineBuffer(); |
| 512 m_capacity = inlineCapacity; |
| 513 } |
| 514 |
| 515 void allocateBuffer(size_t newCapacity) { |
| 516 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks. |
| 517 if (newCapacity > inlineCapacity) |
| 518 Base::allocateBuffer(newCapacity); |
| 519 else |
| 520 resetBufferPointer(); |
| 521 } |
| 522 |
| 523 void allocateExpandedBuffer(size_t newCapacity) { |
| 524 if (newCapacity > inlineCapacity) |
| 525 Base::allocateExpandedBuffer(newCapacity); |
| 526 else |
| 527 resetBufferPointer(); |
| 528 } |
| 529 |
| 530 size_t allocationSize(size_t capacity) const { |
| 531 if (capacity <= inlineCapacity) |
| 532 return m_inlineBufferSize; |
| 533 return Base::allocationSize(capacity); |
| 534 } |
| 535 |
| 536 void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other) { |
| 537 typedef VectorTypeOperations<T> TypeOperations; |
| 538 |
| 539 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) { |
| 540 ASSERT(m_capacity == other.m_capacity); |
| 541 if (m_size > other.m_size) { |
| 542 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other.m_size, |
| 543 m_size); |
| 544 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, |
| 545 other.inlineBuffer()); |
| 546 TypeOperations::move(inlineBuffer() + other.m_size, |
| 547 inlineBuffer() + m_size, |
| 548 other.inlineBuffer() + other.m_size); |
| 549 Base::clearUnusedSlots(inlineBuffer() + other.m_size, |
| 550 inlineBuffer() + m_size); |
| 551 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, |
| 552 other.m_size); |
| 553 } else { |
| 554 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, |
| 555 other.m_size); |
| 556 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, |
| 557 other.inlineBuffer()); |
| 558 TypeOperations::move(other.inlineBuffer() + m_size, |
| 559 other.inlineBuffer() + other.m_size, |
| 560 inlineBuffer() + m_size); |
| 561 Base::clearUnusedSlots(other.inlineBuffer() + m_size, |
| 562 other.inlineBuffer() + other.m_size); |
| 563 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other.m_size, |
| 564 m_size); |
| 565 } |
| 566 } else if (buffer() == inlineBuffer()) { |
| 567 ANNOTATE_DELETE_BUFFER(m_buffer, inlineCapacity, m_size); |
| 568 m_buffer = other.m_buffer; |
| 569 other.m_buffer = other.inlineBuffer(); |
| 570 ANNOTATE_NEW_BUFFER(other.m_buffer, inlineCapacity, m_size); |
| 571 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, |
| 572 other.inlineBuffer()); |
| 573 Base::clearUnusedSlots(inlineBuffer(), inlineBuffer() + m_size); |
| 574 std::swap(m_capacity, other.m_capacity); |
| 575 } else if (other.buffer() == other.inlineBuffer()) { |
| 576 ANNOTATE_DELETE_BUFFER(other.m_buffer, inlineCapacity, other.m_size); |
| 577 other.m_buffer = m_buffer; |
| 578 m_buffer = inlineBuffer(); |
| 579 ANNOTATE_NEW_BUFFER(m_buffer, inlineCapacity, other.m_size); |
| 580 TypeOperations::move(other.inlineBuffer(), |
| 581 other.inlineBuffer() + other.m_size, inlineBuffer()); |
| 582 Base::clearUnusedSlots(other.inlineBuffer(), |
| 583 other.inlineBuffer() + other.m_size); |
| 584 std::swap(m_capacity, other.m_capacity); |
| 585 } else { |
| 586 std::swap(m_buffer, other.m_buffer); |
| 587 std::swap(m_capacity, other.m_capacity); |
| 588 } |
| 589 } |
| 590 |
| 591 using Base::buffer; |
| 592 using Base::capacity; |
| 593 |
| 594 bool hasOutOfLineBuffer() const { |
| 595 return buffer() && buffer() != inlineBuffer(); |
| 596 } |
| 597 |
| 598 protected: |
| 599 using Base::m_size; |
| 600 |
| 601 private: |
| 602 using Base::m_buffer; |
| 603 using Base::m_capacity; |
| 604 |
| 605 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T); |
| 606 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); } |
| 607 const T* inlineBuffer() const { |
| 608 return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); |
| 609 } |
| 610 |
| 611 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer; |
| 612 template <typename U, size_t inlineBuffer, typename V> |
| 613 friend class Deque; |
| 614 }; |
| 615 |
| 616 template < |
| 617 typename T, |
| 618 size_t inlineCapacity = 0, |
| 619 typename Allocator = |
| 620 PartitionAllocator> // Heap-allocated vectors with no inlineCapacity ne
ver need a destructor. |
| 621 class Vector |
| 622 : private VectorBuffer<T, INLINE_CAPACITY, Allocator>, |
| 623 public ConditionalDestructor<Vector<T, INLINE_CAPACITY, Allocator>, |
| 624 (INLINE_CAPACITY == 0) && |
| 625 Allocator::isGarbageCollected> { |
| 626 WTF_USE_ALLOCATOR(Vector, Allocator); |
| 627 typedef VectorBuffer<T, INLINE_CAPACITY, Allocator> Base; |
| 628 typedef VectorTypeOperations<T> TypeOperations; |
| 629 |
| 630 public: |
| 631 typedef T ValueType; |
| 632 typedef T value_type; |
| 633 |
| 634 typedef T* iterator; |
| 635 typedef const T* const_iterator; |
| 636 typedef std::reverse_iterator<iterator> reverse_iterator; |
| 637 typedef std::reverse_iterator<const_iterator> const_reverse_iterator; |
| 638 |
| 639 Vector() { |
| 640 static_assert(!std::is_polymorphic<T>::value || |
| 641 !VectorTraits<T>::canInitializeWithMemset, |
| 642 "Cannot initialize with memset if there is a vtable"); |
| 643 #if ENABLE(OILPAN) |
| 644 static_assert(Allocator::isGarbageCollected || |
| 645 !AllowsOnlyPlacementNew<T>::value || |
| 646 !NeedsTracing<T>::value, |
| 647 "Cannot put DISALLOW_NEW_EXCEPT_PLACEMENT_NEW objects that " |
| 648 "have trace methods into an off-heap Vector"); |
| 364 #endif | 649 #endif |
| 650 static_assert(Allocator::isGarbageCollected || |
| 651 !IsPointerToGarbageCollectedType<T>::value, |
| 652 "Cannot put raw pointers to garbage-collected classes into " |
| 653 "an off-heap Vector. Use HeapVector<Member<T>> instead."); |
| 654 |
| 655 ANNOTATE_NEW_BUFFER(begin(), capacity(), 0); |
| 656 m_size = 0; |
| 657 } |
| 658 |
| 659 explicit Vector(size_t size) : Base(size) { |
| 660 static_assert(!std::is_polymorphic<T>::value || |
| 661 !VectorTraits<T>::canInitializeWithMemset, |
| 662 "Cannot initialize with memset if there is a vtable"); |
| 663 #if ENABLE(OILPAN) |
| 664 static_assert(Allocator::isGarbageCollected || |
| 665 !AllowsOnlyPlacementNew<T>::value || |
| 666 !NeedsTracing<T>::value, |
| 667 "Cannot put DISALLOW_NEW_EXCEPT_PLACEMENT_NEW objects that " |
| 668 "have trace methods into an off-heap Vector"); |
| 669 #endif |
| 670 static_assert(Allocator::isGarbageCollected || |
| 671 !IsPointerToGarbageCollectedType<T>::value, |
| 672 "Cannot put raw pointers to garbage-collected classes into " |
| 673 "an off-heap Vector. Use HeapVector<Member<T>> instead."); |
| 674 |
| 675 ANNOTATE_NEW_BUFFER(begin(), capacity(), size); |
| 676 m_size = size; |
| 677 TypeOperations::initialize(begin(), end()); |
| 678 } |
| 679 |
| 680 // Off-GC-heap vectors: Destructor should be called. |
| 681 // On-GC-heap vectors: Destructor should be called for inline buffers (if |
| 682 // any) but destructor shouldn't be called for vector backing since it is |
| 683 // managed by the traced GC heap. |
| 684 void finalize() { |
| 685 if (!INLINE_CAPACITY) { |
| 686 if (LIKELY(!Base::buffer())) |
| 687 return; |
| 365 } | 688 } |
| 366 | 689 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size); |
| 367 protected: | 690 if (LIKELY(m_size) && |
| 368 VectorBufferBase() | 691 !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) { |
| 369 : m_buffer(nullptr) | 692 TypeOperations::destruct(begin(), end()); |
| 370 , m_capacity(0) | 693 m_size = 0; // Partial protection against use-after-free. |
| 371 { | |
| 372 } | 694 } |
| 373 | 695 |
| 374 VectorBufferBase(T* buffer, size_t capacity) | 696 Base::destruct(); |
| 375 : m_buffer(buffer) | 697 } |
| 376 , m_capacity(capacity) | 698 |
| 377 { | 699 void finalizeGarbageCollectedObject() { finalize(); } |
| 378 } | 700 |
| 379 | 701 Vector(const Vector&); |
| 380 T* m_buffer; | 702 template <size_t otherCapacity> |
| 381 unsigned m_capacity; | 703 explicit Vector(const Vector<T, otherCapacity, Allocator>&); |
| 382 unsigned m_size; | 704 |
| 705 Vector& operator=(const Vector&); |
| 706 template <size_t otherCapacity> |
| 707 Vector& operator=(const Vector<T, otherCapacity, Allocator>&); |
| 708 |
| 709 Vector(Vector&&); |
| 710 Vector& operator=(Vector&&); |
| 711 |
| 712 size_t size() const { return m_size; } |
| 713 size_t capacity() const { return Base::capacity(); } |
| 714 bool isEmpty() const { return !size(); } |
| 715 |
| 716 T& at(size_t i) { |
| 717 RELEASE_ASSERT(i < size()); |
| 718 return Base::buffer()[i]; |
| 719 } |
| 720 const T& at(size_t i) const { |
| 721 RELEASE_ASSERT(i < size()); |
| 722 return Base::buffer()[i]; |
| 723 } |
| 724 |
| 725 T& operator[](size_t i) { return at(i); } |
| 726 const T& operator[](size_t i) const { return at(i); } |
| 727 |
| 728 T* data() { return Base::buffer(); } |
| 729 const T* data() const { return Base::buffer(); } |
| 730 |
| 731 iterator begin() { return data(); } |
| 732 iterator end() { return begin() + m_size; } |
| 733 const_iterator begin() const { return data(); } |
| 734 const_iterator end() const { return begin() + m_size; } |
| 735 |
| 736 reverse_iterator rbegin() { return reverse_iterator(end()); } |
| 737 reverse_iterator rend() { return reverse_iterator(begin()); } |
| 738 const_reverse_iterator rbegin() const { |
| 739 return const_reverse_iterator(end()); |
| 740 } |
| 741 const_reverse_iterator rend() const { |
| 742 return const_reverse_iterator(begin()); |
| 743 } |
| 744 |
| 745 T& first() { return at(0); } |
| 746 const T& first() const { return at(0); } |
| 747 T& last() { return at(size() - 1); } |
| 748 const T& last() const { return at(size() - 1); } |
| 749 |
| 750 template <typename U> |
| 751 bool contains(const U&) const; |
| 752 template <typename U> |
| 753 size_t find(const U&) const; |
| 754 template <typename U> |
| 755 size_t reverseFind(const U&) const; |
| 756 |
| 757 void shrink(size_t); |
| 758 void grow(size_t); |
| 759 void resize(size_t); |
| 760 void reserveCapacity(size_t newCapacity); |
| 761 void reserveInitialCapacity(size_t initialCapacity); |
| 762 void shrinkToFit() { shrinkCapacity(size()); } |
| 763 void shrinkToReasonableCapacity() { |
| 764 if (size() * 2 < capacity()) |
| 765 shrinkCapacity(size() + size() / 4 + 1); |
| 766 } |
| 767 |
| 768 void clear() { shrinkCapacity(0); } |
| 769 |
| 770 template <typename U> |
| 771 void append(const U*, size_t); |
| 772 template <typename U> |
| 773 void append(U&&); |
| 774 template <typename U> |
| 775 void uncheckedAppend(U&& val); |
| 776 template <typename U, size_t otherCapacity, typename V> |
| 777 void appendVector(const Vector<U, otherCapacity, V>&); |
| 778 |
| 779 template <typename U> |
| 780 void insert(size_t position, const U*, size_t); |
| 781 template <typename U> |
| 782 void insert(size_t position, U&&); |
| 783 template <typename U, size_t c, typename V> |
| 784 void insert(size_t position, const Vector<U, c, V>&); |
| 785 |
| 786 template <typename U> |
| 787 void prepend(const U*, size_t); |
| 788 template <typename U> |
| 789 void prepend(U&&); |
| 790 template <typename U, size_t c, typename V> |
| 791 void prepend(const Vector<U, c, V>&); |
| 792 |
| 793 void remove(size_t position); |
| 794 void remove(size_t position, size_t length); |
| 795 |
| 796 void removeLast() { |
| 797 ASSERT(!isEmpty()); |
| 798 shrink(size() - 1); |
| 799 } |
| 800 |
| 801 Vector(size_t size, const T& val) : Base(size) { |
| 802 ANNOTATE_NEW_BUFFER(begin(), capacity(), size); |
| 803 m_size = size; |
| 804 TypeOperations::uninitializedFill(begin(), end(), val); |
| 805 } |
| 806 |
| 807 void fill(const T&, size_t); |
| 808 void fill(const T& val) { fill(val, size()); } |
| 809 |
| 810 template <typename Iterator> |
| 811 void appendRange(Iterator start, Iterator end); |
| 812 |
| 813 void swap(Vector& other) { |
| 814 Base::swapVectorBuffer(other); |
| 815 std::swap(m_size, other.m_size); |
| 816 } |
| 817 |
| 818 void reverse(); |
| 819 |
| 820 template <typename VisitorDispatcher> |
| 821 void trace(VisitorDispatcher); |
| 822 |
| 823 protected: |
| 824 using Base::checkUnusedSlots; |
| 825 using Base::clearUnusedSlots; |
| 826 |
| 827 private: |
| 828 void expandCapacity(size_t newMinCapacity); |
| 829 T* expandCapacity(size_t newMinCapacity, T*); |
| 830 T* expandCapacity(size_t newMinCapacity, const T* data) { |
| 831 return expandCapacity(newMinCapacity, const_cast<T*>(data)); |
| 832 } |
| 833 |
| 834 template <typename U> |
| 835 U* expandCapacity(size_t newMinCapacity, U*); |
| 836 void shrinkCapacity(size_t newCapacity); |
| 837 template <typename U> |
| 838 void appendSlowCase(U&&); |
| 839 |
| 840 using Base::m_size; |
| 841 using Base::buffer; |
| 842 using Base::capacity; |
| 843 using Base::swapVectorBuffer; |
| 844 using Base::allocateBuffer; |
| 845 using Base::allocationSize; |
| 383 }; | 846 }; |
| 384 | 847 |
| 385 template <typename T, size_t inlineCapacity, typename Allocator = PartitionAlloc
ator> | |
| 386 class VectorBuffer; | |
| 387 | |
| 388 template <typename T, typename Allocator> | |
| 389 class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, false, Alloc
ator> { | |
| 390 private: | |
| 391 typedef VectorBufferBase<T, false, Allocator> Base; | |
| 392 public: | |
| 393 VectorBuffer() | |
| 394 { | |
| 395 } | |
| 396 | |
| 397 VectorBuffer(size_t capacity) | |
| 398 { | |
| 399 // Calling malloc(0) might take a lock and may actually do an allocation | |
| 400 // on some systems. | |
| 401 if (capacity) | |
| 402 allocateBuffer(capacity); | |
| 403 } | |
| 404 | |
| 405 void destruct() | |
| 406 { | |
| 407 deallocateBuffer(m_buffer); | |
| 408 m_buffer = nullptr; | |
| 409 } | |
| 410 | |
| 411 void deallocateBuffer(T* bufferToDeallocate) | |
| 412 { | |
| 413 Allocator::freeVectorBacking(bufferToDeallocate); | |
| 414 } | |
| 415 | |
| 416 bool expandBuffer(size_t newCapacity) | |
| 417 { | |
| 418 size_t sizeToAllocate = allocationSize(newCapacity); | |
| 419 if (Allocator::expandVectorBacking(m_buffer, sizeToAllocate)) { | |
| 420 m_capacity = sizeToAllocate / sizeof(T); | |
| 421 return true; | |
| 422 } | |
| 423 return false; | |
| 424 } | |
| 425 | |
| 426 inline bool shrinkBuffer(size_t newCapacity) | |
| 427 { | |
| 428 ASSERT(newCapacity < capacity()); | |
| 429 size_t sizeToAllocate = allocationSize(newCapacity); | |
| 430 if (Allocator::shrinkVectorBacking(m_buffer, allocationSize(capacity()),
sizeToAllocate)) { | |
| 431 m_capacity = sizeToAllocate / sizeof(T); | |
| 432 return true; | |
| 433 } | |
| 434 return false; | |
| 435 } | |
| 436 | |
| 437 void resetBufferPointer() | |
| 438 { | |
| 439 m_buffer = nullptr; | |
| 440 m_capacity = 0; | |
| 441 } | |
| 442 | |
| 443 void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other) | |
| 444 { | |
| 445 std::swap(m_buffer, other.m_buffer); | |
| 446 std::swap(m_capacity, other.m_capacity); | |
| 447 } | |
| 448 | |
| 449 using Base::allocateBuffer; | |
| 450 using Base::allocationSize; | |
| 451 | |
| 452 using Base::buffer; | |
| 453 using Base::capacity; | |
| 454 | |
| 455 using Base::clearUnusedSlots; | |
| 456 using Base::checkUnusedSlots; | |
| 457 | |
| 458 bool hasOutOfLineBuffer() const | |
| 459 { | |
| 460 // When inlineCapacity is 0 we have an out of line buffer if we have a | |
| 461 // buffer. | |
| 462 return buffer(); | |
| 463 } | |
| 464 | |
| 465 protected: | |
| 466 using Base::m_size; | |
| 467 | |
| 468 private: | |
| 469 using Base::m_buffer; | |
| 470 using Base::m_capacity; | |
| 471 }; | |
| 472 | |
| 473 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 474 class VectorBuffer : protected VectorBufferBase<T, true, Allocator> { | |
| 475 WTF_MAKE_NONCOPYABLE(VectorBuffer); | |
| 476 typedef VectorBufferBase<T, true, Allocator> Base; | |
| 477 | |
| 478 public: | |
| 479 VectorBuffer() | |
| 480 : Base(inlineBuffer(), inlineCapacity) | |
| 481 { | |
| 482 } | |
| 483 | |
| 484 VectorBuffer(size_t capacity) | |
| 485 : Base(inlineBuffer(), inlineCapacity) | |
| 486 { | |
| 487 if (capacity > inlineCapacity) | |
| 488 Base::allocateBuffer(capacity); | |
| 489 } | |
| 490 | |
| 491 void destruct() | |
| 492 { | |
| 493 deallocateBuffer(m_buffer); | |
| 494 m_buffer = nullptr; | |
| 495 } | |
| 496 | |
| 497 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate) | |
| 498 { | |
| 499 Allocator::freeInlineVectorBacking(bufferToDeallocate); | |
| 500 } | |
| 501 | |
| 502 void deallocateBuffer(T* bufferToDeallocate) | |
| 503 { | |
| 504 if (UNLIKELY(bufferToDeallocate != inlineBuffer())) | |
| 505 reallyDeallocateBuffer(bufferToDeallocate); | |
| 506 } | |
| 507 | |
| 508 bool expandBuffer(size_t newCapacity) | |
| 509 { | |
| 510 ASSERT(newCapacity > inlineCapacity); | |
| 511 if (m_buffer == inlineBuffer()) | |
| 512 return false; | |
| 513 | |
| 514 size_t sizeToAllocate = allocationSize(newCapacity); | |
| 515 if (Allocator::expandInlineVectorBacking(m_buffer, sizeToAllocate)) { | |
| 516 m_capacity = sizeToAllocate / sizeof(T); | |
| 517 return true; | |
| 518 } | |
| 519 return false; | |
| 520 } | |
| 521 | |
| 522 inline bool shrinkBuffer(size_t newCapacity) | |
| 523 { | |
| 524 ASSERT(newCapacity < capacity()); | |
| 525 if (newCapacity <= inlineCapacity) { | |
| 526 // We need to switch to inlineBuffer. Vector::shrinkCapacity will | |
| 527 // handle it. | |
| 528 return false; | |
| 529 } | |
| 530 ASSERT(m_buffer != inlineBuffer()); | |
| 531 size_t newSize = allocationSize(newCapacity); | |
| 532 if (!Allocator::shrinkInlineVectorBacking(m_buffer, allocationSize(capac
ity()), newSize)) | |
| 533 return false; | |
| 534 m_capacity = newSize / sizeof(T); | |
| 535 return true; | |
| 536 } | |
| 537 | |
| 538 void resetBufferPointer() | |
| 539 { | |
| 540 m_buffer = inlineBuffer(); | |
| 541 m_capacity = inlineCapacity; | |
| 542 } | |
| 543 | |
| 544 void allocateBuffer(size_t newCapacity) | |
| 545 { | |
| 546 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks. | |
| 547 if (newCapacity > inlineCapacity) | |
| 548 Base::allocateBuffer(newCapacity); | |
| 549 else | |
| 550 resetBufferPointer(); | |
| 551 } | |
| 552 | |
| 553 void allocateExpandedBuffer(size_t newCapacity) | |
| 554 { | |
| 555 if (newCapacity > inlineCapacity) | |
| 556 Base::allocateExpandedBuffer(newCapacity); | |
| 557 else | |
| 558 resetBufferPointer(); | |
| 559 } | |
| 560 | |
| 561 size_t allocationSize(size_t capacity) const | |
| 562 { | |
| 563 if (capacity <= inlineCapacity) | |
| 564 return m_inlineBufferSize; | |
| 565 return Base::allocationSize(capacity); | |
| 566 } | |
| 567 | |
| 568 void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other) | |
| 569 { | |
| 570 typedef VectorTypeOperations<T> TypeOperations; | |
| 571 | |
| 572 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()
) { | |
| 573 ASSERT(m_capacity == other.m_capacity); | |
| 574 if (m_size > other.m_size) { | |
| 575 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other
.m_size, m_size); | |
| 576 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_si
ze, other.inlineBuffer()); | |
| 577 TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer
() + m_size, other.inlineBuffer() + other.m_size); | |
| 578 Base::clearUnusedSlots(inlineBuffer() + other.m_size, inlineBuff
er() + m_size); | |
| 579 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, oth
er.m_size); | |
| 580 } else { | |
| 581 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, oth
er.m_size); | |
| 582 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, ot
her.inlineBuffer()); | |
| 583 TypeOperations::move(other.inlineBuffer() + m_size, other.inline
Buffer() + other.m_size, inlineBuffer() + m_size); | |
| 584 Base::clearUnusedSlots(other.inlineBuffer() + m_size, other.inli
neBuffer() + other.m_size); | |
| 585 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other
.m_size, m_size); | |
| 586 } | |
| 587 } else if (buffer() == inlineBuffer()) { | |
| 588 ANNOTATE_DELETE_BUFFER(m_buffer, inlineCapacity, m_size); | |
| 589 m_buffer = other.m_buffer; | |
| 590 other.m_buffer = other.inlineBuffer(); | |
| 591 ANNOTATE_NEW_BUFFER(other.m_buffer, inlineCapacity, m_size); | |
| 592 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.
inlineBuffer()); | |
| 593 Base::clearUnusedSlots(inlineBuffer(), inlineBuffer() + m_size); | |
| 594 std::swap(m_capacity, other.m_capacity); | |
| 595 } else if (other.buffer() == other.inlineBuffer()) { | |
| 596 ANNOTATE_DELETE_BUFFER(other.m_buffer, inlineCapacity, other.m_size)
; | |
| 597 other.m_buffer = m_buffer; | |
| 598 m_buffer = inlineBuffer(); | |
| 599 ANNOTATE_NEW_BUFFER(m_buffer, inlineCapacity, other.m_size); | |
| 600 TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + ot
her.m_size, inlineBuffer()); | |
| 601 Base::clearUnusedSlots(other.inlineBuffer(), other.inlineBuffer() +
other.m_size); | |
| 602 std::swap(m_capacity, other.m_capacity); | |
| 603 } else { | |
| 604 std::swap(m_buffer, other.m_buffer); | |
| 605 std::swap(m_capacity, other.m_capacity); | |
| 606 } | |
| 607 } | |
| 608 | |
| 609 using Base::buffer; | |
| 610 using Base::capacity; | |
| 611 | |
| 612 bool hasOutOfLineBuffer() const | |
| 613 { | |
| 614 return buffer() && buffer() != inlineBuffer(); | |
| 615 } | |
| 616 | |
| 617 protected: | |
| 618 using Base::m_size; | |
| 619 | |
| 620 private: | |
| 621 using Base::m_buffer; | |
| 622 using Base::m_capacity; | |
| 623 | |
| 624 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T); | |
| 625 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer);
} | |
| 626 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inli
neBuffer.buffer); } | |
| 627 | |
| 628 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer; | |
| 629 template <typename U, size_t inlineBuffer, typename V> | |
| 630 friend class Deque; | |
| 631 }; | |
| 632 | |
| 633 template <typename T, size_t inlineCapacity = 0, typename Allocator = PartitionA
llocator> // Heap-allocated vectors with no inlineCapacity never need a destruct
or. | |
| 634 class Vector : private VectorBuffer<T, INLINE_CAPACITY, Allocator>, public Condi
tionalDestructor<Vector<T, INLINE_CAPACITY, Allocator>, (INLINE_CAPACITY == 0) &
& Allocator::isGarbageCollected> { | |
| 635 WTF_USE_ALLOCATOR(Vector, Allocator); | |
| 636 typedef VectorBuffer<T, INLINE_CAPACITY, Allocator> Base; | |
| 637 typedef VectorTypeOperations<T> TypeOperations; | |
| 638 | |
| 639 public: | |
| 640 typedef T ValueType; | |
| 641 typedef T value_type; | |
| 642 | |
| 643 typedef T* iterator; | |
| 644 typedef const T* const_iterator; | |
| 645 typedef std::reverse_iterator<iterator> reverse_iterator; | |
| 646 typedef std::reverse_iterator<const_iterator> const_reverse_iterator; | |
| 647 | |
| 648 Vector() | |
| 649 { | |
| 650 static_assert(!std::is_polymorphic<T>::value || !VectorTraits<T>::canIni
tializeWithMemset, "Cannot initialize with memset if there is a vtable"); | |
| 651 #if ENABLE(OILPAN) | |
| 652 static_assert(Allocator::isGarbageCollected || !AllowsOnlyPlacementNew<T
>::value || !NeedsTracing<T>::value, "Cannot put DISALLOW_NEW_EXCEPT_PLACEMENT_N
EW objects that have trace methods into an off-heap Vector"); | |
| 653 #endif | |
| 654 static_assert(Allocator::isGarbageCollected || !IsPointerToGarbageCollec
tedType<T>::value, "Cannot put raw pointers to garbage-collected classes into an
off-heap Vector. Use HeapVector<Member<T>> instead."); | |
| 655 | |
| 656 ANNOTATE_NEW_BUFFER(begin(), capacity(), 0); | |
| 657 m_size = 0; | |
| 658 } | |
| 659 | |
| 660 explicit Vector(size_t size) | |
| 661 : Base(size) | |
| 662 { | |
| 663 static_assert(!std::is_polymorphic<T>::value || !VectorTraits<T>::canIni
tializeWithMemset, "Cannot initialize with memset if there is a vtable"); | |
| 664 #if ENABLE(OILPAN) | |
| 665 static_assert(Allocator::isGarbageCollected || !AllowsOnlyPlacementNew<T
>::value || !NeedsTracing<T>::value, "Cannot put DISALLOW_NEW_EXCEPT_PLACEMENT_N
EW objects that have trace methods into an off-heap Vector"); | |
| 666 #endif | |
| 667 static_assert(Allocator::isGarbageCollected || !IsPointerToGarbageCollec
tedType<T>::value, "Cannot put raw pointers to garbage-collected classes into an
off-heap Vector. Use HeapVector<Member<T>> instead."); | |
| 668 | |
| 669 ANNOTATE_NEW_BUFFER(begin(), capacity(), size); | |
| 670 m_size = size; | |
| 671 TypeOperations::initialize(begin(), end()); | |
| 672 } | |
| 673 | |
| 674 // Off-GC-heap vectors: Destructor should be called. | |
| 675 // On-GC-heap vectors: Destructor should be called for inline buffers (if | |
| 676 // any) but destructor shouldn't be called for vector backing since it is | |
| 677 // managed by the traced GC heap. | |
| 678 void finalize() | |
| 679 { | |
| 680 if (!INLINE_CAPACITY) { | |
| 681 if (LIKELY(!Base::buffer())) | |
| 682 return; | |
| 683 } | |
| 684 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size); | |
| 685 if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfL
ineBuffer())) { | |
| 686 TypeOperations::destruct(begin(), end()); | |
| 687 m_size = 0; // Partial protection against use-after-free. | |
| 688 } | |
| 689 | |
| 690 Base::destruct(); | |
| 691 } | |
| 692 | |
| 693 void finalizeGarbageCollectedObject() | |
| 694 { | |
| 695 finalize(); | |
| 696 } | |
| 697 | |
| 698 Vector(const Vector&); | |
| 699 template <size_t otherCapacity> | |
| 700 explicit Vector(const Vector<T, otherCapacity, Allocator>&); | |
| 701 | |
| 702 Vector& operator=(const Vector&); | |
| 703 template <size_t otherCapacity> | |
| 704 Vector& operator=(const Vector<T, otherCapacity, Allocator>&); | |
| 705 | |
| 706 Vector(Vector&&); | |
| 707 Vector& operator=(Vector&&); | |
| 708 | |
| 709 size_t size() const { return m_size; } | |
| 710 size_t capacity() const { return Base::capacity(); } | |
| 711 bool isEmpty() const { return !size(); } | |
| 712 | |
| 713 T& at(size_t i) | |
| 714 { | |
| 715 RELEASE_ASSERT(i < size()); | |
| 716 return Base::buffer()[i]; | |
| 717 } | |
| 718 const T& at(size_t i) const | |
| 719 { | |
| 720 RELEASE_ASSERT(i < size()); | |
| 721 return Base::buffer()[i]; | |
| 722 } | |
| 723 | |
| 724 T& operator[](size_t i) { return at(i); } | |
| 725 const T& operator[](size_t i) const { return at(i); } | |
| 726 | |
| 727 T* data() { return Base::buffer(); } | |
| 728 const T* data() const { return Base::buffer(); } | |
| 729 | |
| 730 iterator begin() { return data(); } | |
| 731 iterator end() { return begin() + m_size; } | |
| 732 const_iterator begin() const { return data(); } | |
| 733 const_iterator end() const { return begin() + m_size; } | |
| 734 | |
| 735 reverse_iterator rbegin() { return reverse_iterator(end()); } | |
| 736 reverse_iterator rend() { return reverse_iterator(begin()); } | |
| 737 const_reverse_iterator rbegin() const { return const_reverse_iterator(end())
; } | |
| 738 const_reverse_iterator rend() const { return const_reverse_iterator(begin())
; } | |
| 739 | |
| 740 T& first() { return at(0); } | |
| 741 const T& first() const { return at(0); } | |
| 742 T& last() { return at(size() - 1); } | |
| 743 const T& last() const { return at(size() - 1); } | |
| 744 | |
| 745 template <typename U> bool contains(const U&) const; | |
| 746 template <typename U> size_t find(const U&) const; | |
| 747 template <typename U> size_t reverseFind(const U&) const; | |
| 748 | |
| 749 void shrink(size_t); | |
| 750 void grow(size_t); | |
| 751 void resize(size_t); | |
| 752 void reserveCapacity(size_t newCapacity); | |
| 753 void reserveInitialCapacity(size_t initialCapacity); | |
| 754 void shrinkToFit() { shrinkCapacity(size()); } | |
| 755 void shrinkToReasonableCapacity() | |
| 756 { | |
| 757 if (size() * 2 < capacity()) | |
| 758 shrinkCapacity(size() + size() / 4 + 1); | |
| 759 } | |
| 760 | |
| 761 void clear() { shrinkCapacity(0); } | |
| 762 | |
| 763 template <typename U> void append(const U*, size_t); | |
| 764 template <typename U> void append(U&&); | |
| 765 template <typename U> void uncheckedAppend(U&& val); | |
| 766 template <typename U, size_t otherCapacity, typename V> void appendVector(co
nst Vector<U, otherCapacity, V>&); | |
| 767 | |
| 768 template <typename U> void insert(size_t position, const U*, size_t); | |
| 769 template <typename U> void insert(size_t position, U&&); | |
| 770 template <typename U, size_t c, typename V> void insert(size_t position, con
st Vector<U, c, V>&); | |
| 771 | |
| 772 template <typename U> void prepend(const U*, size_t); | |
| 773 template <typename U> void prepend(U&&); | |
| 774 template <typename U, size_t c, typename V> void prepend(const Vector<U, c,
V>&); | |
| 775 | |
| 776 void remove(size_t position); | |
| 777 void remove(size_t position, size_t length); | |
| 778 | |
| 779 void removeLast() | |
| 780 { | |
| 781 ASSERT(!isEmpty()); | |
| 782 shrink(size() - 1); | |
| 783 } | |
| 784 | |
| 785 Vector(size_t size, const T& val) | |
| 786 : Base(size) | |
| 787 { | |
| 788 ANNOTATE_NEW_BUFFER(begin(), capacity(), size); | |
| 789 m_size = size; | |
| 790 TypeOperations::uninitializedFill(begin(), end(), val); | |
| 791 } | |
| 792 | |
| 793 void fill(const T&, size_t); | |
| 794 void fill(const T& val) { fill(val, size()); } | |
| 795 | |
| 796 template <typename Iterator> void appendRange(Iterator start, Iterator end); | |
| 797 | |
| 798 void swap(Vector& other) | |
| 799 { | |
| 800 Base::swapVectorBuffer(other); | |
| 801 std::swap(m_size, other.m_size); | |
| 802 } | |
| 803 | |
| 804 void reverse(); | |
| 805 | |
| 806 template <typename VisitorDispatcher> void trace(VisitorDispatcher); | |
| 807 | |
| 808 protected: | |
| 809 using Base::checkUnusedSlots; | |
| 810 using Base::clearUnusedSlots; | |
| 811 | |
| 812 private: | |
| 813 void expandCapacity(size_t newMinCapacity); | |
| 814 T* expandCapacity(size_t newMinCapacity, T*); | |
| 815 T* expandCapacity(size_t newMinCapacity, const T* data) | |
| 816 { | |
| 817 return expandCapacity(newMinCapacity, const_cast<T*>(data)); | |
| 818 } | |
| 819 | |
| 820 template <typename U> U* expandCapacity(size_t newMinCapacity, U*); | |
| 821 void shrinkCapacity(size_t newCapacity); | |
| 822 template <typename U> void appendSlowCase(U&&); | |
| 823 | |
| 824 using Base::m_size; | |
| 825 using Base::buffer; | |
| 826 using Base::capacity; | |
| 827 using Base::swapVectorBuffer; | |
| 828 using Base::allocateBuffer; | |
| 829 using Base::allocationSize; | |
| 830 }; | |
| 831 | |
| 832 template <typename T, size_t inlineCapacity, typename Allocator> | 848 template <typename T, size_t inlineCapacity, typename Allocator> |
| 833 Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other) | 849 Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other) |
| 834 : Base(other.capacity()) | 850 : Base(other.capacity()) { |
| 835 { | 851 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size()); |
| 836 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size()); | 852 m_size = other.size(); |
| 837 m_size = other.size(); | 853 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); |
| 838 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); | |
| 839 } | 854 } |
| 840 | 855 |
| 841 template <typename T, size_t inlineCapacity, typename Allocator> | 856 template <typename T, size_t inlineCapacity, typename Allocator> |
| 842 template <size_t otherCapacity> | 857 template <size_t otherCapacity> |
| 843 Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allo
cator>& other) | 858 Vector<T, inlineCapacity, Allocator>::Vector( |
| 844 : Base(other.capacity()) | 859 const Vector<T, otherCapacity, Allocator>& other) |
| 845 { | 860 : Base(other.capacity()) { |
| 846 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size()); | 861 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size()); |
| 847 m_size = other.size(); | 862 m_size = other.size(); |
| 848 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); | 863 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); |
| 849 } | 864 } |
| 850 | 865 |
| 851 template <typename T, size_t inlineCapacity, typename Allocator> | 866 template <typename T, size_t inlineCapacity, typename Allocator> |
| 852 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::oper
ator=(const Vector<T, inlineCapacity, Allocator>& other) | 867 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>:: |
| 853 { | 868 operator=(const Vector<T, inlineCapacity, Allocator>& other) { |
| 854 if (UNLIKELY(&other == this)) | 869 if (UNLIKELY(&other == this)) |
| 855 return *this; | |
| 856 | |
| 857 if (size() > other.size()) { | |
| 858 shrink(other.size()); | |
| 859 } else if (other.size() > capacity()) { | |
| 860 clear(); | |
| 861 reserveCapacity(other.size()); | |
| 862 ASSERT(begin()); | |
| 863 } | |
| 864 | |
| 865 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); | |
| 866 std::copy(other.begin(), other.begin() + size(), begin()); | |
| 867 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()
); | |
| 868 m_size = other.size(); | |
| 869 | |
| 870 return *this; | 870 return *this; |
| 871 } | 871 |
| 872 | 872 if (size() > other.size()) { |
| 873 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a ==
b; } | 873 shrink(other.size()); |
| 874 } else if (other.size() > capacity()) { |
| 875 clear(); |
| 876 reserveCapacity(other.size()); |
| 877 ASSERT(begin()); |
| 878 } |
| 879 |
| 880 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); |
| 881 std::copy(other.begin(), other.begin() + size(), begin()); |
| 882 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); |
| 883 m_size = other.size(); |
| 884 |
| 885 return *this; |
| 886 } |
| 887 |
| 888 inline bool typelessPointersAreEqual(const void* a, const void* b) { |
| 889 return a == b; |
| 890 } |
| 874 | 891 |
| 875 template <typename T, size_t inlineCapacity, typename Allocator> | 892 template <typename T, size_t inlineCapacity, typename Allocator> |
| 876 template <size_t otherCapacity> | 893 template <size_t otherCapacity> |
| 877 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::oper
ator=(const Vector<T, otherCapacity, Allocator>& other) | 894 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>:: |
| 878 { | 895 operator=(const Vector<T, otherCapacity, Allocator>& other) { |
| 879 // If the inline capacities match, we should call the more specific | 896 // If the inline capacities match, we should call the more specific |
| 880 // template. If the inline capacities don't match, the two objects | 897 // template. If the inline capacities don't match, the two objects |
| 881 // shouldn't be allocated the same address. | 898 // shouldn't be allocated the same address. |
| 882 ASSERT(!typelessPointersAreEqual(&other, this)); | 899 ASSERT(!typelessPointersAreEqual(&other, this)); |
| 883 | 900 |
| 884 if (size() > other.size()) { | 901 if (size() > other.size()) { |
| 885 shrink(other.size()); | 902 shrink(other.size()); |
| 886 } else if (other.size() > capacity()) { | 903 } else if (other.size() > capacity()) { |
| 887 clear(); | 904 clear(); |
| 888 reserveCapacity(other.size()); | 905 reserveCapacity(other.size()); |
| 889 ASSERT(begin()); | 906 ASSERT(begin()); |
| 890 } | 907 } |
| 891 | 908 |
| 892 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); | 909 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size()); |
| 893 std::copy(other.begin(), other.begin() + size(), begin()); | 910 std::copy(other.begin(), other.begin() + size(), begin()); |
| 894 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()
); | 911 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); |
| 895 m_size = other.size(); | 912 m_size = other.size(); |
| 896 | 913 |
| 897 return *this; | 914 return *this; |
| 898 } | 915 } |
| 899 | 916 |
| 900 template <typename T, size_t inlineCapacity, typename Allocator> | 917 template <typename T, size_t inlineCapacity, typename Allocator> |
| 901 Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator
>&& other) | 918 Vector<T, inlineCapacity, Allocator>::Vector( |
| 902 { | 919 Vector<T, inlineCapacity, Allocator>&& other) { |
| 903 m_size = 0; | 920 m_size = 0; |
| 904 // It's a little weird to implement a move constructor using swap but this | 921 // It's a little weird to implement a move constructor using swap but this |
| 905 // way we don't have to add a move constructor to VectorBuffer. | 922 // way we don't have to add a move constructor to VectorBuffer. |
| 906 swap(other); | 923 swap(other); |
| 907 } | 924 } |
| 908 | 925 |
| 909 template <typename T, size_t inlineCapacity, typename Allocator> | 926 template <typename T, size_t inlineCapacity, typename Allocator> |
| 910 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::oper
ator=(Vector<T, inlineCapacity, Allocator>&& other) | 927 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>:: |
| 911 { | 928 operator=(Vector<T, inlineCapacity, Allocator>&& other) { |
| 912 swap(other); | 929 swap(other); |
| 913 return *this; | 930 return *this; |
| 914 } | 931 } |
| 915 | 932 |
| 916 template <typename T, size_t inlineCapacity, typename Allocator> | 933 template <typename T, size_t inlineCapacity, typename Allocator> |
| 917 template <typename U> | 934 template <typename U> |
| 918 bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const | 935 bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const { |
| 919 { | 936 return find(value) != kNotFound; |
| 920 return find(value) != kNotFound; | |
| 921 } | 937 } |
| 922 | 938 |
| 923 template <typename T, size_t inlineCapacity, typename Allocator> | 939 template <typename T, size_t inlineCapacity, typename Allocator> |
| 924 template <typename U> | 940 template <typename U> |
| 925 size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const | 941 size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const { |
| 926 { | 942 const T* b = begin(); |
| 927 const T* b = begin(); | 943 const T* e = end(); |
| 928 const T* e = end(); | 944 for (const T* iter = b; iter < e; ++iter) { |
| 929 for (const T* iter = b; iter < e; ++iter) { | 945 if (*iter == value) |
| 930 if (*iter == value) | 946 return iter - b; |
| 931 return iter - b; | 947 } |
| 932 } | 948 return kNotFound; |
| 933 return kNotFound; | |
| 934 } | 949 } |
| 935 | 950 |
| 936 template <typename T, size_t inlineCapacity, typename Allocator> | 951 template <typename T, size_t inlineCapacity, typename Allocator> |
| 937 template <typename U> | 952 template <typename U> |
| 938 size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const | 953 size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const { |
| 939 { | 954 const T* b = begin(); |
| 940 const T* b = begin(); | 955 const T* iter = end(); |
| 941 const T* iter = end(); | 956 while (iter > b) { |
| 942 while (iter > b) { | 957 --iter; |
| 943 --iter; | 958 if (*iter == value) |
| 944 if (*iter == value) | 959 return iter - b; |
| 945 return iter - b; | 960 } |
| 946 } | 961 return kNotFound; |
| 947 return kNotFound; | 962 } |
| 948 } | 963 |
| 949 | 964 template <typename T, size_t inlineCapacity, typename Allocator> |
| 950 template <typename T, size_t inlineCapacity, typename Allocator> | 965 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize) { |
| 951 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize) | 966 if (size() > newSize) { |
| 952 { | 967 shrink(newSize); |
| 953 if (size() > newSize) { | 968 } else if (newSize > capacity()) { |
| 954 shrink(newSize); | 969 clear(); |
| 955 } else if (newSize > capacity()) { | 970 reserveCapacity(newSize); |
| 956 clear(); | 971 ASSERT(begin()); |
| 957 reserveCapacity(newSize); | 972 } |
| 958 ASSERT(begin()); | 973 |
| 959 } | 974 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); |
| 960 | 975 std::fill(begin(), end(), val); |
| 961 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); | 976 TypeOperations::uninitializedFill(end(), begin() + newSize, val); |
| 962 std::fill(begin(), end(), val); | 977 m_size = newSize; |
| 963 TypeOperations::uninitializedFill(end(), begin() + newSize, val); | |
| 964 m_size = newSize; | |
| 965 } | 978 } |
| 966 | 979 |
| 967 template <typename T, size_t inlineCapacity, typename Allocator> | 980 template <typename T, size_t inlineCapacity, typename Allocator> |
| 968 template <typename Iterator> | 981 template <typename Iterator> |
| 969 void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator
end) | 982 void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, |
| 970 { | 983 Iterator end) { |
| 971 for (Iterator it = start; it != end; ++it) | 984 for (Iterator it = start; it != end; ++it) |
| 972 append(*it); | 985 append(*it); |
| 973 } | 986 } |
| 974 | 987 |
| 975 template <typename T, size_t inlineCapacity, typename Allocator> | 988 template <typename T, size_t inlineCapacity, typename Allocator> |
| 976 void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity) | 989 void Vector<T, inlineCapacity, Allocator>::expandCapacity( |
| 977 { | 990 size_t newMinCapacity) { |
| 978 size_t oldCapacity = capacity(); | 991 size_t oldCapacity = capacity(); |
| 979 size_t expandedCapacity = oldCapacity; | 992 size_t expandedCapacity = oldCapacity; |
| 980 // We use a more aggressive expansion strategy for Vectors with inline | 993 // We use a more aggressive expansion strategy for Vectors with inline |
| 981 // storage. This is because they are more likely to be on the stack, so the | 994 // storage. This is because they are more likely to be on the stack, so the |
| 982 // risk of heap bloat is minimized. Furthermore, exceeding the inline | 995 // risk of heap bloat is minimized. Furthermore, exceeding the inline |
| 983 // capacity limit is not supposed to happen in the common case and may | 996 // capacity limit is not supposed to happen in the common case and may |
| 984 // indicate a pathological condition or microbenchmark. | 997 // indicate a pathological condition or microbenchmark. |
| 985 if (INLINE_CAPACITY) { | 998 if (INLINE_CAPACITY) { |
| 986 expandedCapacity *= 2; | 999 expandedCapacity *= 2; |
| 987 // Check for integer overflow, which could happen in the 32-bit build. | 1000 // Check for integer overflow, which could happen in the 32-bit build. |
| 988 RELEASE_ASSERT(expandedCapacity > oldCapacity); | 1001 RELEASE_ASSERT(expandedCapacity > oldCapacity); |
| 989 } else { | 1002 } else { |
| 990 // This cannot integer overflow. | 1003 // This cannot integer overflow. |
| 991 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment | 1004 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment |
| 992 // above 2^32 will fail allocation in allocateBuffer(). On 32-bit, | 1005 // above 2^32 will fail allocation in allocateBuffer(). On 32-bit, |
| 993 // there's not enough address space to hold the old and new buffers. In | 1006 // there's not enough address space to hold the old and new buffers. In |
| 994 // addition, our underlying allocator is supposed to always fail on > | 1007 // addition, our underlying allocator is supposed to always fail on > |
| 995 // (2^31 - 1) allocations. | 1008 // (2^31 - 1) allocations. |
| 996 expandedCapacity += (expandedCapacity / 4) + 1; | 1009 expandedCapacity += (expandedCapacity / 4) + 1; |
| 997 } | 1010 } |
| 998 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kIniti
alVectorSize), expandedCapacity))); | 1011 reserveCapacity(std::max( |
| 999 } | 1012 newMinCapacity, |
| 1000 | 1013 std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity))); |
| 1001 template <typename T, size_t inlineCapacity, typename Allocator> | 1014 } |
| 1002 T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, T
* ptr) | 1015 |
| 1003 { | 1016 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1004 if (ptr < begin() || ptr >= end()) { | 1017 T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, |
| 1005 expandCapacity(newMinCapacity); | 1018 T* ptr) { |
| 1006 return ptr; | 1019 if (ptr < begin() || ptr >= end()) { |
| 1007 } | |
| 1008 size_t index = ptr - begin(); | |
| 1009 expandCapacity(newMinCapacity); | |
| 1010 return begin() + index; | |
| 1011 } | |
| 1012 | |
| 1013 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 1014 template <typename U> | |
| 1015 inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapa
city, U* ptr) | |
| 1016 { | |
| 1017 expandCapacity(newMinCapacity); | 1020 expandCapacity(newMinCapacity); |
| 1018 return ptr; | 1021 return ptr; |
| 1019 } | 1022 } |
| 1020 | 1023 size_t index = ptr - begin(); |
| 1021 template <typename T, size_t inlineCapacity, typename Allocator> | 1024 expandCapacity(newMinCapacity); |
| 1022 inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size) | 1025 return begin() + index; |
| 1023 { | 1026 } |
| 1024 if (size <= m_size) { | 1027 |
| 1025 TypeOperations::destruct(begin() + size, end()); | 1028 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1026 clearUnusedSlots(begin() + size, end()); | 1029 template <typename U> |
| 1027 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); | 1030 inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity( |
| 1028 } else { | 1031 size_t newMinCapacity, |
| 1029 if (size > capacity()) | 1032 U* ptr) { |
| 1030 expandCapacity(size); | 1033 expandCapacity(newMinCapacity); |
| 1031 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); | 1034 return ptr; |
| 1032 TypeOperations::initialize(end(), begin() + size); | 1035 } |
| 1033 } | 1036 |
| 1034 | 1037 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1035 m_size = size; | 1038 inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size) { |
| 1036 } | 1039 if (size <= m_size) { |
| 1037 | |
| 1038 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 1039 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size) | |
| 1040 { | |
| 1041 ASSERT(size <= m_size); | |
| 1042 TypeOperations::destruct(begin() + size, end()); | 1040 TypeOperations::destruct(begin() + size, end()); |
| 1043 clearUnusedSlots(begin() + size, end()); | 1041 clearUnusedSlots(begin() + size, end()); |
| 1044 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); | 1042 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); |
| 1045 m_size = size; | 1043 } else { |
| 1046 } | |
| 1047 | |
| 1048 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 1049 void Vector<T, inlineCapacity, Allocator>::grow(size_t size) | |
| 1050 { | |
| 1051 ASSERT(size >= m_size); | |
| 1052 if (size > capacity()) | 1044 if (size > capacity()) |
| 1053 expandCapacity(size); | 1045 expandCapacity(size); |
| 1054 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); | 1046 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); |
| 1055 TypeOperations::initialize(end(), begin() + size); | 1047 TypeOperations::initialize(end(), begin() + size); |
| 1056 m_size = size; | 1048 } |
| 1057 } | 1049 |
| 1058 | 1050 m_size = size; |
| 1059 template <typename T, size_t inlineCapacity, typename Allocator> | 1051 } |
| 1060 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity) | 1052 |
| 1061 { | 1053 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1062 if (UNLIKELY(newCapacity <= capacity())) | 1054 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size) { |
| 1063 return; | 1055 ASSERT(size <= m_size); |
| 1064 T* oldBuffer = begin(); | 1056 TypeOperations::destruct(begin() + size, end()); |
| 1065 if (!oldBuffer) { | 1057 clearUnusedSlots(begin() + size, end()); |
| 1066 Base::allocateBuffer(newCapacity); | 1058 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); |
| 1067 return; | 1059 m_size = size; |
| 1060 } |
| 1061 |
| 1062 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1063 void Vector<T, inlineCapacity, Allocator>::grow(size_t size) { |
| 1064 ASSERT(size >= m_size); |
| 1065 if (size > capacity()) |
| 1066 expandCapacity(size); |
| 1067 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size); |
| 1068 TypeOperations::initialize(end(), begin() + size); |
| 1069 m_size = size; |
| 1070 } |
| 1071 |
| 1072 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1073 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity) { |
| 1074 if (UNLIKELY(newCapacity <= capacity())) |
| 1075 return; |
| 1076 T* oldBuffer = begin(); |
| 1077 if (!oldBuffer) { |
| 1078 Base::allocateBuffer(newCapacity); |
| 1079 return; |
| 1080 } |
| 1081 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER |
| 1082 size_t oldCapacity = capacity(); |
| 1083 #endif |
| 1084 // The Allocator::isGarbageCollected check is not needed. The check is just |
| 1085 // a static hint for a compiler to indicate that Base::expandBuffer returns |
| 1086 // false if Allocator is a PartitionAllocator. |
| 1087 if (Allocator::isGarbageCollected && Base::expandBuffer(newCapacity)) { |
| 1088 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity()); |
| 1089 return; |
| 1090 } |
| 1091 T* oldEnd = end(); |
| 1092 Base::allocateExpandedBuffer(newCapacity); |
| 1093 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); |
| 1094 TypeOperations::move(oldBuffer, oldEnd, begin()); |
| 1095 clearUnusedSlots(oldBuffer, oldEnd); |
| 1096 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); |
| 1097 Base::deallocateBuffer(oldBuffer); |
| 1098 } |
| 1099 |
| 1100 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1101 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity( |
| 1102 size_t initialCapacity) { |
| 1103 ASSERT(!m_size); |
| 1104 ASSERT(capacity() == INLINE_CAPACITY); |
| 1105 if (initialCapacity > INLINE_CAPACITY) { |
| 1106 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size); |
| 1107 Base::allocateBuffer(initialCapacity); |
| 1108 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); |
| 1109 } |
| 1110 } |
| 1111 |
| 1112 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1113 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity) { |
| 1114 if (newCapacity >= capacity()) |
| 1115 return; |
| 1116 |
| 1117 if (newCapacity < size()) |
| 1118 shrink(newCapacity); |
| 1119 |
| 1120 T* oldBuffer = begin(); |
| 1121 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER |
| 1122 size_t oldCapacity = capacity(); |
| 1123 #endif |
| 1124 if (newCapacity > 0) { |
| 1125 if (Base::shrinkBuffer(newCapacity)) { |
| 1126 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity()); |
| 1127 return; |
| 1068 } | 1128 } |
| 1129 |
| 1130 T* oldEnd = end(); |
| 1131 Base::allocateBuffer(newCapacity); |
| 1132 if (begin() != oldBuffer) { |
| 1133 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); |
| 1134 TypeOperations::move(oldBuffer, oldEnd, begin()); |
| 1135 clearUnusedSlots(oldBuffer, oldEnd); |
| 1136 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); |
| 1137 } |
| 1138 } else { |
| 1139 Base::resetBufferPointer(); |
| 1069 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER | 1140 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER |
| 1070 size_t oldCapacity = capacity(); | 1141 if (oldBuffer != begin()) { |
| 1142 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); |
| 1143 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); |
| 1144 } |
| 1071 #endif | 1145 #endif |
| 1072 // The Allocator::isGarbageCollected check is not needed. The check is just | 1146 } |
| 1073 // a static hint for a compiler to indicate that Base::expandBuffer returns | 1147 |
| 1074 // false if Allocator is a PartitionAllocator. | 1148 Base::deallocateBuffer(oldBuffer); |
| 1075 if (Allocator::isGarbageCollected && Base::expandBuffer(newCapacity)) { | |
| 1076 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity()); | |
| 1077 return; | |
| 1078 } | |
| 1079 T* oldEnd = end(); | |
| 1080 Base::allocateExpandedBuffer(newCapacity); | |
| 1081 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); | |
| 1082 TypeOperations::move(oldBuffer, oldEnd, begin()); | |
| 1083 clearUnusedSlots(oldBuffer, oldEnd); | |
| 1084 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); | |
| 1085 Base::deallocateBuffer(oldBuffer); | |
| 1086 } | |
| 1087 | |
| 1088 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 1089 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t
initialCapacity) | |
| 1090 { | |
| 1091 ASSERT(!m_size); | |
| 1092 ASSERT(capacity() == INLINE_CAPACITY); | |
| 1093 if (initialCapacity > INLINE_CAPACITY) { | |
| 1094 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size); | |
| 1095 Base::allocateBuffer(initialCapacity); | |
| 1096 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); | |
| 1097 } | |
| 1098 } | |
| 1099 | |
| 1100 template <typename T, size_t inlineCapacity, typename Allocator> | |
| 1101 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity) | |
| 1102 { | |
| 1103 if (newCapacity >= capacity()) | |
| 1104 return; | |
| 1105 | |
| 1106 if (newCapacity < size()) | |
| 1107 shrink(newCapacity); | |
| 1108 | |
| 1109 T* oldBuffer = begin(); | |
| 1110 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER | |
| 1111 size_t oldCapacity = capacity(); | |
| 1112 #endif | |
| 1113 if (newCapacity > 0) { | |
| 1114 if (Base::shrinkBuffer(newCapacity)) { | |
| 1115 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity()); | |
| 1116 return; | |
| 1117 } | |
| 1118 | |
| 1119 T* oldEnd = end(); | |
| 1120 Base::allocateBuffer(newCapacity); | |
| 1121 if (begin() != oldBuffer) { | |
| 1122 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); | |
| 1123 TypeOperations::move(oldBuffer, oldEnd, begin()); | |
| 1124 clearUnusedSlots(oldBuffer, oldEnd); | |
| 1125 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); | |
| 1126 } | |
| 1127 } else { | |
| 1128 Base::resetBufferPointer(); | |
| 1129 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER | |
| 1130 if (oldBuffer != begin()) { | |
| 1131 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size); | |
| 1132 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size); | |
| 1133 } | |
| 1134 #endif | |
| 1135 } | |
| 1136 | |
| 1137 Base::deallocateBuffer(oldBuffer); | |
| 1138 } | 1149 } |
| 1139 | 1150 |
| 1140 // Templatizing these is better than just letting the conversion happen | 1151 // Templatizing these is better than just letting the conversion happen |
| 1141 // implicitly, because for instance it allows a PassRefPtr to be appended to a | 1152 // implicitly, because for instance it allows a PassRefPtr to be appended to a |
| 1142 // RefPtr vector without refcount thrash. | 1153 // RefPtr vector without refcount thrash. |
| 1143 | 1154 |
| 1144 template <typename T, size_t inlineCapacity, typename Allocator> | 1155 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1145 template <typename U> | 1156 template <typename U> |
| 1146 void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize
) | 1157 void Vector<T, inlineCapacity, Allocator>::append(const U* data, |
| 1147 { | 1158 size_t dataSize) { |
| 1148 ASSERT(Allocator::isAllocationAllowed()); | 1159 ASSERT(Allocator::isAllocationAllowed()); |
| 1149 size_t newSize = m_size + dataSize; | 1160 size_t newSize = m_size + dataSize; |
| 1150 if (newSize > capacity()) { | 1161 if (newSize > capacity()) { |
| 1151 data = expandCapacity(newSize, data); | 1162 data = expandCapacity(newSize, data); |
| 1152 ASSERT(begin()); | 1163 ASSERT(begin()); |
| 1153 } | 1164 } |
| 1154 RELEASE_ASSERT(newSize >= m_size); | 1165 RELEASE_ASSERT(newSize >= m_size); |
| 1155 T* dest = end(); | 1166 T* dest = end(); |
| 1156 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); | 1167 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); |
| 1157 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data,
&data[dataSize], dest); | 1168 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy( |
| 1158 m_size = newSize; | 1169 data, &data[dataSize], dest); |
| 1170 m_size = newSize; |
| 1159 } | 1171 } |
| 1160 | 1172 |
| 1161 template <typename T, size_t inlineCapacity, typename Allocator> | 1173 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1162 template <typename U> | 1174 template <typename U> |
| 1163 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(U&& val) | 1175 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(U&& val) { |
| 1164 { | 1176 ASSERT(Allocator::isAllocationAllowed()); |
| 1165 ASSERT(Allocator::isAllocationAllowed()); | 1177 if (LIKELY(size() != capacity())) { |
| 1166 if (LIKELY(size() != capacity())) { | 1178 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); |
| 1167 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); | 1179 new (NotNull, end()) T(std::forward<U>(val)); |
| 1168 new (NotNull, end()) T(std::forward<U>(val)); | 1180 ++m_size; |
| 1169 ++m_size; | 1181 return; |
| 1170 return; | 1182 } |
| 1171 } | |
| 1172 | 1183 |
| 1173 appendSlowCase(std::forward<U>(val)); | 1184 appendSlowCase(std::forward<U>(val)); |
| 1174 } | 1185 } |
| 1175 | 1186 |
| 1176 template <typename T, size_t inlineCapacity, typename Allocator> | 1187 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1177 template <typename U> | 1188 template <typename U> |
| 1178 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(U&& val) | 1189 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase( |
| 1179 { | 1190 U&& val) { |
| 1180 ASSERT(size() == capacity()); | 1191 ASSERT(size() == capacity()); |
| 1181 | 1192 |
| 1182 typename std::remove_reference<U>::type* ptr = &val; | 1193 typename std::remove_reference<U>::type* ptr = &val; |
| 1183 ptr = expandCapacity(size() + 1, ptr); | 1194 ptr = expandCapacity(size() + 1, ptr); |
| 1184 ASSERT(begin()); | 1195 ASSERT(begin()); |
| 1185 | 1196 |
| 1186 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); | 1197 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); |
| 1187 new (NotNull, end()) T(std::forward<U>(*ptr)); | 1198 new (NotNull, end()) T(std::forward<U>(*ptr)); |
| 1188 ++m_size; | 1199 ++m_size; |
| 1189 } | 1200 } |
| 1190 | 1201 |
| 1191 // This version of append saves a branch in the case where you know that the | 1202 // This version of append saves a branch in the case where you know that the |
| 1192 // vector's capacity is large enough for the append to succeed. | 1203 // vector's capacity is large enough for the append to succeed. |
| 1193 | 1204 |
| 1194 template <typename T, size_t inlineCapacity, typename Allocator> | 1205 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1195 template <typename U> | 1206 template <typename U> |
| 1196 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(U&& val
) | 1207 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend( |
| 1197 { | 1208 U&& val) { |
| 1198 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER | 1209 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER |
| 1199 // Vectors in ASAN builds don't have inlineCapacity. | 1210 // Vectors in ASAN builds don't have inlineCapacity. |
| 1200 append(std::forward<U>(val)); | 1211 append(std::forward<U>(val)); |
| 1201 #else | 1212 #else |
| 1202 ASSERT(size() < capacity()); | 1213 ASSERT(size() < capacity()); |
| 1203 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); | 1214 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); |
| 1204 new (NotNull, end()) T(std::forward<U>(val)); | 1215 new (NotNull, end()) T(std::forward<U>(val)); |
| 1205 ++m_size; | 1216 ++m_size; |
| 1206 #endif | 1217 #endif |
| 1207 } | 1218 } |
| 1208 | 1219 |
| 1209 template <typename T, size_t inlineCapacity, typename Allocator> | 1220 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1210 template <typename U, size_t otherCapacity, typename OtherAllocator> | 1221 template <typename U, size_t otherCapacity, typename OtherAllocator> |
| 1211 inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, o
therCapacity, OtherAllocator>& val) | 1222 inline void Vector<T, inlineCapacity, Allocator>::appendVector( |
| 1212 { | 1223 const Vector<U, otherCapacity, OtherAllocator>& val) { |
| 1213 append(val.begin(), val.size()); | 1224 append(val.begin(), val.size()); |
| 1214 } | 1225 } |
| 1215 | 1226 |
| 1216 template <typename T, size_t inlineCapacity, typename Allocator> | 1227 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1217 template <typename U> | 1228 template <typename U> |
| 1218 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data
, size_t dataSize) | 1229 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, |
| 1219 { | 1230 const U* data, |
| 1220 ASSERT(Allocator::isAllocationAllowed()); | 1231 size_t dataSize) { |
| 1221 RELEASE_ASSERT(position <= size()); | 1232 ASSERT(Allocator::isAllocationAllowed()); |
| 1222 size_t newSize = m_size + dataSize; | 1233 RELEASE_ASSERT(position <= size()); |
| 1223 if (newSize > capacity()) { | 1234 size_t newSize = m_size + dataSize; |
| 1224 data = expandCapacity(newSize, data); | 1235 if (newSize > capacity()) { |
| 1225 ASSERT(begin()); | 1236 data = expandCapacity(newSize, data); |
| 1226 } | 1237 ASSERT(begin()); |
| 1227 RELEASE_ASSERT(newSize >= m_size); | 1238 } |
| 1228 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); | 1239 RELEASE_ASSERT(newSize >= m_size); |
| 1229 T* spot = begin() + position; | 1240 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize); |
| 1230 TypeOperations::moveOverlapping(spot, end(), spot + dataSize); | 1241 T* spot = begin() + position; |
| 1231 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data,
&data[dataSize], spot); | 1242 TypeOperations::moveOverlapping(spot, end(), spot + dataSize); |
| 1232 m_size = newSize; | 1243 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy( |
| 1244 data, &data[dataSize], spot); |
| 1245 m_size = newSize; |
| 1233 } | 1246 } |
| 1234 | 1247 |
| 1235 template <typename T, size_t inlineCapacity, typename Allocator> | 1248 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1236 template <typename U> | 1249 template <typename U> |
| 1237 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, U&& va
l) | 1250 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, |
| 1238 { | 1251 U&& val) { |
| 1239 ASSERT(Allocator::isAllocationAllowed()); | 1252 ASSERT(Allocator::isAllocationAllowed()); |
| 1240 RELEASE_ASSERT(position <= size()); | 1253 RELEASE_ASSERT(position <= size()); |
| 1241 typename std::remove_reference<U>::type* data = &val; | 1254 typename std::remove_reference<U>::type* data = &val; |
| 1242 if (size() == capacity()) { | 1255 if (size() == capacity()) { |
| 1243 data = expandCapacity(size() + 1, data); | 1256 data = expandCapacity(size() + 1, data); |
| 1244 ASSERT(begin()); | 1257 ASSERT(begin()); |
| 1245 } | 1258 } |
| 1246 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); | 1259 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1); |
| 1247 T* spot = begin() + position; | 1260 T* spot = begin() + position; |
| 1248 TypeOperations::moveOverlapping(spot, end(), spot + 1); | 1261 TypeOperations::moveOverlapping(spot, end(), spot + 1); |
| 1249 new (NotNull, spot) T(std::forward<U>(*data)); | 1262 new (NotNull, spot) T(std::forward<U>(*data)); |
| 1250 ++m_size; | 1263 ++m_size; |
| 1251 } | 1264 } |
| 1252 | 1265 |
| 1253 template <typename T, size_t inlineCapacity, typename Allocator> | 1266 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1254 template <typename U, size_t c, typename OtherAllocator> | 1267 template <typename U, size_t c, typename OtherAllocator> |
| 1255 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const
Vector<U, c, OtherAllocator>& val) | 1268 inline void Vector<T, inlineCapacity, Allocator>::insert( |
| 1256 { | 1269 size_t position, |
| 1257 insert(position, val.begin(), val.size()); | 1270 const Vector<U, c, OtherAllocator>& val) { |
| 1271 insert(position, val.begin(), val.size()); |
| 1258 } | 1272 } |
| 1259 | 1273 |
| 1260 template <typename T, size_t inlineCapacity, typename Allocator> | 1274 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1261 template <typename U> | 1275 template <typename U> |
| 1262 void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSiz
e) | 1276 void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, |
| 1263 { | 1277 size_t dataSize) { |
| 1264 insert(0, data, dataSize); | 1278 insert(0, data, dataSize); |
| 1265 } | 1279 } |
| 1266 | 1280 |
| 1267 template <typename T, size_t inlineCapacity, typename Allocator> | 1281 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1268 template <typename U> | 1282 template <typename U> |
| 1269 inline void Vector<T, inlineCapacity, Allocator>::prepend(U&& val) | 1283 inline void Vector<T, inlineCapacity, Allocator>::prepend(U&& val) { |
| 1270 { | 1284 insert(0, std::forward<U>(val)); |
| 1271 insert(0, std::forward<U>(val)); | |
| 1272 } | 1285 } |
| 1273 | 1286 |
| 1274 template <typename T, size_t inlineCapacity, typename Allocator> | 1287 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1275 template <typename U, size_t c, typename V> | 1288 template <typename U, size_t c, typename V> |
| 1276 inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>&
val) | 1289 inline void Vector<T, inlineCapacity, Allocator>::prepend( |
| 1277 { | 1290 const Vector<U, c, V>& val) { |
| 1278 insert(0, val.begin(), val.size()); | 1291 insert(0, val.begin(), val.size()); |
| 1279 } | 1292 } |
| 1280 | 1293 |
| 1281 template <typename T, size_t inlineCapacity, typename Allocator> | 1294 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1282 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position) | 1295 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position) { |
| 1283 { | 1296 RELEASE_ASSERT(position < size()); |
| 1284 RELEASE_ASSERT(position < size()); | 1297 T* spot = begin() + position; |
| 1285 T* spot = begin() + position; | 1298 spot->~T(); |
| 1286 spot->~T(); | 1299 TypeOperations::moveOverlapping(spot + 1, end(), spot); |
| 1287 TypeOperations::moveOverlapping(spot + 1, end(), spot); | 1300 clearUnusedSlots(end() - 1, end()); |
| 1288 clearUnusedSlots(end() - 1, end()); | 1301 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - 1); |
| 1289 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - 1); | 1302 --m_size; |
| 1290 --m_size; | |
| 1291 } | 1303 } |
| 1292 | 1304 |
| 1293 template <typename T, size_t inlineCapacity, typename Allocator> | 1305 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1294 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t
length) | 1306 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, |
| 1295 { | 1307 size_t length) { |
| 1296 ASSERT_WITH_SECURITY_IMPLICATION(position <= size()); | 1308 ASSERT_WITH_SECURITY_IMPLICATION(position <= size()); |
| 1297 if (!length) | 1309 if (!length) |
| 1298 return; | 1310 return; |
| 1299 RELEASE_ASSERT(position + length <= size()); | 1311 RELEASE_ASSERT(position + length <= size()); |
| 1300 T* beginSpot = begin() + position; | 1312 T* beginSpot = begin() + position; |
| 1301 T* endSpot = beginSpot + length; | 1313 T* endSpot = beginSpot + length; |
| 1302 TypeOperations::destruct(beginSpot, endSpot); | 1314 TypeOperations::destruct(beginSpot, endSpot); |
| 1303 TypeOperations::moveOverlapping(endSpot, end(), beginSpot); | 1315 TypeOperations::moveOverlapping(endSpot, end(), beginSpot); |
| 1304 clearUnusedSlots(end() - length, end()); | 1316 clearUnusedSlots(end() - length, end()); |
| 1305 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - length); | 1317 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - length); |
| 1306 m_size -= length; | 1318 m_size -= length; |
| 1307 } | 1319 } |
| 1308 | 1320 |
| 1309 template <typename T, size_t inlineCapacity, typename Allocator> | 1321 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1310 inline void Vector<T, inlineCapacity, Allocator>::reverse() | 1322 inline void Vector<T, inlineCapacity, Allocator>::reverse() { |
| 1311 { | 1323 for (size_t i = 0; i < m_size / 2; ++i) |
| 1312 for (size_t i = 0; i < m_size / 2; ++i) | 1324 std::swap(at(i), at(m_size - 1 - i)); |
| 1313 std::swap(at(i), at(m_size - 1 - i)); | |
| 1314 } | 1325 } |
| 1315 | 1326 |
| 1316 template <typename T, size_t inlineCapacity, typename Allocator> | 1327 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1317 inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapaci
ty, Allocator>& b) | 1328 inline void swap(Vector<T, inlineCapacity, Allocator>& a, |
| 1318 { | 1329 Vector<T, inlineCapacity, Allocator>& b) { |
| 1319 a.swap(b); | 1330 a.swap(b); |
| 1320 } | 1331 } |
| 1321 | 1332 |
| 1322 template <typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename A
llocator> | 1333 template <typename T, |
| 1323 bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T,
inlineCapacityB, Allocator>& b) | 1334 size_t inlineCapacityA, |
| 1324 { | 1335 size_t inlineCapacityB, |
| 1325 if (a.size() != b.size()) | 1336 typename Allocator> |
| 1326 return false; | 1337 bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, |
| 1327 if (a.isEmpty()) | 1338 const Vector<T, inlineCapacityB, Allocator>& b) { |
| 1328 return true; | 1339 if (a.size() != b.size()) |
| 1329 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size()); | 1340 return false; |
| 1341 if (a.isEmpty()) |
| 1342 return true; |
| 1343 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size()); |
| 1330 } | 1344 } |
| 1331 | 1345 |
| 1332 template <typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename A
llocator> | 1346 template <typename T, |
| 1333 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vec
tor<T, inlineCapacityB, Allocator>& b) | 1347 size_t inlineCapacityA, |
| 1334 { | 1348 size_t inlineCapacityB, |
| 1335 return !(a == b); | 1349 typename Allocator> |
| 1350 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, |
| 1351 const Vector<T, inlineCapacityB, Allocator>& b) { |
| 1352 return !(a == b); |
| 1336 } | 1353 } |
| 1337 | 1354 |
| 1338 // This is only called if the allocator is a HeapAllocator. It is used when | 1355 // This is only called if the allocator is a HeapAllocator. It is used when |
| 1339 // visiting during a tracing GC. | 1356 // visiting during a tracing GC. |
| 1340 template <typename T, size_t inlineCapacity, typename Allocator> | 1357 template <typename T, size_t inlineCapacity, typename Allocator> |
| 1341 template <typename VisitorDispatcher> | 1358 template <typename VisitorDispatcher> |
| 1342 void Vector<T, inlineCapacity, Allocator>::trace(VisitorDispatcher visitor) | 1359 void Vector<T, inlineCapacity, Allocator>::trace(VisitorDispatcher visitor) { |
| 1343 { | 1360 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled. |
| 1344 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled. | 1361 if (!buffer()) |
| 1345 if (!buffer()) | 1362 return; |
| 1346 return; | 1363 if (this->hasOutOfLineBuffer()) { |
| 1347 if (this->hasOutOfLineBuffer()) { | 1364 // This is a performance optimization for a case where the buffer has |
| 1348 // This is a performance optimization for a case where the buffer has | 1365 // been already traced by somewhere. This can happen if the conservative |
| 1349 // been already traced by somewhere. This can happen if the conservative | 1366 // scanning traced an on-stack (false-positive or real) pointer to the |
| 1350 // scanning traced an on-stack (false-positive or real) pointer to the | 1367 // HeapVector, and then visitor->trace() traces the HeapVector. |
| 1351 // HeapVector, and then visitor->trace() traces the HeapVector. | 1368 if (Allocator::isHeapObjectAlive(buffer())) |
| 1352 if (Allocator::isHeapObjectAlive(buffer())) | 1369 return; |
| 1353 return; | 1370 Allocator::markNoTracing(visitor, buffer()); |
| 1354 Allocator::markNoTracing(visitor, buffer()); | 1371 } |
| 1355 } | 1372 const T* bufferBegin = buffer(); |
| 1356 const T* bufferBegin = buffer(); | 1373 const T* bufferEnd = buffer() + size(); |
| 1357 const T* bufferEnd = buffer() + size(); | 1374 if (NeedsTracingTrait<VectorTraits<T>>::value) { |
| 1358 if (NeedsTracingTrait<VectorTraits<T>>::value) { | 1375 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; |
| 1359 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; buffe
rEntry++) | 1376 bufferEntry++) |
| 1360 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>(vis
itor, *const_cast<T*>(bufferEntry)); | 1377 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>( |
| 1361 checkUnusedSlots(buffer() + size(), buffer() + capacity()); | 1378 visitor, *const_cast<T*>(bufferEntry)); |
| 1362 } | 1379 checkUnusedSlots(buffer() + size(), buffer() + capacity()); |
| 1380 } |
| 1363 } | 1381 } |
| 1364 | 1382 |
| 1365 #if !ENABLE(OILPAN) | 1383 #if !ENABLE(OILPAN) |
| 1366 template <typename T, size_t N> | 1384 template <typename T, size_t N> |
| 1367 struct NeedsTracing<Vector<T, N>> { | 1385 struct NeedsTracing<Vector<T, N>> { |
| 1368 STATIC_ONLY(NeedsTracing); | 1386 STATIC_ONLY(NeedsTracing); |
| 1369 static const bool value = false; | 1387 static const bool value = false; |
| 1370 }; | 1388 }; |
| 1371 #endif | 1389 #endif |
| 1372 | 1390 |
| 1373 } // namespace WTF | 1391 } // namespace WTF |
| 1374 | 1392 |
| 1375 using WTF::Vector; | 1393 using WTF::Vector; |
| 1376 | 1394 |
| 1377 #endif // WTF_Vector_h | 1395 #endif // WTF_Vector_h |
| OLD | NEW |