Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
* License, v. 2.0. If a copy of the MPL was not distributed with this
5
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7
/* A type/length-parametrized vector class. */
8
9
#ifndef mozilla_Vector_h
10
#define mozilla_Vector_h
11
12
#include "mozilla/Alignment.h"
13
#include "mozilla/AllocPolicy.h"
14
#include "mozilla/ArrayUtils.h" // for PointerRangeSize
15
#include "mozilla/Assertions.h"
16
#include "mozilla/Attributes.h"
17
#include "mozilla/MathAlgorithms.h"
18
#include "mozilla/MemoryReporting.h"
19
#include "mozilla/Move.h"
20
#include "mozilla/OperatorNewExtensions.h"
21
#include "mozilla/ReentrancyGuard.h"
22
#include "mozilla/TemplateLib.h"
23
#include "mozilla/TypeTraits.h"
24
25
#include <new> // for placement new
26
27
/* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
28
#ifdef _MSC_VER
29
# pragma warning(push)
30
# pragma warning(disable : 4345)
31
#endif
32
33
namespace mozilla {
34
35
template <typename T, size_t N, class AllocPolicy>
36
class Vector;
37
38
namespace detail {
39
40
/*
41
* Check that the given capacity wastes the minimal amount of space if
42
* allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
43
* power-of-two as possible. growStorageBy() is responsible for ensuring this.
44
*/
45
template <typename T>
46
static bool CapacityHasExcessSpace(size_t aCapacity) {
47
size_t size = aCapacity * sizeof(T);
48
return RoundUpPow2(size) - size >= sizeof(T);
49
}
50
51
/*
52
* This template class provides a default implementation for vector operations
53
* when the element type is not known to be a POD, as judged by IsPod.
54
*/
55
template <typename T, size_t N, class AP, bool IsPod>
56
struct VectorImpl {
57
/*
58
* Constructs an object in the uninitialized memory at *aDst with aArgs.
59
*/
60
template <typename... Args>
61
MOZ_NONNULL(1)
62
static inline void new_(T* aDst, Args&&... aArgs) {
63
new (KnownNotNull, aDst) T(std::forward<Args>(aArgs)...);
64
}
65
66
/* Destroys constructed objects in the range [aBegin, aEnd). */
67
static inline void destroy(T* aBegin, T* aEnd) {
68
MOZ_ASSERT(aBegin <= aEnd);
69
for (T* p = aBegin; p < aEnd; ++p) {
70
p->~T();
71
}
72
}
73
74
/* Constructs objects in the uninitialized range [aBegin, aEnd). */
75
static inline void initialize(T* aBegin, T* aEnd) {
76
MOZ_ASSERT(aBegin <= aEnd);
77
for (T* p = aBegin; p < aEnd; ++p) {
78
new_(p);
79
}
80
}
81
82
/*
83
* Copy-constructs objects in the uninitialized range
84
* [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
85
*/
86
template <typename U>
87
static inline void copyConstruct(T* aDst, const U* aSrcStart,
88
const U* aSrcEnd) {
89
MOZ_ASSERT(aSrcStart <= aSrcEnd);
90
for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
91
new_(aDst, *p);
92
}
93
}
94
95
/*
96
* Move-constructs objects in the uninitialized range
97
* [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
98
*/
99
template <typename U>
100
static inline void moveConstruct(T* aDst, U* aSrcStart, U* aSrcEnd) {
101
MOZ_ASSERT(aSrcStart <= aSrcEnd);
102
for (U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
103
new_(aDst, std::move(*p));
104
}
105
}
106
107
/*
108
* Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
109
* the same object aU.
110
*/
111
template <typename U>
112
static inline void copyConstructN(T* aDst, size_t aN, const U& aU) {
113
for (T* end = aDst + aN; aDst < end; ++aDst) {
114
new_(aDst, aU);
115
}
116
}
117
118
/*
119
* Grows the given buffer to have capacity aNewCap, preserving the objects
120
* constructed in the range [begin, end) and updating aV. Assumes that (1)
121
* aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
122
* not overflow.
123
*/
124
static inline MOZ_MUST_USE bool growTo(Vector<T, N, AP>& aV, size_t aNewCap) {
125
MOZ_ASSERT(!aV.usingInlineStorage());
126
MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
127
T* newbuf = aV.template pod_malloc<T>(aNewCap);
128
if (MOZ_UNLIKELY(!newbuf)) {
129
return false;
130
}
131
T* dst = newbuf;
132
T* src = aV.beginNoCheck();
133
for (; src < aV.endNoCheck(); ++dst, ++src) {
134
new_(dst, std::move(*src));
135
}
136
VectorImpl::destroy(aV.beginNoCheck(), aV.endNoCheck());
137
aV.free_(aV.mBegin, aV.mTail.mCapacity);
138
aV.mBegin = newbuf;
139
/* aV.mLength is unchanged. */
140
aV.mTail.mCapacity = aNewCap;
141
return true;
142
}
143
};
144
145
/*
146
* This partial template specialization provides a default implementation for
147
* vector operations when the element type is known to be a POD, as judged by
148
* IsPod.
149
*/
150
template <typename T, size_t N, class AP>
151
struct VectorImpl<T, N, AP, true> {
152
template <typename... Args>
153
MOZ_NONNULL(1)
154
static inline void new_(T* aDst, Args&&... aArgs) {
155
// Explicitly construct a local object instead of using a temporary since
156
// T(args...) will be treated like a C-style cast in the unary case and
157
// allow unsafe conversions. Both forms should be equivalent to an
158
// optimizing compiler.
159
T temp(std::forward<Args>(aArgs)...);
160
*aDst = temp;
161
}
162
163
static inline void destroy(T*, T*) {}
164
165
static inline void initialize(T* aBegin, T* aEnd) {
166
/*
167
* You would think that memset would be a big win (or even break even)
168
* when we know T is a POD. But currently it's not. This is probably
169
* because |append| tends to be given small ranges and memset requires
170
* a function call that doesn't get inlined.
171
*
172
* memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
173
*/
174
MOZ_ASSERT(aBegin <= aEnd);
175
for (T* p = aBegin; p < aEnd; ++p) {
176
new_(p);
177
}
178
}
179
180
template <typename U>
181
static inline void copyConstruct(T* aDst, const U* aSrcStart,
182
const U* aSrcEnd) {
183
/*
184
* See above memset comment. Also, notice that copyConstruct is
185
* currently templated (T != U), so memcpy won't work without
186
* requiring T == U.
187
*
188
* memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
189
*/
190
MOZ_ASSERT(aSrcStart <= aSrcEnd);
191
for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
192
new_(aDst, *p);
193
}
194
}
195
196
template <typename U>
197
static inline void moveConstruct(T* aDst, const U* aSrcStart,
198
const U* aSrcEnd) {
199
copyConstruct(aDst, aSrcStart, aSrcEnd);
200
}
201
202
static inline void copyConstructN(T* aDst, size_t aN, const T& aT) {
203
for (T* end = aDst + aN; aDst < end; ++aDst) {
204
new_(aDst, aT);
205
}
206
}
207
208
static inline MOZ_MUST_USE bool growTo(Vector<T, N, AP>& aV, size_t aNewCap) {
209
MOZ_ASSERT(!aV.usingInlineStorage());
210
MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
211
T* newbuf =
212
aV.template pod_realloc<T>(aV.mBegin, aV.mTail.mCapacity, aNewCap);
213
if (MOZ_UNLIKELY(!newbuf)) {
214
return false;
215
}
216
aV.mBegin = newbuf;
217
/* aV.mLength is unchanged. */
218
aV.mTail.mCapacity = aNewCap;
219
return true;
220
}
221
222
static inline void podResizeToFit(Vector<T, N, AP>& aV) {
223
if (aV.usingInlineStorage() || aV.mLength == aV.mTail.mCapacity) {
224
return;
225
}
226
if (!aV.mLength) {
227
aV.free_(aV.mBegin, aV.mTail.mCapacity);
228
aV.mBegin = aV.inlineStorage();
229
aV.mTail.mCapacity = aV.kInlineCapacity;
230
#ifdef DEBUG
231
aV.mTail.mReserved = 0;
232
#endif
233
return;
234
}
235
T* newbuf =
236
aV.template pod_realloc<T>(aV.mBegin, aV.mTail.mCapacity, aV.mLength);
237
if (MOZ_UNLIKELY(!newbuf)) {
238
return;
239
}
240
aV.mBegin = newbuf;
241
aV.mTail.mCapacity = aV.mLength;
242
#ifdef DEBUG
243
aV.mTail.mReserved = aV.mLength;
244
#endif
245
}
246
};
247
248
// A struct for TestVector.cpp to access private internal fields.
249
// DO NOT DEFINE IN YOUR OWN CODE.
250
struct VectorTesting;
251
252
} // namespace detail
253
254
/*
255
* STL-like container providing a short-lived, dynamic buffer. Vector calls the
256
* constructors/destructors of all elements stored in its internal buffer, so
257
* non-PODs may be safely used. Additionally, Vector will store the first N
258
* elements in-place before resorting to dynamic allocation.
259
*
260
* T requirements:
261
* - default and copy constructible, assignable, destructible
262
* - operations do not throw
263
* MinInlineCapacity requirements:
264
* - any value, however, MinInlineCapacity is clamped to min/max values
265
* AllocPolicy:
266
* - see "Allocation policies" in AllocPolicy.h (defaults to
267
* mozilla::MallocAllocPolicy)
268
*
269
* Vector is not reentrant: T member functions called during Vector member
270
* functions must not call back into the same object!
271
*/
272
template <typename T, size_t MinInlineCapacity = 0,
273
class AllocPolicy = MallocAllocPolicy>
274
class MOZ_NON_PARAM Vector final : private AllocPolicy {
275
/* utilities */
276
277
static const bool kElemIsPod = IsPod<T>::value;
278
typedef detail::VectorImpl<T, MinInlineCapacity, AllocPolicy, kElemIsPod>
279
Impl;
280
friend struct detail::VectorImpl<T, MinInlineCapacity, AllocPolicy,
281
kElemIsPod>;
282
283
friend struct detail::VectorTesting;
284
285
MOZ_MUST_USE bool growStorageBy(size_t aIncr);
286
MOZ_MUST_USE bool convertToHeapStorage(size_t aNewCap);
287
MOZ_MUST_USE bool maybeCheckSimulatedOOM(size_t aRequestedSize);
288
289
/* magic constants */
290
291
/**
292
* The maximum space allocated for inline element storage.
293
*
294
* We reduce space by what the AllocPolicy base class and prior Vector member
295
* fields likely consume to attempt to play well with binary size classes.
296
*/
297
static constexpr size_t kMaxInlineBytes =
298
1024 -
299
(sizeof(AllocPolicy) + sizeof(T*) + sizeof(size_t) + sizeof(size_t));
300
301
/**
302
* The number of T elements of inline capacity built into this Vector. This
303
* is usually |MinInlineCapacity|, but it may be less (or zero!) for large T.
304
*
305
* We use a partially-specialized template (not explicit specialization, which
306
* is only allowed at namespace scope) to compute this value. The benefit is
307
* that |sizeof(T)| need not be computed, and |T| doesn't have to be fully
308
* defined at the time |Vector<T>| appears, if no inline storage is requested.
309
*/
310
template <size_t MinimumInlineCapacity, size_t Dummy>
311
struct ComputeCapacity {
312
static constexpr size_t value =
313
tl::Min<MinimumInlineCapacity, kMaxInlineBytes / sizeof(T)>::value;
314
};
315
316
template <size_t Dummy>
317
struct ComputeCapacity<0, Dummy> {
318
static constexpr size_t value = 0;
319
};
320
321
/** The actual inline capacity in number of elements T. This may be zero! */
322
static constexpr size_t kInlineCapacity =
323
ComputeCapacity<MinInlineCapacity, 0>::value;
324
325
/* member data */
326
327
/*
328
* Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
329
* mBegin + mLength) hold valid constructed T objects. The range [mBegin +
330
* mLength, mBegin + mCapacity) holds uninitialized memory. The range
331
* [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
332
* previously allocated by a call to reserve().
333
*/
334
T* mBegin;
335
336
/* Number of elements in the vector. */
337
size_t mLength;
338
339
/*
340
* Memory used to store capacity, reserved element count (debug builds only),
341
* and inline storage. The simple "answer" is:
342
*
343
* size_t mCapacity;
344
* #ifdef DEBUG
345
* size_t mReserved;
346
* #endif
347
* alignas(T) unsigned char mBytes[kInlineCapacity * sizeof(T)];
348
*
349
* but there are complications. First, C++ forbids zero-sized arrays that
350
* might result. Second, we don't want zero capacity to affect Vector's size
351
* (even empty classes take up a byte, unless they're base classes).
352
*
353
* Yet again, we eliminate the zero-sized array using partial specialization.
354
* And we eliminate potential size hit by putting capacity/reserved in one
355
* struct, then putting the array (if any) in a derived struct. If no array
356
* is needed, the derived struct won't consume extra space.
357
*/
358
struct CapacityAndReserved {
359
explicit CapacityAndReserved(size_t aCapacity, size_t aReserved)
360
: mCapacity(aCapacity)
361
#ifdef DEBUG
362
,
363
mReserved(aReserved)
364
#endif
365
{
366
}
367
CapacityAndReserved() = default;
368
369
/* Max number of elements storable in the vector without resizing. */
370
size_t mCapacity;
371
372
#ifdef DEBUG
373
/* Max elements of reserved or used space in this vector. */
374
size_t mReserved;
375
#endif
376
};
377
378
// Silence warnings about this struct possibly being padded dued to the
379
// alignas() in it -- there's nothing we can do to avoid it.
380
#ifdef _MSC_VER
381
# pragma warning(push)
382
# pragma warning(disable : 4324)
383
#endif // _MSC_VER
384
385
template <size_t Capacity, size_t Dummy>
386
struct CRAndStorage : CapacityAndReserved {
387
explicit CRAndStorage(size_t aCapacity, size_t aReserved)
388
: CapacityAndReserved(aCapacity, aReserved) {}
389
CRAndStorage() = default;
390
391
alignas(T) unsigned char mBytes[Capacity * sizeof(T)];
392
393
// GCC fails due to -Werror=strict-aliasing if |mBytes| is directly cast to
394
// T*. Indirecting through this function addresses the problem.
395
void* data() { return mBytes; }
396
397
T* storage() { return static_cast<T*>(data()); }
398
};
399
400
template <size_t Dummy>
401
struct CRAndStorage<0, Dummy> : CapacityAndReserved {
402
explicit CRAndStorage(size_t aCapacity, size_t aReserved)
403
: CapacityAndReserved(aCapacity, aReserved) {}
404
CRAndStorage() = default;
405
406
T* storage() {
407
// If this returns |nullptr|, functions like |Vector::begin()| would too,
408
// breaking callers that pass a vector's elements as pointer/length to
409
// code that bounds its operation by length but (even just as a sanity
410
// check) always wants a non-null pointer. Fake up an aligned, non-null
411
// pointer to support these callers.
412
return reinterpret_cast<T*>(sizeof(T));
413
}
414
};
415
416
CRAndStorage<kInlineCapacity, 0> mTail;
417
418
#ifdef _MSC_VER
419
# pragma warning(pop)
420
#endif // _MSC_VER
421
422
#ifdef DEBUG
423
friend class ReentrancyGuard;
424
bool mEntered;
425
#endif
426
427
/* private accessors */
428
429
bool usingInlineStorage() const {
430
return mBegin == const_cast<Vector*>(this)->inlineStorage();
431
}
432
433
T* inlineStorage() { return mTail.storage(); }
434
435
T* beginNoCheck() const { return mBegin; }
436
437
T* endNoCheck() { return mBegin + mLength; }
438
439
const T* endNoCheck() const { return mBegin + mLength; }
440
441
#ifdef DEBUG
442
/**
443
* The amount of explicitly allocated space in this vector that is immediately
444
* available to be filled by appending additional elements. This value is
445
* always greater than or equal to |length()| -- the vector's actual elements
446
* are implicitly reserved. This value is always less than or equal to
447
* |capacity()|. It may be explicitly increased using the |reserve()| method.
448
*/
449
size_t reserved() const {
450
MOZ_ASSERT(mLength <= mTail.mReserved);
451
MOZ_ASSERT(mTail.mReserved <= mTail.mCapacity);
452
return mTail.mReserved;
453
}
454
#endif
455
456
/* Append operations guaranteed to succeed due to pre-reserved space. */
457
template <typename U>
458
void internalAppend(U&& aU);
459
template <typename U, size_t O, class BP>
460
void internalAppendAll(const Vector<U, O, BP>& aU);
461
void internalAppendN(const T& aT, size_t aN);
462
template <typename U>
463
void internalAppend(const U* aBegin, size_t aLength);
464
465
public:
466
static const size_t sMaxInlineStorage = MinInlineCapacity;
467
468
typedef T ElementType;
469
470
explicit Vector(AllocPolicy = AllocPolicy());
471
Vector(Vector&&); /* Move constructor. */
472
Vector& operator=(Vector&&); /* Move assignment. */
473
~Vector();
474
475
/* accessors */
476
477
const AllocPolicy& allocPolicy() const { return *this; }
478
479
AllocPolicy& allocPolicy() { return *this; }
480
481
enum { InlineLength = MinInlineCapacity };
482
483
size_t length() const { return mLength; }
484
485
bool empty() const { return mLength == 0; }
486
487
size_t capacity() const { return mTail.mCapacity; }
488
489
T* begin() {
490
MOZ_ASSERT(!mEntered);
491
return mBegin;
492
}
493
494
const T* begin() const {
495
MOZ_ASSERT(!mEntered);
496
return mBegin;
497
}
498
499
T* end() {
500
MOZ_ASSERT(!mEntered);
501
return mBegin + mLength;
502
}
503
504
const T* end() const {
505
MOZ_ASSERT(!mEntered);
506
return mBegin + mLength;
507
}
508
509
T& operator[](size_t aIndex) {
510
MOZ_ASSERT(!mEntered);
511
MOZ_ASSERT(aIndex < mLength);
512
return begin()[aIndex];
513
}
514
515
const T& operator[](size_t aIndex) const {
516
MOZ_ASSERT(!mEntered);
517
MOZ_ASSERT(aIndex < mLength);
518
return begin()[aIndex];
519
}
520
521
T& back() {
522
MOZ_ASSERT(!mEntered);
523
MOZ_ASSERT(!empty());
524
return *(end() - 1);
525
}
526
527
const T& back() const {
528
MOZ_ASSERT(!mEntered);
529
MOZ_ASSERT(!empty());
530
return *(end() - 1);
531
}
532
533
class Range {
534
friend class Vector;
535
T* mCur;
536
T* mEnd;
537
Range(T* aCur, T* aEnd) : mCur(aCur), mEnd(aEnd) {
538
MOZ_ASSERT(aCur <= aEnd);
539
}
540
541
public:
542
bool empty() const { return mCur == mEnd; }
543
size_t remain() const { return PointerRangeSize(mCur, mEnd); }
544
T& front() const {
545
MOZ_ASSERT(!empty());
546
return *mCur;
547
}
548
void popFront() {
549
MOZ_ASSERT(!empty());
550
++mCur;
551
}
552
T popCopyFront() {
553
MOZ_ASSERT(!empty());
554
return *mCur++;
555
}
556
};
557
558
class ConstRange {
559
friend class Vector;
560
const T* mCur;
561
const T* mEnd;
562
ConstRange(const T* aCur, const T* aEnd) : mCur(aCur), mEnd(aEnd) {
563
MOZ_ASSERT(aCur <= aEnd);
564
}
565
566
public:
567
bool empty() const { return mCur == mEnd; }
568
size_t remain() const { return PointerRangeSize(mCur, mEnd); }
569
const T& front() const {
570
MOZ_ASSERT(!empty());
571
return *mCur;
572
}
573
void popFront() {
574
MOZ_ASSERT(!empty());
575
++mCur;
576
}
577
T popCopyFront() {
578
MOZ_ASSERT(!empty());
579
return *mCur++;
580
}
581
};
582
583
Range all() { return Range(begin(), end()); }
584
ConstRange all() const { return ConstRange(begin(), end()); }
585
586
/* mutators */
587
588
/**
589
* Reverse the order of the elements in the vector in place.
590
*/
591
void reverse();
592
593
/**
594
* Given that the vector is empty, grow the internal capacity to |aRequest|,
595
* keeping the length 0.
596
*/
597
MOZ_MUST_USE bool initCapacity(size_t aRequest);
598
599
/**
600
* Given that the vector is empty, grow the internal capacity and length to
601
* |aRequest| leaving the elements' memory completely uninitialized (with all
602
* the associated hazards and caveats). This avoids the usual allocation-size
603
* rounding that happens in resize and overhead of initialization for elements
604
* that are about to be overwritten.
605
*/
606
MOZ_MUST_USE bool initLengthUninitialized(size_t aRequest);
607
608
/**
609
* If reserve(aRequest) succeeds and |aRequest >= length()|, then appending
610
* |aRequest - length()| elements, in any sequence of append/appendAll calls,
611
* is guaranteed to succeed.
612
*
613
* A request to reserve an amount less than the current length does not affect
614
* reserved space.
615
*/
616
MOZ_MUST_USE bool reserve(size_t aRequest);
617
618
/**
619
* Destroy elements in the range [end() - aIncr, end()). Does not deallocate
620
* or unreserve storage for those elements.
621
*/
622
void shrinkBy(size_t aIncr);
623
624
/**
625
* Destroy elements in the range [aNewLength, end()). Does not deallocate
626
* or unreserve storage for those elements.
627
*/
628
void shrinkTo(size_t aNewLength);
629
630
/** Grow the vector by aIncr elements. */
631
MOZ_MUST_USE bool growBy(size_t aIncr);
632
633
/** Call shrinkBy or growBy based on whether newSize > length(). */
634
MOZ_MUST_USE bool resize(size_t aNewLength);
635
636
/**
637