Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
* License, v. 2.0. If a copy of the MPL was not distributed with this
5
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7
/* A type/length-parametrized vector class. */
8
9
#ifndef mozilla_Vector_h
10
#define mozilla_Vector_h
11
12
#include "mozilla/Alignment.h"
13
#include "mozilla/AllocPolicy.h"
14
#include "mozilla/ArrayUtils.h" // for PointerRangeSize
15
#include "mozilla/Assertions.h"
16
#include "mozilla/Attributes.h"
17
#include "mozilla/MathAlgorithms.h"
18
#include "mozilla/MemoryReporting.h"
19
#include "mozilla/Move.h"
20
#include "mozilla/OperatorNewExtensions.h"
21
#include "mozilla/ReentrancyGuard.h"
22
#include "mozilla/TemplateLib.h"
23
#include "mozilla/TypeTraits.h"
24
25
#include <new> // for placement new
26
27
namespace mozilla {
28
29
template <typename T, size_t N, class AllocPolicy>
30
class Vector;
31
32
namespace detail {
33
34
/*
35
* Check that the given capacity wastes the minimal amount of space if
36
* allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
37
* power-of-two as possible. growStorageBy() is responsible for ensuring this.
38
*/
39
template <typename T>
40
static bool CapacityHasExcessSpace(size_t aCapacity) {
41
size_t size = aCapacity * sizeof(T);
42
return RoundUpPow2(size) - size >= sizeof(T);
43
}
44
45
/*
46
* This template class provides a default implementation for vector operations
47
* when the element type is not known to be a POD, as judged by IsPod.
48
*/
49
template <typename T, size_t N, class AP, bool IsPod>
50
struct VectorImpl {
51
/*
52
* Constructs an object in the uninitialized memory at *aDst with aArgs.
53
*/
54
template <typename... Args>
55
MOZ_NONNULL(1)
56
static inline void new_(T* aDst, Args&&... aArgs) {
57
new (KnownNotNull, aDst) T(std::forward<Args>(aArgs)...);
58
}
59
60
/* Destroys constructed objects in the range [aBegin, aEnd). */
61
static inline void destroy(T* aBegin, T* aEnd) {
62
MOZ_ASSERT(aBegin <= aEnd);
63
for (T* p = aBegin; p < aEnd; ++p) {
64
p->~T();
65
}
66
}
67
68
/* Constructs objects in the uninitialized range [aBegin, aEnd). */
69
static inline void initialize(T* aBegin, T* aEnd) {
70
MOZ_ASSERT(aBegin <= aEnd);
71
for (T* p = aBegin; p < aEnd; ++p) {
72
new_(p);
73
}
74
}
75
76
/*
77
* Copy-constructs objects in the uninitialized range
78
* [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
79
*/
80
template <typename U>
81
static inline void copyConstruct(T* aDst, const U* aSrcStart,
82
const U* aSrcEnd) {
83
MOZ_ASSERT(aSrcStart <= aSrcEnd);
84
for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
85
new_(aDst, *p);
86
}
87
}
88
89
/*
90
* Move-constructs objects in the uninitialized range
91
* [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
92
*/
93
template <typename U>
94
static inline void moveConstruct(T* aDst, U* aSrcStart, U* aSrcEnd) {
95
MOZ_ASSERT(aSrcStart <= aSrcEnd);
96
for (U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
97
new_(aDst, std::move(*p));
98
}
99
}
100
101
/*
102
* Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
103
* the same object aU.
104
*/
105
template <typename U>
106
static inline void copyConstructN(T* aDst, size_t aN, const U& aU) {
107
for (T* end = aDst + aN; aDst < end; ++aDst) {
108
new_(aDst, aU);
109
}
110
}
111
112
/*
113
* Grows the given buffer to have capacity aNewCap, preserving the objects
114
* constructed in the range [begin, end) and updating aV. Assumes that (1)
115
* aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
116
* not overflow.
117
*/
118
static inline MOZ_MUST_USE bool growTo(Vector<T, N, AP>& aV, size_t aNewCap) {
119
MOZ_ASSERT(!aV.usingInlineStorage());
120
MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
121
T* newbuf = aV.template pod_malloc<T>(aNewCap);
122
if (MOZ_UNLIKELY(!newbuf)) {
123
return false;
124
}
125
T* dst = newbuf;
126
T* src = aV.beginNoCheck();
127
for (; src < aV.endNoCheck(); ++dst, ++src) {
128
new_(dst, std::move(*src));
129
}
130
VectorImpl::destroy(aV.beginNoCheck(), aV.endNoCheck());
131
aV.free_(aV.mBegin, aV.mTail.mCapacity);
132
aV.mBegin = newbuf;
133
/* aV.mLength is unchanged. */
134
aV.mTail.mCapacity = aNewCap;
135
return true;
136
}
137
};
138
139
/*
140
* This partial template specialization provides a default implementation for
141
* vector operations when the element type is known to be a POD, as judged by
142
* IsPod.
143
*/
144
template <typename T, size_t N, class AP>
145
struct VectorImpl<T, N, AP, true> {
146
template <typename... Args>
147
MOZ_NONNULL(1)
148
static inline void new_(T* aDst, Args&&... aArgs) {
149
// Explicitly construct a local object instead of using a temporary since
150
// T(args...) will be treated like a C-style cast in the unary case and
151
// allow unsafe conversions. Both forms should be equivalent to an
152
// optimizing compiler.
153
T temp(std::forward<Args>(aArgs)...);
154
*aDst = temp;
155
}
156
157
static inline void destroy(T*, T*) {}
158
159
static inline void initialize(T* aBegin, T* aEnd) {
160
/*
161
* You would think that memset would be a big win (or even break even)
162
* when we know T is a POD. But currently it's not. This is probably
163
* because |append| tends to be given small ranges and memset requires
164
* a function call that doesn't get inlined.
165
*
166
* memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
167
*/
168
MOZ_ASSERT(aBegin <= aEnd);
169
for (T* p = aBegin; p < aEnd; ++p) {
170
new_(p);
171
}
172
}
173
174
template <typename U>
175
static inline void copyConstruct(T* aDst, const U* aSrcStart,
176
const U* aSrcEnd) {
177
/*
178
* See above memset comment. Also, notice that copyConstruct is
179
* currently templated (T != U), so memcpy won't work without
180
* requiring T == U.
181
*
182
* memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
183
*/
184
MOZ_ASSERT(aSrcStart <= aSrcEnd);
185
for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
186
new_(aDst, *p);
187
}
188
}
189
190
template <typename U>
191
static inline void moveConstruct(T* aDst, const U* aSrcStart,
192
const U* aSrcEnd) {
193
copyConstruct(aDst, aSrcStart, aSrcEnd);
194
}
195
196
static inline void copyConstructN(T* aDst, size_t aN, const T& aT) {
197
for (T* end = aDst + aN; aDst < end; ++aDst) {
198
new_(aDst, aT);
199
}
200
}
201
202
static inline MOZ_MUST_USE bool growTo(Vector<T, N, AP>& aV, size_t aNewCap) {
203
MOZ_ASSERT(!aV.usingInlineStorage());
204
MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
205
T* newbuf =
206
aV.template pod_realloc<T>(aV.mBegin, aV.mTail.mCapacity, aNewCap);
207
if (MOZ_UNLIKELY(!newbuf)) {
208
return false;
209
}
210
aV.mBegin = newbuf;
211
/* aV.mLength is unchanged. */
212
aV.mTail.mCapacity = aNewCap;
213
return true;
214
}
215
216
static inline void podResizeToFit(Vector<T, N, AP>& aV) {
217
if (aV.usingInlineStorage() || aV.mLength == aV.mTail.mCapacity) {
218
return;
219
}
220
if (!aV.mLength) {
221
aV.free_(aV.mBegin, aV.mTail.mCapacity);
222
aV.mBegin = aV.inlineStorage();
223
aV.mTail.mCapacity = aV.kInlineCapacity;
224
#ifdef DEBUG
225
aV.mTail.mReserved = 0;
226
#endif
227
return;
228
}
229
T* newbuf =
230
aV.template pod_realloc<T>(aV.mBegin, aV.mTail.mCapacity, aV.mLength);
231
if (MOZ_UNLIKELY(!newbuf)) {
232
return;
233
}
234
aV.mBegin = newbuf;
235
aV.mTail.mCapacity = aV.mLength;
236
#ifdef DEBUG
237
aV.mTail.mReserved = aV.mLength;
238
#endif
239
}
240
};
241
242
// A struct for TestVector.cpp to access private internal fields.
243
// DO NOT DEFINE IN YOUR OWN CODE.
244
struct VectorTesting;
245
246
} // namespace detail
247
248
/*
249
* STL-like container providing a short-lived, dynamic buffer. Vector calls the
250
* constructors/destructors of all elements stored in its internal buffer, so
251
* non-PODs may be safely used. Additionally, Vector will store the first N
252
* elements in-place before resorting to dynamic allocation.
253
*
254
* T requirements:
255
* - default and copy constructible, assignable, destructible
256
* - operations do not throw
257
* MinInlineCapacity requirements:
258
* - any value, however, MinInlineCapacity is clamped to min/max values
259
* AllocPolicy:
260
* - see "Allocation policies" in AllocPolicy.h (defaults to
261
* mozilla::MallocAllocPolicy)
262
*
263
* Vector is not reentrant: T member functions called during Vector member
264
* functions must not call back into the same object!
265
*/
266
template <typename T, size_t MinInlineCapacity = 0,
267
class AllocPolicy = MallocAllocPolicy>
268
class MOZ_NON_PARAM Vector final : private AllocPolicy {
269
/* utilities */
270
271
static const bool kElemIsPod = IsPod<T>::value;
272
typedef detail::VectorImpl<T, MinInlineCapacity, AllocPolicy, kElemIsPod>
273
Impl;
274
friend struct detail::VectorImpl<T, MinInlineCapacity, AllocPolicy,
275
kElemIsPod>;
276
277
friend struct detail::VectorTesting;
278
279
MOZ_MUST_USE bool growStorageBy(size_t aIncr);
280
MOZ_MUST_USE bool convertToHeapStorage(size_t aNewCap);
281
MOZ_MUST_USE bool maybeCheckSimulatedOOM(size_t aRequestedSize);
282
283
/* magic constants */
284
285
/**
286
* The maximum space allocated for inline element storage.
287
*
288
* We reduce space by what the AllocPolicy base class and prior Vector member
289
* fields likely consume to attempt to play well with binary size classes.
290
*/
291
static constexpr size_t kMaxInlineBytes =
292
1024 -
293
(sizeof(AllocPolicy) + sizeof(T*) + sizeof(size_t) + sizeof(size_t));
294
295
/**
296
* The number of T elements of inline capacity built into this Vector. This
297
* is usually |MinInlineCapacity|, but it may be less (or zero!) for large T.
298
*
299
* We use a partially-specialized template (not explicit specialization, which
300
* is only allowed at namespace scope) to compute this value. The benefit is
301
* that |sizeof(T)| need not be computed, and |T| doesn't have to be fully
302
* defined at the time |Vector<T>| appears, if no inline storage is requested.
303
*/
304
template <size_t MinimumInlineCapacity, size_t Dummy>
305
struct ComputeCapacity {
306
static constexpr size_t value =
307
tl::Min<MinimumInlineCapacity, kMaxInlineBytes / sizeof(T)>::value;
308
};
309
310
template <size_t Dummy>
311
struct ComputeCapacity<0, Dummy> {
312
static constexpr size_t value = 0;
313
};
314
315
/** The actual inline capacity in number of elements T. This may be zero! */
316
static constexpr size_t kInlineCapacity =
317
ComputeCapacity<MinInlineCapacity, 0>::value;
318
319
/* member data */
320
321
/*
322
* Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
323
* mBegin + mLength) hold valid constructed T objects. The range [mBegin +
324
* mLength, mBegin + mCapacity) holds uninitialized memory. The range
325
* [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
326
* previously allocated by a call to reserve().
327
*/
328
T* mBegin;
329
330
/* Number of elements in the vector. */
331
size_t mLength;
332
333
/*
334
* Memory used to store capacity, reserved element count (debug builds only),
335
* and inline storage. The simple "answer" is:
336
*
337
* size_t mCapacity;
338
* #ifdef DEBUG
339
* size_t mReserved;
340
* #endif
341
* alignas(T) unsigned char mBytes[kInlineCapacity * sizeof(T)];
342
*
343
* but there are complications. First, C++ forbids zero-sized arrays that
344
* might result. Second, we don't want zero capacity to affect Vector's size
345
* (even empty classes take up a byte, unless they're base classes).
346
*
347
* Yet again, we eliminate the zero-sized array using partial specialization.
348
* And we eliminate potential size hit by putting capacity/reserved in one
349
* struct, then putting the array (if any) in a derived struct. If no array
350
* is needed, the derived struct won't consume extra space.
351
*/
352
struct CapacityAndReserved {
353
explicit CapacityAndReserved(size_t aCapacity, size_t aReserved)
354
: mCapacity(aCapacity)
355
#ifdef DEBUG
356
,
357
mReserved(aReserved)
358
#endif
359
{
360
}
361
CapacityAndReserved() = default;
362
363
/* Max number of elements storable in the vector without resizing. */
364
size_t mCapacity;
365
366
#ifdef DEBUG
367
/* Max elements of reserved or used space in this vector. */
368
size_t mReserved;
369
#endif
370
};
371
372
// Silence warnings about this struct possibly being padded dued to the
373
// alignas() in it -- there's nothing we can do to avoid it.
374
#ifdef _MSC_VER
375
# pragma warning(push)
376
# pragma warning(disable : 4324)
377
#endif // _MSC_VER
378
379
template <size_t Capacity, size_t Dummy>
380
struct CRAndStorage : CapacityAndReserved {
381
explicit CRAndStorage(size_t aCapacity, size_t aReserved)
382
: CapacityAndReserved(aCapacity, aReserved) {}
383
CRAndStorage() = default;
384
385
alignas(T) unsigned char mBytes[Capacity * sizeof(T)];
386
387
// GCC fails due to -Werror=strict-aliasing if |mBytes| is directly cast to
388
// T*. Indirecting through this function addresses the problem.
389
void* data() { return mBytes; }
390
391
T* storage() { return static_cast<T*>(data()); }
392
};
393
394
template <size_t Dummy>
395
struct CRAndStorage<0, Dummy> : CapacityAndReserved {
396
explicit CRAndStorage(size_t aCapacity, size_t aReserved)
397
: CapacityAndReserved(aCapacity, aReserved) {}
398
CRAndStorage() = default;
399
400
T* storage() {
401
// If this returns |nullptr|, functions like |Vector::begin()| would too,
402
// breaking callers that pass a vector's elements as pointer/length to
403
// code that bounds its operation by length but (even just as a sanity
404
// check) always wants a non-null pointer. Fake up an aligned, non-null
405
// pointer to support these callers.
406
return reinterpret_cast<T*>(sizeof(T));
407
}
408
};
409
410
CRAndStorage<kInlineCapacity, 0> mTail;
411
412
#ifdef _MSC_VER
413
# pragma warning(pop)
414
#endif // _MSC_VER
415
416
#ifdef DEBUG
417
friend class ReentrancyGuard;
418
bool mEntered;
419
#endif
420
421
/* private accessors */
422
423
bool usingInlineStorage() const {
424
return mBegin == const_cast<Vector*>(this)->inlineStorage();
425
}
426
427
T* inlineStorage() { return mTail.storage(); }
428
429
T* beginNoCheck() const { return mBegin; }
430
431
T* endNoCheck() { return mBegin + mLength; }
432
433
const T* endNoCheck() const { return mBegin + mLength; }
434
435
#ifdef DEBUG
436
/**
437
* The amount of explicitly allocated space in this vector that is immediately
438
* available to be filled by appending additional elements. This value is
439
* always greater than or equal to |length()| -- the vector's actual elements
440
* are implicitly reserved. This value is always less than or equal to
441
* |capacity()|. It may be explicitly increased using the |reserve()| method.
442
*/
443
size_t reserved() const {
444
MOZ_ASSERT(mLength <= mTail.mReserved);
445
MOZ_ASSERT(mTail.mReserved <= mTail.mCapacity);
446
return mTail.mReserved;
447
}
448
#endif
449
450
/* Append operations guaranteed to succeed due to pre-reserved space. */
451
template <typename U>
452
void internalAppend(U&& aU);
453
template <typename U, size_t O, class BP>
454
void internalAppendAll(const Vector<U, O, BP>& aU);
455
void internalAppendN(const T& aT, size_t aN);
456
template <typename U>
457
void internalAppend(const U* aBegin, size_t aLength);
458
459
public:
460
static const size_t sMaxInlineStorage = MinInlineCapacity;
461
462
typedef T ElementType;
463
464
explicit Vector(AllocPolicy = AllocPolicy());
465
Vector(Vector&&); /* Move constructor. */
466
Vector& operator=(Vector&&); /* Move assignment. */
467
~Vector();
468
469
/* accessors */
470
471
const AllocPolicy& allocPolicy() const { return *this; }
472
473
AllocPolicy& allocPolicy() { return *this; }
474
475
enum { InlineLength = MinInlineCapacity };
476
477
size_t length() const { return mLength; }
478
479
bool empty() const { return mLength == 0; }
480
481
size_t capacity() const { return mTail.mCapacity; }
482
483
T* begin() {
484
MOZ_ASSERT(!mEntered);
485
return mBegin;
486
}
487
488
const T* begin() const {
489
MOZ_ASSERT(!mEntered);
490
return mBegin;
491
}
492
493
T* end() {
494
MOZ_ASSERT(!mEntered);
495
return mBegin + mLength;
496
}
497
498
const T* end() const {
499
MOZ_ASSERT(!mEntered);
500
return mBegin + mLength;
501
}
502
503
T& operator[](size_t aIndex) {
504
MOZ_ASSERT(!mEntered);
505
MOZ_ASSERT(aIndex < mLength);
506
return begin()[aIndex];
507
}
508
509
const T& operator[](size_t aIndex) const {
510
MOZ_ASSERT(!mEntered);
511
MOZ_ASSERT(aIndex < mLength);
512
return begin()[aIndex];
513
}
514
515
T& back() {
516
MOZ_ASSERT(!mEntered);
517
MOZ_ASSERT(!empty());
518
return *(end() - 1);
519
}
520
521
const T& back() const {
522
MOZ_ASSERT(!mEntered);
523
MOZ_ASSERT(!empty());
524
return *(end() - 1);
525
}
526
527
class Range {
528
friend class Vector;
529
T* mCur;
530
T* mEnd;
531
Range(T* aCur, T* aEnd) : mCur(aCur), mEnd(aEnd) {
532
MOZ_ASSERT(aCur <= aEnd);
533
}
534
535
public:
536
bool empty() const { return mCur == mEnd; }
537
size_t remain() const { return PointerRangeSize(mCur, mEnd); }
538
T& front() const {
539
MOZ_ASSERT(!empty());
540
return *mCur;
541
}
542
void popFront() {
543
MOZ_ASSERT(!empty());
544
++mCur;
545
}
546
T popCopyFront() {
547
MOZ_ASSERT(!empty());
548
return *mCur++;
549
}
550
};
551
552
class ConstRange {
553
friend class Vector;
554
const T* mCur;
555
const T* mEnd;
556
ConstRange(const T* aCur, const T* aEnd) : mCur(aCur), mEnd(aEnd) {
557
MOZ_ASSERT(aCur <= aEnd);
558
}
559
560
public:
561
bool empty() const { return mCur == mEnd; }
562
size_t remain() const { return PointerRangeSize(mCur, mEnd); }
563
const T& front() const {
564
MOZ_ASSERT(!empty());
565
return *mCur;
566
}
567
void popFront() {
568
MOZ_ASSERT(!empty());
569
++mCur;
570
}
571
T popCopyFront() {
572
MOZ_ASSERT(!empty());
573
return *mCur++;
574
}
575
};
576
577
Range all() { return Range(begin(), end()); }
578
ConstRange all() const { return ConstRange(begin(), end()); }
579
580
/* mutators */
581
582
/**
583
* Reverse the order of the elements in the vector in place.
584
*/
585
void reverse();
586
587
/**
588
* Given that the vector is empty, grow the internal capacity to |aRequest|,
589
* keeping the length 0.
590
*/
591
MOZ_MUST_USE bool initCapacity(size_t aRequest);
592
593
/**
594
* Given that the vector is empty, grow the internal capacity and length to
595
* |aRequest| leaving the elements' memory completely uninitialized (with all
596
* the associated hazards and caveats). This avoids the usual allocation-size
597
* rounding that happens in resize and overhead of initialization for elements
598
* that are about to be overwritten.
599
*/
600
MOZ_MUST_USE bool initLengthUninitialized(size_t aRequest);
601
602
/**
603
* If reserve(aRequest) succeeds and |aRequest >= length()|, then appending
604
* |aRequest - length()| elements, in any sequence of append/appendAll calls,
605
* is guaranteed to succeed.
606
*
607
* A request to reserve an amount less than the current length does not affect
608
* reserved space.
609