Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5
//! The interning module provides a generic data structure
6
//! interning container. It is similar in concept to a
7
//! traditional string interning container, but it is
8
//! specialized to the WR thread model.
9
//!
10
//! There is an Interner structure, that lives in the
11
//! scene builder thread, and a DataStore structure
12
//! that lives in the frame builder thread.
13
//!
14
//! Hashing, interning and handle creation is done by
15
//! the interner structure during scene building.
16
//!
17
//! Delta changes for the interner are pushed during
18
//! a transaction to the frame builder. The frame builder
19
//! is then able to access the content of the interned
20
//! handles quickly, via array indexing.
21
//!
22
//! Epoch tracking ensures that the garbage collection
23
//! step which the interner uses to remove items is
24
//! only invoked on items that the frame builder thread
25
//! is no longer referencing.
26
//!
27
//! Items in the data store are stored in a traditional
28
//! free-list structure, for content access and memory
29
//! usage efficiency.
30
//!
31
//! The epoch is incremented each time a scene is
32
//! built. The most recently used scene epoch is
33
//! stored inside each handle. This is then used for
34
//! cache invalidation.
35
36
use crate::internal_types::FastHashMap;
37
use malloc_size_of::MallocSizeOf;
38
use crate::profiler::ResourceProfileCounter;
39
use std::fmt::Debug;
40
use std::hash::Hash;
41
use std::marker::PhantomData;
42
use std::{mem, ops, u64};
43
use std::sync::atomic::{AtomicUsize, Ordering};
44
use crate::util::VecHelper;
45
46
#[cfg_attr(feature = "capture", derive(Serialize))]
47
#[cfg_attr(feature = "replay", derive(Deserialize))]
48
#[derive(Debug, Copy, Clone, MallocSizeOf, PartialEq)]
49
struct Epoch(u64);
50
51
/// A list of updates to be applied to the data store,
52
/// provided by the interning structure.
53
#[cfg_attr(feature = "capture", derive(Serialize))]
54
#[cfg_attr(feature = "replay", derive(Deserialize))]
55
#[derive(MallocSizeOf)]
56
pub struct UpdateList<S> {
57
/// Items to insert.
58
pub insertions: Vec<Insertion<S>>,
59
60
/// Items to remove.
61
pub removals: Vec<Removal>,
62
}
63
64
#[cfg_attr(feature = "capture", derive(Serialize))]
65
#[cfg_attr(feature = "replay", derive(Deserialize))]
66
#[derive(MallocSizeOf)]
67
pub struct Insertion<S> {
68
pub index: usize,
69
pub uid: ItemUid,
70
pub value: S,
71
}
72
73
#[cfg_attr(feature = "capture", derive(Serialize))]
74
#[cfg_attr(feature = "replay", derive(Deserialize))]
75
#[derive(MallocSizeOf)]
76
pub struct Removal {
77
pub index: usize,
78
pub uid: ItemUid,
79
}
80
81
impl<S> UpdateList<S> {
82
fn new() -> UpdateList<S> {
83
UpdateList {
84
insertions: Vec::new(),
85
removals: Vec::new(),
86
}
87
}
88
89
fn take_and_preallocate(&mut self) -> UpdateList<S> {
90
UpdateList {
91
insertions: self.insertions.take_and_preallocate(),
92
removals: self.removals.take_and_preallocate(),
93
}
94
}
95
}
96
97
lazy_static! {
98
static ref NEXT_UID: AtomicUsize = AtomicUsize::new(0);
99
}
100
101
/// A globally, unique identifier
102
#[cfg_attr(feature = "capture", derive(Serialize))]
103
#[cfg_attr(feature = "replay", derive(Deserialize))]
104
#[derive(Debug, Copy, Clone, Eq, Hash, MallocSizeOf, PartialEq)]
105
pub struct ItemUid {
106
uid: usize,
107
}
108
109
impl ItemUid {
110
pub fn next_uid() -> ItemUid {
111
let uid = NEXT_UID.fetch_add(1, Ordering::Relaxed);
112
ItemUid { uid }
113
}
114
115
// Intended for debug usage only
116
pub fn get_uid(&self) -> usize {
117
self.uid
118
}
119
}
120
121
#[cfg_attr(feature = "capture", derive(Serialize))]
122
#[cfg_attr(feature = "replay", derive(Deserialize))]
123
#[derive(Debug, MallocSizeOf)]
124
pub struct Handle<I> {
125
index: u32,
126
epoch: Epoch,
127
uid: ItemUid,
128
_marker: PhantomData<I>,
129
}
130
131
impl<I> Clone for Handle<I> {
132
fn clone(&self) -> Self {
133
Handle {
134
index: self.index,
135
epoch: self.epoch,
136
uid: self.uid,
137
_marker: self._marker,
138
}
139
}
140
}
141
142
impl<I> Copy for Handle<I> {}
143
144
impl<I> Handle<I> {
145
pub fn uid(&self) -> ItemUid {
146
self.uid
147
}
148
}
149
150
pub trait InternDebug {
151
fn on_interned(&self, _uid: ItemUid) {}
152
}
153
154
/// The data store lives in the frame builder thread. It
155
/// contains a free-list of items for fast access.
156
#[cfg_attr(feature = "capture", derive(Serialize))]
157
#[cfg_attr(feature = "replay", derive(Deserialize))]
158
#[derive(MallocSizeOf)]
159
pub struct DataStore<I: Internable> {
160
items: Vec<Option<I::StoreData>>,
161
}
162
163
impl<I: Internable> Default for DataStore<I> {
164
fn default() -> Self {
165
DataStore {
166
items: Vec::new(),
167
}
168
}
169
}
170
171
impl<I: Internable> DataStore<I> {
172
/// Apply any updates from the scene builder thread to
173
/// this data store.
174
pub fn apply_updates(
175
&mut self,
176
update_list: UpdateList<I::Key>,
177
profile_counter: &mut ResourceProfileCounter,
178
) {
179
for insertion in update_list.insertions {
180
self.items
181
.entry(insertion.index)
182
.set(Some(insertion.value.into()));
183
}
184
185
for removal in update_list.removals {
186
self.items[removal.index] = None;
187
}
188
189
let per_item_size = mem::size_of::<I::Key>() + mem::size_of::<I::StoreData>();
190
profile_counter.set(self.items.len(), per_item_size * self.items.len());
191
}
192
}
193
194
/// Retrieve an item from the store via handle
195
impl<I: Internable> ops::Index<Handle<I>> for DataStore<I> {
196
type Output = I::StoreData;
197
fn index(&self, handle: Handle<I>) -> &I::StoreData {
198
self.items[handle.index as usize].as_ref().expect("Bad datastore lookup")
199
}
200
}
201
202
/// Retrieve a mutable item from the store via handle
203
/// Retrieve an item from the store via handle
204
impl<I: Internable> ops::IndexMut<Handle<I>> for DataStore<I> {
205
fn index_mut(&mut self, handle: Handle<I>) -> &mut I::StoreData {
206
self.items[handle.index as usize].as_mut().expect("Bad datastore lookup")
207
}
208
}
209
210
/// The main interning data structure. This lives in the
211
/// scene builder thread, and handles hashing and interning
212
/// unique data structures. It also manages a free-list for
213
/// the items in the data store, which is synchronized via
214
/// an update list of additions / removals.
215
#[cfg_attr(feature = "capture", derive(Serialize))]
216
#[cfg_attr(feature = "replay", derive(Deserialize))]
217
#[derive(MallocSizeOf)]
218
pub struct Interner<I: Internable> {
219
/// Uniquely map an interning key to a handle
220
map: FastHashMap<I::Key, Handle<I>>,
221
/// List of free slots in the data store for re-use.
222
free_list: Vec<usize>,
223
/// Pending list of updates that need to be applied.
224
update_list: UpdateList<I::Key>,
225
/// The current epoch for the interner.
226
current_epoch: Epoch,
227
/// The information associated with each interned
228
/// item that can be accessed by the interner.
229
local_data: Vec<I::InternData>,
230
}
231
232
impl<I: Internable> Default for Interner<I> {
233
fn default() -> Self {
234
Interner {
235
map: FastHashMap::default(),
236
free_list: Vec::new(),
237
update_list: UpdateList::new(),
238
current_epoch: Epoch(1),
239
local_data: Vec::new(),
240
}
241
}
242
}
243
244
impl<I: Internable> Interner<I> {
245
/// Intern a data structure, and return a handle to
246
/// that data. The handle can then be stored in the
247
/// frame builder, and safely accessed via the data
248
/// store that lives in the frame builder thread.
249
/// The provided closure is invoked to build the
250
/// local data about an interned structure if the
251
/// key isn't already interned.
252
pub fn intern<F>(
253
&mut self,
254
data: &I::Key,
255
fun: F,
256
) -> Handle<I> where F: FnOnce() -> I::InternData {
257
// Use get_mut rather than entry here to avoid
258
// cloning the (sometimes large) key in the common
259
// case, where the data already exists in the interner.
260
if let Some(handle) = self.map.get_mut(data) {
261
handle.epoch = self.current_epoch;
262
return *handle;
263
}
264
265
// We need to intern a new data item. First, find out
266
// if there is a spare slot in the free-list that we
267
// can use. Otherwise, append to the end of the list.
268
let index = match self.free_list.pop() {
269
Some(index) => index,
270
None => self.local_data.len(),
271
};
272
273
let uid = ItemUid::next_uid();
274
275
// Add a pending update to insert the new data.
276
self.update_list.insertions.push(Insertion {
277
index,
278
uid,
279
value: data.clone(),
280
});
281
282
// Generate a handle for access via the data store.
283
let handle = Handle {
284
index: index as u32,
285
epoch: self.current_epoch,
286
uid,
287
_marker: PhantomData,
288
};
289
290
#[cfg(debug_assertions)]
291
data.on_interned(handle.uid);
292
293
// Store this handle so the next time it is
294
// interned, it gets re-used.
295
self.map.insert(data.clone(), handle);
296
297
// Create the local data for this item that is
298
// being interned.
299
self.local_data.entry(index).set(fun());
300
301
handle
302
}
303
304
/// Retrieve the pending list of updates for an interner
305
/// that need to be applied to the data store. Also run
306
/// a GC step that removes old entries.
307
pub fn end_frame_and_get_pending_updates(&mut self) -> UpdateList<I::Key> {
308
let mut update_list = self.update_list.take_and_preallocate();
309
310
let free_list = &mut self.free_list;
311
let current_epoch = self.current_epoch.0;
312
313
// First, run a GC step. Walk through the handles, and
314
// if we find any that haven't been used for some time,
315
// remove them. If this ever shows up in profiles, we
316
// can make the GC step partial (scan only part of the
317
// map each frame). It also might make sense in the
318
// future to adjust how long items remain in the cache
319
// based on the current size of the list.
320
self.map.retain(|_, handle| {
321
if handle.epoch.0 + 10 < current_epoch {
322
// To expire an item:
323
// - Add index to the free-list for re-use.
324
// - Add an update to the data store to invalidate this slot.
325
// - Remove from the hash map.
326
free_list.push(handle.index as usize);
327
update_list.removals.push(Removal {
328
index: handle.index as usize,
329
uid: handle.uid,
330
});
331
return false;
332
}
333
334
true
335
});
336
337
// Begin the next epoch
338
self.current_epoch = Epoch(self.current_epoch.0 + 1);
339
340
update_list
341
}
342
}
343
344
/// Retrieve the local data for an item from the interner via handle
345
impl<I: Internable> ops::Index<Handle<I>> for Interner<I> {
346
type Output = I::InternData;
347
fn index(&self, handle: Handle<I>) -> &I::InternData {
348
&self.local_data[handle.index as usize]
349
}
350
}
351
352
// The trick to make trait bounds configurable by features.
353
mod dummy {
354
#[cfg(not(feature = "capture"))]
355
pub trait Serialize {}
356
#[cfg(not(feature = "capture"))]
357
impl<T> Serialize for T {}
358
#[cfg(not(feature = "replay"))]
359
pub trait Deserialize<'a> {}
360
#[cfg(not(feature = "replay"))]
361
impl<'a, T> Deserialize<'a> for T {}
362
}
363
#[cfg(feature = "capture")]
364
use serde::Serialize as InternSerialize;
365
#[cfg(not(feature = "capture"))]
366
use self::dummy::Serialize as InternSerialize;
367
#[cfg(feature = "replay")]
368
use serde::Deserialize as InternDeserialize;
369
#[cfg(not(feature = "replay"))]
370
use self::dummy::Deserialize as InternDeserialize;
371
372
/// Implement `Internable` for a type that wants to participate in interning.
373
pub trait Internable: MallocSizeOf {
374
type Key: Eq + Hash + Clone + Debug + MallocSizeOf + InternDebug + InternSerialize + for<'a> InternDeserialize<'a>;
375
type StoreData: From<Self::Key> + MallocSizeOf + InternSerialize + for<'a> InternDeserialize<'a>;
376
type InternData: MallocSizeOf + InternSerialize + for<'a> InternDeserialize<'a>;
377
}