Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5
#![allow(unsafe_code)]
6
7
//! The rule tree.
8
9
use crate::applicable_declarations::ApplicableDeclarationList;
10
use crate::hash::{self, FxHashMap};
11
use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock};
12
use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards};
13
use crate::stylesheets::{Origin, StyleRule};
14
use crate::thread_state;
15
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
16
use parking_lot::RwLock;
17
use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow};
18
use smallvec::SmallVec;
19
use std::io::{self, Write};
20
use std::mem;
21
use std::ptr;
22
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
23
24
/// The rule tree, the structure servo uses to preserve the results of selector
25
/// matching.
26
///
27
/// This is organized as a tree of rules. When a node matches a set of rules,
28
/// they're inserted in order in the tree, starting with the less specific one.
29
///
30
/// When a rule is inserted in the tree, other elements may share the path up to
31
/// a given rule. If that's the case, we don't duplicate child nodes, but share
32
/// them.
33
///
34
/// When the rule node refcount drops to zero, it doesn't get freed. It gets
35
/// instead put into a free list, and it is potentially GC'd after a while in a
36
/// single-threaded fashion.
37
///
38
/// That way, a rule node that represents a likely-to-match-again rule (like a
39
/// :hover rule) can be reused if we haven't GC'd it yet.
40
///
41
/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC
44
/// to se a discussion about the different memory orderings used here.
45
#[derive(Debug)]
46
pub struct RuleTree {
47
root: StrongRuleNode,
48
}
49
50
impl Drop for RuleTree {
51
fn drop(&mut self) {
52
// GC the rule tree.
53
unsafe {
54
self.gc();
55
}
56
57
// After the GC, the free list should be empty.
58
debug_assert_eq!(
59
self.root.get().next_free.load(Ordering::Relaxed),
60
FREE_LIST_SENTINEL
61
);
62
63
// Remove the sentinel. This indicates that GCs will no longer occur.
64
// Any further drops of StrongRuleNodes must occur on the main thread,
65
// and will trigger synchronous dropping of the Rule nodes.
66
self.root
67
.get()
68
.next_free
69
.store(ptr::null_mut(), Ordering::Relaxed);
70
}
71
}
72
73
impl MallocSizeOf for RuleTree {
74
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
75
let mut n = 0;
76
let mut stack = SmallVec::<[_; 32]>::new();
77
stack.push(self.root.downgrade());
78
79
while let Some(node) = stack.pop() {
80
n += unsafe { ops.malloc_size_of(node.ptr()) };
81
let children = unsafe { (*node.ptr()).children.read() };
82
children.shallow_size_of(ops);
83
children.each(|c| stack.push(c.clone()));
84
}
85
86
n
87
}
88
}
89
90
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
91
struct ChildKey(CascadeLevel, ptr::NonNull<()>);
92
93
unsafe impl Send for ChildKey {}
94
unsafe impl Sync for ChildKey {}
95
96
/// A style source for the rule node. It can either be a CSS style rule or a
97
/// declaration block.
98
///
99
/// Note that, even though the declaration block from inside the style rule
100
/// could be enough to implement the rule tree, keeping the whole rule provides
101
/// more debuggability, and also the ability of show those selectors to
102
/// devtools.
103
#[derive(Clone, Debug)]
104
pub struct StyleSource(ArcUnion<Locked<StyleRule>, Locked<PropertyDeclarationBlock>>);
105
106
impl PartialEq for StyleSource {
107
fn eq(&self, other: &Self) -> bool {
108
ArcUnion::ptr_eq(&self.0, &other.0)
109
}
110
}
111
112
impl StyleSource {
113
/// Creates a StyleSource from a StyleRule.
114
pub fn from_rule(rule: Arc<Locked<StyleRule>>) -> Self {
115
StyleSource(ArcUnion::from_first(rule))
116
}
117
118
#[inline]
119
fn key(&self) -> ptr::NonNull<()> {
120
self.0.ptr()
121
}
122
123
/// Creates a StyleSource from a PropertyDeclarationBlock.
124
pub fn from_declarations(decls: Arc<Locked<PropertyDeclarationBlock>>) -> Self {
125
StyleSource(ArcUnion::from_second(decls))
126
}
127
128
fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
129
if let Some(ref rule) = self.0.as_first() {
130
let rule = rule.read_with(guard);
131
let _ = write!(writer, "{:?}", rule.selectors);
132
}
133
134
let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
135
}
136
137
/// Read the style source guard, and obtain thus read access to the
138
/// underlying property declaration block.
139
#[inline]
140
pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
141
let block: &Locked<PropertyDeclarationBlock> = match self.0.borrow() {
142
ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block,
143
ArcUnionBorrow::Second(ref block) => block.get(),
144
};
145
block.read_with(guard)
146
}
147
148
/// Returns the style rule if applicable, otherwise None.
149
pub fn as_rule(&self) -> Option<ArcBorrow<Locked<StyleRule>>> {
150
self.0.as_first()
151
}
152
153
/// Returns the declaration block if applicable, otherwise None.
154
pub fn as_declarations(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
155
self.0.as_second()
156
}
157
}
158
159
/// This value exists here so a node that pushes itself to the list can know
160
/// that is in the free list by looking at is next pointer, and comparing it
161
/// with null.
162
///
163
/// The root node doesn't have a null pointer in the free list, but this value.
164
const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
165
166
/// A second sentinel value for the free list, indicating that it's locked (i.e.
167
/// another thread is currently adding an entry). We spin if we find this value.
168
const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode;
169
170
/// A counter to track how many shadow root rules deep we are. This is used to
171
/// handle:
172
///
174
///
175
/// See the static functions for the meaning of different values.
176
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
177
pub struct ShadowCascadeOrder(i8);
178
179
impl ShadowCascadeOrder {
180
/// A level for the outermost shadow tree (the shadow tree we own, and the
181
/// ones from the slots we're slotted in).
182
#[inline]
183
pub fn for_outermost_shadow_tree() -> Self {
184
Self(-1)
185
}
186
187
/// A level for the element's tree.
188
#[inline]
189
fn for_same_tree() -> Self {
190
Self(0)
191
}
192
193
/// A level for the innermost containing tree (the one closest to the
194
/// element).
195
#[inline]
196
pub fn for_innermost_containing_tree() -> Self {
197
Self(1)
198
}
199
200
/// Decrement the level, moving inwards. We should only move inwards if
201
/// we're traversing slots.
202
#[inline]
203
pub fn dec(&mut self) {
204
debug_assert!(self.0 < 0);
205
self.0 = self.0.saturating_sub(1);
206
}
207
208
/// The level, moving inwards. We should only move inwards if we're
209
/// traversing slots.
210
#[inline]
211
pub fn inc(&mut self) {
212
debug_assert_ne!(self.0, -1);
213
self.0 = self.0.saturating_add(1);
214
}
215
}
216
217
impl std::ops::Neg for ShadowCascadeOrder {
218
type Output = Self;
219
#[inline]
220
fn neg(self) -> Self {
221
Self(self.0.neg())
222
}
223
}
224
225
impl RuleTree {
226
/// Construct a new rule tree.
227
pub fn new() -> Self {
228
RuleTree {
229
root: StrongRuleNode::new(Box::new(RuleNode::root())),
230
}
231
}
232
233
/// Get the root rule node.
234
pub fn root(&self) -> &StrongRuleNode {
235
&self.root
236
}
237
238
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
239
let _ = writeln!(writer, " + RuleTree");
240
self.root.get().dump(guards, writer, 0);
241
}
242
243
/// Dump the rule tree to stdout.
244
pub fn dump_stdout(&self, guards: &StylesheetGuards) {
245
let mut stdout = io::stdout();
246
self.dump(guards, &mut stdout);
247
}
248
249
/// Inserts the given rules, that must be in proper order by specifity, and
250
/// returns the corresponding rule node representing the last inserted one.
251
///
252
/// !important rules are detected and inserted into the appropriate position
253
/// in the rule tree. This allows selector matching to ignore importance,
254
/// while still maintaining the appropriate cascade order in the rule tree.
255
pub fn insert_ordered_rules_with_important<'a, I>(
256
&self,
257
iter: I,
258
guards: &StylesheetGuards,
259
) -> StrongRuleNode
260
where
261
I: Iterator<Item = (StyleSource, CascadeLevel)>,
262
{
263
use self::CascadeLevel::*;
264
let mut current = self.root.clone();
265
266
let mut found_important = false;
267
268
let mut important_author = SmallVec::<[(StyleSource, ShadowCascadeOrder); 4]>::new();
269
270
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
271
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
272
let mut transition = None;
273
274
for (source, level) in iter {
275
debug_assert!(!level.is_important(), "Important levels handled internally");
276
let any_important = {
277
let pdb = source.read(level.guard(guards));
278
pdb.any_important()
279
};
280
281
if any_important {
282
found_important = true;
283
match level {
284
AuthorNormal {
285
shadow_cascade_order,
286
} => {
287
important_author.push((source.clone(), shadow_cascade_order));
288
},
289
UANormal => important_ua.push(source.clone()),
290
UserNormal => important_user.push(source.clone()),
291
_ => {},
292
};
293
}
294
295
// We don't optimize out empty rules, even though we could.
296
//
297
// Inspector relies on every rule being inserted in the normal level
298
// at least once, in order to return the rules with the correct
299
// specificity order.
300
//
301
// TODO(emilio): If we want to apply these optimizations without
302
// breaking inspector's expectations, we'd need to run
303
// selector-matching again at the inspector's request. That may or
304
// may not be a better trade-off.
305
if matches!(level, Transitions) && found_important {
306
// There can be at most one transition, and it will come at
307
// the end of the iterator. Stash it and apply it after
308
// !important rules.
309
debug_assert!(transition.is_none());
310
transition = Some(source);
311
} else {
312
current = current.ensure_child(self.root.downgrade(), source, level);
313
}
314
}
315
316
// Early-return in the common case of no !important declarations.
317
if !found_important {
318
return current;
319
}
320
321
// Insert important declarations, in order of increasing importance,
322
// followed by any transition rule.
323
//
324
// Inner shadow wins over same-tree, which wins over outer-shadow.
325
//
326
// We negate the shadow cascade order to preserve the right PartialOrd
327
// behavior.
328
if !important_author.is_empty() &&
329
important_author.first().unwrap().1 != important_author.last().unwrap().1
330
{
331
// We only need to sort if the important rules come from
332
// different trees, but we need this sort to be stable.
333
//
334
// FIXME(emilio): This could maybe be smarter, probably by chunking
335
// the important rules while inserting, and iterating the outer
336
// chunks in reverse order.
337
//
338
// That is, if we have rules with levels like: -1 -1 -1 0 0 0 1 1 1,
339
// we're really only sorting the chunks, while keeping elements
340
// inside the same chunk already sorted. Seems like we could try to
341
// keep a SmallVec-of-SmallVecs with the chunks and just iterate the
342
// outer in reverse.
343
important_author.sort_by_key(|&(_, order)| -order);
344
}
345
346
for (source, shadow_cascade_order) in important_author.drain(..) {
347
current = current.ensure_child(
348
self.root.downgrade(),
349
source,
350
AuthorImportant {
351
shadow_cascade_order: -shadow_cascade_order,
352
},
353
);
354
}
355
356
for source in important_user.drain(..) {
357
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
358
}
359
360
for source in important_ua.drain(..) {
361
current = current.ensure_child(self.root.downgrade(), source, UAImportant);
362
}
363
364
if let Some(source) = transition {
365
current = current.ensure_child(self.root.downgrade(), source, Transitions);
366
}
367
368
current
369
}
370
371
/// Given a list of applicable declarations, insert the rules and return the
372
/// corresponding rule node.
373
pub fn compute_rule_node(
374
&self,
375
applicable_declarations: &mut ApplicableDeclarationList,
376
guards: &StylesheetGuards,
377
) -> StrongRuleNode {
378
self.insert_ordered_rules_with_important(
379
applicable_declarations.drain(..).map(|d| d.for_rule_tree()),
380
guards,
381
)
382
}
383
384
/// Insert the given rules, that must be in proper order by specifity, and
385
/// return the corresponding rule node representing the last inserted one.
386
pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
387
where
388
I: Iterator<Item = (StyleSource, CascadeLevel)>,
389
{
390
self.insert_ordered_rules_from(self.root.clone(), iter)
391
}
392
393
fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode
394
where
395
I: Iterator<Item = (StyleSource, CascadeLevel)>,
396
{
397
let mut current = from;
398
for (source, level) in iter {
399
current = current.ensure_child(self.root.downgrade(), source, level);
400
}
401
current
402
}
403
404
/// This can only be called when no other threads is accessing this tree.
405
pub unsafe fn gc(&self) {
406
self.root.gc();
407
}
408
409
/// This can only be called when no other threads is accessing this tree.
410
pub unsafe fn maybe_gc(&self) {
411
#[cfg(debug_assertions)]
412
self.maybe_dump_stats();
413
414
self.root.maybe_gc();
415
}
416
417
#[cfg(debug_assertions)]
418
fn maybe_dump_stats(&self) {
419
use itertools::Itertools;
420
use std::cell::Cell;
421
use std::time::{Duration, Instant};
422
423
if !log_enabled!(log::Level::Trace) {
424
return;
425
}
426
427
const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2);
428
429
thread_local! {
430
pub static LAST_STATS: Cell<Instant> = Cell::new(Instant::now());
431
};
432
433
let should_dump = LAST_STATS.with(|s| {
434
let now = Instant::now();
435
if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL {
436
return false;
437
}
438
s.set(now);
439
true
440
});
441
442
if !should_dump {
443
return;
444
}
445
446
let mut children_count = FxHashMap::default();
447
448
let mut stack = SmallVec::<[_; 32]>::new();
449
stack.push(self.root.clone());
450
while let Some(node) = stack.pop() {
451
let children = node.get().children.read();
452
*children_count.entry(children.len()).or_insert(0) += 1;
453
children.each(|c| stack.push(c.upgrade()));
454
}
455
456
trace!("Rule tree stats:");
457
let counts = children_count.keys().sorted();
458
for count in counts {
459
trace!(" {} - {}", count, children_count[count]);
460
}
461
}
462
463
/// Replaces a rule in a given level (if present) for another rule.
464
///
465
/// Returns the resulting node that represents the new path, or None if
466
/// the old path is still valid.
467
pub fn update_rule_at_level(
468
&self,
469
level: CascadeLevel,
470
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
471
path: &StrongRuleNode,
472
guards: &StylesheetGuards,
473
important_rules_changed: &mut bool,
474
) -> Option<StrongRuleNode> {
475
// TODO(emilio): Being smarter with lifetimes we could avoid a bit of
476
// the refcount churn.
477
let mut current = path.clone();
478
*important_rules_changed = false;
479
480
// First walk up until the first less-or-equally specific rule.
481
let mut children = SmallVec::<[_; 10]>::new();
482
while current.get().level > level {
483
children.push((
484
current.get().source.as_ref().unwrap().clone(),
485
current.get().level,
486
));
487
current = current.parent().unwrap().clone();
488
}
489
490
// Then remove the one at the level we want to replace, if any.
491
//
492
// NOTE: Here we assume that only one rule can be at the level we're
493
// replacing.
494
//
495
// This is certainly true for HTML style attribute rules, animations and
496
// transitions, but could not be so for SMIL animations, which we'd need
497
// to special-case (isn't hard, it's just about removing the `if` and
498
// special cases, and replacing them for a `while` loop, avoiding the
499
// optimizations).
500
if current.get().level == level {
501
*important_rules_changed |= level.is_important();
502
503
let current_decls = current.get().source.as_ref().unwrap().as_declarations();
504
505
// If the only rule at the level we're replacing is exactly the
506
// same as `pdb`, we're done, and `path` is still valid.
507
if let (Some(ref pdb), Some(ref current_decls)) = (pdb, current_decls) {
508
// If the only rule at the level we're replacing is exactly the
509
// same as `pdb`, we're done, and `path` is still valid.
510
//
511
// TODO(emilio): Another potential optimization is the one where
512
// we can just replace the rule at that level for `pdb`, and
513
// then we don't need to re-create the children, and `path` is
514
// also equally valid. This is less likely, and would require an
515
// in-place mutation of the source, which is, at best, fiddly,
516
// so let's skip it for now.
517
let is_here_already = ArcBorrow::ptr_eq(pdb, current_decls);
518
if is_here_already {
519
debug!("Picking the fast path in rule replacement");
520
return None;
521
}
522
}
523
524
if current_decls.is_some() {
525
current = current.parent().unwrap().clone();
526
}
527
}
528
529
// Insert the rule if it's relevant at this level in the cascade.
530
//
531
// These optimizations are likely to be important, because the levels
532
// where replacements apply (style and animations) tend to trigger
533
// pretty bad styling cases already.
534
if let Some(pdb) = pdb {
535
if level.is_important() {
536
if pdb.read_with(level.guard(guards)).any_important() {
537
current = current.ensure_child(
538
self.root.downgrade(),
539
StyleSource::from_declarations(pdb.clone_arc()),
540
level,
541
);
542
*important_rules_changed = true;
543
}
544
} else {
545
if pdb.read_with(level.guard(guards)).any_normal() {
546
current = current.ensure_child(
547
self.root.downgrade(),
548
StyleSource::from_declarations(pdb.clone_arc()),
549
level,
550
);
551
}
552
}
553
}
554
555
// Now the rule is in the relevant place, push the children as
556
// necessary.
557
let rule = self.insert_ordered_rules_from(current, children.drain(..).rev());
558
Some(rule)
559
}
560
561
/// Returns new rule nodes without Transitions level rule.
562
pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
563
// Return a clone if there is no transition level.
564
if path.cascade_level() != CascadeLevel::Transitions {
565
return path.clone();
566
}
567
568
path.parent().unwrap().clone()
569
}
570
571
/// Returns new rule node without rules from declarative animations.
572
pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
573
// Return a clone if there are no animation rules.
574
if !path.has_animation_or_transition_rules() {
575
return path.clone();
576
}
577
578
let iter = path
579
.self_and_ancestors()
580
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride);
581
let mut last = path;
582
let mut children = SmallVec::<[_; 10]>::new();
583
for node in iter {
584
if !node.cascade_level().is_animation() {
585
children.push((
586
node.get().source.as_ref().unwrap().clone(),
587
node.cascade_level(),
588
));
589
}
590
last = node;
591
}
592
593
let rule = self
594
.insert_ordered_rules_from(last.parent().unwrap().clone(), children.drain(..).rev());
595
rule
596
}
597
598
/// Returns new rule node by adding animation rules at transition level.
599
/// The additional rules must be appropriate for the transition
600
/// level of the cascade, which is the highest level of the cascade.
601
/// (This is the case for one current caller, the cover rule used
602
/// for CSS transitions.)
603
pub fn add_animation_rules_at_transition_level(
604
&self,
605
path: &StrongRuleNode,
606
pdb: Arc<Locked<PropertyDeclarationBlock>>,
607
guards: &StylesheetGuards,
608
) -> StrongRuleNode {
609
let mut dummy = false;
610
self.update_rule_at_level(
611
CascadeLevel::Transitions,
612
Some(pdb.borrow_arc()),
613
path,
614
guards,
615
&mut dummy,
616
)
617
.expect("Should return a valid rule node")
618
}
619
}
620
621
/// The number of RuleNodes added to the free list before we will consider
622
/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
623
/// where it likely did not result from a rigorous performance analysis.)
624
const RULE_TREE_GC_INTERVAL: usize = 300;
625
626
/// The cascade level these rules are relevant at, as per[1][2][3].
627
///
628
/// Presentational hints for SVG and HTML are in the "author-level
629
/// zero-specificity" level, that is, right after user rules, and before author
630
/// rules.
631
///
632
/// The order of variants declared here is significant, and must be in
633
/// _ascending_ order of precedence.
634
///
635
/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
636
/// from outside the tree the element is in can't affect the element.
637
///
638
/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
639
/// tree may affect an element connected to the document or an "outer" shadow
640
/// tree.
641
///
646
#[repr(u8)]
647
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, PartialEq, PartialOrd)]
648
pub enum CascadeLevel {
649
/// Normal User-Agent rules.
650
UANormal,
651
/// User normal rules.
652
UserNormal,
653
/// Presentational hints.
654
PresHints,
655
/// Shadow DOM styles from author styles.
656
AuthorNormal {
657
/// The order in the shadow tree hierarchy. This number is relative to
658
/// the tree of the element, and thus the only invariants that need to
659
/// be preserved is:
660
///
661
/// * Zero is the same tree as the element that matched the rule. This
662
/// is important so that we can optimize style attribute insertions.
663
///
664
/// * The levels are ordered in accordance with
666
shadow_cascade_order: ShadowCascadeOrder,
667
},
668
/// SVG SMIL animations.
669
SMILOverride,
670
/// CSS animations and script-generated animations.
671
Animations,
672
/// Author-supplied important rules.
673
AuthorImportant {
674
/// The order in the shadow tree hierarchy, inverted, so that PartialOrd
675
/// does the right thing.
676
shadow_cascade_order: ShadowCascadeOrder,
677
},
678
/// User important rules.
679
UserImportant,
680
/// User-agent important rules.
681
UAImportant,
682
/// Transitions
683
Transitions,
684
}
685
686
impl CascadeLevel {
687
/// Pack this cascade level in a single byte.
688
///
689
/// We have 10 levels, which we can represent with 4 bits, and then a
690
/// cascade order optionally, which we can clamp to three bits max, and
691
/// represent with a fourth bit for the sign.
692
///
693
/// So this creates: SOOODDDD
694
///
695
/// Where `S` is the sign of the order (one if negative, 0 otherwise), `O`
696
/// is the absolute value of the order, and `D`s are the discriminant.
697
#[inline]
698
pub fn to_byte_lossy(&self) -> u8 {
699
let (discriminant, order) = match *self {
700
Self::UANormal => (0, 0),
701
Self::UserNormal => (1, 0),
702
Self::PresHints => (2, 0),
703
Self::AuthorNormal {
704
shadow_cascade_order,
705
} => (3, shadow_cascade_order.0),
706
Self::SMILOverride => (4, 0),
707
Self::Animations => (5, 0),
708
Self::AuthorImportant {
709
shadow_cascade_order,
710
} => (6, shadow_cascade_order.0),
711
Self::UserImportant => (7, 0),
712
Self::UAImportant => (8, 0),
713
Self::Transitions => (9, 0),
714
};
715
716
debug_assert_eq!(discriminant & 0xf, discriminant);
717
if order == 0 {
718
return discriminant;
719
}
720
721
let negative = order < 0;
722
let value = std::cmp::min(order.abs() as u8, 0b111);
723
(negative as u8) << 7 | value << 4 | discriminant
724
}
725
726
/// Convert back from the single-byte representation of the cascade level
727
/// explained above.
728
#[inline]
729
pub fn from_byte(b: u8) -> Self {
730
let order = {
731
let abs = ((b & 0b01110000) >> 4) as i8;
732
let negative = b & 0b10000000 != 0;
733
if negative {
734
-abs
735
} else {
736
abs
737
}
738
};
739
let discriminant = b & 0xf;
740
let level = match discriminant {
741
0 => Self::UANormal,
742
1 => Self::UserNormal,
743
2 => Self::PresHints,
744
3 => {
745
return Self::AuthorNormal {
746
shadow_cascade_order: ShadowCascadeOrder(order),
747
}
748
},
749
4 => Self::SMILOverride,
750
5 => Self::Animations,
751
6 => {
752
return Self::AuthorImportant {
753
shadow_cascade_order: ShadowCascadeOrder(order),
754
}
755
},
756
7 => Self::UserImportant,
757
8 => Self::UAImportant,
758
9 => Self::Transitions,
759
_ => unreachable!("Didn't expect {} as a discriminant", discriminant),
760
};
761
debug_assert_eq!(order, 0, "Didn't expect an order value for {:?}", level);
762
level
763
}
764
765
/// Select a lock guard for this level
766
pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
767
match *self {
768
CascadeLevel::UANormal |
769
CascadeLevel::UserNormal |
770
CascadeLevel::UserImportant |
771
CascadeLevel::UAImportant => guards.ua_or_user,
772
_ => guards.author,
773
}
774
}
775
776
/// Returns the cascade level for author important declarations from the
777
/// same tree as the element.
778
#[inline]
779
pub fn same_tree_author_important() -> Self {
780
CascadeLevel::AuthorImportant {
781
shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
782
}
783
}
784
785
/// Returns the cascade level for author normal declarations from the same
786
/// tree as the element.
787
#[inline]
788
pub fn same_tree_author_normal() -> Self {
789
CascadeLevel::AuthorNormal {
790
shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
791
}
792
}
793
794
/// Returns whether this cascade level represents important rules of some
795
/// sort.
796
#[inline]
797
pub fn is_important(&self) -> bool {
798
match *self {
799
CascadeLevel::AuthorImportant { .. } |
800
CascadeLevel::UserImportant |
801
CascadeLevel::UAImportant => true,
802
_ => false,
803
}
804
}
805
806
/// Returns the importance relevant for this rule. Pretty similar to
807
/// `is_important`.
808
#[inline]
809
pub fn importance(&self) -> Importance {
810
if self.is_important() {
811
Importance::Important
812
} else {
813
Importance::Normal
814
}
815
}
816
817
/// Returns the cascade origin of the rule.
818
#[inline]
819
pub fn origin(&self) -> Origin {
820
match *self {
821
CascadeLevel::UAImportant | CascadeLevel::UANormal => Origin::UserAgent,
822
CascadeLevel::UserImportant | CascadeLevel::UserNormal => Origin::User,
823
CascadeLevel::PresHints |
824
CascadeLevel::AuthorNormal { .. } |
825
CascadeLevel::AuthorImportant { .. } |
826
CascadeLevel::SMILOverride |
827
CascadeLevel::Animations |
828
CascadeLevel::Transitions => Origin::Author,
829
}
830
}
831
832
/// Returns whether this cascade level represents an animation rules.
833
#[inline]
834
pub fn is_animation(&self) -> bool {
835
match *self {
836
CascadeLevel::SMILOverride | CascadeLevel::Animations | CascadeLevel::Transitions => {
837
true
838
},
839
_ => false,
840
}
841
}
842
}
843
844
/// The children of a single rule node.
845
///
846
/// We optimize the case of no kids and a single child, since they're by far the
847
/// most common case and it'd cause a bunch of bloat for no reason.
848
///
849
/// The children remove themselves when they go away, which means that it's ok
850
/// for us to store weak pointers to them.
851
enum RuleNodeChildren {
852
/// There are no kids.
853
Empty,
854
/// There's just one kid. This is an extremely common case, so we don't
855
/// bother allocating a map for it.
856
One(WeakRuleNode),
857
/// At least at one point in time there was more than one kid (that is to
858
/// say, we don't bother re-allocating if children are removed dynamically).
859
Map(Box<FxHashMap<ChildKey, WeakRuleNode>>),
860
}
861
862
impl MallocShallowSizeOf for RuleNodeChildren {
863
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
864
match *self {
865
RuleNodeChildren::One(..) | RuleNodeChildren::Empty => 0,
866
RuleNodeChildren::Map(ref m) => {
867
// Want to account for both the box and the hashmap.
868
m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
869
},
870
}
871
}
872
}
873
874
impl Default for RuleNodeChildren {
875
fn default() -> Self {
876
RuleNodeChildren::Empty
877
}
878
}
879
880
impl RuleNodeChildren {
881
/// Executes a given function for each of the children.
882
fn each(&self, mut f: impl FnMut(&WeakRuleNode)) {
883
match *self {
884
RuleNodeChildren::Empty => {},
885
RuleNodeChildren::One(ref child) => f(child),
886
RuleNodeChildren::Map(ref map) => {
887
for (_key, kid) in map.iter() {
888
f(kid)
889
}
890
},
891
}
892
}
893
894
fn len(&self) -> usize {
895
match *self {
896
RuleNodeChildren::Empty => 0,
897
RuleNodeChildren::One(..) => 1,
898
RuleNodeChildren::Map(ref map) => map.len(),
899
}
900
}
901
902
fn is_empty(&self) -> bool {
903
self.len() == 0
904
}
905
906
fn get(&self, key: &ChildKey) -> Option<&WeakRuleNode> {
907
match *self {
908
RuleNodeChildren::Empty => return None,
909
RuleNodeChildren::One(ref kid) => {
910
// We're read-locked, so no need to do refcount stuff, since the
911
// child is only removed from the main thread, _and_ it'd need
912
// to write-lock us anyway.
913
if unsafe { (*kid.ptr()).key() } == *key {
914
Some(kid)
915
} else {
916
None
917
}
918
},
919
RuleNodeChildren::Map(ref map) => map.get(&key),
920
}
921
}
922
923
fn get_or_insert_with(
924
&mut self,
925
key: ChildKey,
926
get_new_child: impl FnOnce() -> StrongRuleNode,
927
) -> StrongRuleNode {
928
let existing_child_key = match *self {
929
RuleNodeChildren::Empty => {
930
let new = get_new_child();
931
debug_assert_eq!(new.get().key(), key);
932
*self = RuleNodeChildren::One(new.downgrade());
933
return new;
934
},
935
RuleNodeChildren::One(ref weak) => unsafe {
936
// We're locked necessarily, so it's fine to look at our
937
// weak-child without refcount-traffic.
938
let existing_child_key = (*weak.ptr()).key();
939
if existing_child_key == key {
940
return weak.upgrade();
941
}
942
existing_child_key
943
},
944
RuleNodeChildren::Map(ref mut map) => {
945
return match map.entry(key) {
946
hash::map::Entry::Occupied(ref occupied) => occupied.get().upgrade(),
947
hash::map::Entry::Vacant(vacant) => {
948
let new = get_new_child();
949
950
debug_assert_eq!(new.get().key(), key);
951
vacant.insert(new.downgrade());
952
953
new
954
},
955
};
956
},
957
};
958
959
let existing_child = match mem::replace(self, RuleNodeChildren::Empty) {
960
RuleNodeChildren::One(o) => o,
961
_ => unreachable!(),
962
};
963
// Two rule-nodes are still a not-totally-uncommon thing, so
964
// avoid over-allocating entries.
965
//
966
// TODO(emilio): Maybe just inline two kids too?
967
let mut children = Box::new(FxHashMap::with_capacity_and_hasher(2, Default::default()));
968
children.insert(existing_child_key, existing_child);
969
970
let new = get_new_child();
971
debug_assert_eq!(new.get().key(), key);
972
children.insert(key, new.downgrade());
973
974
*self = RuleNodeChildren::Map(children);
975
976
new
977
}
978
979
fn remove(&mut self, key: &ChildKey) -> Option<WeakRuleNode> {
980
match *self {
981
RuleNodeChildren::Empty => return None,
982
RuleNodeChildren::One(ref one) => {
983
if unsafe { (*one.ptr()).key() } != *key {
984
return None;
985
}
986
},
987
RuleNodeChildren::Map(ref mut multiple) => {
988
return multiple.remove(key);
989
},
990
}
991
992
match mem::replace(self, RuleNodeChildren::Empty) {
993
RuleNodeChildren::One(o) => Some(o),
994
_ => unreachable!(),
995
}
996
}
997
}
998
999
/// A node in the rule tree.
1000
pub struct RuleNode {
1001
/// The root node. Only the root has no root pointer, for obvious reasons.
1002
root: Option<WeakRuleNode>,
1003
1004
/// The parent rule node. Only the root has no parent.
1005
parent: Option<StrongRuleNode>,
1006
1007
/// The actual style source, either coming from a selector in a StyleRule,
1008
/// or a raw property declaration block (like the style attribute).
1009
///
1010
/// None for the root node.
1011
source: Option<StyleSource>,
1012
1013
/// The cascade level this rule is positioned at.
1014
level: CascadeLevel,
1015
1016
refcount: AtomicUsize,
1017
1018
/// Only used for the root, stores the number of free rule nodes that are
1019
/// around.
1020
free_count: AtomicUsize,
1021
1022
/// The children of a given rule node. Children remove themselves from here
1023
/// when they go away.
1024
children: RwLock<RuleNodeChildren>,
1025
1026
/// The next item in the rule tree free list, that starts on the root node.
1027
///
1028
/// When this is set to null, that means that the rule tree has been torn
1029
/// down, and GCs will no longer occur. When this happens, StrongRuleNodes
1030
/// may only be dropped on the main thread, and teardown happens
1031
/// synchronously.
1032
next_free: AtomicPtr<RuleNode>,
1033
}
1034
1035
unsafe impl Sync for RuleTree {}
1036
unsafe impl Send for RuleTree {}
1037
1038
// On Gecko builds, hook into the leak checking machinery.
1039
#[cfg(feature = "gecko_refcount_logging")]
1040
mod gecko_leak_checking {
1041
use super::RuleNode;
1042
use std::mem::size_of;
1043
use std::os::raw::{c_char, c_void};
1044
1045
extern "C" {
1046
fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
1047
fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
1048
}
1049
1050
static NAME: &'static [u8] = b"RuleNode\0";
1051
1052
/// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery.
1053
pub fn log_ctor(ptr: *const RuleNode) {
1054
let s = NAME as *const [u8] as *const u8 as *const c_char;
1055
unsafe {
1056
NS_LogCtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
1057
}
1058
}
1059
1060
/// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery.
1061
pub fn log_dtor(ptr: *const RuleNode) {
1062
let s = NAME as *const [u8] as *const u8 as *const c_char;
1063
unsafe {
1064
NS_LogDtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
1065
}
1066
}
1067
}
1068
1069
#[inline(always)]
1070
fn log_new(_ptr: *const RuleNode) {
1071
#[cfg(feature = "gecko_refcount_logging")]
1072
gecko_leak_checking::log_ctor(_ptr);
1073
}
1074
1075
#[inline(always)]
1076
fn log_drop(_ptr: *const RuleNode) {
1077
#[cfg(feature = "gecko_refcount_logging")]
1078
gecko_leak_checking::log_dtor(_ptr);
1079
}
1080
1081
impl RuleNode {
1082
fn new(
1083
root: WeakRuleNode,
1084
parent: StrongRuleNode,
1085
source: StyleSource,
1086
level: CascadeLevel,
1087
) -> Self {
1088
debug_assert!(root.upgrade().parent().is_none());
1089
RuleNode {
1090
root: Some(root),
1091
parent: Some(parent),
1092
source: Some(source),
1093
level: level,
1094
refcount: AtomicUsize::new(1),
1095
children: Default::default(),
1096
free_count: AtomicUsize::new(0),
1097
next_free: AtomicPtr::new(ptr::null_mut()),
1098
}
1099
}
1100
1101
fn root() -> Self {
1102
RuleNode {
1103
root: None,
1104
parent: None,
1105
source: None,
1106
level: CascadeLevel::UANormal,
1107
refcount: AtomicUsize::new(1),
1108
free_count: AtomicUsize::new(0),
1109
children: Default::default(),
1110
next_free: AtomicPtr::new(FREE_LIST_SENTINEL),
1111
}
1112
}
1113
1114
fn key(&self) -> ChildKey {
1115
ChildKey(
1116
self.level,
1117
self.source
1118
.as_ref()
1119
.expect("Called key() on the root node")
1120
.key(),
1121
)
1122
}
1123
1124
fn is_root(&self) -> bool {
1125
self.parent.is_none()
1126
}
1127
1128
fn free_count(&self) -> &AtomicUsize {
1129
debug_assert!(self.is_root());
1130
&self.free_count
1131
}
1132
1133
/// Remove this rule node from the child list.
1134
///
1135
/// This is expected to be called before freeing the node from the free
1136
/// list, on the main thread.
1137
unsafe fn remove_from_child_list(&self) {
1138
debug!(
1139
"Remove from child list: {:?}, parent: {:?}",
1140
self as *const RuleNode,
1141
self.parent.as_ref().map(|p| p.ptr())
1142
);
1143
1144
if let Some(parent) = self.parent.as_ref() {
1145
let weak = parent.get().children.write().remove(&self.key());
1146
assert_eq!(weak.unwrap().ptr() as *const _, self as *const _);
1147
}
1148
}
1149
1150
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
1151
const INDENT_INCREMENT: usize = 4;
1152
1153
for _ in 0..indent {
1154
let _ = write!(writer, " ");
1155
}
1156
1157
let _ = writeln!(
1158
writer,
1159
" - {:?} (ref: {:?}, parent: {:?})",
1160
self as *const _,
1161
self.refcount.load(Ordering::Relaxed),
1162
self.parent.as_ref().map(|p| p.ptr())
1163
);
1164
1165
for _ in 0..indent {
1166
let _ = write!(writer, " ");
1167
}
1168
1169
if self.source.is_some() {
1170
self.source
1171
.as_ref()
1172
.unwrap()
1173
.dump(self.level.guard(guards), writer);
1174
} else {
1175
if indent != 0 {
1176
warn!("How has this happened?");
1177
}
1178
let _ = write!(writer, "(root)");
1179
}
1180
1181
let _ = write!(writer, "\n");
1182
self.children.read().each(|child| {
1183
child
1184
.upgrade()
1185
.get()
1186
.dump(guards, writer, indent + INDENT_INCREMENT);
1187
});
1188
}
1189
}
1190
1191
#[derive(Clone)]
1192
struct WeakRuleNode {
1193
p: ptr::NonNull<RuleNode>,
1194
}
1195
1196
/// A strong reference to a rule node.
1197
#[derive(Debug, Eq, Hash, PartialEq)]
1198
pub struct StrongRuleNode {
1199
p: ptr::NonNull<RuleNode>,
1200
}
1201
1202
unsafe impl Send for StrongRuleNode {}
1203
unsafe impl Sync for StrongRuleNode {}
1204
1205
#[cfg(feature = "servo")]
1206
malloc_size_of_is_0!(StrongRuleNode);
1207
1208
impl StrongRuleNode {
1209
fn new(n: Box<RuleNode>) -> Self {
1210
debug_assert_eq!(n.parent.is_none(), !n.source.is_some());
1211
1212
// TODO(emilio): Use into_raw_non_null when it's stable.
1213
let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) };
1214
log_new(ptr.as_ptr());
1215
1216
debug!("Creating rule node: {:p}", ptr);
1217
1218
StrongRuleNode::from_ptr(ptr)
1219
}
1220
1221
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1222
StrongRuleNode { p }
1223
}
1224
1225
fn downgrade(&self) -> WeakRuleNode {
1226
WeakRuleNode::from_ptr(self.p)
1227
}
1228
1229
/// Get the parent rule node of this rule node.
1230
pub fn parent(&self) -> Option<&StrongRuleNode> {
1231
self.get().parent.as_ref()
1232
}
1233
1234
fn ensure_child(
1235
&self,
1236
root: WeakRuleNode,
1237
source: StyleSource,
1238
level: CascadeLevel,
1239
) -> StrongRuleNode {
1240
use parking_lot::RwLockUpgradableReadGuard;
1241
1242
debug_assert!(
1243
self.get().level <= level,
1244
"Should be ordered (instead {:?} > {:?}), from {:?} and {:?}",
1245
self.get().level,
1246
level,
1247
self.get().source,
1248
source,
1249
);
1250
1251
let key = ChildKey(level, source.key());
1252
1253
let read_guard = self.get().children.upgradable_read();
1254
if let Some(child) = read_guard.get(&key) {
1255
return child.upgrade();
1256
}
1257
1258
RwLockUpgradableReadGuard::upgrade(read_guard).get_or_insert_with(key, move || {
1259
StrongRuleNode::new(Box::new(RuleNode::new(
1260
root,
1261
self.clone(),
1262
source.clone(),
1263
level,
1264
)))
1265
})
1266
}
1267
1268
/// Raw pointer to the RuleNode
1269
#[inline]
1270
pub fn ptr(&self) -> *mut RuleNode {
1271
self.p.as_ptr()
1272
}
1273
1274
fn get(&self) -> &RuleNode {
1275
if cfg!(debug_assertions) {
1276
let node = unsafe { &*self.p.as_ptr() };
1277
assert!(node.refcount.load(Ordering::Relaxed) > 0);
1278
}
1279
unsafe { &*self.p.as_ptr() }
1280
}
1281
1282
/// Get the style source corresponding to this rule node. May return `None`
1283
/// if it's the root node, which means that the node hasn't matched any
1284
/// rules.
1285
pub fn style_source(&self) -> Option<&StyleSource> {
1286
self.get().source.as_ref()
1287
}
1288
1289
/// The cascade level for this node
1290
pub fn cascade_level(&self) -> CascadeLevel {
1291
self.get().level
1292
}
1293
1294
/// Get the importance that this rule node represents.
1295
pub fn importance(&self) -> Importance {
1296
self.get().level.importance()
1297
}
1298
1299
/// Get an iterator for this rule node and its ancestors.
1300
pub fn self_and_ancestors(&self) -> SelfAndAncestors {
1301
SelfAndAncestors {
1302
current: Some(self),
1303
}
1304
}
1305
1306
/// Returns whether this node has any child, only intended for testing
1307
/// purposes, and called on a single-threaded fashion only.
1308
pub unsafe fn has_children_for_testing(&self) -> bool {
1309
!self.get().children.read().is_empty()
1310
}
1311
1312
unsafe fn pop_from_free_list(&self) -> Option<WeakRuleNode> {
1313
// NB: This can run from the root node destructor, so we can't use
1314
// `get()`, since it asserts the refcount is bigger than zero.
1315
let me = &*self.p.as_ptr();
1316
1317
debug_assert!(me.is_root());
1318
1319
// FIXME(#14213): Apparently the layout data can be gone from script.
1320
//
1321
// That's... suspicious, but it's fine if it happens for the rule tree
1322
// case, so just don't crash in the case we're doing the final GC in
1323
// script.
1324
1325
debug_assert!(
1326
!thread_state::get().is_worker() &&
1327
(thread_state::get().is_layout() || thread_state::get().is_script())
1328
);
1329
1330
let current = me.next_free.load(Ordering::Relaxed);
1331
if current == FREE_LIST_SENTINEL {
1332
return None;
1333
}
1334
1335
debug_assert!(
1336
!current.is_null(),
1337
"Multiple threads are operating on the free list at the \
1338
same time?"
1339
);
1340
debug_assert!(
1341
current != self.p.as_ptr(),
1342
"How did the root end up in the free list?"
1343
);
1344
1345
let next = (*current)
1346
.next_free
1347
.swap(ptr::null_mut(), Ordering::Relaxed);
1348
1349
debug_assert!(
1350
!next.is_null(),
1351
"How did a null pointer end up in the free list?"
1352
);
1353
1354
me.next_free.store(next, Ordering::Relaxed);
1355
1356
debug!(
1357
"Popping from free list: cur: {:?}, next: {:?}",
1358
current, next
1359
);
1360
1361
Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current)))
1362
}
1363
1364
unsafe fn assert_free_list_has_no_duplicates_or_null(&self) {
1365
assert!(cfg!(debug_assertions), "This is an expensive check!");
1366
use crate::hash::FxHashSet;
1367
1368
let me = &*self.p.as_ptr();
1369
assert!(me.is_root());
1370
1371
let mut current = self.p.as_ptr();
1372
let mut seen = FxHashSet::default();
1373
while current != FREE_LIST_SENTINEL {
1374
let next = (*current).next_free.load(Ordering::Relaxed);
1375
assert!(!next.is_null());
1376
assert!(!seen.contains(&next));
1377
seen.insert(next);
1378
1379
current = next;
1380
}
1381
}
1382
1383
unsafe fn gc(&self) {
1384
if cfg!(debug_assertions) {
1385
self.assert_free_list_has_no_duplicates_or_null();
1386
}
1387
1388
// NB: This can run from the root node destructor, so we can't use
1389
// `get()`, since it asserts the refcount is bigger than zero.
1390
let me = &*self.p.as_ptr();
1391
1392
debug_assert!(me.is_root(), "Can't call GC on a non-root node!");
1393
1394
while let Some(weak) = self.pop_from_free_list() {
1395
let node = &*weak.p.as_ptr();
1396
if node.refcount.load(Ordering::Relaxed) != 0 {
1397
// Nothing to do, the node is still alive.
1398
continue;
1399
}
1400
1401
debug!("GC'ing {:?}", weak.p.as_ptr());
1402
node.remove_from_child_list();
1403
log_drop(weak.p.as_ptr());
1404
let _ = Box::from_raw(weak.p.as_ptr());
1405
}
1406
1407
me.free_count().store(0, Ordering::Relaxed);
1408
1409
debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1410
}
1411
1412
unsafe fn maybe_gc(&self) {
1413
debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!");
1414
if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
1415
self.gc();
1416
}
1417
}
1418
1419
/// Returns true if there is either animation or transition level rule.
1420
pub fn has_animation_or_transition_rules(&self) -> bool {
1421
self.self_and_ancestors()
1422
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
1423
.any(|node| node.cascade_level().is_animation())
1424
}
1425
1426
/// Get a set of properties whose CascadeLevel are higher than Animations
1427
/// but not equal to Transitions.
1428
///
1429
/// If there are any custom properties, we set the boolean value of the
1430
/// returned tuple to true.
1431
pub fn get_properties_overriding_animations(
1432
&self,
1433
guards: &StylesheetGuards,
1434
) -> (LonghandIdSet, bool) {
1435
use crate::properties::PropertyDeclarationId;
1436
1437
// We want to iterate over cascade levels that override the animations
1438
// level, i.e. !important levels and the transitions level.
1439
//
1440
// However, we actually want to skip the transitions level because
1441
// although it is higher in the cascade than animations, when both
1442
// transitions and animations are present for a given element and
1443
// property, transitions are suppressed so that they don't actually
1444
// override animations.
1445
let iter = self
1446
.self_and_ancestors()
1447
.skip_while(|node| node.cascade_level() == CascadeLevel::Transitions)
1448
.take_while(|node| node.cascade_level() > CascadeLevel::Animations);
1449
let mut result = (LonghandIdSet::new(), false);
1450
for node in iter {
1451
let style = node.style_source().unwrap();
1452
for (decl, important) in style
1453
.read(node.cascade_level().guard(guards))
1454
.declaration_importance_iter()
1455
{
1456
// Although we are only iterating over cascade levels that
1457
// override animations, in a given property declaration block we
1458
// can have a mixture of !important and non-!important
1459
// declarations but only the !important declarations actually
1460
// override animations.
1461
if important.important() {
1462
match decl.id() {
1463
PropertyDeclarationId::Longhand(id) => result.0.insert(id),
1464
PropertyDeclarationId::Custom(_) => result.1 = true,
1465
}
1466
}
1467
}
1468
}
1469
result
1470
}
1471
}
1472
1473
/// An iterator over a rule node and its ancestors.
1474
#[derive(Clone)]
1475
pub struct SelfAndAncestors<'a> {
1476
current: Option<&'a StrongRuleNode>,
1477
}
1478
1479
impl<'a> Iterator for SelfAndAncestors<'a> {
1480
type Item = &'a StrongRuleNode;
1481
1482
fn next(&mut self) -> Option<Self::Item> {
1483
self.current.map(|node| {
1484
self.current = node.parent();
1485
node
1486
})
1487
}
1488
}
1489
1490
impl Clone for StrongRuleNode {
1491
fn clone(&self) -> Self {
1492
debug!(
1493
"{:?}: {:?}+",
1494
self.ptr(),
1495
self.get().refcount.load(Ordering::Relaxed)
1496
);
1497
debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0);
1498
self.get().refcount.fetch_add(1, Ordering::Relaxed);
1499
StrongRuleNode::from_ptr(self.p)
1500
}
1501
}
1502
1503
impl Drop for StrongRuleNode {
1504
#[cfg_attr(feature = "servo", allow(unused_mut))]
1505
fn drop(&mut self) {
1506
let node = unsafe { &*self.ptr() };
1507
1508
debug!(
1509
"{:?}: {:?}-",
1510
self.ptr(),
1511
node.refcount.load(Ordering::Relaxed)
1512
);
1513
debug!(
1514
"Dropping node: {:?}, root: {:?}, parent: {:?}",
1515
self.ptr(),
1516
node.root.as_ref().map(|r| r.ptr()),
1517
node.parent.as_ref().map(|p| p.ptr())
1518
);
1519
let should_drop = {
1520
debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
1521
node.refcount.fetch_sub(1, Ordering::Relaxed) == 1
1522
};
1523
1524
if !should_drop {
1525
return;
1526
}
1527
1528
if node.parent.is_none() {
1529
debug!("Dropping root node!");
1530
// The free list should be null by this point
1531
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1532
log_drop(self.ptr());
1533
let _ = unsafe { Box::from_raw(self.ptr()) };
1534
return;
1535
}
1536
1537
let root = unsafe { &*node.root.as_ref().unwrap().ptr() };
1538
let free_list = &root.next_free;
1539
let mut old_head = free_list.load(Ordering::Relaxed);
1540
1541
// If the free list is null, that means that the rule tree has been
1542
// formally torn down, and the last standard GC has already occurred.
1543
// We require that any callers using the rule tree at this point are
1544
// on the main thread only, which lets us trigger a synchronous GC
1545
// here to avoid leaking anything. We use the GC machinery, rather
1546
// than just dropping directly, so that we benefit from the iterative
1547
// destruction and don't trigger unbounded recursion during drop. See
1548
// [1] and the associated crashtest.
1549
//
1551
if old_head.is_null() {
1552
debug_assert!(
1553
!thread_state::get().is_worker() &&
1554
(thread_state::get().is_layout() || thread_state::get().is_script())
1555
);
1556
// Add the node as the sole entry in the free list.
1557
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1558
node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed);
1559
free_list.store(node as *const _ as *mut _, Ordering::Relaxed);
1560
1561
// Invoke the GC.
1562
//
1563
// Note that we need hold a strong reference to the root so that it
1564
// doesn't go away during the GC (which would happen if we're freeing
1565
// the last external reference into the rule tree). This is nicely
1566
// enforced by having the gc() method live on StrongRuleNode rather than
1567
// RuleNode.
1568
let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade();
1569
unsafe {
1570
strong_root.gc();
1571
}
1572
1573
// Leave the free list null, like we found it, such that additional
1574
// drops for straggling rule nodes will take this same codepath.
1575
debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1576
root.next_free.store(ptr::null_mut(), Ordering::Relaxed);
1577
1578
// Return. If strong_root is the last strong reference to the root,
1579
// this re-enter StrongRuleNode::drop, and take the root-dropping
1580
// path earlier in this function.
1581
return;
1582
}
1583
1584
// We're sure we're already in the free list, don't spinloop if we're.
1585
// Note that this is just a fast path, so it doesn't need to have an
1586
// strong memory ordering.
1587
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1588
return;
1589
}
1590
1591
// Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED.
1592
//
1593
// Note that we use Acquire/Release semantics for the free list
1594
// synchronization, in order to guarantee that the next_free
1595
// reads/writes we do below are properly visible from multiple threads
1596
// racing.
1597
loop {
1598
match free_list.compare_exchange_weak(
1599
old_head,
1600
FREE_LIST_LOCKED,
1601
Ordering::Acquire,
1602
Ordering::Relaxed,
1603
) {
1604
Ok(..) => {
1605
if old_head != FREE_LIST_LOCKED {
1606
break;
1607
}
1608
},
1609
Err(new) => old_head = new,
1610
}
1611
}
1612
1613
// If other thread has raced with use while using the same rule node,
1614
// just store the old head again, we're done.
1615
//
1616
// Note that we can use relaxed operations for loading since we're
1617
// effectively locking the free list with Acquire/Release semantics, and
1618
// the memory ordering is already guaranteed by that locking/unlocking.
1619
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1620
free_list.store(old_head, Ordering::Release);
1621
return;
1622
}
1623
1624
// Else store the old head as the next pointer, and store ourselves as
1625
// the new head of the free list.
1626
//
1627
// This can be relaxed since this pointer won't be read until GC.
1628
node.next_free.store(old_head, Ordering::Relaxed);
1629
1630
// Increment the free count. This doesn't need to be an RMU atomic
1631
// operation, because the free list is "locked".
1632
let old_free_count = root.free_count().load(Ordering::Relaxed);
1633
root.free_count()
1634
.store(old_free_count + 1, Ordering::Relaxed);
1635
1636
// This can be release because of the locking of the free list, that
1637
// ensures that all the other nodes racing with this one are using
1638
// `Acquire`.
1639
free_list.store(self.ptr(), Ordering::Release);
1640
}
1641
}
1642
1643
impl<'a> From<&'a StrongRuleNode> for WeakRuleNode {
1644
fn from(node: &'a StrongRuleNode) -> Self {
1645
WeakRuleNode::from_ptr(node.p)
1646
}
1647
}
1648
1649
impl WeakRuleNode {
1650
#[inline]
1651
fn ptr(&self) -> *mut RuleNode {
1652
self.p.as_ptr()
1653
}
1654
1655
fn upgrade(&self) -> StrongRuleNode {
1656
debug!("Upgrading weak node: {:p}", self.ptr());
1657
1658
let node = unsafe { &*self.ptr() };
1659
node.refcount.fetch_add(1, Ordering::Relaxed);
1660
StrongRuleNode::from_ptr(self.p)
1661
}
1662
1663
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1664
WeakRuleNode { p }
1665
}
1666
}