Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5
#![allow(unsafe_code)]
6
7
//! The rule tree.
8
9
use crate::applicable_declarations::ApplicableDeclarationList;
10
#[cfg(feature = "gecko")]
11
use crate::gecko::selector_parser::PseudoElement;
12
use crate::hash::{self, FxHashMap};
13
use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock};
14
use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards};
15
use crate::stylesheets::{Origin, StyleRule};
16
use crate::thread_state;
17
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
18
use parking_lot::RwLock;
19
use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow};
20
use smallvec::SmallVec;
21
use std::io::{self, Write};
22
use std::mem;
23
use std::ptr;
24
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
25
26
/// The rule tree, the structure servo uses to preserve the results of selector
27
/// matching.
28
///
29
/// This is organized as a tree of rules. When a node matches a set of rules,
30
/// they're inserted in order in the tree, starting with the less specific one.
31
///
32
/// When a rule is inserted in the tree, other elements may share the path up to
33
/// a given rule. If that's the case, we don't duplicate child nodes, but share
34
/// them.
35
///
36
/// When the rule node refcount drops to zero, it doesn't get freed. It gets
37
/// instead put into a free list, and it is potentially GC'd after a while in a
38
/// single-threaded fashion.
39
///
40
/// That way, a rule node that represents a likely-to-match-again rule (like a
41
/// :hover rule) can be reused if we haven't GC'd it yet.
42
///
43
/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC
46
/// to se a discussion about the different memory orderings used here.
47
#[derive(Debug)]
48
pub struct RuleTree {
49
root: StrongRuleNode,
50
}
51
52
impl Drop for RuleTree {
53
fn drop(&mut self) {
54
// GC the rule tree.
55
unsafe {
56
self.gc();
57
}
58
59
// After the GC, the free list should be empty.
60
debug_assert_eq!(
61
self.root.get().next_free.load(Ordering::Relaxed),
62
FREE_LIST_SENTINEL
63
);
64
65
// Remove the sentinel. This indicates that GCs will no longer occur.
66
// Any further drops of StrongRuleNodes must occur on the main thread,
67
// and will trigger synchronous dropping of the Rule nodes.
68
self.root
69
.get()
70
.next_free
71
.store(ptr::null_mut(), Ordering::Relaxed);
72
}
73
}
74
75
impl MallocSizeOf for RuleTree {
76
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
77
let mut n = 0;
78
let mut stack = SmallVec::<[_; 32]>::new();
79
stack.push(self.root.downgrade());
80
81
while let Some(node) = stack.pop() {
82
n += unsafe { ops.malloc_size_of(node.ptr()) };
83
let children = unsafe { (*node.ptr()).children.read() };
84
children.shallow_size_of(ops);
85
children.each(|c| stack.push(c.clone()));
86
}
87
88
n
89
}
90
}
91
92
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
93
struct ChildKey(CascadeLevel, ptr::NonNull<()>);
94
95
unsafe impl Send for ChildKey {}
96
unsafe impl Sync for ChildKey {}
97
98
/// A style source for the rule node. It can either be a CSS style rule or a
99
/// declaration block.
100
///
101
/// Note that, even though the declaration block from inside the style rule
102
/// could be enough to implement the rule tree, keeping the whole rule provides
103
/// more debuggability, and also the ability of show those selectors to
104
/// devtools.
105
#[derive(Clone, Debug)]
106
pub struct StyleSource(ArcUnion<Locked<StyleRule>, Locked<PropertyDeclarationBlock>>);
107
108
impl PartialEq for StyleSource {
109
fn eq(&self, other: &Self) -> bool {
110
ArcUnion::ptr_eq(&self.0, &other.0)
111
}
112
}
113
114
impl StyleSource {
115
/// Creates a StyleSource from a StyleRule.
116
pub fn from_rule(rule: Arc<Locked<StyleRule>>) -> Self {
117
StyleSource(ArcUnion::from_first(rule))
118
}
119
120
#[inline]
121
fn key(&self) -> ptr::NonNull<()> {
122
self.0.ptr()
123
}
124
125
/// Creates a StyleSource from a PropertyDeclarationBlock.
126
pub fn from_declarations(decls: Arc<Locked<PropertyDeclarationBlock>>) -> Self {
127
StyleSource(ArcUnion::from_second(decls))
128
}
129
130
fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
131
if let Some(ref rule) = self.0.as_first() {
132
let rule = rule.read_with(guard);
133
let _ = write!(writer, "{:?}", rule.selectors);
134
}
135
136
let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
137
}
138
139
/// Read the style source guard, and obtain thus read access to the
140
/// underlying property declaration block.
141
#[inline]
142
pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
143
let block: &Locked<PropertyDeclarationBlock> = match self.0.borrow() {
144
ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block,
145
ArcUnionBorrow::Second(ref block) => block.get(),
146
};
147
block.read_with(guard)
148
}
149
150
/// Returns the style rule if applicable, otherwise None.
151
pub fn as_rule(&self) -> Option<ArcBorrow<Locked<StyleRule>>> {
152
self.0.as_first()
153
}
154
155
/// Returns the declaration block if applicable, otherwise None.
156
pub fn as_declarations(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
157
self.0.as_second()
158
}
159
}
160
161
/// This value exists here so a node that pushes itself to the list can know
162
/// that is in the free list by looking at is next pointer, and comparing it
163
/// with null.
164
///
165
/// The root node doesn't have a null pointer in the free list, but this value.
166
const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
167
168
/// A second sentinel value for the free list, indicating that it's locked (i.e.
169
/// another thread is currently adding an entry). We spin if we find this value.
170
const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode;
171
172
/// A counter to track how many shadow root rules deep we are. This is used to
173
/// handle:
174
///
176
///
177
/// See the static functions for the meaning of different values.
178
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
179
pub struct ShadowCascadeOrder(i8);
180
181
impl ShadowCascadeOrder {
182
/// A level for the outermost shadow tree (the shadow tree we own, and the
183
/// ones from the slots we're slotted in).
184
#[inline]
185
pub fn for_outermost_shadow_tree() -> Self {
186
Self(-1)
187
}
188
189
/// A level for the element's tree.
190
#[inline]
191
fn for_same_tree() -> Self {
192
Self(0)
193
}
194
195
/// A level for the innermost containing tree (the one closest to the
196
/// element).
197
#[inline]
198
pub fn for_innermost_containing_tree() -> Self {
199
Self(1)
200
}
201
202
/// Decrement the level, moving inwards. We should only move inwards if
203
/// we're traversing slots.
204
#[inline]
205
pub fn dec(&mut self) {
206
debug_assert!(self.0 < 0);
207
self.0 = self.0.saturating_sub(1);
208
}
209
210
/// The level, moving inwards. We should only move inwards if we're
211
/// traversing slots.
212
#[inline]
213
pub fn inc(&mut self) {
214
debug_assert_ne!(self.0, -1);
215
self.0 = self.0.saturating_add(1);
216
}
217
}
218
219
impl std::ops::Neg for ShadowCascadeOrder {
220
type Output = Self;
221
#[inline]
222
fn neg(self) -> Self {
223
Self(self.0.neg())
224
}
225
}
226
227
impl RuleTree {
228
/// Construct a new rule tree.
229
pub fn new() -> Self {
230
RuleTree {
231
root: StrongRuleNode::new(Box::new(RuleNode::root())),
232
}
233
}
234
235
/// Get the root rule node.
236
pub fn root(&self) -> &StrongRuleNode {
237
&self.root
238
}
239
240
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
241
let _ = writeln!(writer, " + RuleTree");
242
self.root.get().dump(guards, writer, 0);
243
}
244
245
/// Dump the rule tree to stdout.
246
pub fn dump_stdout(&self, guards: &StylesheetGuards) {
247
let mut stdout = io::stdout();
248
self.dump(guards, &mut stdout);
249
}
250
251
/// Inserts the given rules, that must be in proper order by specifity, and
252
/// returns the corresponding rule node representing the last inserted one.
253
///
254
/// !important rules are detected and inserted into the appropriate position
255
/// in the rule tree. This allows selector matching to ignore importance,
256
/// while still maintaining the appropriate cascade order in the rule tree.
257
pub fn insert_ordered_rules_with_important<'a, I>(
258
&self,
259
iter: I,
260
guards: &StylesheetGuards,
261
) -> StrongRuleNode
262
where
263
I: Iterator<Item = (StyleSource, CascadeLevel)>,
264
{
265
use self::CascadeLevel::*;
266
let mut current = self.root.clone();
267
268
let mut found_important = false;
269
270
let mut important_author = SmallVec::<[(StyleSource, ShadowCascadeOrder); 4]>::new();
271
272
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
273
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
274
let mut transition = None;
275
276
for (source, level) in iter {
277
debug_assert!(!level.is_important(), "Important levels handled internally");
278
let any_important = {
279
let pdb = source.read(level.guard(guards));
280
pdb.any_important()
281
};
282
283
if any_important {
284
found_important = true;
285
match level {
286
AuthorNormal {
287
shadow_cascade_order,
288
} => {
289
important_author.push((source.clone(), shadow_cascade_order));
290
},
291
UANormal => important_ua.push(source.clone()),
292
UserNormal => important_user.push(source.clone()),
293
_ => {},
294
};
295
}
296
297
// We don't optimize out empty rules, even though we could.
298
//
299
// Inspector relies on every rule being inserted in the normal level
300
// at least once, in order to return the rules with the correct
301
// specificity order.
302
//
303
// TODO(emilio): If we want to apply these optimizations without
304
// breaking inspector's expectations, we'd need to run
305
// selector-matching again at the inspector's request. That may or
306
// may not be a better trade-off.
307
if matches!(level, Transitions) && found_important {
308
// There can be at most one transition, and it will come at
309
// the end of the iterator. Stash it and apply it after
310
// !important rules.
311
debug_assert!(transition.is_none());
312
transition = Some(source);
313
} else {
314
current = current.ensure_child(self.root.downgrade(), source, level);
315
}
316
}
317
318
// Early-return in the common case of no !important declarations.
319
if !found_important {
320
return current;
321
}
322
323
// Insert important declarations, in order of increasing importance,
324
// followed by any transition rule.
325
//
326
// Inner shadow wins over same-tree, which wins over outer-shadow.
327
//
328
// We negate the shadow cascade order to preserve the right PartialOrd
329
// behavior.
330
if !important_author.is_empty() &&
331
important_author.first().unwrap().1 != important_author.last().unwrap().1
332
{
333
// We only need to sort if the important rules come from
334
// different trees, but we need this sort to be stable.
335
//
336
// FIXME(emilio): This could maybe be smarter, probably by chunking
337
// the important rules while inserting, and iterating the outer
338
// chunks in reverse order.
339
//
340
// That is, if we have rules with levels like: -1 -1 -1 0 0 0 1 1 1,
341
// we're really only sorting the chunks, while keeping elements
342
// inside the same chunk already sorted. Seems like we could try to
343
// keep a SmallVec-of-SmallVecs with the chunks and just iterate the
344
// outer in reverse.
345
important_author.sort_by_key(|&(_, order)| -order);
346
}
347
348
for (source, shadow_cascade_order) in important_author.drain(..) {
349
current = current.ensure_child(
350
self.root.downgrade(),
351
source,
352
AuthorImportant {
353
shadow_cascade_order: -shadow_cascade_order,
354
},
355
);
356
}
357
358
for source in important_user.drain(..) {
359
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
360
}
361
362
for source in important_ua.drain(..) {
363
current = current.ensure_child(self.root.downgrade(), source, UAImportant);
364
}
365
366
if let Some(source) = transition {
367
current = current.ensure_child(self.root.downgrade(), source, Transitions);
368
}
369
370
current
371
}
372
373
/// Given a list of applicable declarations, insert the rules and return the
374
/// corresponding rule node.
375
pub fn compute_rule_node(
376
&self,
377
applicable_declarations: &mut ApplicableDeclarationList,
378
guards: &StylesheetGuards,
379
) -> StrongRuleNode {
380
self.insert_ordered_rules_with_important(
381
applicable_declarations.drain(..).map(|d| d.for_rule_tree()),
382
guards,
383
)
384
}
385
386
/// Insert the given rules, that must be in proper order by specifity, and
387
/// return the corresponding rule node representing the last inserted one.
388
pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
389
where
390
I: Iterator<Item = (StyleSource, CascadeLevel)>,
391
{
392
self.insert_ordered_rules_from(self.root.clone(), iter)
393
}
394
395
fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode
396
where
397
I: Iterator<Item = (StyleSource, CascadeLevel)>,
398
{
399
let mut current = from;
400
for (source, level) in iter {
401
current = current.ensure_child(self.root.downgrade(), source, level);
402
}
403
current
404
}
405
406
/// This can only be called when no other threads is accessing this tree.
407
pub unsafe fn gc(&self) {
408
self.root.gc();
409
}
410
411
/// This can only be called when no other threads is accessing this tree.
412
pub unsafe fn maybe_gc(&self) {
413
#[cfg(debug_assertions)]
414
self.maybe_dump_stats();
415
416
self.root.maybe_gc();
417
}
418
419
#[cfg(debug_assertions)]
420
fn maybe_dump_stats(&self) {
421
use itertools::Itertools;
422
use std::cell::Cell;
423
use std::time::{Duration, Instant};
424
425
if !log_enabled!(log::Level::Trace) {
426
return;
427
}
428
429
const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2);
430
431
thread_local! {
432
pub static LAST_STATS: Cell<Instant> = Cell::new(Instant::now());
433
};
434
435
let should_dump = LAST_STATS.with(|s| {
436
let now = Instant::now();
437
if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL {
438
return false;
439
}
440
s.set(now);
441
true
442
});
443
444
if !should_dump {
445
return;
446
}
447
448
let mut children_count = FxHashMap::default();
449
450
let mut stack = SmallVec::<[_; 32]>::new();
451
stack.push(self.root.clone());
452
while let Some(node) = stack.pop() {
453
let children = node.get().children.read();
454
*children_count.entry(children.len()).or_insert(0) += 1;
455
children.each(|c| stack.push(c.upgrade()));
456
}
457
458
trace!("Rule tree stats:");
459
let counts = children_count.keys().sorted();
460
for count in counts {
461
trace!(" {} - {}", count, children_count[count]);
462
}
463
}
464
465
/// Replaces a rule in a given level (if present) for another rule.
466
///
467
/// Returns the resulting node that represents the new path, or None if
468
/// the old path is still valid.
469
pub fn update_rule_at_level(
470
&self,
471
level: CascadeLevel,
472
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
473
path: &StrongRuleNode,
474
guards: &StylesheetGuards,
475
important_rules_changed: &mut bool,
476
) -> Option<StrongRuleNode> {
477
// TODO(emilio): Being smarter with lifetimes we could avoid a bit of
478
// the refcount churn.
479
let mut current = path.clone();
480
*important_rules_changed = false;
481
482
// First walk up until the first less-or-equally specific rule.
483
let mut children = SmallVec::<[_; 10]>::new();
484
while current.get().level > level {
485
children.push((
486
current.get().source.as_ref().unwrap().clone(),
487
current.get().level,
488
));
489
current = current.parent().unwrap().clone();
490
}
491
492
// Then remove the one at the level we want to replace, if any.
493
//
494
// NOTE: Here we assume that only one rule can be at the level we're
495
// replacing.
496
//
497
// This is certainly true for HTML style attribute rules, animations and
498
// transitions, but could not be so for SMIL animations, which we'd need
499
// to special-case (isn't hard, it's just about removing the `if` and
500
// special cases, and replacing them for a `while` loop, avoiding the
501
// optimizations).
502
if current.get().level == level {
503
*important_rules_changed |= level.is_important();
504
505
let current_decls = current.get().source.as_ref().unwrap().as_declarations();
506
507
// If the only rule at the level we're replacing is exactly the
508
// same as `pdb`, we're done, and `path` is still valid.
509
if let (Some(ref pdb), Some(ref current_decls)) = (pdb, current_decls) {
510
// If the only rule at the level we're replacing is exactly the
511
// same as `pdb`, we're done, and `path` is still valid.
512
//
513
// TODO(emilio): Another potential optimization is the one where
514
// we can just replace the rule at that level for `pdb`, and
515
// then we don't need to re-create the children, and `path` is
516
// also equally valid. This is less likely, and would require an
517
// in-place mutation of the source, which is, at best, fiddly,
518
// so let's skip it for now.
519
let is_here_already = ArcBorrow::ptr_eq(pdb, current_decls);
520
if is_here_already {
521
debug!("Picking the fast path in rule replacement");
522
return None;
523
}
524
}
525
526
if current_decls.is_some() {
527
current = current.parent().unwrap().clone();
528
}
529
}
530
531
// Insert the rule if it's relevant at this level in the cascade.
532
//
533
// These optimizations are likely to be important, because the levels
534
// where replacements apply (style and animations) tend to trigger
535
// pretty bad styling cases already.
536
if let Some(pdb) = pdb {
537
if level.is_important() {
538
if pdb.read_with(level.guard(guards)).any_important() {
539
current = current.ensure_child(
540
self.root.downgrade(),
541
StyleSource::from_declarations(pdb.clone_arc()),
542
level,
543
);
544
*important_rules_changed = true;
545
}
546
} else {
547
if pdb.read_with(level.guard(guards)).any_normal() {
548
current = current.ensure_child(
549
self.root.downgrade(),
550
StyleSource::from_declarations(pdb.clone_arc()),
551
level,
552
);
553
}
554
}
555
}
556
557
// Now the rule is in the relevant place, push the children as
558
// necessary.
559
let rule = self.insert_ordered_rules_from(current, children.drain(..).rev());
560
Some(rule)
561
}
562
563
/// Returns new rule nodes without Transitions level rule.
564
pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
565
// Return a clone if there is no transition level.
566
if path.cascade_level() != CascadeLevel::Transitions {
567
return path.clone();
568
}
569
570
path.parent().unwrap().clone()
571
}
572
573
/// Returns new rule node without rules from declarative animations.
574
pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
575
// Return a clone if there are no animation rules.
576
if !path.has_animation_or_transition_rules() {
577
return path.clone();
578
}
579
580
let iter = path
581
.self_and_ancestors()
582
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride);
583
let mut last = path;
584
let mut children = SmallVec::<[_; 10]>::new();
585
for node in iter {
586
if !node.cascade_level().is_animation() {
587
children.push((
588
node.get().source.as_ref().unwrap().clone(),
589
node.cascade_level(),
590
));
591
}
592
last = node;
593
}
594
595
let rule =
596
self.insert_ordered_rules_from(last.parent().unwrap().clone(), children.drain(..).rev());
597
rule
598
}
599
600
/// Returns new rule node by adding animation rules at transition level.
601
/// The additional rules must be appropriate for the transition
602
/// level of the cascade, which is the highest level of the cascade.
603
/// (This is the case for one current caller, the cover rule used
604
/// for CSS transitions.)
605
pub fn add_animation_rules_at_transition_level(
606
&self,
607
path: &StrongRuleNode,
608
pdb: Arc<Locked<PropertyDeclarationBlock>>,
609
guards: &StylesheetGuards,
610
) -> StrongRuleNode {
611
let mut dummy = false;
612
self.update_rule_at_level(
613
CascadeLevel::Transitions,
614
Some(pdb.borrow_arc()),
615
path,
616
guards,
617
&mut dummy,
618
)
619
.expect("Should return a valid rule node")
620
}
621
}
622
623
/// The number of RuleNodes added to the free list before we will consider
624
/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
625
/// where it likely did not result from a rigorous performance analysis.)
626
const RULE_TREE_GC_INTERVAL: usize = 300;
627
628
/// The cascade level these rules are relevant at, as per[1][2][3].
629
///
630
/// Presentational hints for SVG and HTML are in the "author-level
631
/// zero-specificity" level, that is, right after user rules, and before author
632
/// rules.
633
///
634
/// The order of variants declared here is significant, and must be in
635
/// _ascending_ order of precedence.
636
///
637
/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
638
/// from outside the tree the element is in can't affect the element.
639
///
640
/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
641
/// tree may affect an element connected to the document or an "outer" shadow
642
/// tree.
643
///
648
#[repr(u8)]
649
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, PartialEq, PartialOrd)]
650
pub enum CascadeLevel {
651
/// Normal User-Agent rules.
652
UANormal,
653
/// User normal rules.
654
UserNormal,
655
/// Presentational hints.
656
PresHints,
657
/// Shadow DOM styles from author styles.
658
AuthorNormal {
659
/// The order in the shadow tree hierarchy. This number is relative to
660
/// the tree of the element, and thus the only invariants that need to
661
/// be preserved is:
662
///
663
/// * Zero is the same tree as the element that matched the rule. This
664
/// is important so that we can optimize style attribute insertions.
665
///
666
/// * The levels are ordered in accordance with
668
shadow_cascade_order: ShadowCascadeOrder,
669
},
670
/// SVG SMIL animations.
671
SMILOverride,
672
/// CSS animations and script-generated animations.
673
Animations,
674
/// Author-supplied important rules.
675
AuthorImportant {
676
/// The order in the shadow tree hierarchy, inverted, so that PartialOrd
677
/// does the right thing.
678
shadow_cascade_order: ShadowCascadeOrder,
679
},
680
/// User important rules.
681
UserImportant,
682
/// User-agent important rules.
683
UAImportant,
684
/// Transitions
685
Transitions,
686
}
687
688
impl CascadeLevel {
689
/// Pack this cascade level in a single byte.
690
///
691
/// We have 10 levels, which we can represent with 4 bits, and then a
692
/// cascade order optionally, which we can clamp to three bits max, and
693
/// represent with a fourth bit for the sign.
694
///
695
/// So this creates: SOOODDDD
696
///
697
/// Where `S` is the sign of the order (one if negative, 0 otherwise), `O`
698
/// is the absolute value of the order, and `D`s are the discriminant.
699
#[inline]
700
pub fn to_byte_lossy(&self) -> u8 {
701
let (discriminant, order) = match *self {
702
Self::UANormal => (0, 0),
703
Self::UserNormal => (1, 0),
704
Self::PresHints => (2, 0),
705
Self::AuthorNormal {
706
shadow_cascade_order,
707
} => (3, shadow_cascade_order.0),
708
Self::SMILOverride => (4, 0),
709
Self::Animations => (5, 0),
710
Self::AuthorImportant {
711
shadow_cascade_order,
712
} => (6, shadow_cascade_order.0),
713
Self::UserImportant => (7, 0),
714
Self::UAImportant => (8, 0),
715
Self::Transitions => (9, 0),
716
};
717
718
debug_assert_eq!(discriminant & 0xf, discriminant);
719
if order == 0 {
720
return discriminant;
721
}
722
723
let negative = order < 0;
724
let value = std::cmp::min(order.abs() as u8, 0b111);
725
(negative as u8) << 7 | value << 4 | discriminant
726
}
727
728
/// Convert back from the single-byte representation of the cascade level
729
/// explained above.
730
#[inline]
731
pub fn from_byte(b: u8) -> Self {
732
let order = {
733
let abs = ((b & 0b01110000) >> 4) as i8;
734
let negative = b & 0b10000000 != 0;
735
if negative {
736
-abs
737
} else {
738
abs
739
}
740
};
741
let discriminant = b & 0xf;
742
let level = match discriminant {
743
0 => Self::UANormal,
744
1 => Self::UserNormal,
745
2 => Self::PresHints,
746
3 => {
747
return Self::AuthorNormal {
748
shadow_cascade_order: ShadowCascadeOrder(order),
749
}
750
},
751
4 => Self::SMILOverride,
752
5 => Self::Animations,
753
6 => {
754
return Self::AuthorImportant {
755
shadow_cascade_order: ShadowCascadeOrder(order),
756
}
757
},
758
7 => Self::UserImportant,
759
8 => Self::UAImportant,
760
9 => Self::Transitions,
761
_ => unreachable!("Didn't expect {} as a discriminant", discriminant),
762
};
763
debug_assert_eq!(order, 0, "Didn't expect an order value for {:?}", level);
764
level
765
}
766
767
/// Select a lock guard for this level
768
pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
769
match *self {
770
CascadeLevel::UANormal |
771
CascadeLevel::UserNormal |
772
CascadeLevel::UserImportant |
773
CascadeLevel::UAImportant => guards.ua_or_user,
774
_ => guards.author,
775
}
776
}
777
778
/// Returns the cascade level for author important declarations from the
779
/// same tree as the element.
780
#[inline]
781
pub fn same_tree_author_important() -> Self {
782
CascadeLevel::AuthorImportant {
783
shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
784
}
785
}
786
787
/// Returns the cascade level for author normal declarations from the same
788
/// tree as the element.
789
#[inline]
790
pub fn same_tree_author_normal() -> Self {
791
CascadeLevel::AuthorNormal {
792
shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
793
}
794
}
795
796
/// Returns whether this cascade level represents important rules of some
797
/// sort.
798
#[inline]
799
pub fn is_important(&self) -> bool {
800
match *self {
801
CascadeLevel::AuthorImportant { .. } |
802
CascadeLevel::UserImportant |
803
CascadeLevel::UAImportant => true,
804
_ => false,
805
}
806
}
807
808
/// Returns the importance relevant for this rule. Pretty similar to
809
/// `is_important`.
810
#[inline]
811
pub fn importance(&self) -> Importance {
812
if self.is_important() {
813
Importance::Important
814
} else {
815
Importance::Normal
816
}
817
}
818
819
/// Returns the cascade origin of the rule.
820
#[inline]
821
pub fn origin(&self) -> Origin {
822
match *self {
823
CascadeLevel::UAImportant | CascadeLevel::UANormal => Origin::UserAgent,
824
CascadeLevel::UserImportant | CascadeLevel::UserNormal => Origin::User,
825
CascadeLevel::PresHints |
826
CascadeLevel::AuthorNormal { .. } |
827
CascadeLevel::AuthorImportant { .. } |
828
CascadeLevel::SMILOverride |
829
CascadeLevel::Animations |
830
CascadeLevel::Transitions => Origin::Author,
831
}
832
}
833
834
/// Returns whether this cascade level represents an animation rules.
835
#[inline]
836
pub fn is_animation(&self) -> bool {
837
match *self {
838
CascadeLevel::SMILOverride | CascadeLevel::Animations | CascadeLevel::Transitions => {
839
true
840
},
841
_ => false,
842
}
843
}
844
}
845
846
/// The children of a single rule node.
847
///
848
/// We optimize the case of no kids and a single child, since they're by far the
849
/// most common case and it'd cause a bunch of bloat for no reason.
850
///
851
/// The children remove themselves when they go away, which means that it's ok
852
/// for us to store weak pointers to them.
853
enum RuleNodeChildren {
854
/// There are no kids.
855
Empty,
856
/// There's just one kid. This is an extremely common case, so we don't
857
/// bother allocating a map for it.
858
One(WeakRuleNode),
859
/// At least at one point in time there was more than one kid (that is to
860
/// say, we don't bother re-allocating if children are removed dynamically).
861
Map(Box<FxHashMap<ChildKey, WeakRuleNode>>),
862
}
863
864
impl MallocShallowSizeOf for RuleNodeChildren {
865
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
866
match *self {
867
RuleNodeChildren::One(..) | RuleNodeChildren::Empty => 0,
868
RuleNodeChildren::Map(ref m) => {
869
// Want to account for both the box and the hashmap.
870
m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
871
},
872
}
873
}
874
}
875
876
impl Default for RuleNodeChildren {
877
fn default() -> Self {
878
RuleNodeChildren::Empty
879
}
880
}
881
882
impl RuleNodeChildren {
883
/// Executes a given function for each of the children.
884
fn each(&self, mut f: impl FnMut(&WeakRuleNode)) {
885
match *self {
886
RuleNodeChildren::Empty => {},
887
RuleNodeChildren::One(ref child) => f(child),
888
RuleNodeChildren::Map(ref map) => {
889
for (_key, kid) in map.iter() {
890
f(kid)
891
}
892
},
893
}
894
}
895
896
fn len(&self) -> usize {
897
match *self {
898
RuleNodeChildren::Empty => 0,
899
RuleNodeChildren::One(..) => 1,
900
RuleNodeChildren::Map(ref map) => map.len(),
901
}
902
}
903
904
fn is_empty(&self) -> bool {
905
self.len() == 0
906
}
907
908
fn get(&self, key: &ChildKey) -> Option<&WeakRuleNode> {
909
match *self {
910
RuleNodeChildren::Empty => return None,
911
RuleNodeChildren::One(ref kid) => {
912
// We're read-locked, so no need to do refcount stuff, since the
913
// child is only removed from the main thread, _and_ it'd need
914
// to write-lock us anyway.
915
if unsafe { (*kid.ptr()).key() } == *key {
916
Some(kid)
917
} else {
918
None
919
}
920
},
921
RuleNodeChildren::Map(ref map) => map.get(&key),
922
}
923
}
924
925
fn get_or_insert_with(
926
&mut self,
927
key: ChildKey,
928
get_new_child: impl FnOnce() -> StrongRuleNode,
929
) -> StrongRuleNode {
930
let existing_child_key = match *self {
931
RuleNodeChildren::Empty => {
932
let new = get_new_child();
933
debug_assert_eq!(new.get().key(), key);
934
*self = RuleNodeChildren::One(new.downgrade());
935
return new;
936
},
937
RuleNodeChildren::One(ref weak) => unsafe {
938
// We're locked necessarily, so it's fine to look at our
939
// weak-child without refcount-traffic.
940
let existing_child_key = (*weak.ptr()).key();
941
if existing_child_key == key {
942
return weak.upgrade();
943
}
944
existing_child_key
945
},
946
RuleNodeChildren::Map(ref mut map) => {
947
return match map.entry(key) {
948
hash::map::Entry::Occupied(ref occupied) => occupied.get().upgrade(),
949
hash::map::Entry::Vacant(vacant) => {
950
let new = get_new_child();
951
952
debug_assert_eq!(new.get().key(), key);
953
vacant.insert(new.downgrade());
954
955
new
956
},
957
};
958
},
959
};
960
961
let existing_child = match mem::replace(self, RuleNodeChildren::Empty) {
962
RuleNodeChildren::One(o) => o,
963
_ => unreachable!(),
964
};
965
// Two rule-nodes are still a not-totally-uncommon thing, so
966
// avoid over-allocating entries.
967
//
968
// TODO(emilio): Maybe just inline two kids too?
969
let mut children = Box::new(FxHashMap::with_capacity_and_hasher(2, Default::default()));
970
children.insert(existing_child_key, existing_child);
971
972
let new = get_new_child();
973
debug_assert_eq!(new.get().key(), key);
974
children.insert(key, new.downgrade());
975
976
*self = RuleNodeChildren::Map(children);
977
978
new
979
}
980
981
fn remove(&mut self, key: &ChildKey) -> Option<WeakRuleNode> {
982
match *self {
983
RuleNodeChildren::Empty => return None,
984
RuleNodeChildren::One(ref one) => {
985
if unsafe { (*one.ptr()).key() } != *key {
986
return None;
987
}
988
},
989
RuleNodeChildren::Map(ref mut multiple) => {
990
return multiple.remove(key);
991
},
992
}
993
994
match mem::replace(self, RuleNodeChildren::Empty) {
995
RuleNodeChildren::One(o) => Some(o),
996
_ => unreachable!(),
997
}
998
}
999
}
1000
1001
/// A node in the rule tree.
1002
pub struct RuleNode {
1003
/// The root node. Only the root has no root pointer, for obvious reasons.
1004
root: Option<WeakRuleNode>,
1005
1006
/// The parent rule node. Only the root has no parent.
1007
parent: Option<StrongRuleNode>,
1008
1009
/// The actual style source, either coming from a selector in a StyleRule,
1010
/// or a raw property declaration block (like the style attribute).
1011
///
1012
/// None for the root node.
1013
source: Option<StyleSource>,
1014
1015
/// The cascade level this rule is positioned at.
1016
level: CascadeLevel,
1017
1018
refcount: AtomicUsize,
1019
1020
/// Only used for the root, stores the number of free rule nodes that are
1021
/// around.
1022
free_count: AtomicUsize,
1023
1024
/// The children of a given rule node. Children remove themselves from here
1025
/// when they go away.
1026
children: RwLock<RuleNodeChildren>,
1027
1028
/// The next item in the rule tree free list, that starts on the root node.
1029
///
1030
/// When this is set to null, that means that the rule tree has been torn
1031
/// down, and GCs will no longer occur. When this happens, StrongRuleNodes
1032
/// may only be dropped on the main thread, and teardown happens
1033
/// synchronously.
1034
next_free: AtomicPtr<RuleNode>,
1035
}
1036
1037
unsafe impl Sync for RuleTree {}
1038
unsafe impl Send for RuleTree {}
1039
1040
// On Gecko builds, hook into the leak checking machinery.
1041
#[cfg(feature = "gecko_refcount_logging")]
1042
mod gecko_leak_checking {
1043
use super::RuleNode;
1044
use std::mem::size_of;
1045
use std::os::raw::{c_char, c_void};
1046
1047
extern "C" {
1048
fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
1049
fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
1050
}
1051
1052
static NAME: &'static [u8] = b"RuleNode\0";
1053
1054
/// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery.
1055
pub fn log_ctor(ptr: *const RuleNode) {
1056
let s = NAME as *const [u8] as *const u8 as *const c_char;
1057
unsafe {
1058
NS_LogCtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
1059
}
1060
}
1061
1062
/// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery.
1063
pub fn log_dtor(ptr: *const RuleNode) {
1064
let s = NAME as *const [u8] as *const u8 as *const c_char;
1065
unsafe {
1066
NS_LogDtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
1067
}
1068
}
1069
}
1070
1071
#[inline(always)]
1072
fn log_new(_ptr: *const RuleNode) {
1073
#[cfg(feature = "gecko_refcount_logging")]
1074
gecko_leak_checking::log_ctor(_ptr);
1075
}
1076
1077
#[inline(always)]
1078
fn log_drop(_ptr: *const RuleNode) {
1079
#[cfg(feature = "gecko_refcount_logging")]
1080
gecko_leak_checking::log_dtor(_ptr);
1081
}
1082
1083
impl RuleNode {
1084
fn new(
1085
root: WeakRuleNode,
1086
parent: StrongRuleNode,
1087
source: StyleSource,
1088
level: CascadeLevel,
1089
) -> Self {
1090
debug_assert!(root.upgrade().parent().is_none());
1091
RuleNode {
1092
root: Some(root),
1093
parent: Some(parent),
1094
source: Some(source),
1095
level: level,
1096
refcount: AtomicUsize::new(1),
1097
children: Default::default(),
1098
free_count: AtomicUsize::new(0),
1099
next_free: AtomicPtr::new(ptr::null_mut()),
1100
}
1101
}
1102
1103
fn root() -> Self {
1104
RuleNode {
1105
root: None,
1106
parent: None,
1107
source: None,
1108
level: CascadeLevel::UANormal,
1109
refcount: AtomicUsize::new(1),
1110
free_count: AtomicUsize::new(0),
1111
children: Default::default(),
1112
next_free: AtomicPtr::new(FREE_LIST_SENTINEL),
1113
}
1114
}
1115
1116
fn key(&self) -> ChildKey {
1117
ChildKey(
1118
self.level,
1119
self.source
1120
.as_ref()
1121
.expect("Called key() on the root node")
1122
.key(),
1123
)
1124
}
1125
1126
fn is_root(&self) -> bool {
1127
self.parent.is_none()
1128
}
1129
1130
fn free_count(&self) -> &AtomicUsize {
1131
debug_assert!(self.is_root());
1132
&self.free_count
1133
}
1134
1135
/// Remove this rule node from the child list.
1136
///
1137
/// This is expected to be called before freeing the node from the free
1138
/// list, on the main thread.
1139
unsafe fn remove_from_child_list(&self) {
1140
debug!(
1141
"Remove from child list: {:?}, parent: {:?}",
1142
self as *const RuleNode,
1143
self.parent.as_ref().map(|p| p.ptr())
1144
);
1145
1146
if let Some(parent) = self.parent.as_ref() {
1147
let weak = parent.get().children.write().remove(&self.key());
1148
assert_eq!(weak.unwrap().ptr() as *const _, self as *const _);
1149
}
1150
}
1151
1152
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
1153
const INDENT_INCREMENT: usize = 4;
1154
1155
for _ in 0..indent {
1156
let _ = write!(writer, " ");
1157
}
1158
1159
let _ = writeln!(
1160
writer,
1161
" - {:?} (ref: {:?}, parent: {:?})",
1162
self as *const _,
1163
self.refcount.load(Ordering::Relaxed),
1164
self.parent.as_ref().map(|p| p.ptr())
1165
);
1166
1167
for _ in 0..indent {
1168
let _ = write!(writer, " ");
1169
}
1170
1171
if self.source.is_some() {
1172
self.source
1173
.as_ref()
1174
.unwrap()
1175
.dump(self.level.guard(guards), writer);
1176
} else {
1177
if indent != 0 {
1178
warn!("How has this happened?");
1179
}
1180
let _ = write!(writer, "(root)");
1181
}
1182
1183
let _ = write!(writer, "\n");
1184
self.children.read().each(|child| {
1185
child
1186
.upgrade()
1187
.get()
1188
.dump(guards, writer, indent + INDENT_INCREMENT);
1189
});
1190
}
1191
}
1192
1193
#[derive(Clone)]
1194
struct WeakRuleNode {
1195
p: ptr::NonNull<RuleNode>,
1196
}
1197
1198
/// A strong reference to a rule node.
1199
#[derive(Debug, Eq, Hash, PartialEq)]
1200
pub struct StrongRuleNode {
1201
p: ptr::NonNull<RuleNode>,
1202
}
1203
1204
unsafe impl Send for StrongRuleNode {}
1205
unsafe impl Sync for StrongRuleNode {}
1206
1207
#[cfg(feature = "servo")]
1208
malloc_size_of_is_0!(StrongRuleNode);
1209
1210
impl StrongRuleNode {
1211
fn new(n: Box<RuleNode>) -> Self {
1212
debug_assert_eq!(n.parent.is_none(), !n.source.is_some());
1213
1214
// TODO(emilio): Use into_raw_non_null when it's stable.
1215
let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) };
1216
log_new(ptr.as_ptr());
1217
1218
debug!("Creating rule node: {:p}", ptr);
1219
1220
StrongRuleNode::from_ptr(ptr)
1221
}
1222
1223
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1224
StrongRuleNode { p }
1225
}
1226
1227
fn downgrade(&self) -> WeakRuleNode {
1228
WeakRuleNode::from_ptr(self.p)
1229
}
1230
1231
/// Get the parent rule node of this rule node.
1232
pub fn parent(&self) -> Option<&StrongRuleNode> {
1233
self.get().parent.as_ref()
1234
}
1235
1236
fn ensure_child(
1237
&self,
1238
root: WeakRuleNode,
1239
source: StyleSource,
1240
level: CascadeLevel,
1241
) -> StrongRuleNode {
1242
use parking_lot::RwLockUpgradableReadGuard;
1243
1244
debug_assert!(
1245
self.get().level <= level,
1246
"Should be ordered (instead {:?} > {:?}), from {:?} and {:?}",
1247
self.get().level,
1248
level,
1249
self.get().source,
1250
source,
1251
);
1252
1253
let key = ChildKey(level, source.key());
1254
1255
let read_guard = self.get().children.upgradable_read();
1256
if let Some(child) = read_guard.get(&key) {
1257
return child.upgrade();
1258
}
1259
1260
RwLockUpgradableReadGuard::upgrade(read_guard).get_or_insert_with(key, move || {
1261
StrongRuleNode::new(Box::new(RuleNode::new(
1262
root,
1263
self.clone(),
1264
source.clone(),
1265
level,
1266
)))
1267
})
1268
}
1269
1270
/// Raw pointer to the RuleNode
1271
#[inline]
1272
pub fn ptr(&self) -> *mut RuleNode {
1273
self.p.as_ptr()
1274
}
1275
1276
fn get(&self) -> &RuleNode {
1277
if cfg!(debug_assertions) {
1278
let node = unsafe { &*self.p.as_ptr() };
1279
assert!(node.refcount.load(Ordering::Relaxed) > 0);
1280
}
1281
unsafe { &*self.p.as_ptr() }
1282
}
1283
1284
/// Get the style source corresponding to this rule node. May return `None`
1285
/// if it's the root node, which means that the node hasn't matched any
1286
/// rules.
1287
pub fn style_source(&self) -> Option<&StyleSource> {
1288
self.get().source.as_ref()
1289
}
1290
1291
/// The cascade level for this node
1292
pub fn cascade_level(&self) -> CascadeLevel {
1293
self.get().level
1294
}
1295
1296
/// Get the importance that this rule node represents.
1297
pub fn importance(&self) -> Importance {
1298
self.get().level.importance()
1299
}
1300
1301
/// Get an iterator for this rule node and its ancestors.
1302
pub fn self_and_ancestors(&self) -> SelfAndAncestors {
1303
SelfAndAncestors {
1304
current: Some(self),
1305
}
1306
}
1307
1308
/// Returns whether this node has any child, only intended for testing
1309
/// purposes, and called on a single-threaded fashion only.
1310
pub unsafe fn has_children_for_testing(&self) -> bool {
1311
!self.get().children.read().is_empty()
1312
}
1313
1314
unsafe fn pop_from_free_list(&self) -> Option<WeakRuleNode> {
1315
// NB: This can run from the root node destructor, so we can't use
1316
// `get()`, since it asserts the refcount is bigger than zero.
1317
let me = &*self.p.as_ptr();
1318
1319
debug_assert!(me.is_root());
1320
1321
// FIXME(#14213): Apparently the layout data can be gone from script.
1322
//
1323
// That's... suspicious, but it's fine if it happens for the rule tree
1324
// case, so just don't crash in the case we're doing the final GC in
1325
// script.
1326
1327
debug_assert!(
1328
!thread_state::get().is_worker() &&
1329
(thread_state::get().is_layout() || thread_state::get().is_script())
1330
);
1331
1332
let current = me.next_free.load(Ordering::Relaxed);
1333
if current == FREE_LIST_SENTINEL {
1334
return None;
1335
}
1336
1337
debug_assert!(
1338
!current.is_null(),
1339
"Multiple threads are operating on the free list at the \
1340
same time?"
1341
);
1342
debug_assert!(
1343
current != self.p.as_ptr(),
1344
"How did the root end up in the free list?"
1345
);
1346
1347
let next = (*current)
1348
.next_free
1349
.swap(ptr::null_mut(), Ordering::Relaxed);
1350
1351
debug_assert!(
1352
!next.is_null(),
1353
"How did a null pointer end up in the free list?"
1354
);
1355
1356
me.next_free.store(next, Ordering::Relaxed);
1357
1358
debug!(
1359
"Popping from free list: cur: {:?}, next: {:?}",
1360
current, next
1361
);
1362
1363
Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current)))
1364
}
1365
1366
unsafe fn assert_free_list_has_no_duplicates_or_null(&self) {
1367
assert!(cfg!(debug_assertions), "This is an expensive check!");
1368
use crate::hash::FxHashSet;
1369
1370
let me = &*self.p.as_ptr();
1371
assert!(me.is_root());
1372
1373
let mut current = self.p.as_ptr();
1374
let mut seen = FxHashSet::default();
1375
while current != FREE_LIST_SENTINEL {
1376
let next = (*current).next_free.load(Ordering::Relaxed);
1377
assert!(!next.is_null());
1378
assert!(!seen.contains(&next));
1379
seen.insert(next);
1380
1381
current = next;
1382
}
1383
}
1384
1385
unsafe fn gc(&self) {
1386
if cfg!(debug_assertions) {
1387
self.assert_free_list_has_no_duplicates_or_null();
1388
}
1389
1390
// NB: This can run from the root node destructor, so we can't use
1391
// `get()`, since it asserts the refcount is bigger than zero.
1392
let me = &*self.p.as_ptr();
1393
1394
debug_assert!(me.is_root(), "Can't call GC on a non-root node!");
1395
1396
while let Some(weak) = self.pop_from_free_list() {
1397
let node = &*weak.p.as_ptr();
1398
if node.refcount.load(Ordering::Relaxed) != 0 {
1399
// Nothing to do, the node is still alive.
1400
continue;
1401
}
1402
1403
debug!("GC'ing {:?}", weak.p.as_ptr());
1404
node.remove_from_child_list();
1405
log_drop(weak.p.as_ptr());
1406
let _ = Box::from_raw(weak.p.as_ptr());
1407
}
1408
1409
me.free_count().store(0, Ordering::Relaxed);
1410
1411
debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1412
}
1413
1414
unsafe fn maybe_gc(&self) {
1415
debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!");
1416
if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
1417
self.gc();
1418
}
1419
}
1420
1421
/// Returns true if any properties specified by `rule_type_mask` was set by
1422
/// an author rule.
1423
#[cfg(feature = "gecko")]
1424
pub fn has_author_specified_rules<E>(
1425
&self,
1426
mut element: E,
1427
mut pseudo: Option<PseudoElement>,
1428
guards: &StylesheetGuards,
1429
rule_type_mask: u32,
1430
author_colors_allowed: bool,
1431
) -> bool
1432
where
1433
E: crate::dom::TElement,
1434
{
1435
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BACKGROUND;
1436
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BORDER;
1437
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_PADDING;
1438
use crate::properties::{CSSWideKeyword, LonghandId};
1439
use crate::properties::{PropertyDeclaration, PropertyDeclarationId};
1440
use crate::values::specified::Color;
1441
use std::borrow::Cow;
1442
1443
// Reset properties:
1444
const BACKGROUND_PROPS: &'static [LonghandId] =
1445
&[LonghandId::BackgroundColor, LonghandId::BackgroundImage];
1446
1447
const BORDER_PROPS: &'static [LonghandId] = &[
1448
LonghandId::BorderTopColor,
1449
LonghandId::BorderTopStyle,
1450
LonghandId::BorderTopWidth,
1451
LonghandId::BorderRightColor,
1452
LonghandId::BorderRightStyle,
1453
LonghandId::BorderRightWidth,
1454
LonghandId::BorderBottomColor,
1455
LonghandId::BorderBottomStyle,
1456
LonghandId::BorderBottomWidth,
1457
LonghandId::BorderLeftColor,
1458
LonghandId::BorderLeftStyle,
1459
LonghandId::BorderLeftWidth,
1460
LonghandId::BorderTopLeftRadius,
1461
LonghandId::BorderTopRightRadius,
1462
LonghandId::BorderBottomRightRadius,
1463
LonghandId::BorderBottomLeftRadius,
1464
LonghandId::BorderInlineStartColor,
1465
LonghandId::BorderInlineStartStyle,
1466
LonghandId::BorderInlineStartWidth,
1467
LonghandId::BorderInlineEndColor,
1468
LonghandId::BorderInlineEndStyle,
1469
LonghandId::BorderInlineEndWidth,
1470
LonghandId::BorderBlockStartColor,
1471
LonghandId::BorderBlockStartStyle,
1472
LonghandId::BorderBlockStartWidth,
1473
LonghandId::BorderBlockEndColor,
1474
LonghandId::BorderBlockEndStyle,
1475
LonghandId::BorderBlockEndWidth,
1476
];
1477
1478
const PADDING_PROPS: &'static [LonghandId] = &[
1479
LonghandId::PaddingTop,
1480
LonghandId::PaddingRight,
1481
LonghandId::PaddingBottom,
1482
LonghandId::PaddingLeft,
1483
LonghandId::PaddingInlineStart,
1484
LonghandId::PaddingInlineEnd,
1485
LonghandId::PaddingBlockStart,
1486
LonghandId::PaddingBlockEnd,
1487
];
1488
1489
// Set of properties that we are currently interested in.
1490
let mut properties = LonghandIdSet::new();
1491
1492
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1493
for id in BACKGROUND_PROPS {
1494
properties.insert(*id);
1495
}
1496
}
1497
if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 {
1498
for id in BORDER_PROPS {
1499
properties.insert(*id);
1500
}
1501
}
1502
if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 {
1503
for id in PADDING_PROPS {
1504
properties.insert(*id);
1505
}
1506
}
1507
1508
// If author colors are not allowed, don't look at those properties
1509
// (except for background-color which is special and we handle below).
1510
if !author_colors_allowed {
1511
properties.remove_all(LonghandIdSet::ignored_when_colors_disabled());
1512
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1513
properties.insert(LonghandId::BackgroundColor);
1514
}
1515
}
1516
1517
let mut element_rule_node = Cow::Borrowed(self);
1518
1519
loop {
1520
// We need to be careful not to count styles covered up by
1521
// user-important or UA-important declarations. But we do want to
1522
// catch explicit inherit styling in those and check our parent
1523
// element to see whether we have user styling for those properties.
1524
// Note that we don't care here about inheritance due to lack of a
1525
// specified value, since all the properties we care about are reset
1526
// properties.
1527
1528
let mut inherited_properties = LonghandIdSet::new();
1529
let mut have_explicit_ua_inherit = false;
1530
1531
for node in element_rule_node.self_and_ancestors() {
1532
let source = node.style_source();
1533
let declarations = if source.is_some() {
1534
source
1535
.as_ref()
1536
.unwrap()
1537
.read(node.cascade_level().guard(guards))
1538
.declaration_importance_iter()
1539
} else {
1540
continue;
1541
};
1542
1543
// Iterate over declarations of the longhands we care about.
1544
let node_importance = node.importance();
1545
let longhands = declarations.rev().filter_map(|(declaration, importance)| {
1546
if importance != node_importance {
1547
return None;
1548
}
1549
match declaration.id() {
1550
PropertyDeclarationId::Longhand(id) => Some((id, declaration)),
1551
_ => None,
1552
}
1553
});
1554
1555
let is_author = node.cascade_level().origin() == Origin::Author;
1556
for (id, declaration) in longhands {
1557
if !properties.contains(id) {
1558
continue;
1559
}
1560
1561
if is_author {
1562
if !author_colors_allowed {
1563
// FIXME(emilio): this looks wrong, this should
1564
// do: if color is not transparent, then return
1565
// true, or something.
1566
if let PropertyDeclaration::BackgroundColor(ref color) = *declaration {
1567
return *color == Color::transparent();
1568
}
1569
}
1570
return true;
1571
}
1572
1573
// This property was set by a non-author rule.
1574
// Stop looking for it in this element's rule
1575
// nodes.
1576
properties.remove(id);
1577
1578
// However, if it is inherited, then it might be
1579
// inherited from an author rule from an
1580
// ancestor element's rule nodes.
1581
if declaration.get_css_wide_keyword() == Some(CSSWideKeyword::Inherit) {
1582
have_explicit_ua_inherit = true;
1583
inherited_properties.insert(id);
1584
}
1585
}
1586
}
1587
1588
if !have_explicit_ua_inherit {
1589
break;
1590
}
1591
1592
// Continue to the parent element and search for the inherited properties.
1593
if let Some(pseudo) = pseudo.take() {
1594
if pseudo.inherits_from_default_values() {
1595
break;
1596
}
1597
} else {
1598
element = match element.inheritance_parent() {
1599
Some(parent) => parent,
1600
None => break,
1601
};
1602
1603
let parent_data = element.mutate_data().unwrap();
1604
let parent_rule_node = parent_data.styles.primary().rules().clone();
1605
element_rule_node = Cow::Owned(parent_rule_node);
1606
}
1607
1608
properties = inherited_properties;
1609
}
1610
1611
false
1612
}
1613
1614
/// Returns true if there is either animation or transition level rule.
1615
pub fn has_animation_or_transition_rules(&self) -> bool {
1616
self.self_and_ancestors()
1617
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
1618
.any(|node| node.cascade_level().is_animation())
1619
}
1620
1621
/// Get a set of properties whose CascadeLevel are higher than Animations
1622
/// but not equal to Transitions.
1623
///
1624
/// If there are any custom properties, we set the boolean value of the
1625
/// returned tuple to true.
1626
pub fn get_properties_overriding_animations(
1627
&self,
1628
guards: &StylesheetGuards,
1629
) -> (LonghandIdSet, bool) {
1630
use crate::properties::PropertyDeclarationId;
1631
1632
// We want to iterate over cascade levels that override the animations
1633
// level, i.e. !important levels and the transitions level.
1634
//
1635
// However, we actually want to skip the transitions level because
1636
// although it is higher in the cascade than animations, when both
1637
// transitions and animations are present for a given element and
1638
// property, transitions are suppressed so that they don't actually
1639
// override animations.
1640
let iter = self
1641
.self_and_ancestors()
1642
.skip_while(|node| node.cascade_level() == CascadeLevel::Transitions)
1643
.take_while(|node| node.cascade_level() > CascadeLevel::Animations);
1644
let mut result = (LonghandIdSet::new(), false);
1645
for node in iter {
1646
let style = node.style_source().unwrap();
1647
for (decl, important) in style
1648
.read(node.cascade_level().guard(guards))
1649
.declaration_importance_iter()
1650
{
1651
// Although we are only iterating over cascade levels that
1652
// override animations, in a given property declaration block we
1653
// can have a mixture of !important and non-!important
1654
// declarations but only the !important declarations actually
1655
// override animations.
1656
if important.important() {
1657
match decl.id() {
1658
PropertyDeclarationId::Longhand(id) => result.0.insert(id),
1659
PropertyDeclarationId::Custom(_) => result.1 = true,
1660
}
1661
}
1662
}
1663
}
1664
result
1665
}
1666
}
1667
1668
/// An iterator over a rule node and its ancestors.
1669
#[derive(Clone)]
1670
pub struct SelfAndAncestors<'a> {
1671
current: Option<&'a StrongRuleNode>,
1672
}
1673
1674
impl<'a> Iterator for SelfAndAncestors<'a> {
1675
type Item = &'a StrongRuleNode;
1676
1677
fn next(&mut self) -> Option<Self::Item> {
1678
self.current.map(|node| {
1679
self.current = node.parent();
1680
node
1681
})
1682
}
1683
}
1684
1685
impl Clone for StrongRuleNode {
1686
fn clone(&self) -> Self {
1687
debug!(
1688
"{:?}: {:?}+",
1689
self.ptr(),
1690
self.get().refcount.load(Ordering::Relaxed)
1691
);
1692
debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0);
1693
self.get().refcount.fetch_add(1, Ordering::Relaxed);
1694
StrongRuleNode::from_ptr(self.p)
1695
}
1696
}
1697
1698
impl Drop for StrongRuleNode {
1699
fn drop(&mut self) {
1700
let node = unsafe { &*self.ptr() };
1701
1702
debug!(
1703
"{:?}: {:?}-",
1704
self.ptr(),
1705
node.refcount.load(Ordering::Relaxed)
1706
);
1707
debug!(
1708
"Dropping node: {:?}, root: {:?}, parent: {:?}",
1709
self.ptr(),
1710
node.root.as_ref().map(|r| r.ptr()),
1711
node.parent.as_ref().map(|p| p.ptr())
1712
);
1713
let should_drop = {
1714
debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
1715
node.refcount.fetch_sub(1, Ordering::Relaxed) == 1
1716
};
1717
1718
if !should_drop {
1719
return;
1720
}
1721
1722
debug_assert!(node.children.read().is_empty());
1723
if node.parent.is_none() {
1724
debug!("Dropping root node!");
1725
// The free list should be null by this point
1726
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1727
log_drop(self.ptr());
1728
let _ = unsafe { Box::from_raw(self.ptr()) };
1729
return;
1730
}
1731
1732
let root = unsafe { &*node.root.as_ref().unwrap().ptr() };
1733
let free_list = &root.next_free;
1734
let mut old_head = free_list.load(Ordering::Relaxed);
1735
1736
// If the free list is null, that means that the rule tree has been
1737
// formally torn down, and the last standard GC has already occurred.
1738
// We require that any callers using the rule tree at this point are
1739
// on the main thread only, which lets us trigger a synchronous GC
1740
// here to avoid leaking anything. We use the GC machinery, rather
1741
// than just dropping directly, so that we benefit from the iterative
1742
// destruction and don't trigger unbounded recursion during drop. See
1743
// [1] and the associated crashtest.
1744
//
1746
if old_head.is_null() {
1747
debug_assert!(
1748
!thread_state::get().is_worker() &&
1749
(thread_state::get().is_layout() || thread_state::get().is_script())
1750
);
1751
// Add the node as the sole entry in the free list.
1752
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1753
node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed);
1754
free_list.store(node as *const _ as *mut _, Ordering::Relaxed);
1755
1756
// Invoke the GC.
1757
//
1758
// Note that we need hold a strong reference to the root so that it
1759
// doesn't go away during the GC (which would happen if we're freeing
1760
// the last external reference into the rule tree). This is nicely
1761
// enforced by having the gc() method live on StrongRuleNode rather than
1762
// RuleNode.
1763
let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade();
1764
unsafe {
1765
strong_root.gc();
1766
}
1767
1768
// Leave the free list null, like we found it, such that additional
1769
// drops for straggling rule nodes will take this same codepath.
1770
debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1771
root.next_free.store(ptr::null_mut(), Ordering::Relaxed);
1772
1773
// Return. If strong_root is the last strong reference to the root,
1774
// this re-enter StrongRuleNode::drop, and take the root-dropping
1775
// path earlier in this function.
1776
return;
1777
}
1778
1779
// We're sure we're already in the free list, don't spinloop if we're.
1780
// Note that this is just a fast path, so it doesn't need to have an
1781
// strong memory ordering.
1782
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1783
return;
1784
}
1785
1786
// Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED.
1787
//
1788
// Note that we use Acquire/Release semantics for the free list
1789
// synchronization, in order to guarantee that the next_free
1790
// reads/writes we do below are properly visible from multiple threads
1791
// racing.
1792
loop {
1793
match free_list.compare_exchange_weak(
1794
old_head,
1795
FREE_LIST_LOCKED,
1796
Ordering::Acquire,
1797
Ordering::Relaxed,
1798
) {
1799
Ok(..) => {
1800
if old_head != FREE_LIST_LOCKED {
1801
break;
1802
}
1803
},
1804
Err(new) => old_head = new,
1805
}
1806
}
1807
1808
// If other thread has raced with use while using the same rule node,
1809
// just store the old head again, we're done.
1810
//
1811
// Note that we can use relaxed operations for loading since we're
1812
// effectively locking the free list with Acquire/Release semantics, and
1813
// the memory ordering is already guaranteed by that locking/unlocking.
1814
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1815
free_list.store(old_head, Ordering::Release);
1816
return;
1817
}
1818
1819
// Else store the old head as the next pointer, and store ourselves as
1820
// the new head of the free list.
1821
//
1822
// This can be relaxed since this pointer won't be read until GC.
1823
node.next_free.store(old_head, Ordering::Relaxed);
1824
1825
// Increment the free count. This doesn't need to be an RMU atomic
1826
// operation, because the free list is "locked".
1827
let old_free_count = root.free_count().load(Ordering::Relaxed);
1828
root.free_count()
1829
.store(old_free_count + 1, Ordering::Relaxed);
1830
1831
// This can be release because of the locking of the free list, that
1832
// ensures that all the other nodes racing with this one are using
1833
// `Acquire`.
1834
free_list.store(self.ptr(), Ordering::Release);
1835
}
1836
}
1837
1838
impl<'a> From<&'a StrongRuleNode> for WeakRuleNode {
1839
fn from(node: &'a StrongRuleNode) -> Self {
1840
WeakRuleNode::from_ptr(node.p)
1841
}
1842
}
1843
1844
impl WeakRuleNode {
1845
#[inline]
1846
fn ptr(&self) -> *mut RuleNode {
1847
self.p.as_ptr()
1848
}
1849
1850
fn upgrade(&self) -> StrongRuleNode {
1851
debug!("Upgrading weak node: {:p}", self.ptr());
1852
1853
let node = unsafe { &*self.ptr() };
1854
node.refcount.fetch_add(1, Ordering::Relaxed);
1855
StrongRuleNode::from_ptr(self.p)
1856
}
1857
1858
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1859
WeakRuleNode { p }
1860
}
1861
}