Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5
#![allow(unsafe_code)]
6
7
//! The rule tree.
8
9
use crate::applicable_declarations::ApplicableDeclarationList;
10
#[cfg(feature = "gecko")]
11
use crate::gecko::selector_parser::PseudoElement;
12
use crate::hash::{self, FxHashMap};
13
use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock};
14
use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards};
15
use crate::stylesheets::{Origin, StyleRule};
16
use crate::thread_state;
17
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
18
use parking_lot::RwLock;
19
use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow};
20
use smallvec::SmallVec;
21
use std::io::{self, Write};
22
use std::mem;
23
use std::ptr;
24
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
25
26
/// The rule tree, the structure servo uses to preserve the results of selector
27
/// matching.
28
///
29
/// This is organized as a tree of rules. When a node matches a set of rules,
30
/// they're inserted in order in the tree, starting with the less specific one.
31
///
32
/// When a rule is inserted in the tree, other elements may share the path up to
33
/// a given rule. If that's the case, we don't duplicate child nodes, but share
34
/// them.
35
///
36
/// When the rule node refcount drops to zero, it doesn't get freed. It gets
37
/// instead put into a free list, and it is potentially GC'd after a while in a
38
/// single-threaded fashion.
39
///
40
/// That way, a rule node that represents a likely-to-match-again rule (like a
41
/// :hover rule) can be reused if we haven't GC'd it yet.
42
///
43
/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC
46
/// to se a discussion about the different memory orderings used here.
47
#[derive(Debug)]
48
pub struct RuleTree {
49
root: StrongRuleNode,
50
}
51
52
impl Drop for RuleTree {
53
fn drop(&mut self) {
54
// GC the rule tree.
55
unsafe {
56
self.gc();
57
}
58
59
// After the GC, the free list should be empty.
60
debug_assert_eq!(
61
self.root.get().next_free.load(Ordering::Relaxed),
62
FREE_LIST_SENTINEL
63
);
64
65
// Remove the sentinel. This indicates that GCs will no longer occur.
66
// Any further drops of StrongRuleNodes must occur on the main thread,
67
// and will trigger synchronous dropping of the Rule nodes.
68
self.root
69
.get()
70
.next_free
71
.store(ptr::null_mut(), Ordering::Relaxed);
72
}
73
}
74
75
impl MallocSizeOf for RuleTree {
76
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
77
let mut n = 0;
78
let mut stack = SmallVec::<[_; 32]>::new();
79
stack.push(self.root.downgrade());
80
81
while let Some(node) = stack.pop() {
82
n += unsafe { ops.malloc_size_of(node.ptr()) };
83
let children = unsafe { (*node.ptr()).children.read() };
84
children.shallow_size_of(ops);
85
children.each(|c| stack.push(c.clone()));
86
}
87
88
n
89
}
90
}
91
92
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
93
struct ChildKey(CascadeLevel, ptr::NonNull<()>);
94
95
unsafe impl Send for ChildKey {}
96
unsafe impl Sync for ChildKey {}
97
98
/// A style source for the rule node. It can either be a CSS style rule or a
99
/// declaration block.
100
///
101
/// Note that, even though the declaration block from inside the style rule
102
/// could be enough to implement the rule tree, keeping the whole rule provides
103
/// more debuggability, and also the ability of show those selectors to
104
/// devtools.
105
#[derive(Clone, Debug)]
106
pub struct StyleSource(ArcUnion<Locked<StyleRule>, Locked<PropertyDeclarationBlock>>);
107
108
impl PartialEq for StyleSource {
109
fn eq(&self, other: &Self) -> bool {
110
ArcUnion::ptr_eq(&self.0, &other.0)
111
}
112
}
113
114
impl StyleSource {
115
/// Creates a StyleSource from a StyleRule.
116
pub fn from_rule(rule: Arc<Locked<StyleRule>>) -> Self {
117
StyleSource(ArcUnion::from_first(rule))
118
}
119
120
#[inline]
121
fn key(&self) -> ptr::NonNull<()> {
122
self.0.ptr()
123
}
124
125
/// Creates a StyleSource from a PropertyDeclarationBlock.
126
pub fn from_declarations(decls: Arc<Locked<PropertyDeclarationBlock>>) -> Self {
127
StyleSource(ArcUnion::from_second(decls))
128
}
129
130
fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
131
if let Some(ref rule) = self.0.as_first() {
132
let rule = rule.read_with(guard);
133
let _ = write!(writer, "{:?}", rule.selectors);
134
}
135
136
let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
137
}
138
139
/// Read the style source guard, and obtain thus read access to the
140
/// underlying property declaration block.
141
#[inline]
142
pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
143
let block: &Locked<PropertyDeclarationBlock> = match self.0.borrow() {
144
ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block,
145
ArcUnionBorrow::Second(ref block) => block.get(),
146
};
147
block.read_with(guard)
148
}
149
150
/// Indicates if this StyleSource is a style rule.
151
pub fn is_rule(&self) -> bool {
152
self.0.is_first()
153
}
154
155
/// Indicates if this StyleSource is a PropertyDeclarationBlock.
156
pub fn is_declarations(&self) -> bool {
157
self.0.is_second()
158
}
159
160
/// Returns the style rule if applicable, otherwise None.
161
pub fn as_rule(&self) -> Option<ArcBorrow<Locked<StyleRule>>> {
162
self.0.as_first()
163
}
164
165
/// Returns the declaration block if applicable, otherwise None.
166
pub fn as_declarations(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
167
self.0.as_second()
168
}
169
}
170
171
/// This value exists here so a node that pushes itself to the list can know
172
/// that is in the free list by looking at is next pointer, and comparing it
173
/// with null.
174
///
175
/// The root node doesn't have a null pointer in the free list, but this value.
176
const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
177
178
/// A second sentinel value for the free list, indicating that it's locked (i.e.
179
/// another thread is currently adding an entry). We spin if we find this value.
180
const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode;
181
182
/// A counter to track how many inner shadow roots rules deep we are.
183
///
184
/// This is used to handle:
185
///
187
///
188
/// In particular, it'd be `0` for the innermost shadow host, `1` for the next,
189
/// and so on.
190
pub type ShadowCascadeOrder = u8;
191
192
impl RuleTree {
193
/// Construct a new rule tree.
194
pub fn new() -> Self {
195
RuleTree {
196
root: StrongRuleNode::new(Box::new(RuleNode::root())),
197
}
198
}
199
200
/// Get the root rule node.
201
pub fn root(&self) -> &StrongRuleNode {
202
&self.root
203
}
204
205
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
206
let _ = writeln!(writer, " + RuleTree");
207
self.root.get().dump(guards, writer, 0);
208
}
209
210
/// Dump the rule tree to stdout.
211
pub fn dump_stdout(&self, guards: &StylesheetGuards) {
212
let mut stdout = io::stdout();
213
self.dump(guards, &mut stdout);
214
}
215
216
/// Inserts the given rules, that must be in proper order by specifity, and
217
/// returns the corresponding rule node representing the last inserted one.
218
///
219
/// !important rules are detected and inserted into the appropriate position
220
/// in the rule tree. This allows selector matching to ignore importance,
221
/// while still maintaining the appropriate cascade order in the rule tree.
222
pub fn insert_ordered_rules_with_important<'a, I>(
223
&self,
224
iter: I,
225
guards: &StylesheetGuards,
226
) -> StrongRuleNode
227
where
228
I: Iterator<Item = (StyleSource, CascadeLevel, ShadowCascadeOrder)>,
229
{
230
use self::CascadeLevel::*;
231
let mut current = self.root.clone();
232
let mut last_level = current.get().level;
233
234
let mut found_important = false;
235
let mut important_style_attr = None;
236
237
let mut important_same_tree = SmallVec::<[StyleSource; 4]>::new();
238
let mut important_inner_shadow = SmallVec::<[SmallVec<[StyleSource; 4]>; 4]>::new();
239
important_inner_shadow.push(SmallVec::new());
240
241
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
242
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
243
let mut transition = None;
244
245
let mut last_cascade_order = 0;
246
for (source, level, shadow_cascade_order) in iter {
247
debug_assert!(level >= last_level, "Not really ordered");
248
debug_assert!(!level.is_important(), "Important levels handled internally");
249
let any_important = {
250
let pdb = source.read(level.guard(guards));
251
pdb.any_important()
252
};
253
254
if any_important {
255
found_important = true;
256
match level {
257
InnerShadowNormal => {
258
debug_assert!(
259
shadow_cascade_order >= last_cascade_order,
260
"Not really ordered"
261
);
262
if shadow_cascade_order > last_cascade_order &&
263
!important_inner_shadow.last().unwrap().is_empty()
264
{
265
last_cascade_order = shadow_cascade_order;
266
important_inner_shadow.push(SmallVec::new());
267
}
268
important_inner_shadow
269
.last_mut()
270
.unwrap()
271
.push(source.clone())
272
},
273
SameTreeAuthorNormal => important_same_tree.push(source.clone()),
274
UANormal => important_ua.push(source.clone()),
275
UserNormal => important_user.push(source.clone()),
276
StyleAttributeNormal => {
277
debug_assert!(important_style_attr.is_none());
278
important_style_attr = Some(source.clone());
279
},
280
_ => {},
281
};
282
}
283
284
// We don't optimize out empty rules, even though we could.
285
//
286
// Inspector relies on every rule being inserted in the normal level
287
// at least once, in order to return the rules with the correct
288
// specificity order.
289
//
290
// TODO(emilio): If we want to apply these optimizations without
291
// breaking inspector's expectations, we'd need to run
292
// selector-matching again at the inspector's request. That may or
293
// may not be a better trade-off.
294
if matches!(level, Transitions) && found_important {
295
// There can be at most one transition, and it will come at
296
// the end of the iterator. Stash it and apply it after
297
// !important rules.
298
debug_assert!(transition.is_none());
299
transition = Some(source);
300
} else {
301
current = current.ensure_child(self.root.downgrade(), source, level);
302
}
303
last_level = level;
304
}
305
306
// Early-return in the common case of no !important declarations.
307
if !found_important {
308
return current;
309
}
310
311
//
312
// Insert important declarations, in order of increasing importance,
313
// followed by any transition rule.
314
//
315
316
for source in important_same_tree.drain() {
317
current = current.ensure_child(self.root.downgrade(), source, SameTreeAuthorImportant);
318
}
319
320
if let Some(source) = important_style_attr {
321
current = current.ensure_child(self.root.downgrade(), source, StyleAttributeImportant);
322
}
323
324
for mut list in important_inner_shadow.drain().rev() {
325
for source in list.drain() {
326
current = current.ensure_child(self.root.downgrade(), source, InnerShadowImportant);
327
}
328
}
329
330
for source in important_user.drain() {
331
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
332
}
333
334
for source in important_ua.drain() {
335
current = current.ensure_child(self.root.downgrade(), source, UAImportant);
336
}
337
338
if let Some(source) = transition {
339
current = current.ensure_child(self.root.downgrade(), source, Transitions);
340
}
341
342
current
343
}
344
345
/// Given a list of applicable declarations, insert the rules and return the
346
/// corresponding rule node.
347
pub fn compute_rule_node(
348
&self,
349
applicable_declarations: &mut ApplicableDeclarationList,
350
guards: &StylesheetGuards,
351
) -> StrongRuleNode {
352
self.insert_ordered_rules_with_important(
353
applicable_declarations.drain().map(|d| d.for_rule_tree()),
354
guards,
355
)
356
}
357
358
/// Insert the given rules, that must be in proper order by specifity, and
359
/// return the corresponding rule node representing the last inserted one.
360
pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
361
where
362
I: Iterator<Item = (StyleSource, CascadeLevel)>,
363
{
364
self.insert_ordered_rules_from(self.root.clone(), iter)
365
}
366
367
fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode
368
where
369
I: Iterator<Item = (StyleSource, CascadeLevel)>,
370
{
371
let mut current = from;
372
let mut last_level = current.get().level;
373
for (source, level) in iter {
374
debug_assert!(last_level <= level, "Not really ordered");
375
current = current.ensure_child(self.root.downgrade(), source, level);
376
last_level = level;
377
}
378
current
379
}
380
381
/// This can only be called when no other threads is accessing this tree.
382
pub unsafe fn gc(&self) {
383
self.root.gc();
384
}
385
386
/// This can only be called when no other threads is accessing this tree.
387
pub unsafe fn maybe_gc(&self) {
388
#[cfg(debug_assertions)]
389
self.maybe_dump_stats();
390
391
self.root.maybe_gc();
392
}
393
394
#[cfg(debug_assertions)]
395
fn maybe_dump_stats(&self) {
396
use itertools::Itertools;
397
use std::cell::Cell;
398
use std::time::{Duration, Instant};
399
400
if !log_enabled!(log::Level::Trace) {
401
return;
402
}
403
404
const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2);
405
406
thread_local! {
407
pub static LAST_STATS: Cell<Instant> = Cell::new(Instant::now());
408
};
409
410
let should_dump = LAST_STATS.with(|s| {
411
let now = Instant::now();
412
if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL {
413
return false;
414
}
415
s.set(now);
416
true
417
});
418
419
if !should_dump {
420
return;
421
}
422
423
let mut children_count = FxHashMap::default();
424
425
let mut stack = SmallVec::<[_; 32]>::new();
426
stack.push(self.root.clone());
427
while let Some(node) = stack.pop() {
428
let children = node.get().children.read();
429
*children_count.entry(children.len()).or_insert(0) += 1;
430
children.each(|c| stack.push(c.upgrade()));
431
}
432
433
trace!("Rule tree stats:");
434
let counts = children_count.keys().sorted();
435
for count in counts {
436
trace!(" {} - {}", count, children_count[count]);
437
}
438
}
439
440
/// Replaces a rule in a given level (if present) for another rule.
441
///
442
/// Returns the resulting node that represents the new path, or None if
443
/// the old path is still valid.
444
pub fn update_rule_at_level(
445
&self,
446
level: CascadeLevel,
447
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
448
path: &StrongRuleNode,
449
guards: &StylesheetGuards,
450
important_rules_changed: &mut bool,
451
) -> Option<StrongRuleNode> {
452
debug_assert!(level.is_unique_per_element());
453
// TODO(emilio): Being smarter with lifetimes we could avoid a bit of
454
// the refcount churn.
455
let mut current = path.clone();
456
*important_rules_changed = false;
457
458
// First walk up until the first less-or-equally specific rule.
459
let mut children = SmallVec::<[_; 10]>::new();
460
while current.get().level > level {
461
children.push((
462
current.get().source.as_ref().unwrap().clone(),
463
current.get().level,
464
));
465
current = current.parent().unwrap().clone();
466
}
467
468
// Then remove the one at the level we want to replace, if any.
469
//
470
// NOTE: Here we assume that only one rule can be at the level we're
471
// replacing.
472
//
473
// This is certainly true for HTML style attribute rules, animations and
474
// transitions, but could not be so for SMIL animations, which we'd need
475
// to special-case (isn't hard, it's just about removing the `if` and
476
// special cases, and replacing them for a `while` loop, avoiding the
477
// optimizations).
478
if current.get().level == level {
479
*important_rules_changed |= level.is_important();
480
481
if let Some(pdb) = pdb {
482
// If the only rule at the level we're replacing is exactly the
483
// same as `pdb`, we're done, and `path` is still valid.
484
//
485
// TODO(emilio): Another potential optimization is the one where
486
// we can just replace the rule at that level for `pdb`, and
487
// then we don't need to re-create the children, and `path` is
488
// also equally valid. This is less likely, and would require an
489
// in-place mutation of the source, which is, at best, fiddly,
490
// so let's skip it for now.
491
let current_decls = current
492
.get()
493
.source
494
.as_ref()
495
.unwrap()
496
.as_declarations()
497
.expect("Replacing non-declarations style?");
498
let is_here_already = ArcBorrow::ptr_eq(&pdb, &current_decls);
499
if is_here_already {
500
debug!("Picking the fast path in rule replacement");
501
return None;
502
}
503
}
504
current = current.parent().unwrap().clone();
505
}
506
debug_assert!(
507
current.get().level != level,
508
"Multiple rules should've been replaced?"
509
);
510
511
// Insert the rule if it's relevant at this level in the cascade.
512
//
513
// These optimizations are likely to be important, because the levels
514
// where replacements apply (style and animations) tend to trigger
515
// pretty bad styling cases already.
516
if let Some(pdb) = pdb {
517
if level.is_important() {
518
if pdb.read_with(level.guard(guards)).any_important() {
519
current = current.ensure_child(
520
self.root.downgrade(),
521
StyleSource::from_declarations(pdb.clone_arc()),
522
level,
523
);
524
*important_rules_changed = true;
525
}
526
} else {
527
if pdb.read_with(level.guard(guards)).any_normal() {
528
current = current.ensure_child(
529
self.root.downgrade(),
530
StyleSource::from_declarations(pdb.clone_arc()),
531
level,
532
);
533
}
534
}
535
}
536
537
// Now the rule is in the relevant place, push the children as
538
// necessary.
539
let rule = self.insert_ordered_rules_from(current, children.drain().rev());
540
Some(rule)
541
}
542
543
/// Returns new rule nodes without Transitions level rule.
544
pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
545
// Return a clone if there is no transition level.
546
if path.cascade_level() != CascadeLevel::Transitions {
547
return path.clone();
548
}
549
550
path.parent().unwrap().clone()
551
}
552
553
/// Returns new rule node without rules from declarative animations.
554
pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
555
// Return a clone if there are no animation rules.
556
if !path.has_animation_or_transition_rules() {
557
return path.clone();
558
}
559
560
let iter = path
561
.self_and_ancestors()
562
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride);
563
let mut last = path;
564
let mut children = SmallVec::<[_; 10]>::new();
565
for node in iter {
566
if !node.cascade_level().is_animation() {
567
children.push((
568
node.get().source.as_ref().unwrap().clone(),
569
node.cascade_level(),
570
));
571
}
572
last = node;
573
}
574
575
let rule =
576
self.insert_ordered_rules_from(last.parent().unwrap().clone(), children.drain().rev());
577
rule
578
}
579
580
/// Returns new rule node by adding animation rules at transition level.
581
/// The additional rules must be appropriate for the transition
582
/// level of the cascade, which is the highest level of the cascade.
583
/// (This is the case for one current caller, the cover rule used
584
/// for CSS transitions.)
585
pub fn add_animation_rules_at_transition_level(
586
&self,
587
path: &StrongRuleNode,
588
pdb: Arc<Locked<PropertyDeclarationBlock>>,
589
guards: &StylesheetGuards,
590
) -> StrongRuleNode {
591
let mut dummy = false;
592
self.update_rule_at_level(
593
CascadeLevel::Transitions,
594
Some(pdb.borrow_arc()),
595
path,
596
guards,
597
&mut dummy,
598
)
599
.expect("Should return a valid rule node")
600
}
601
}
602
603
/// The number of RuleNodes added to the free list before we will consider
604
/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
605
/// where it likely did not result from a rigorous performance analysis.)
606
const RULE_TREE_GC_INTERVAL: usize = 300;
607
608
/// The cascade level these rules are relevant at, as per[1][2][3].
609
///
610
/// Presentational hints for SVG and HTML are in the "author-level
611
/// zero-specificity" level, that is, right after user rules, and before author
612
/// rules.
613
///
614
/// The order of variants declared here is significant, and must be in
615
/// _ascending_ order of precedence.
616
///
617
/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
618
/// from outside the tree the element is in can't affect the element.
619
///
620
/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
621
/// tree may affect an element connected to the document or an "outer" shadow
622
/// tree.
623
///
624
/// We need to differentiate between rules from the same tree and "inner" shadow
625
/// trees in order to be able to find the right position for the style attribute
626
/// easily. Otherwise we wouldn't be able to avoid selector-matching when a
627
/// style attribute is added or removed.
628
///
633
#[repr(u8)]
634
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd)]
635
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
636
pub enum CascadeLevel {
637
/// Normal User-Agent rules.
638
UANormal = 0,
639
/// User normal rules.
640
UserNormal,
641
/// Presentational hints.
642
PresHints,
643
/// Shadow DOM styles from "inner" shadow trees.
644
///
645
/// See above for why this is needed instead of merging InnerShadowNormal,
646
/// SameTreeAuthorNormal and StyleAttributeNormal inside something like
647
/// AuthorNormal.
648
InnerShadowNormal,
649
/// Author normal rules from the same tree the element is in.
650
SameTreeAuthorNormal,
651
/// Style attribute normal rules.
652
StyleAttributeNormal,
653
/// SVG SMIL animations.
654
SMILOverride,
655
/// CSS animations and script-generated animations.
656
Animations,
657
/// Author-supplied important rules from the same tree the element came
658
/// from.
659
SameTreeAuthorImportant,
660
/// Style attribute important rules.
661
StyleAttributeImportant,
662
/// Shadow DOM important rules.
663
InnerShadowImportant,
664
/// User important rules.
665
UserImportant,
666
/// User-agent important rules.
667
UAImportant,
668
/// Transitions
669
///
670
/// NB: If this changes from being last, change from_byte below.
671
Transitions,
672
}
673
674
impl CascadeLevel {
675
/// Converts a raw byte to a CascadeLevel.
676
pub unsafe fn from_byte(byte: u8) -> Self {
677
debug_assert!(byte <= CascadeLevel::Transitions as u8);
678
mem::transmute(byte)
679
}
680
681
/// Select a lock guard for this level
682
pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
683
match *self {
684
CascadeLevel::UANormal |
685
CascadeLevel::UserNormal |
686
CascadeLevel::UserImportant |
687
CascadeLevel::UAImportant => guards.ua_or_user,
688
_ => guards.author,
689
}
690
}
691
692
/// Returns whether this cascade level is unique per element, in which case
693
/// we can replace the path in the cascade without fear.
694
pub fn is_unique_per_element(&self) -> bool {
695
match *self {
696
CascadeLevel::Transitions |
697
CascadeLevel::Animations |
698
CascadeLevel::SMILOverride |
699
CascadeLevel::StyleAttributeNormal |
700
CascadeLevel::StyleAttributeImportant => true,
701
_ => false,
702
}
703
}
704
705
/// Returns whether this cascade level represents important rules of some
706
/// sort.
707
#[inline]
708
pub fn is_important(&self) -> bool {
709
match *self {
710
CascadeLevel::SameTreeAuthorImportant |
711
CascadeLevel::InnerShadowImportant |
712
CascadeLevel::StyleAttributeImportant |
713
CascadeLevel::UserImportant |
714
CascadeLevel::UAImportant => true,
715
_ => false,
716
}
717
}
718
719
/// Returns the importance relevant for this rule. Pretty similar to
720
/// `is_important`.
721
#[inline]
722
pub fn importance(&self) -> Importance {
723
if self.is_important() {
724
Importance::Important
725
} else {
726
Importance::Normal
727
}
728
}
729
730
/// Returns the cascade origin of the rule.
731
#[inline]
732
pub fn origin(&self) -> Origin {
733
match *self {
734
CascadeLevel::UAImportant | CascadeLevel::UANormal => Origin::UserAgent,
735
CascadeLevel::UserImportant | CascadeLevel::UserNormal => Origin::User,
736
CascadeLevel::PresHints |
737
CascadeLevel::InnerShadowNormal |
738
CascadeLevel::SameTreeAuthorNormal |
739
CascadeLevel::StyleAttributeNormal |
740
CascadeLevel::SMILOverride |
741
CascadeLevel::Animations |
742
CascadeLevel::SameTreeAuthorImportant |
743
CascadeLevel::StyleAttributeImportant |
744
CascadeLevel::InnerShadowImportant |
745
CascadeLevel::Transitions => Origin::Author,
746
}
747
}
748
749
/// Returns whether this cascade level represents an animation rules.
750
#[inline]
751
pub fn is_animation(&self) -> bool {
752
match *self {
753
CascadeLevel::SMILOverride | CascadeLevel::Animations | CascadeLevel::Transitions => {
754
true
755
},
756
_ => false,
757
}
758
}
759
}
760
761
/// The children of a single rule node.
762
///
763
/// We optimize the case of no kids and a single child, since they're by far the
764
/// most common case and it'd cause a bunch of bloat for no reason.
765
///
766
/// The children remove themselves when they go away, which means that it's ok
767
/// for us to store weak pointers to them.
768
enum RuleNodeChildren {
769
/// There are no kids.
770
Empty,
771
/// There's just one kid. This is an extremely common case, so we don't
772
/// bother allocating a map for it.
773
One(WeakRuleNode),
774
/// At least at one point in time there was more than one kid (that is to
775
/// say, we don't bother re-allocating if children are removed dynamically).
776
Map(Box<FxHashMap<ChildKey, WeakRuleNode>>),
777
}
778
779
impl MallocShallowSizeOf for RuleNodeChildren {
780
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
781
match *self {
782
RuleNodeChildren::One(..) | RuleNodeChildren::Empty => 0,
783
RuleNodeChildren::Map(ref m) => {
784
// Want to account for both the box and the hashmap.
785
m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
786
},
787
}
788
}
789
}
790
791
impl Default for RuleNodeChildren {
792
fn default() -> Self {
793
RuleNodeChildren::Empty
794
}
795
}
796
797
impl RuleNodeChildren {
798
/// Executes a given function for each of the children.
799
fn each(&self, mut f: impl FnMut(&WeakRuleNode)) {
800
match *self {
801
RuleNodeChildren::Empty => {},
802
RuleNodeChildren::One(ref child) => f(child),
803
RuleNodeChildren::Map(ref map) => {
804
for (_key, kid) in map.iter() {
805
f(kid)
806
}
807
},
808
}
809
}
810
811
fn len(&self) -> usize {
812
match *self {
813
RuleNodeChildren::Empty => 0,
814
RuleNodeChildren::One(..) => 1,
815
RuleNodeChildren::Map(ref map) => map.len(),
816
}
817
}
818
819
fn is_empty(&self) -> bool {
820
self.len() == 0
821
}
822
823
fn get(&self, key: &ChildKey) -> Option<&WeakRuleNode> {
824
match *self {
825
RuleNodeChildren::Empty => return None,
826
RuleNodeChildren::One(ref kid) => {
827
// We're read-locked, so no need to do refcount stuff, since the
828
// child is only removed from the main thread, _and_ it'd need
829
// to write-lock us anyway.
830
if unsafe { (*kid.ptr()).key() } == *key {
831
Some(kid)
832
} else {
833
None
834
}
835
},
836
RuleNodeChildren::Map(ref map) => map.get(&key),
837
}
838
}
839
840
fn get_or_insert_with(
841
&mut self,
842
key: ChildKey,
843
get_new_child: impl FnOnce() -> StrongRuleNode,
844
) -> StrongRuleNode {
845
let existing_child_key = match *self {
846
RuleNodeChildren::Empty => {
847
let new = get_new_child();
848
debug_assert_eq!(new.get().key(), key);
849
*self = RuleNodeChildren::One(new.downgrade());
850
return new;
851
},
852
RuleNodeChildren::One(ref weak) => unsafe {
853
// We're locked necessarily, so it's fine to look at our
854
// weak-child without refcount-traffic.
855
let existing_child_key = (*weak.ptr()).key();
856
if existing_child_key == key {
857
return weak.upgrade();
858
}
859
existing_child_key
860
},
861
RuleNodeChildren::Map(ref mut map) => {
862
return match map.entry(key) {
863
hash::map::Entry::Occupied(ref occupied) => occupied.get().upgrade(),
864
hash::map::Entry::Vacant(vacant) => {
865
let new = get_new_child();
866
867
debug_assert_eq!(new.get().key(), key);
868
vacant.insert(new.downgrade());
869
870
new
871
},
872
};
873
},
874
};
875
876
let existing_child = match mem::replace(self, RuleNodeChildren::Empty) {
877
RuleNodeChildren::One(o) => o,
878
_ => unreachable!(),
879
};
880
// Two rule-nodes are still a not-totally-uncommon thing, so
881
// avoid over-allocating entries.
882
//
883
// TODO(emilio): Maybe just inline two kids too?
884
let mut children = Box::new(FxHashMap::with_capacity_and_hasher(2, Default::default()));
885
children.insert(existing_child_key, existing_child);
886
887
let new = get_new_child();
888
debug_assert_eq!(new.get().key(), key);
889
children.insert(key, new.downgrade());
890
891
*self = RuleNodeChildren::Map(children);
892
893
new
894
}
895
896
fn remove(&mut self, key: &ChildKey) -> Option<WeakRuleNode> {
897
match *self {
898
RuleNodeChildren::Empty => return None,
899
RuleNodeChildren::One(ref one) => {
900
if unsafe { (*one.ptr()).key() } != *key {
901
return None;
902
}
903
},
904
RuleNodeChildren::Map(ref mut multiple) => {
905
return multiple.remove(key);
906
},
907
}
908
909
match mem::replace(self, RuleNodeChildren::Empty) {
910
RuleNodeChildren::One(o) => Some(o),
911
_ => unreachable!(),
912
}
913
}
914
}
915
916
/// A node in the rule tree.
917
pub struct RuleNode {
918
/// The root node. Only the root has no root pointer, for obvious reasons.
919
root: Option<WeakRuleNode>,
920
921
/// The parent rule node. Only the root has no parent.
922
parent: Option<StrongRuleNode>,
923
924
/// The actual style source, either coming from a selector in a StyleRule,
925
/// or a raw property declaration block (like the style attribute).
926
///
927
/// None for the root node.
928
source: Option<StyleSource>,
929
930
/// The cascade level this rule is positioned at.
931
level: CascadeLevel,
932
933
refcount: AtomicUsize,
934
935
/// Only used for the root, stores the number of free rule nodes that are
936
/// around.
937
free_count: AtomicUsize,
938
939
/// The children of a given rule node. Children remove themselves from here
940
/// when they go away.
941
children: RwLock<RuleNodeChildren>,
942
943
/// The next item in the rule tree free list, that starts on the root node.
944
///
945
/// When this is set to null, that means that the rule tree has been torn
946
/// down, and GCs will no longer occur. When this happens, StrongRuleNodes
947
/// may only be dropped on the main thread, and teardown happens
948
/// synchronously.
949
next_free: AtomicPtr<RuleNode>,
950
}
951
952
unsafe impl Sync for RuleTree {}
953
unsafe impl Send for RuleTree {}
954
955
// On Gecko builds, hook into the leak checking machinery.
956
#[cfg(feature = "gecko_refcount_logging")]
957
mod gecko_leak_checking {
958
use super::RuleNode;
959
use std::mem::size_of;
960
use std::os::raw::{c_char, c_void};
961
962
extern "C" {
963
pub fn NS_LogCtor(aPtr: *const c_void, aTypeName: *const c_char, aSize: u32);
964
pub fn NS_LogDtor(aPtr: *const c_void, aTypeName: *const c_char, aSize: u32);
965
}
966
967
static NAME: &'static [u8] = b"RuleNode\0";
968
969
/// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery.
970
pub fn log_ctor(ptr: *const RuleNode) {
971
let s = NAME as *const [u8] as *const u8 as *const c_char;
972
unsafe {
973
NS_LogCtor(ptr as *const c_void, s, size_of::<RuleNode>() as u32);
974
}
975
}
976
977
/// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery.
978
pub fn log_dtor(ptr: *const RuleNode) {
979
let s = NAME as *const [u8] as *const u8 as *const c_char;
980
unsafe {
981
NS_LogDtor(ptr as *const c_void, s, size_of::<RuleNode>() as u32);
982
}
983
}
984
985
}
986
987
#[inline(always)]
988
fn log_new(_ptr: *const RuleNode) {
989
#[cfg(feature = "gecko_refcount_logging")]
990
gecko_leak_checking::log_ctor(_ptr);
991
}
992
993
#[inline(always)]
994
fn log_drop(_ptr: *const RuleNode) {
995
#[cfg(feature = "gecko_refcount_logging")]
996
gecko_leak_checking::log_dtor(_ptr);
997
}
998
999
impl RuleNode {
1000
fn new(
1001
root: WeakRuleNode,
1002
parent: StrongRuleNode,
1003
source: StyleSource,
1004
level: CascadeLevel,
1005
) -> Self {
1006
debug_assert!(root.upgrade().parent().is_none());
1007
RuleNode {
1008
root: Some(root),
1009
parent: Some(parent),
1010
source: Some(source),
1011
level: level,
1012
refcount: AtomicUsize::new(1),
1013
children: Default::default(),
1014
free_count: AtomicUsize::new(0),
1015
next_free: AtomicPtr::new(ptr::null_mut()),
1016
}
1017
}
1018
1019
fn root() -> Self {
1020
RuleNode {
1021
root: None,
1022
parent: None,
1023
source: None,
1024
level: CascadeLevel::UANormal,
1025
refcount: AtomicUsize::new(1),
1026
free_count: AtomicUsize::new(0),
1027
children: Default::default(),
1028
next_free: AtomicPtr::new(FREE_LIST_SENTINEL),
1029
}
1030
}
1031
1032
fn key(&self) -> ChildKey {
1033
ChildKey(
1034
self.level,
1035
self.source
1036
.as_ref()
1037
.expect("Called key() on the root node")
1038
.key(),
1039
)
1040
}
1041
1042
fn is_root(&self) -> bool {
1043
self.parent.is_none()
1044
}
1045
1046
fn free_count(&self) -> &AtomicUsize {
1047
debug_assert!(self.is_root());
1048
&self.free_count
1049
}
1050
1051
/// Remove this rule node from the child list.
1052
///
1053
/// This is expected to be called before freeing the node from the free
1054
/// list, on the main thread.
1055
unsafe fn remove_from_child_list(&self) {
1056
debug!(
1057
"Remove from child list: {:?}, parent: {:?}",
1058
self as *const RuleNode,
1059
self.parent.as_ref().map(|p| p.ptr())
1060
);
1061
1062
if let Some(parent) = self.parent.as_ref() {
1063
let weak = parent.get().children.write().remove(&self.key());
1064
assert_eq!(weak.unwrap().ptr() as *const _, self as *const _);
1065
}
1066
}
1067
1068
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
1069
const INDENT_INCREMENT: usize = 4;
1070
1071
for _ in 0..indent {
1072
let _ = write!(writer, " ");
1073
}
1074
1075
let _ = writeln!(
1076
writer,
1077
" - {:?} (ref: {:?}, parent: {:?})",
1078
self as *const _,
1079
self.refcount.load(Ordering::Relaxed),
1080
self.parent.as_ref().map(|p| p.ptr())
1081
);
1082
1083
for _ in 0..indent {
1084
let _ = write!(writer, " ");
1085
}
1086
1087
if self.source.is_some() {
1088
self.source
1089
.as_ref()
1090
.unwrap()
1091
.dump(self.level.guard(guards), writer);
1092
} else {
1093
if indent != 0 {
1094
warn!("How has this happened?");
1095
}
1096
let _ = write!(writer, "(root)");
1097
}
1098
1099
let _ = write!(writer, "\n");
1100
self.children.read().each(|child| {
1101
child
1102
.upgrade()
1103
.get()
1104
.dump(guards, writer, indent + INDENT_INCREMENT);
1105
});
1106
}
1107
}
1108
1109
#[derive(Clone)]
1110
struct WeakRuleNode {
1111
p: ptr::NonNull<RuleNode>,
1112
}
1113
1114
/// A strong reference to a rule node.
1115
#[derive(Debug, Eq, Hash, PartialEq)]
1116
pub struct StrongRuleNode {
1117
p: ptr::NonNull<RuleNode>,
1118
}
1119
1120
unsafe impl Send for StrongRuleNode {}
1121
unsafe impl Sync for StrongRuleNode {}
1122
1123
#[cfg(feature = "servo")]
1124
malloc_size_of_is_0!(StrongRuleNode);
1125
1126
impl StrongRuleNode {
1127
fn new(n: Box<RuleNode>) -> Self {
1128
debug_assert_eq!(n.parent.is_none(), !n.source.is_some());
1129
1130
// TODO(emilio): Use into_raw_non_null when it's stable.
1131
let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) };
1132
log_new(ptr.as_ptr());
1133
1134
debug!("Creating rule node: {:p}", ptr);
1135
1136
StrongRuleNode::from_ptr(ptr)
1137
}
1138
1139
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1140
StrongRuleNode { p }
1141
}
1142
1143
fn downgrade(&self) -> WeakRuleNode {
1144
WeakRuleNode::from_ptr(self.p)
1145
}
1146
1147
/// Get the parent rule node of this rule node.
1148
pub fn parent(&self) -> Option<&StrongRuleNode> {
1149
self.get().parent.as_ref()
1150
}
1151
1152
fn ensure_child(
1153
&self,
1154
root: WeakRuleNode,
1155
source: StyleSource,
1156
level: CascadeLevel,
1157
) -> StrongRuleNode {
1158
use parking_lot::RwLockUpgradableReadGuard;
1159
1160
let key = ChildKey(level, source.key());
1161
1162
let read_guard = self.get().children.upgradable_read();
1163
if let Some(child) = read_guard.get(&key) {
1164
return child.upgrade();
1165
}
1166
1167
RwLockUpgradableReadGuard::upgrade(read_guard).get_or_insert_with(key, move || {
1168
StrongRuleNode::new(Box::new(RuleNode::new(
1169
root,
1170
self.clone(),
1171
source.clone(),
1172
level,
1173
)))
1174
})
1175
}
1176
1177
/// Raw pointer to the RuleNode
1178
#[inline]
1179
pub fn ptr(&self) -> *mut RuleNode {
1180
self.p.as_ptr()
1181
}
1182
1183
fn get(&self) -> &RuleNode {
1184
if cfg!(debug_assertions) {
1185
let node = unsafe { &*self.p.as_ptr() };
1186
assert!(node.refcount.load(Ordering::Relaxed) > 0);
1187
}
1188
unsafe { &*self.p.as_ptr() }
1189
}
1190
1191
/// Get the style source corresponding to this rule node. May return `None`
1192
/// if it's the root node, which means that the node hasn't matched any
1193
/// rules.
1194
pub fn style_source(&self) -> Option<&StyleSource> {
1195
self.get().source.as_ref()
1196
}
1197
1198
/// The cascade level for this node
1199
pub fn cascade_level(&self) -> CascadeLevel {
1200
self.get().level
1201
}
1202
1203
/// Get the importance that this rule node represents.
1204
pub fn importance(&self) -> Importance {
1205
self.get().level.importance()
1206
}
1207
1208
/// Get an iterator for this rule node and its ancestors.
1209
pub fn self_and_ancestors(&self) -> SelfAndAncestors {
1210
SelfAndAncestors {
1211
current: Some(self),
1212
}
1213
}
1214
1215
/// Returns whether this node has any child, only intended for testing
1216
/// purposes, and called on a single-threaded fashion only.
1217
pub unsafe fn has_children_for_testing(&self) -> bool {
1218
self.get().children.read().is_empty()
1219
}
1220
1221
unsafe fn pop_from_free_list(&self) -> Option<WeakRuleNode> {
1222
// NB: This can run from the root node destructor, so we can't use
1223
// `get()`, since it asserts the refcount is bigger than zero.
1224
let me = &*self.p.as_ptr();
1225
1226
debug_assert!(me.is_root());
1227
1228
// FIXME(#14213): Apparently the layout data can be gone from script.
1229
//
1230
// That's... suspicious, but it's fine if it happens for the rule tree
1231
// case, so just don't crash in the case we're doing the final GC in
1232
// script.
1233
1234
debug_assert!(
1235
!thread_state::get().is_worker() &&
1236
(thread_state::get().is_layout() || thread_state::get().is_script())
1237
);
1238
1239
let current = me.next_free.load(Ordering::Relaxed);
1240
if current == FREE_LIST_SENTINEL {
1241
return None;
1242
}
1243
1244
debug_assert!(
1245
!current.is_null(),
1246
"Multiple threads are operating on the free list at the \
1247
same time?"
1248
);
1249
debug_assert!(
1250
current != self.p.as_ptr(),
1251
"How did the root end up in the free list?"
1252
);
1253
1254
let next = (*current)
1255
.next_free
1256
.swap(ptr::null_mut(), Ordering::Relaxed);
1257
1258
debug_assert!(
1259
!next.is_null(),
1260
"How did a null pointer end up in the free list?"
1261
);
1262
1263
me.next_free.store(next, Ordering::Relaxed);
1264
1265
debug!(
1266
"Popping from free list: cur: {:?}, next: {:?}",
1267
current, next
1268
);
1269
1270
Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current)))
1271
}
1272
1273
unsafe fn assert_free_list_has_no_duplicates_or_null(&self) {
1274
assert!(cfg!(debug_assertions), "This is an expensive check!");
1275
use crate::hash::FxHashSet;
1276
1277
let me = &*self.p.as_ptr();
1278
assert!(me.is_root());
1279
1280
let mut current = self.p.as_ptr();
1281
let mut seen = FxHashSet::default();
1282
while current != FREE_LIST_SENTINEL {
1283
let next = (*current).next_free.load(Ordering::Relaxed);
1284
assert!(!next.is_null());
1285
assert!(!seen.contains(&next));
1286
seen.insert(next);
1287
1288
current = next;
1289
}
1290
}
1291
1292
unsafe fn gc(&self) {
1293
if cfg!(debug_assertions) {
1294
self.assert_free_list_has_no_duplicates_or_null();
1295
}
1296
1297
// NB: This can run from the root node destructor, so we can't use
1298
// `get()`, since it asserts the refcount is bigger than zero.
1299
let me = &*self.p.as_ptr();
1300
1301
debug_assert!(me.is_root(), "Can't call GC on a non-root node!");
1302
1303
while let Some(weak) = self.pop_from_free_list() {
1304
let node = &*weak.p.as_ptr();
1305
if node.refcount.load(Ordering::Relaxed) != 0 {
1306
// Nothing to do, the node is still alive.
1307
continue;
1308
}
1309
1310
debug!("GC'ing {:?}", weak.p.as_ptr());
1311
node.remove_from_child_list();
1312
log_drop(weak.p.as_ptr());
1313
let _ = Box::from_raw(weak.p.as_ptr());
1314
}
1315
1316
me.free_count().store(0, Ordering::Relaxed);
1317
1318
debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1319
}
1320
1321
unsafe fn maybe_gc(&self) {
1322
debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!");
1323
if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
1324
self.gc();
1325
}
1326
}
1327
1328
/// Returns true if any properties specified by `rule_type_mask` was set by
1329
/// an author rule.
1330
#[cfg(feature = "gecko")]
1331
pub fn has_author_specified_rules<E>(
1332
&self,
1333
mut element: E,
1334
mut pseudo: Option<PseudoElement>,
1335
guards: &StylesheetGuards,
1336
rule_type_mask: u32,
1337
author_colors_allowed: bool,
1338
) -> bool
1339
where
1340
E: crate::dom::TElement,
1341
{
1342
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BACKGROUND;
1343
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BORDER;
1344
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_PADDING;
1345
use crate::properties::{CSSWideKeyword, LonghandId};
1346
use crate::properties::{PropertyDeclaration, PropertyDeclarationId};
1347
use crate::values::specified::Color;
1348
use std::borrow::Cow;
1349
1350
// Reset properties:
1351
const BACKGROUND_PROPS: &'static [LonghandId] =
1352
&[LonghandId::BackgroundColor, LonghandId::BackgroundImage];
1353
1354
const BORDER_PROPS: &'static [LonghandId] = &[
1355
LonghandId::BorderTopColor,
1356
LonghandId::BorderTopStyle,
1357
LonghandId::BorderTopWidth,
1358
LonghandId::BorderRightColor,
1359
LonghandId::BorderRightStyle,
1360
LonghandId::BorderRightWidth,
1361
LonghandId::BorderBottomColor,
1362
LonghandId::BorderBottomStyle,
1363
LonghandId::BorderBottomWidth,
1364
LonghandId::BorderLeftColor,
1365
LonghandId::BorderLeftStyle,
1366
LonghandId::BorderLeftWidth,
1367
LonghandId::BorderTopLeftRadius,
1368
LonghandId::BorderTopRightRadius,
1369
LonghandId::BorderBottomRightRadius,
1370
LonghandId::BorderBottomLeftRadius,
1371
LonghandId::BorderInlineStartColor,
1372
LonghandId::BorderInlineStartStyle,
1373
LonghandId::BorderInlineStartWidth,
1374
LonghandId::BorderInlineEndColor,
1375
LonghandId::BorderInlineEndStyle,
1376
LonghandId::BorderInlineEndWidth,
1377
LonghandId::BorderBlockStartColor,
1378
LonghandId::BorderBlockStartStyle,
1379
LonghandId::BorderBlockStartWidth,
1380
LonghandId::BorderBlockEndColor,
1381
LonghandId::BorderBlockEndStyle,
1382
LonghandId::BorderBlockEndWidth,
1383
];
1384
1385
const PADDING_PROPS: &'static [LonghandId] = &[
1386
LonghandId::PaddingTop,
1387
LonghandId::PaddingRight,
1388
LonghandId::PaddingBottom,
1389
LonghandId::PaddingLeft,
1390
LonghandId::PaddingInlineStart,
1391
LonghandId::PaddingInlineEnd,
1392
LonghandId::PaddingBlockStart,
1393
LonghandId::PaddingBlockEnd,
1394
];
1395
1396
// Set of properties that we are currently interested in.
1397
let mut properties = LonghandIdSet::new();
1398
1399
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1400
for id in BACKGROUND_PROPS {
1401
properties.insert(*id);
1402
}
1403
}
1404
if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 {
1405
for id in BORDER_PROPS {
1406
properties.insert(*id);
1407
}
1408
}
1409
if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 {
1410
for id in PADDING_PROPS {
1411
properties.insert(*id);
1412
}
1413
}
1414
1415
// If author colors are not allowed, don't look at those properties
1416
// (except for background-color which is special and we handle below).
1417
if !author_colors_allowed {
1418
properties.remove_all(LonghandIdSet::ignored_when_colors_disabled());
1419
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1420
properties.insert(LonghandId::BackgroundColor);
1421
}
1422
}
1423
1424
let mut element_rule_node = Cow::Borrowed(self);
1425
1426
loop {
1427
// We need to be careful not to count styles covered up by
1428
// user-important or UA-important declarations. But we do want to
1429
// catch explicit inherit styling in those and check our parent
1430
// element to see whether we have user styling for those properties.
1431
// Note that we don't care here about inheritance due to lack of a
1432
// specified value, since all the properties we care about are reset
1433
// properties.
1434
1435
let mut inherited_properties = LonghandIdSet::new();
1436
let mut have_explicit_ua_inherit = false;
1437
1438
for node in element_rule_node.self_and_ancestors() {
1439
let source = node.style_source();
1440
let declarations = if source.is_some() {
1441
source
1442
.as_ref()
1443
.unwrap()
1444
.read(node.cascade_level().guard(guards))
1445
.declaration_importance_iter()
1446
} else {
1447
continue;
1448
};
1449
1450
// Iterate over declarations of the longhands we care about.
1451
let node_importance = node.importance();
1452
let longhands = declarations.rev().filter_map(|(declaration, importance)| {
1453
if importance != node_importance {
1454
return None;
1455
}
1456
match declaration.id() {
1457
PropertyDeclarationId::Longhand(id) => Some((id, declaration)),
1458
_ => None,
1459
}
1460
});
1461
1462
match node.cascade_level() {
1463
// Non-author rules:
1464
CascadeLevel::UANormal |
1465
CascadeLevel::UAImportant |
1466
CascadeLevel::UserNormal |
1467
CascadeLevel::UserImportant => {
1468
for (id, declaration) in longhands {
1469
if properties.contains(id) {
1470
// This property was set by a non-author rule.
1471
// Stop looking for it in this element's rule
1472
// nodes.
1473
properties.remove(id);
1474
1475
// However, if it is inherited, then it might be
1476
// inherited from an author rule from an
1477
// ancestor element's rule nodes.
1478
if declaration.get_css_wide_keyword() ==
1479
Some(CSSWideKeyword::Inherit)
1480
{
1481
have_explicit_ua_inherit = true;
1482
inherited_properties.insert(id);
1483
}
1484
}
1485
}
1486
},
1487
// Author rules:
1488
CascadeLevel::PresHints |
1489
CascadeLevel::SameTreeAuthorNormal |
1490
CascadeLevel::InnerShadowNormal |
1491
CascadeLevel::StyleAttributeNormal |
1492
CascadeLevel::SMILOverride |
1493
CascadeLevel::Animations |
1494
CascadeLevel::SameTreeAuthorImportant |
1495
CascadeLevel::InnerShadowImportant |
1496
CascadeLevel::StyleAttributeImportant |
1497
CascadeLevel::Transitions => {
1498
for (id, declaration) in longhands {
1499
if properties.contains(id) {
1500
if !author_colors_allowed {
1501
if let PropertyDeclaration::BackgroundColor(ref color) =
1502
*declaration
1503
{
1504
return *color == Color::transparent();
1505
}
1506
}
1507
return true;
1508
}
1509
}
1510
},
1511
}
1512
}
1513
1514
if !have_explicit_ua_inherit {
1515
break;
1516
}
1517
1518
// Continue to the parent element and search for the inherited properties.
1519
if let Some(pseudo) = pseudo.take() {
1520
if pseudo.inherits_from_default_values() {
1521
break;
1522
}
1523
} else {
1524
element = match element.inheritance_parent() {
1525
Some(parent) => parent,
1526
None => break,
1527
};
1528
1529
let parent_data = element.mutate_data().unwrap();
1530
let parent_rule_node = parent_data.styles.primary().rules().clone();
1531
element_rule_node = Cow::Owned(parent_rule_node);
1532
}
1533
1534
properties = inherited_properties;
1535
}
1536
1537
false
1538
}
1539
1540
/// Returns true if there is either animation or transition level rule.
1541
pub fn has_animation_or_transition_rules(&self) -> bool {
1542
self.self_and_ancestors()
1543
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
1544
.any(|node| node.cascade_level().is_animation())
1545
}
1546
1547
/// Get a set of properties whose CascadeLevel are higher than Animations
1548
/// but not equal to Transitions.
1549
///
1550
/// If there are any custom properties, we set the boolean value of the
1551
/// returned tuple to true.
1552
pub fn get_properties_overriding_animations(
1553
&self,
1554
guards: &StylesheetGuards,
1555
) -> (LonghandIdSet, bool) {
1556
use crate::properties::PropertyDeclarationId;
1557
1558
// We want to iterate over cascade levels that override the animations
1559
// level, i.e. !important levels and the transitions level.
1560
//
1561
// However, we actually want to skip the transitions level because
1562
// although it is higher in the cascade than animations, when both
1563
// transitions and animations are present for a given element and
1564
// property, transitions are suppressed so that they don't actually
1565
// override animations.
1566
let iter = self
1567
.self_and_ancestors()
1568
.skip_while(|node| node.cascade_level() == CascadeLevel::Transitions)
1569
.take_while(|node| node.cascade_level() > CascadeLevel::Animations);
1570
let mut result = (LonghandIdSet::new(), false);
1571
for node in iter {
1572
let style = node.style_source().unwrap();
1573
for (decl, important) in style
1574
.read(node.cascade_level().guard(guards))
1575
.declaration_importance_iter()
1576
{
1577
// Although we are only iterating over cascade levels that
1578
// override animations, in a given property declaration block we
1579
// can have a mixture of !important and non-!important
1580
// declarations but only the !important declarations actually
1581
// override animations.
1582
if important.important() {
1583
match decl.id() {
1584
PropertyDeclarationId::Longhand(id) => result.0.insert(id),
1585
PropertyDeclarationId::Custom(_) => result.1 = true,
1586
}
1587
}
1588
}
1589
}
1590
result
1591
}
1592
}
1593
1594
/// An iterator over a rule node and its ancestors.
1595
#[derive(Clone)]
1596
pub struct SelfAndAncestors<'a> {
1597
current: Option<&'a StrongRuleNode>,
1598
}
1599
1600
impl<'a> Iterator for SelfAndAncestors<'a> {
1601
type Item = &'a StrongRuleNode;
1602
1603
fn next(&mut self) -> Option<Self::Item> {
1604
self.current.map(|node| {
1605
self.current = node.parent();
1606
node
1607
})
1608
}
1609
}
1610
1611
impl Clone for StrongRuleNode {
1612
fn clone(&self) -> Self {
1613
debug!(
1614
"{:?}: {:?}+",
1615
self.ptr(),
1616
self.get().refcount.load(Ordering::Relaxed)
1617
);
1618
debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0);
1619
self.get().refcount.fetch_add(1, Ordering::Relaxed);
1620
StrongRuleNode::from_ptr(self.p)
1621
}
1622
}
1623
1624
impl Drop for StrongRuleNode {
1625
fn drop(&mut self) {
1626
let node = unsafe { &*self.ptr() };
1627
1628
debug!(
1629
"{:?}: {:?}-",
1630
self.ptr(),
1631
node.refcount.load(Ordering::Relaxed)
1632
);
1633
debug!(
1634
"Dropping node: {:?}, root: {:?}, parent: {:?}",
1635
self.ptr(),
1636
node.root.as_ref().map(|r| r.ptr()),
1637
node.parent.as_ref().map(|p| p.ptr())
1638
);
1639
let should_drop = {
1640
debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
1641
node.refcount.fetch_sub(1, Ordering::Relaxed) == 1
1642
};
1643
1644
if !should_drop {
1645
return;
1646
}
1647
1648
debug_assert!(node.children.read().is_empty());
1649
if node.parent.is_none() {
1650
debug!("Dropping root node!");
1651
// The free list should be null by this point
1652
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1653
log_drop(self.ptr());
1654
let _ = unsafe { Box::from_raw(self.ptr()) };
1655
return;
1656
}
1657
1658
let root = unsafe { &*node.root.as_ref().unwrap().ptr() };
1659
let free_list = &root.next_free;
1660
let mut old_head = free_list.load(Ordering::Relaxed);
1661
1662
// If the free list is null, that means that the rule tree has been
1663
// formally torn down, and the last standard GC has already occurred.
1664
// We require that any callers using the rule tree at this point are
1665
// on the main thread only, which lets us trigger a synchronous GC
1666
// here to avoid leaking anything. We use the GC machinery, rather
1667
// than just dropping directly, so that we benefit from the iterative
1668
// destruction and don't trigger unbounded recursion during drop. See
1669
// [1] and the associated crashtest.
1670
//
1672
if old_head.is_null() {
1673
debug_assert!(
1674
!thread_state::get().is_worker() &&
1675
(thread_state::get().is_layout() || thread_state::get().is_script())
1676
);
1677
// Add the node as the sole entry in the free list.
1678
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1679
node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed);
1680
free_list.store(node as *const _ as *mut _, Ordering::Relaxed);
1681
1682
// Invoke the GC.
1683
//
1684
// Note that we need hold a strong reference to the root so that it
1685
// doesn't go away during the GC (which would happen if we're freeing
1686
// the last external reference into the rule tree). This is nicely
1687
// enforced by having the gc() method live on StrongRuleNode rather than
1688
// RuleNode.
1689
let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade();
1690
unsafe {
1691
strong_root.gc();
1692
}
1693
1694
// Leave the free list null, like we found it, such that additional
1695
// drops for straggling rule nodes will take this same codepath.
1696
debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1697
root.next_free.store(ptr::null_mut(), Ordering::Relaxed);
1698
1699
// Return. If strong_root is the last strong reference to the root,
1700
// this re-enter StrongRuleNode::drop, and take the root-dropping
1701
// path earlier in this function.
1702
return;
1703
}
1704
1705
// We're sure we're already in the free list, don't spinloop if we're.
1706
// Note that this is just a fast path, so it doesn't need to have an
1707
// strong memory ordering.
1708
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1709
return;
1710
}
1711
1712
// Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED.
1713
//
1714
// Note that we use Acquire/Release semantics for the free list
1715
// synchronization, in order to guarantee that the next_free
1716
// reads/writes we do below are properly visible from multiple threads
1717
// racing.
1718
loop {
1719
match free_list.compare_exchange_weak(
1720
old_head,
1721
FREE_LIST_LOCKED,
1722
Ordering::Acquire,
1723
Ordering::Relaxed,
1724
) {
1725
Ok(..) => {
1726
if old_head != FREE_LIST_LOCKED {
1727
break;
1728
}
1729
},
1730
Err(new) => old_head = new,
1731
}
1732
}
1733
1734
// If other thread has raced with use while using the same rule node,
1735
// just store the old head again, we're done.
1736
//
1737
// Note that we can use relaxed operations for loading since we're
1738
// effectively locking the free list with Acquire/Release semantics, and
1739
// the memory ordering is already guaranteed by that locking/unlocking.
1740
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1741
free_list.store(old_head, Ordering::Release);
1742
return;
1743
}
1744
1745
// Else store the old head as the next pointer, and store ourselves as
1746
// the new head of the free list.
1747
//
1748
// This can be relaxed since this pointer won't be read until GC.
1749
node.next_free.store(old_head, Ordering::Relaxed);
1750
1751
// Increment the free count. This doesn't need to be an RMU atomic
1752
// operation, because the free list is "locked".
1753
let old_free_count = root.free_count().load(Ordering::Relaxed);
1754
root.free_count()
1755
.store(old_free_count + 1, Ordering::Relaxed);
1756
1757
// This can be release because of the locking of the free list, that
1758
// ensures that all the other nodes racing with this one are using
1759
// `Acquire`.
1760
free_list.store(self.ptr(), Ordering::Release);
1761
}
1762
}
1763
1764
impl<'a> From<&'a StrongRuleNode> for WeakRuleNode {
1765
fn from(node: &'a StrongRuleNode) -> Self {
1766
WeakRuleNode::from_ptr(node.p)
1767
}
1768
}
1769
1770
impl WeakRuleNode {
1771
#[inline]
1772
fn ptr(&self) -> *mut RuleNode {
1773
self.p.as_ptr()
1774
}
1775
1776
fn upgrade(&self) -> StrongRuleNode {
1777
debug!("Upgrading weak node: {:p}", self.ptr());
1778
1779
let node = unsafe { &*self.ptr() };
1780
node.refcount.fetch_add(1, Ordering::Relaxed);
1781
StrongRuleNode::from_ptr(self.p)
1782
}
1783
1784
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1785
WeakRuleNode { p }
1786
}
1787
}