Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5
#![allow(unsafe_code)]
6
7
//! The rule tree.
8
9
use crate::applicable_declarations::ApplicableDeclarationList;
10
#[cfg(feature = "gecko")]
11
use crate::gecko::selector_parser::PseudoElement;
12
use crate::hash::{self, FxHashMap};
13
use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock};
14
use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards};
15
use crate::stylesheets::{Origin, StyleRule};
16
use crate::thread_state;
17
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
18
use parking_lot::RwLock;
19
use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow};
20
use smallvec::SmallVec;
21
use std::io::{self, Write};
22
use std::mem;
23
use std::ptr;
24
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
25
26
/// The rule tree, the structure servo uses to preserve the results of selector
27
/// matching.
28
///
29
/// This is organized as a tree of rules. When a node matches a set of rules,
30
/// they're inserted in order in the tree, starting with the less specific one.
31
///
32
/// When a rule is inserted in the tree, other elements may share the path up to
33
/// a given rule. If that's the case, we don't duplicate child nodes, but share
34
/// them.
35
///
36
/// When the rule node refcount drops to zero, it doesn't get freed. It gets
37
/// instead put into a free list, and it is potentially GC'd after a while in a
38
/// single-threaded fashion.
39
///
40
/// That way, a rule node that represents a likely-to-match-again rule (like a
41
/// :hover rule) can be reused if we haven't GC'd it yet.
42
///
43
/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC
46
/// to se a discussion about the different memory orderings used here.
47
#[derive(Debug)]
48
pub struct RuleTree {
49
root: StrongRuleNode,
50
}
51
52
impl Drop for RuleTree {
53
fn drop(&mut self) {
54
// GC the rule tree.
55
unsafe {
56
self.gc();
57
}
58
59
// After the GC, the free list should be empty.
60
debug_assert_eq!(
61
self.root.get().next_free.load(Ordering::Relaxed),
62
FREE_LIST_SENTINEL
63
);
64
65
// Remove the sentinel. This indicates that GCs will no longer occur.
66
// Any further drops of StrongRuleNodes must occur on the main thread,
67
// and will trigger synchronous dropping of the Rule nodes.
68
self.root
69
.get()
70
.next_free
71
.store(ptr::null_mut(), Ordering::Relaxed);
72
}
73
}
74
75
impl MallocSizeOf for RuleTree {
76
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
77
let mut n = 0;
78
let mut stack = SmallVec::<[_; 32]>::new();
79
stack.push(self.root.downgrade());
80
81
while let Some(node) = stack.pop() {
82
n += unsafe { ops.malloc_size_of(node.ptr()) };
83
let children = unsafe { (*node.ptr()).children.read() };
84
children.shallow_size_of(ops);
85
children.each(|c| stack.push(c.clone()));
86
}
87
88
n
89
}
90
}
91
92
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
93
struct ChildKey(CascadeLevel, ptr::NonNull<()>);
94
95
unsafe impl Send for ChildKey {}
96
unsafe impl Sync for ChildKey {}
97
98
/// A style source for the rule node. It can either be a CSS style rule or a
99
/// declaration block.
100
///
101
/// Note that, even though the declaration block from inside the style rule
102
/// could be enough to implement the rule tree, keeping the whole rule provides
103
/// more debuggability, and also the ability of show those selectors to
104
/// devtools.
105
#[derive(Clone, Debug)]
106
pub struct StyleSource(ArcUnion<Locked<StyleRule>, Locked<PropertyDeclarationBlock>>);
107
108
impl PartialEq for StyleSource {
109
fn eq(&self, other: &Self) -> bool {
110
ArcUnion::ptr_eq(&self.0, &other.0)
111
}
112
}
113
114
impl StyleSource {
115
/// Creates a StyleSource from a StyleRule.
116
pub fn from_rule(rule: Arc<Locked<StyleRule>>) -> Self {
117
StyleSource(ArcUnion::from_first(rule))
118
}
119
120
#[inline]
121
fn key(&self) -> ptr::NonNull<()> {
122
self.0.ptr()
123
}
124
125
/// Creates a StyleSource from a PropertyDeclarationBlock.
126
pub fn from_declarations(decls: Arc<Locked<PropertyDeclarationBlock>>) -> Self {
127
StyleSource(ArcUnion::from_second(decls))
128
}
129
130
fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
131
if let Some(ref rule) = self.0.as_first() {
132
let rule = rule.read_with(guard);
133
let _ = write!(writer, "{:?}", rule.selectors);
134
}
135
136
let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
137
}
138
139
/// Read the style source guard, and obtain thus read access to the
140
/// underlying property declaration block.
141
#[inline]
142
pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
143
let block: &Locked<PropertyDeclarationBlock> = match self.0.borrow() {
144
ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block,
145
ArcUnionBorrow::Second(ref block) => block.get(),
146
};
147
block.read_with(guard)
148
}
149
150
/// Returns the style rule if applicable, otherwise None.
151
pub fn as_rule(&self) -> Option<ArcBorrow<Locked<StyleRule>>> {
152
self.0.as_first()
153
}
154
155
/// Returns the declaration block if applicable, otherwise None.
156
pub fn as_declarations(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
157
self.0.as_second()
158
}
159
}
160
161
/// This value exists here so a node that pushes itself to the list can know
162
/// that is in the free list by looking at is next pointer, and comparing it
163
/// with null.
164
///
165
/// The root node doesn't have a null pointer in the free list, but this value.
166
const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
167
168
/// A second sentinel value for the free list, indicating that it's locked (i.e.
169
/// another thread is currently adding an entry). We spin if we find this value.
170
const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode;
171
172
/// A counter to track how many inner shadow roots rules deep we are.
173
///
174
/// This is used to handle:
175
///
177
///
178
/// In particular, it'd be `0` for the innermost shadow host, `1` for the next,
179
/// and so on.
180
pub type ShadowCascadeOrder = u8;
181
182
impl RuleTree {
183
/// Construct a new rule tree.
184
pub fn new() -> Self {
185
RuleTree {
186
root: StrongRuleNode::new(Box::new(RuleNode::root())),
187
}
188
}
189
190
/// Get the root rule node.
191
pub fn root(&self) -> &StrongRuleNode {
192
&self.root
193
}
194
195
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
196
let _ = writeln!(writer, " + RuleTree");
197
self.root.get().dump(guards, writer, 0);
198
}
199
200
/// Dump the rule tree to stdout.
201
pub fn dump_stdout(&self, guards: &StylesheetGuards) {
202
let mut stdout = io::stdout();
203
self.dump(guards, &mut stdout);
204
}
205
206
/// Inserts the given rules, that must be in proper order by specifity, and
207
/// returns the corresponding rule node representing the last inserted one.
208
///
209
/// !important rules are detected and inserted into the appropriate position
210
/// in the rule tree. This allows selector matching to ignore importance,
211
/// while still maintaining the appropriate cascade order in the rule tree.
212
pub fn insert_ordered_rules_with_important<'a, I>(
213
&self,
214
iter: I,
215
guards: &StylesheetGuards,
216
) -> StrongRuleNode
217
where
218
I: Iterator<Item = (StyleSource, CascadeLevel, ShadowCascadeOrder)>,
219
{
220
use self::CascadeLevel::*;
221
let mut current = self.root.clone();
222
let mut last_level = current.get().level;
223
224
let mut found_important = false;
225
let mut important_style_attr = None;
226
227
let mut important_same_tree = SmallVec::<[StyleSource; 4]>::new();
228
let mut important_inner_shadow = SmallVec::<[SmallVec<[StyleSource; 4]>; 4]>::new();
229
important_inner_shadow.push(SmallVec::new());
230
231
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
232
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
233
let mut transition = None;
234
235
let mut last_cascade_order = 0;
236
for (source, level, shadow_cascade_order) in iter {
237
debug_assert!(level >= last_level, "Not really ordered");
238
debug_assert!(!level.is_important(), "Important levels handled internally");
239
let any_important = {
240
let pdb = source.read(level.guard(guards));
241
pdb.any_important()
242
};
243
244
if any_important {
245
found_important = true;
246
match level {
247
InnerShadowNormal => {
248
debug_assert!(
249
shadow_cascade_order >= last_cascade_order,
250
"Not really ordered"
251
);
252
if shadow_cascade_order > last_cascade_order &&
253
!important_inner_shadow.last().unwrap().is_empty()
254
{
255
last_cascade_order = shadow_cascade_order;
256
important_inner_shadow.push(SmallVec::new());
257
}
258
important_inner_shadow
259
.last_mut()
260
.unwrap()
261
.push(source.clone())
262
},
263
SameTreeAuthorNormal => important_same_tree.push(source.clone()),
264
UANormal => important_ua.push(source.clone()),
265
UserNormal => important_user.push(source.clone()),
266
StyleAttributeNormal => {
267
debug_assert!(important_style_attr.is_none());
268
important_style_attr = Some(source.clone());
269
},
270
_ => {},
271
};
272
}
273
274
// We don't optimize out empty rules, even though we could.
275
//
276
// Inspector relies on every rule being inserted in the normal level
277
// at least once, in order to return the rules with the correct
278
// specificity order.
279
//
280
// TODO(emilio): If we want to apply these optimizations without
281
// breaking inspector's expectations, we'd need to run
282
// selector-matching again at the inspector's request. That may or
283
// may not be a better trade-off.
284
if matches!(level, Transitions) && found_important {
285
// There can be at most one transition, and it will come at
286
// the end of the iterator. Stash it and apply it after
287
// !important rules.
288
debug_assert!(transition.is_none());
289
transition = Some(source);
290
} else {
291
current = current.ensure_child(self.root.downgrade(), source, level);
292
}
293
last_level = level;
294
}
295
296
// Early-return in the common case of no !important declarations.
297
if !found_important {
298
return current;
299
}
300
301
//
302
// Insert important declarations, in order of increasing importance,
303
// followed by any transition rule.
304
//
305
306
for source in important_same_tree.drain() {
307
current = current.ensure_child(self.root.downgrade(), source, SameTreeAuthorImportant);
308
}
309
310
if let Some(source) = important_style_attr {
311
current = current.ensure_child(self.root.downgrade(), source, StyleAttributeImportant);
312
}
313
314
for mut list in important_inner_shadow.drain().rev() {
315
for source in list.drain() {
316
current = current.ensure_child(self.root.downgrade(), source, InnerShadowImportant);
317
}
318
}
319
320
for source in important_user.drain() {
321
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
322
}
323
324
for source in important_ua.drain() {
325
current = current.ensure_child(self.root.downgrade(), source, UAImportant);
326
}
327
328
if let Some(source) = transition {
329
current = current.ensure_child(self.root.downgrade(), source, Transitions);
330
}
331
332
current
333
}
334
335
/// Given a list of applicable declarations, insert the rules and return the
336
/// corresponding rule node.
337
pub fn compute_rule_node(
338
&self,
339
applicable_declarations: &mut ApplicableDeclarationList,
340
guards: &StylesheetGuards,
341
) -> StrongRuleNode {
342
self.insert_ordered_rules_with_important(
343
applicable_declarations.drain().map(|d| d.for_rule_tree()),
344
guards,
345
)
346
}
347
348
/// Insert the given rules, that must be in proper order by specifity, and
349
/// return the corresponding rule node representing the last inserted one.
350
pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
351
where
352
I: Iterator<Item = (StyleSource, CascadeLevel)>,
353
{
354
self.insert_ordered_rules_from(self.root.clone(), iter)
355
}
356
357
fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode
358
where
359
I: Iterator<Item = (StyleSource, CascadeLevel)>,
360
{
361
let mut current = from;
362
let mut last_level = current.get().level;
363
for (source, level) in iter {
364
debug_assert!(last_level <= level, "Not really ordered");
365
current = current.ensure_child(self.root.downgrade(), source, level);
366
last_level = level;
367
}
368
current
369
}
370
371
/// This can only be called when no other threads is accessing this tree.
372
pub unsafe fn gc(&self) {
373
self.root.gc();
374
}
375
376
/// This can only be called when no other threads is accessing this tree.
377
pub unsafe fn maybe_gc(&self) {
378
#[cfg(debug_assertions)]
379
self.maybe_dump_stats();
380
381
self.root.maybe_gc();
382
}
383
384
#[cfg(debug_assertions)]
385
fn maybe_dump_stats(&self) {
386
use itertools::Itertools;
387
use std::cell::Cell;
388
use std::time::{Duration, Instant};
389
390
if !log_enabled!(log::Level::Trace) {
391
return;
392
}
393
394
const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2);
395
396
thread_local! {
397
pub static LAST_STATS: Cell<Instant> = Cell::new(Instant::now());
398
};
399
400
let should_dump = LAST_STATS.with(|s| {
401
let now = Instant::now();
402
if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL {
403
return false;
404
}
405
s.set(now);
406
true
407
});
408
409
if !should_dump {
410
return;
411
}
412
413
let mut children_count = FxHashMap::default();
414
415
let mut stack = SmallVec::<[_; 32]>::new();
416
stack.push(self.root.clone());
417
while let Some(node) = stack.pop() {
418
let children = node.get().children.read();
419
*children_count.entry(children.len()).or_insert(0) += 1;
420
children.each(|c| stack.push(c.upgrade()));
421
}
422
423
trace!("Rule tree stats:");
424
let counts = children_count.keys().sorted();
425
for count in counts {
426
trace!(" {} - {}", count, children_count[count]);
427
}
428
}
429
430
/// Replaces a rule in a given level (if present) for another rule.
431
///
432
/// Returns the resulting node that represents the new path, or None if
433
/// the old path is still valid.
434
pub fn update_rule_at_level(
435
&self,
436
level: CascadeLevel,
437
pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
438
path: &StrongRuleNode,
439
guards: &StylesheetGuards,
440
important_rules_changed: &mut bool,
441
) -> Option<StrongRuleNode> {
442
debug_assert!(level.is_unique_per_element());
443
// TODO(emilio): Being smarter with lifetimes we could avoid a bit of
444
// the refcount churn.
445
let mut current = path.clone();
446
*important_rules_changed = false;
447
448
// First walk up until the first less-or-equally specific rule.
449
let mut children = SmallVec::<[_; 10]>::new();
450
while current.get().level > level {
451
children.push((
452
current.get().source.as_ref().unwrap().clone(),
453
current.get().level,
454
));
455
current = current.parent().unwrap().clone();
456
}
457
458
// Then remove the one at the level we want to replace, if any.
459
//
460
// NOTE: Here we assume that only one rule can be at the level we're
461
// replacing.
462
//
463
// This is certainly true for HTML style attribute rules, animations and
464
// transitions, but could not be so for SMIL animations, which we'd need
465
// to special-case (isn't hard, it's just about removing the `if` and
466
// special cases, and replacing them for a `while` loop, avoiding the
467
// optimizations).
468
if current.get().level == level {
469
*important_rules_changed |= level.is_important();
470
471
if let Some(pdb) = pdb {
472
// If the only rule at the level we're replacing is exactly the
473
// same as `pdb`, we're done, and `path` is still valid.
474
//
475
// TODO(emilio): Another potential optimization is the one where
476
// we can just replace the rule at that level for `pdb`, and
477
// then we don't need to re-create the children, and `path` is
478
// also equally valid. This is less likely, and would require an
479
// in-place mutation of the source, which is, at best, fiddly,
480
// so let's skip it for now.
481
let current_decls = current
482
.get()
483
.source
484
.as_ref()
485
.unwrap()
486
.as_declarations()
487
.expect("Replacing non-declarations style?");
488
let is_here_already = ArcBorrow::ptr_eq(&pdb, &current_decls);
489
if is_here_already {
490
debug!("Picking the fast path in rule replacement");
491
return None;
492
}
493
}
494
current = current.parent().unwrap().clone();
495
}
496
debug_assert!(
497
current.get().level != level,
498
"Multiple rules should've been replaced?"
499
);
500
501
// Insert the rule if it's relevant at this level in the cascade.
502
//
503
// These optimizations are likely to be important, because the levels
504
// where replacements apply (style and animations) tend to trigger
505
// pretty bad styling cases already.
506
if let Some(pdb) = pdb {
507
if level.is_important() {
508
if pdb.read_with(level.guard(guards)).any_important() {
509
current = current.ensure_child(
510
self.root.downgrade(),
511
StyleSource::from_declarations(pdb.clone_arc()),
512
level,
513
);
514
*important_rules_changed = true;
515
}
516
} else {
517
if pdb.read_with(level.guard(guards)).any_normal() {
518
current = current.ensure_child(
519
self.root.downgrade(),
520
StyleSource::from_declarations(pdb.clone_arc()),
521
level,
522
);
523
}
524
}
525
}
526
527
// Now the rule is in the relevant place, push the children as
528
// necessary.
529
let rule = self.insert_ordered_rules_from(current, children.drain().rev());
530
Some(rule)
531
}
532
533
/// Returns new rule nodes without Transitions level rule.
534
pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
535
// Return a clone if there is no transition level.
536
if path.cascade_level() != CascadeLevel::Transitions {
537
return path.clone();
538
}
539
540
path.parent().unwrap().clone()
541
}
542
543
/// Returns new rule node without rules from declarative animations.
544
pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
545
// Return a clone if there are no animation rules.
546
if !path.has_animation_or_transition_rules() {
547
return path.clone();
548
}
549
550
let iter = path
551
.self_and_ancestors()
552
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride);
553
let mut last = path;
554
let mut children = SmallVec::<[_; 10]>::new();
555
for node in iter {
556
if !node.cascade_level().is_animation() {
557
children.push((
558
node.get().source.as_ref().unwrap().clone(),
559
node.cascade_level(),
560
));
561
}
562
last = node;
563
}
564
565
let rule =
566
self.insert_ordered_rules_from(last.parent().unwrap().clone(), children.drain().rev());
567
rule
568
}
569
570
/// Returns new rule node by adding animation rules at transition level.
571
/// The additional rules must be appropriate for the transition
572
/// level of the cascade, which is the highest level of the cascade.
573
/// (This is the case for one current caller, the cover rule used
574
/// for CSS transitions.)
575
pub fn add_animation_rules_at_transition_level(
576
&self,
577
path: &StrongRuleNode,
578
pdb: Arc<Locked<PropertyDeclarationBlock>>,
579
guards: &StylesheetGuards,
580
) -> StrongRuleNode {
581
let mut dummy = false;
582
self.update_rule_at_level(
583
CascadeLevel::Transitions,
584
Some(pdb.borrow_arc()),
585
path,
586
guards,
587
&mut dummy,
588
)
589
.expect("Should return a valid rule node")
590
}
591
}
592
593
/// The number of RuleNodes added to the free list before we will consider
594
/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
595
/// where it likely did not result from a rigorous performance analysis.)
596
const RULE_TREE_GC_INTERVAL: usize = 300;
597
598
/// The cascade level these rules are relevant at, as per[1][2][3].
599
///
600
/// Presentational hints for SVG and HTML are in the "author-level
601
/// zero-specificity" level, that is, right after user rules, and before author
602
/// rules.
603
///
604
/// The order of variants declared here is significant, and must be in
605
/// _ascending_ order of precedence.
606
///
607
/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
608
/// from outside the tree the element is in can't affect the element.
609
///
610
/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
611
/// tree may affect an element connected to the document or an "outer" shadow
612
/// tree.
613
///
614
/// We need to differentiate between rules from the same tree and "inner" shadow
615
/// trees in order to be able to find the right position for the style attribute
616
/// easily. Otherwise we wouldn't be able to avoid selector-matching when a
617
/// style attribute is added or removed.
618
///
623
#[repr(u8)]
624
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd)]
625
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
626
pub enum CascadeLevel {
627
/// Normal User-Agent rules.
628
UANormal = 0,
629
/// User normal rules.
630
UserNormal,
631
/// Presentational hints.
632
PresHints,
633
/// Shadow DOM styles from "inner" shadow trees.
634
///
635
/// See above for why this is needed instead of merging InnerShadowNormal,
636
/// SameTreeAuthorNormal and StyleAttributeNormal inside something like
637
/// AuthorNormal.
638
InnerShadowNormal,
639
/// Author normal rules from the same tree the element is in.
640
SameTreeAuthorNormal,
641
/// Style attribute normal rules.
642
StyleAttributeNormal,
643
/// SVG SMIL animations.
644
SMILOverride,
645
/// CSS animations and script-generated animations.
646
Animations,
647
/// Author-supplied important rules from the same tree the element came
648
/// from.
649
SameTreeAuthorImportant,
650
/// Style attribute important rules.
651
StyleAttributeImportant,
652
/// Shadow DOM important rules.
653
InnerShadowImportant,
654
/// User important rules.
655
UserImportant,
656
/// User-agent important rules.
657
UAImportant,
658
/// Transitions
659
///
660
/// NB: If this changes from being last, change from_byte below.
661
Transitions,
662
}
663
664
impl CascadeLevel {
665
/// Converts a raw byte to a CascadeLevel.
666
pub unsafe fn from_byte(byte: u8) -> Self {
667
debug_assert!(byte <= CascadeLevel::Transitions as u8);
668
mem::transmute(byte)
669
}
670
671
/// Select a lock guard for this level
672
pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
673
match *self {
674
CascadeLevel::UANormal |
675
CascadeLevel::UserNormal |
676
CascadeLevel::UserImportant |
677
CascadeLevel::UAImportant => guards.ua_or_user,
678
_ => guards.author,
679
}
680
}
681
682
/// Returns whether this cascade level is unique per element, in which case
683
/// we can replace the path in the cascade without fear.
684
pub fn is_unique_per_element(&self) -> bool {
685
match *self {
686
CascadeLevel::Transitions |
687
CascadeLevel::Animations |
688
CascadeLevel::SMILOverride |
689
CascadeLevel::StyleAttributeNormal |
690
CascadeLevel::StyleAttributeImportant => true,
691
_ => false,
692
}
693
}
694
695
/// Returns whether this cascade level represents important rules of some
696
/// sort.
697
#[inline]
698
pub fn is_important(&self) -> bool {
699
match *self {
700
CascadeLevel::SameTreeAuthorImportant |
701
CascadeLevel::InnerShadowImportant |
702
CascadeLevel::StyleAttributeImportant |
703
CascadeLevel::UserImportant |
704
CascadeLevel::UAImportant => true,
705
_ => false,
706
}
707
}
708
709
/// Returns the importance relevant for this rule. Pretty similar to
710
/// `is_important`.
711
#[inline]
712
pub fn importance(&self) -> Importance {
713
if self.is_important() {
714
Importance::Important
715
} else {
716
Importance::Normal
717
}
718
}
719
720
/// Returns the cascade origin of the rule.
721
#[inline]
722
pub fn origin(&self) -> Origin {
723
match *self {
724
CascadeLevel::UAImportant | CascadeLevel::UANormal => Origin::UserAgent,
725
CascadeLevel::UserImportant | CascadeLevel::UserNormal => Origin::User,
726
CascadeLevel::PresHints |
727
CascadeLevel::InnerShadowNormal |
728
CascadeLevel::SameTreeAuthorNormal |
729
CascadeLevel::StyleAttributeNormal |
730
CascadeLevel::SMILOverride |
731
CascadeLevel::Animations |
732
CascadeLevel::SameTreeAuthorImportant |
733
CascadeLevel::StyleAttributeImportant |
734
CascadeLevel::InnerShadowImportant |
735
CascadeLevel::Transitions => Origin::Author,
736
}
737
}
738
739
/// Returns whether this cascade level represents an animation rules.
740
#[inline]
741
pub fn is_animation(&self) -> bool {
742
match *self {
743
CascadeLevel::SMILOverride | CascadeLevel::Animations | CascadeLevel::Transitions => {
744
true
745
},
746
_ => false,
747
}
748
}
749
}
750
751
/// The children of a single rule node.
752
///
753
/// We optimize the case of no kids and a single child, since they're by far the
754
/// most common case and it'd cause a bunch of bloat for no reason.
755
///
756
/// The children remove themselves when they go away, which means that it's ok
757
/// for us to store weak pointers to them.
758
enum RuleNodeChildren {
759
/// There are no kids.
760
Empty,
761
/// There's just one kid. This is an extremely common case, so we don't
762
/// bother allocating a map for it.
763
One(WeakRuleNode),
764
/// At least at one point in time there was more than one kid (that is to
765
/// say, we don't bother re-allocating if children are removed dynamically).
766
Map(Box<FxHashMap<ChildKey, WeakRuleNode>>),
767
}
768
769
impl MallocShallowSizeOf for RuleNodeChildren {
770
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
771
match *self {
772
RuleNodeChildren::One(..) | RuleNodeChildren::Empty => 0,
773
RuleNodeChildren::Map(ref m) => {
774
// Want to account for both the box and the hashmap.
775
m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
776
},
777
}
778
}
779
}
780
781
impl Default for RuleNodeChildren {
782
fn default() -> Self {
783
RuleNodeChildren::Empty
784
}
785
}
786
787
impl RuleNodeChildren {
788
/// Executes a given function for each of the children.
789
fn each(&self, mut f: impl FnMut(&WeakRuleNode)) {
790
match *self {
791
RuleNodeChildren::Empty => {},
792
RuleNodeChildren::One(ref child) => f(child),
793
RuleNodeChildren::Map(ref map) => {
794
for (_key, kid) in map.iter() {
795
f(kid)
796
}
797
},
798
}
799
}
800
801
fn len(&self) -> usize {
802
match *self {
803
RuleNodeChildren::Empty => 0,
804
RuleNodeChildren::One(..) => 1,
805
RuleNodeChildren::Map(ref map) => map.len(),
806
}
807
}
808
809
fn is_empty(&self) -> bool {
810
self.len() == 0
811
}
812
813
fn get(&self, key: &ChildKey) -> Option<&WeakRuleNode> {
814
match *self {
815
RuleNodeChildren::Empty => return None,
816
RuleNodeChildren::One(ref kid) => {
817
// We're read-locked, so no need to do refcount stuff, since the
818
// child is only removed from the main thread, _and_ it'd need
819
// to write-lock us anyway.
820
if unsafe { (*kid.ptr()).key() } == *key {
821
Some(kid)
822
} else {
823
None
824
}
825
},
826
RuleNodeChildren::Map(ref map) => map.get(&key),
827
}
828
}
829
830
fn get_or_insert_with(
831
&mut self,
832
key: ChildKey,
833
get_new_child: impl FnOnce() -> StrongRuleNode,
834
) -> StrongRuleNode {
835
let existing_child_key = match *self {
836
RuleNodeChildren::Empty => {
837
let new = get_new_child();
838
debug_assert_eq!(new.get().key(), key);
839
*self = RuleNodeChildren::One(new.downgrade());
840
return new;
841
},
842
RuleNodeChildren::One(ref weak) => unsafe {
843
// We're locked necessarily, so it's fine to look at our
844
// weak-child without refcount-traffic.
845
let existing_child_key = (*weak.ptr()).key();
846
if existing_child_key == key {
847
return weak.upgrade();
848
}
849
existing_child_key
850
},
851
RuleNodeChildren::Map(ref mut map) => {
852
return match map.entry(key) {
853
hash::map::Entry::Occupied(ref occupied) => occupied.get().upgrade(),
854
hash::map::Entry::Vacant(vacant) => {
855
let new = get_new_child();
856
857
debug_assert_eq!(new.get().key(), key);
858
vacant.insert(new.downgrade());
859
860
new
861
},
862
};
863
},
864
};
865
866
let existing_child = match mem::replace(self, RuleNodeChildren::Empty) {
867
RuleNodeChildren::One(o) => o,
868
_ => unreachable!(),
869
};
870
// Two rule-nodes are still a not-totally-uncommon thing, so
871
// avoid over-allocating entries.
872
//
873
// TODO(emilio): Maybe just inline two kids too?
874
let mut children = Box::new(FxHashMap::with_capacity_and_hasher(2, Default::default()));
875
children.insert(existing_child_key, existing_child);
876
877
let new = get_new_child();
878
debug_assert_eq!(new.get().key(), key);
879
children.insert(key, new.downgrade());
880
881
*self = RuleNodeChildren::Map(children);
882
883
new
884
}
885
886
fn remove(&mut self, key: &ChildKey) -> Option<WeakRuleNode> {
887
match *self {
888
RuleNodeChildren::Empty => return None,
889
RuleNodeChildren::One(ref one) => {
890
if unsafe { (*one.ptr()).key() } != *key {
891
return None;
892
}
893
},
894
RuleNodeChildren::Map(ref mut multiple) => {
895
return multiple.remove(key);
896
},
897
}
898
899
match mem::replace(self, RuleNodeChildren::Empty) {
900
RuleNodeChildren::One(o) => Some(o),
901
_ => unreachable!(),
902
}
903
}
904
}
905
906
/// A node in the rule tree.
907
pub struct RuleNode {
908
/// The root node. Only the root has no root pointer, for obvious reasons.
909
root: Option<WeakRuleNode>,
910
911
/// The parent rule node. Only the root has no parent.
912
parent: Option<StrongRuleNode>,
913
914
/// The actual style source, either coming from a selector in a StyleRule,
915
/// or a raw property declaration block (like the style attribute).
916
///
917
/// None for the root node.
918
source: Option<StyleSource>,
919
920
/// The cascade level this rule is positioned at.
921
level: CascadeLevel,
922
923
refcount: AtomicUsize,
924
925
/// Only used for the root, stores the number of free rule nodes that are
926
/// around.
927
free_count: AtomicUsize,
928
929
/// The children of a given rule node. Children remove themselves from here
930
/// when they go away.
931
children: RwLock<RuleNodeChildren>,
932
933
/// The next item in the rule tree free list, that starts on the root node.
934
///
935
/// When this is set to null, that means that the rule tree has been torn
936
/// down, and GCs will no longer occur. When this happens, StrongRuleNodes
937
/// may only be dropped on the main thread, and teardown happens
938
/// synchronously.
939
next_free: AtomicPtr<RuleNode>,
940
}
941
942
unsafe impl Sync for RuleTree {}
943
unsafe impl Send for RuleTree {}
944
945
// On Gecko builds, hook into the leak checking machinery.
946
#[cfg(feature = "gecko_refcount_logging")]
947
mod gecko_leak_checking {
948
use super::RuleNode;
949
use std::mem::size_of;
950
use std::os::raw::{c_char, c_void};
951
952
extern "C" {
953
fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
954
fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
955
}
956
957
static NAME: &'static [u8] = b"RuleNode\0";
958
959
/// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery.
960
pub fn log_ctor(ptr: *const RuleNode) {
961
let s = NAME as *const [u8] as *const u8 as *const c_char;
962
unsafe {
963
NS_LogCtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
964
}
965
}
966
967
/// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery.
968
pub fn log_dtor(ptr: *const RuleNode) {
969
let s = NAME as *const [u8] as *const u8 as *const c_char;
970
unsafe {
971
NS_LogDtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
972
}
973
}
974
}
975
976
#[inline(always)]
977
fn log_new(_ptr: *const RuleNode) {
978
#[cfg(feature = "gecko_refcount_logging")]
979
gecko_leak_checking::log_ctor(_ptr);
980
}
981
982
#[inline(always)]
983
fn log_drop(_ptr: *const RuleNode) {
984
#[cfg(feature = "gecko_refcount_logging")]
985
gecko_leak_checking::log_dtor(_ptr);
986
}
987
988
impl RuleNode {
989
fn new(
990
root: WeakRuleNode,
991
parent: StrongRuleNode,
992
source: StyleSource,
993
level: CascadeLevel,
994
) -> Self {
995
debug_assert!(root.upgrade().parent().is_none());
996
RuleNode {
997
root: Some(root),
998
parent: Some(parent),
999
source: Some(source),
1000
level: level,
1001
refcount: AtomicUsize::new(1),
1002
children: Default::default(),
1003
free_count: AtomicUsize::new(0),
1004
next_free: AtomicPtr::new(ptr::null_mut()),
1005
}
1006
}
1007
1008
fn root() -> Self {
1009
RuleNode {
1010
root: None,
1011
parent: None,
1012
source: None,
1013
level: CascadeLevel::UANormal,
1014
refcount: AtomicUsize::new(1),
1015
free_count: AtomicUsize::new(0),
1016
children: Default::default(),
1017
next_free: AtomicPtr::new(FREE_LIST_SENTINEL),
1018
}
1019
}
1020
1021
fn key(&self) -> ChildKey {
1022
ChildKey(
1023
self.level,
1024
self.source
1025
.as_ref()
1026
.expect("Called key() on the root node")
1027
.key(),
1028
)
1029
}
1030
1031
fn is_root(&self) -> bool {
1032
self.parent.is_none()
1033
}
1034
1035
fn free_count(&self) -> &AtomicUsize {
1036
debug_assert!(self.is_root());
1037
&self.free_count
1038
}
1039
1040
/// Remove this rule node from the child list.
1041
///
1042
/// This is expected to be called before freeing the node from the free
1043
/// list, on the main thread.
1044
unsafe fn remove_from_child_list(&self) {
1045
debug!(
1046
"Remove from child list: {:?}, parent: {:?}",
1047
self as *const RuleNode,
1048
self.parent.as_ref().map(|p| p.ptr())
1049
);
1050
1051
if let Some(parent) = self.parent.as_ref() {
1052
let weak = parent.get().children.write().remove(&self.key());
1053
assert_eq!(weak.unwrap().ptr() as *const _, self as *const _);
1054
}
1055
}
1056
1057
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
1058
const INDENT_INCREMENT: usize = 4;
1059
1060
for _ in 0..indent {
1061
let _ = write!(writer, " ");
1062
}
1063
1064
let _ = writeln!(
1065
writer,
1066
" - {:?} (ref: {:?}, parent: {:?})",
1067
self as *const _,
1068
self.refcount.load(Ordering::Relaxed),
1069
self.parent.as_ref().map(|p| p.ptr())
1070
);
1071
1072
for _ in 0..indent {
1073
let _ = write!(writer, " ");
1074
}
1075
1076
if self.source.is_some() {
1077
self.source
1078
.as_ref()
1079
.unwrap()
1080
.dump(self.level.guard(guards), writer);
1081
} else {
1082
if indent != 0 {
1083
warn!("How has this happened?");
1084
}
1085
let _ = write!(writer, "(root)");
1086
}
1087
1088
let _ = write!(writer, "\n");
1089
self.children.read().each(|child| {
1090
child
1091
.upgrade()
1092
.get()
1093
.dump(guards, writer, indent + INDENT_INCREMENT);
1094
});
1095
}
1096
}
1097
1098
#[derive(Clone)]
1099
struct WeakRuleNode {
1100
p: ptr::NonNull<RuleNode>,
1101
}
1102
1103
/// A strong reference to a rule node.
1104
#[derive(Debug, Eq, Hash, PartialEq)]
1105
pub struct StrongRuleNode {
1106
p: ptr::NonNull<RuleNode>,
1107
}
1108
1109
unsafe impl Send for StrongRuleNode {}
1110
unsafe impl Sync for StrongRuleNode {}
1111
1112
#[cfg(feature = "servo")]
1113
malloc_size_of_is_0!(StrongRuleNode);
1114
1115
impl StrongRuleNode {
1116
fn new(n: Box<RuleNode>) -> Self {
1117
debug_assert_eq!(n.parent.is_none(), !n.source.is_some());
1118
1119
// TODO(emilio): Use into_raw_non_null when it's stable.
1120
let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) };
1121
log_new(ptr.as_ptr());
1122
1123
debug!("Creating rule node: {:p}", ptr);
1124
1125
StrongRuleNode::from_ptr(ptr)
1126
}
1127
1128
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1129
StrongRuleNode { p }
1130
}
1131
1132
fn downgrade(&self) -> WeakRuleNode {
1133
WeakRuleNode::from_ptr(self.p)
1134
}
1135
1136
/// Get the parent rule node of this rule node.
1137
pub fn parent(&self) -> Option<&StrongRuleNode> {
1138
self.get().parent.as_ref()
1139
}
1140
1141
fn ensure_child(
1142
&self,
1143
root: WeakRuleNode,
1144
source: StyleSource,
1145
level: CascadeLevel,
1146
) -> StrongRuleNode {
1147
use parking_lot::RwLockUpgradableReadGuard;
1148
1149
let key = ChildKey(level, source.key());
1150
1151
let read_guard = self.get().children.upgradable_read();
1152
if let Some(child) = read_guard.get(&key) {
1153
return child.upgrade();
1154
}
1155
1156
RwLockUpgradableReadGuard::upgrade(read_guard).get_or_insert_with(key, move || {
1157
StrongRuleNode::new(Box::new(RuleNode::new(
1158
root,
1159
self.clone(),
1160
source.clone(),
1161
level,
1162
)))
1163
})
1164
}
1165
1166
/// Raw pointer to the RuleNode
1167
#[inline]
1168
pub fn ptr(&self) -> *mut RuleNode {
1169
self.p.as_ptr()
1170
}
1171
1172
fn get(&self) -> &RuleNode {
1173
if cfg!(debug_assertions) {
1174
let node = unsafe { &*self.p.as_ptr() };
1175
assert!(node.refcount.load(Ordering::Relaxed) > 0);
1176
}
1177
unsafe { &*self.p.as_ptr() }
1178
}
1179
1180
/// Get the style source corresponding to this rule node. May return `None`
1181
/// if it's the root node, which means that the node hasn't matched any
1182
/// rules.
1183
pub fn style_source(&self) -> Option<&StyleSource> {
1184
self.get().source.as_ref()
1185
}
1186
1187
/// The cascade level for this node
1188
pub fn cascade_level(&self) -> CascadeLevel {
1189
self.get().level
1190
}
1191
1192
/// Get the importance that this rule node represents.
1193
pub fn importance(&self) -> Importance {
1194
self.get().level.importance()
1195
}
1196
1197
/// Get an iterator for this rule node and its ancestors.
1198
pub fn self_and_ancestors(&self) -> SelfAndAncestors {
1199
SelfAndAncestors {
1200
current: Some(self),
1201
}
1202
}
1203
1204
/// Returns whether this node has any child, only intended for testing
1205
/// purposes, and called on a single-threaded fashion only.
1206
pub unsafe fn has_children_for_testing(&self) -> bool {
1207
!self.get().children.read().is_empty()
1208
}
1209
1210
unsafe fn pop_from_free_list(&self) -> Option<WeakRuleNode> {
1211
// NB: This can run from the root node destructor, so we can't use
1212
// `get()`, since it asserts the refcount is bigger than zero.
1213
let me = &*self.p.as_ptr();
1214
1215
debug_assert!(me.is_root());
1216
1217
// FIXME(#14213): Apparently the layout data can be gone from script.
1218
//
1219
// That's... suspicious, but it's fine if it happens for the rule tree
1220
// case, so just don't crash in the case we're doing the final GC in
1221
// script.
1222
1223
debug_assert!(
1224
!thread_state::get().is_worker() &&
1225
(thread_state::get().is_layout() || thread_state::get().is_script())
1226
);
1227
1228
let current = me.next_free.load(Ordering::Relaxed);
1229
if current == FREE_LIST_SENTINEL {
1230
return None;
1231
}
1232
1233
debug_assert!(
1234
!current.is_null(),
1235
"Multiple threads are operating on the free list at the \
1236
same time?"
1237
);
1238
debug_assert!(
1239
current != self.p.as_ptr(),
1240
"How did the root end up in the free list?"
1241
);
1242
1243
let next = (*current)
1244
.next_free
1245
.swap(ptr::null_mut(), Ordering::Relaxed);
1246
1247
debug_assert!(
1248
!next.is_null(),
1249
"How did a null pointer end up in the free list?"
1250
);
1251
1252
me.next_free.store(next, Ordering::Relaxed);
1253
1254
debug!(
1255
"Popping from free list: cur: {:?}, next: {:?}",
1256
current, next
1257
);
1258
1259
Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current)))
1260
}
1261
1262
unsafe fn assert_free_list_has_no_duplicates_or_null(&self) {
1263
assert!(cfg!(debug_assertions), "This is an expensive check!");
1264
use crate::hash::FxHashSet;
1265
1266
let me = &*self.p.as_ptr();
1267
assert!(me.is_root());
1268
1269
let mut current = self.p.as_ptr();
1270
let mut seen = FxHashSet::default();
1271
while current != FREE_LIST_SENTINEL {
1272
let next = (*current).next_free.load(Ordering::Relaxed);
1273
assert!(!next.is_null());
1274
assert!(!seen.contains(&next));
1275
seen.insert(next);
1276
1277
current = next;
1278
}
1279
}
1280
1281
unsafe fn gc(&self) {
1282
if cfg!(debug_assertions) {
1283
self.assert_free_list_has_no_duplicates_or_null();
1284
}
1285
1286
// NB: This can run from the root node destructor, so we can't use
1287
// `get()`, since it asserts the refcount is bigger than zero.
1288
let me = &*self.p.as_ptr();
1289
1290
debug_assert!(me.is_root(), "Can't call GC on a non-root node!");
1291
1292
while let Some(weak) = self.pop_from_free_list() {
1293
let node = &*weak.p.as_ptr();
1294
if node.refcount.load(Ordering::Relaxed) != 0 {
1295
// Nothing to do, the node is still alive.
1296
continue;
1297
}
1298
1299
debug!("GC'ing {:?}", weak.p.as_ptr());
1300
node.remove_from_child_list();
1301
log_drop(weak.p.as_ptr());
1302
let _ = Box::from_raw(weak.p.as_ptr());
1303
}
1304
1305
me.free_count().store(0, Ordering::Relaxed);
1306
1307
debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1308
}
1309
1310
unsafe fn maybe_gc(&self) {
1311
debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!");
1312
if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
1313
self.gc();
1314
}
1315
}
1316
1317
/// Returns true if any properties specified by `rule_type_mask` was set by
1318
/// an author rule.
1319
#[cfg(feature = "gecko")]
1320
pub fn has_author_specified_rules<E>(
1321
&self,
1322
mut element: E,
1323
mut pseudo: Option<PseudoElement>,
1324
guards: &StylesheetGuards,
1325
rule_type_mask: u32,
1326
author_colors_allowed: bool,
1327
) -> bool
1328
where
1329
E: crate::dom::TElement,
1330
{
1331
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BACKGROUND;
1332
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BORDER;
1333
use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_PADDING;
1334
use crate::properties::{CSSWideKeyword, LonghandId};
1335
use crate::properties::{PropertyDeclaration, PropertyDeclarationId};
1336
use crate::values::specified::Color;
1337
use std::borrow::Cow;
1338
1339
// Reset properties:
1340
const BACKGROUND_PROPS: &'static [LonghandId] =
1341
&[LonghandId::BackgroundColor, LonghandId::BackgroundImage];
1342
1343
const BORDER_PROPS: &'static [LonghandId] = &[
1344
LonghandId::BorderTopColor,
1345
LonghandId::BorderTopStyle,
1346
LonghandId::BorderTopWidth,
1347
LonghandId::BorderRightColor,
1348
LonghandId::BorderRightStyle,
1349
LonghandId::BorderRightWidth,
1350
LonghandId::BorderBottomColor,
1351
LonghandId::BorderBottomStyle,
1352
LonghandId::BorderBottomWidth,
1353
LonghandId::BorderLeftColor,
1354
LonghandId::BorderLeftStyle,
1355
LonghandId::BorderLeftWidth,
1356
LonghandId::BorderTopLeftRadius,
1357
LonghandId::BorderTopRightRadius,
1358
LonghandId::BorderBottomRightRadius,
1359
LonghandId::BorderBottomLeftRadius,
1360
LonghandId::BorderInlineStartColor,
1361
LonghandId::BorderInlineStartStyle,
1362
LonghandId::BorderInlineStartWidth,
1363
LonghandId::BorderInlineEndColor,
1364
LonghandId::BorderInlineEndStyle,
1365
LonghandId::BorderInlineEndWidth,
1366
LonghandId::BorderBlockStartColor,
1367
LonghandId::BorderBlockStartStyle,
1368
LonghandId::BorderBlockStartWidth,
1369
LonghandId::BorderBlockEndColor,
1370
LonghandId::BorderBlockEndStyle,
1371
LonghandId::BorderBlockEndWidth,
1372
];
1373
1374
const PADDING_PROPS: &'static [LonghandId] = &[
1375
LonghandId::PaddingTop,
1376
LonghandId::PaddingRight,
1377
LonghandId::PaddingBottom,
1378
LonghandId::PaddingLeft,
1379
LonghandId::PaddingInlineStart,
1380
LonghandId::PaddingInlineEnd,
1381
LonghandId::PaddingBlockStart,
1382
LonghandId::PaddingBlockEnd,
1383
];
1384
1385
// Set of properties that we are currently interested in.
1386
let mut properties = LonghandIdSet::new();
1387
1388
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1389
for id in BACKGROUND_PROPS {
1390
properties.insert(*id);
1391
}
1392
}
1393
if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 {
1394
for id in BORDER_PROPS {
1395
properties.insert(*id);
1396
}
1397
}
1398
if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 {
1399
for id in PADDING_PROPS {
1400
properties.insert(*id);
1401
}
1402
}
1403
1404
// If author colors are not allowed, don't look at those properties
1405
// (except for background-color which is special and we handle below).
1406
if !author_colors_allowed {
1407
properties.remove_all(LonghandIdSet::ignored_when_colors_disabled());
1408
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
1409
properties.insert(LonghandId::BackgroundColor);
1410
}
1411
}
1412
1413
let mut element_rule_node = Cow::Borrowed(self);
1414
1415
loop {
1416
// We need to be careful not to count styles covered up by
1417
// user-important or UA-important declarations. But we do want to
1418
// catch explicit inherit styling in those and check our parent
1419
// element to see whether we have user styling for those properties.
1420
// Note that we don't care here about inheritance due to lack of a
1421
// specified value, since all the properties we care about are reset
1422
// properties.
1423
1424
let mut inherited_properties = LonghandIdSet::new();
1425
let mut have_explicit_ua_inherit = false;
1426
1427
for node in element_rule_node.self_and_ancestors() {
1428
let source = node.style_source();
1429
let declarations = if source.is_some() {
1430
source
1431
.as_ref()
1432
.unwrap()
1433
.read(node.cascade_level().guard(guards))
1434
.declaration_importance_iter()
1435
} else {
1436
continue;
1437
};
1438
1439
// Iterate over declarations of the longhands we care about.
1440
let node_importance = node.importance();
1441
let longhands = declarations.rev().filter_map(|(declaration, importance)| {
1442
if importance != node_importance {
1443
return None;
1444
}
1445
match declaration.id() {
1446
PropertyDeclarationId::Longhand(id) => Some((id, declaration)),
1447
_ => None,
1448
}
1449
});
1450
1451
match node.cascade_level() {
1452
// Non-author rules:
1453
CascadeLevel::UANormal |
1454
CascadeLevel::UAImportant |
1455
CascadeLevel::UserNormal |
1456
CascadeLevel::UserImportant => {
1457
for (id, declaration) in longhands {
1458
if properties.contains(id) {
1459
// This property was set by a non-author rule.
1460
// Stop looking for it in this element's rule
1461
// nodes.
1462
properties.remove(id);
1463
1464
// However, if it is inherited, then it might be
1465
// inherited from an author rule from an
1466
// ancestor element's rule nodes.
1467
if declaration.get_css_wide_keyword() ==
1468
Some(CSSWideKeyword::Inherit)
1469
{
1470
have_explicit_ua_inherit = true;
1471
inherited_properties.insert(id);
1472
}
1473
}
1474
}
1475
},
1476
// Author rules:
1477
CascadeLevel::PresHints |
1478
CascadeLevel::SameTreeAuthorNormal |
1479
CascadeLevel::InnerShadowNormal |
1480
CascadeLevel::StyleAttributeNormal |
1481
CascadeLevel::SMILOverride |
1482
CascadeLevel::Animations |
1483
CascadeLevel::SameTreeAuthorImportant |
1484
CascadeLevel::InnerShadowImportant |
1485
CascadeLevel::StyleAttributeImportant |
1486
CascadeLevel::Transitions => {
1487
for (id, declaration) in longhands {
1488
if properties.contains(id) {
1489
if !author_colors_allowed {
1490
if let PropertyDeclaration::BackgroundColor(ref color) =
1491
*declaration
1492
{
1493
return *color == Color::transparent();
1494
}
1495
}
1496
return true;
1497
}
1498
}
1499
},
1500
}
1501
}
1502
1503
if !have_explicit_ua_inherit {
1504
break;
1505
}
1506
1507
// Continue to the parent element and search for the inherited properties.
1508
if let Some(pseudo) = pseudo.take() {
1509
if pseudo.inherits_from_default_values() {
1510
break;
1511
}
1512
} else {
1513
element = match element.inheritance_parent() {
1514
Some(parent) => parent,
1515
None => break,
1516
};
1517
1518
let parent_data = element.mutate_data().unwrap();
1519
let parent_rule_node = parent_data.styles.primary().rules().clone();
1520
element_rule_node = Cow::Owned(parent_rule_node);
1521
}
1522
1523
properties = inherited_properties;
1524
}
1525
1526
false
1527
}
1528
1529
/// Returns true if there is either animation or transition level rule.
1530
pub fn has_animation_or_transition_rules(&self) -> bool {
1531
self.self_and_ancestors()
1532
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
1533
.any(|node| node.cascade_level().is_animation())
1534
}
1535
1536
/// Get a set of properties whose CascadeLevel are higher than Animations
1537
/// but not equal to Transitions.
1538
///
1539
/// If there are any custom properties, we set the boolean value of the
1540
/// returned tuple to true.
1541
pub fn get_properties_overriding_animations(
1542
&self,
1543
guards: &StylesheetGuards,
1544
) -> (LonghandIdSet, bool) {
1545
use crate::properties::PropertyDeclarationId;
1546
1547
// We want to iterate over cascade levels that override the animations
1548
// level, i.e. !important levels and the transitions level.
1549
//
1550
// However, we actually want to skip the transitions level because
1551
// although it is higher in the cascade than animations, when both
1552
// transitions and animations are present for a given element and
1553
// property, transitions are suppressed so that they don't actually
1554
// override animations.
1555
let iter = self
1556
.self_and_ancestors()
1557
.skip_while(|node| node.cascade_level() == CascadeLevel::Transitions)
1558
.take_while(|node| node.cascade_level() > CascadeLevel::Animations);
1559
let mut result = (LonghandIdSet::new(), false);
1560
for node in iter {
1561
let style = node.style_source().unwrap();
1562
for (decl, important) in style
1563
.read(node.cascade_level().guard(guards))
1564
.declaration_importance_iter()
1565
{
1566
// Although we are only iterating over cascade levels that
1567
// override animations, in a given property declaration block we
1568
// can have a mixture of !important and non-!important
1569
// declarations but only the !important declarations actually
1570
// override animations.
1571
if important.important() {
1572
match decl.id() {
1573
PropertyDeclarationId::Longhand(id) => result.0.insert(id),
1574
PropertyDeclarationId::Custom(_) => result.1 = true,
1575
}
1576
}
1577
}
1578
}
1579
result
1580
}
1581
}
1582
1583
/// An iterator over a rule node and its ancestors.
1584
#[derive(Clone)]
1585
pub struct SelfAndAncestors<'a> {
1586
current: Option<&'a StrongRuleNode>,
1587
}
1588
1589
impl<'a> Iterator for SelfAndAncestors<'a> {
1590
type Item = &'a StrongRuleNode;
1591
1592
fn next(&mut self) -> Option<Self::Item> {
1593
self.current.map(|node| {
1594
self.current = node.parent();
1595
node
1596
})
1597
}
1598
}
1599
1600
impl Clone for StrongRuleNode {
1601
fn clone(&self) -> Self {
1602
debug!(
1603
"{:?}: {:?}+",
1604
self.ptr(),
1605
self.get().refcount.load(Ordering::Relaxed)
1606
);
1607
debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0);
1608
self.get().refcount.fetch_add(1, Ordering::Relaxed);
1609
StrongRuleNode::from_ptr(self.p)
1610
}
1611
}
1612
1613
impl Drop for StrongRuleNode {
1614
fn drop(&mut self) {
1615
let node = unsafe { &*self.ptr() };
1616
1617
debug!(
1618
"{:?}: {:?}-",
1619
self.ptr(),
1620
node.refcount.load(Ordering::Relaxed)
1621
);
1622
debug!(
1623
"Dropping node: {:?}, root: {:?}, parent: {:?}",
1624
self.ptr(),
1625
node.root.as_ref().map(|r| r.ptr()),
1626
node.parent.as_ref().map(|p| p.ptr())
1627
);
1628
let should_drop = {
1629
debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
1630
node.refcount.fetch_sub(1, Ordering::Relaxed) == 1
1631
};
1632
1633
if !should_drop {
1634
return;
1635
}
1636
1637
debug_assert!(node.children.read().is_empty());
1638
if node.parent.is_none() {
1639
debug!("Dropping root node!");
1640
// The free list should be null by this point
1641
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1642
log_drop(self.ptr());
1643
let _ = unsafe { Box::from_raw(self.ptr()) };
1644
return;
1645
}
1646
1647
let root = unsafe { &*node.root.as_ref().unwrap().ptr() };
1648
let free_list = &root.next_free;
1649
let mut old_head = free_list.load(Ordering::Relaxed);
1650
1651
// If the free list is null, that means that the rule tree has been
1652
// formally torn down, and the last standard GC has already occurred.
1653
// We require that any callers using the rule tree at this point are
1654
// on the main thread only, which lets us trigger a synchronous GC
1655
// here to avoid leaking anything. We use the GC machinery, rather
1656
// than just dropping directly, so that we benefit from the iterative
1657
// destruction and don't trigger unbounded recursion during drop. See
1658
// [1] and the associated crashtest.
1659
//
1661
if old_head.is_null() {
1662
debug_assert!(
1663
!thread_state::get().is_worker() &&
1664
(thread_state::get().is_layout() || thread_state::get().is_script())
1665
);
1666
// Add the node as the sole entry in the free list.
1667
debug_assert!(node.next_free.load(Ordering::Relaxed).is_null());
1668
node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed);
1669
free_list.store(node as *const _ as *mut _, Ordering::Relaxed);
1670
1671
// Invoke the GC.
1672
//
1673
// Note that we need hold a strong reference to the root so that it
1674
// doesn't go away during the GC (which would happen if we're freeing
1675
// the last external reference into the rule tree). This is nicely
1676
// enforced by having the gc() method live on StrongRuleNode rather than
1677
// RuleNode.
1678
let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade();
1679
unsafe {
1680
strong_root.gc();
1681
}
1682
1683
// Leave the free list null, like we found it, such that additional
1684
// drops for straggling rule nodes will take this same codepath.
1685
debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL);
1686
root.next_free.store(ptr::null_mut(), Ordering::Relaxed);
1687
1688
// Return. If strong_root is the last strong reference to the root,
1689
// this re-enter StrongRuleNode::drop, and take the root-dropping
1690
// path earlier in this function.
1691
return;
1692
}
1693
1694
// We're sure we're already in the free list, don't spinloop if we're.
1695
// Note that this is just a fast path, so it doesn't need to have an
1696
// strong memory ordering.
1697
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1698
return;
1699
}
1700
1701
// Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED.
1702
//
1703
// Note that we use Acquire/Release semantics for the free list
1704
// synchronization, in order to guarantee that the next_free
1705
// reads/writes we do below are properly visible from multiple threads
1706
// racing.
1707
loop {
1708
match free_list.compare_exchange_weak(
1709
old_head,
1710
FREE_LIST_LOCKED,
1711
Ordering::Acquire,
1712
Ordering::Relaxed,
1713
) {
1714
Ok(..) => {
1715
if old_head != FREE_LIST_LOCKED {
1716
break;
1717
}
1718
},
1719
Err(new) => old_head = new,
1720
}
1721
}
1722
1723
// If other thread has raced with use while using the same rule node,
1724
// just store the old head again, we're done.
1725
//
1726
// Note that we can use relaxed operations for loading since we're
1727
// effectively locking the free list with Acquire/Release semantics, and
1728
// the memory ordering is already guaranteed by that locking/unlocking.
1729
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
1730
free_list.store(old_head, Ordering::Release);
1731
return;
1732
}
1733
1734
// Else store the old head as the next pointer, and store ourselves as
1735
// the new head of the free list.
1736
//
1737
// This can be relaxed since this pointer won't be read until GC.
1738
node.next_free.store(old_head, Ordering::Relaxed);
1739
1740
// Increment the free count. This doesn't need to be an RMU atomic
1741
// operation, because the free list is "locked".
1742
let old_free_count = root.free_count().load(Ordering::Relaxed);
1743
root.free_count()
1744
.store(old_free_count + 1, Ordering::Relaxed);
1745
1746
// This can be release because of the locking of the free list, that
1747
// ensures that all the other nodes racing with this one are using
1748
// `Acquire`.
1749
free_list.store(self.ptr(), Ordering::Release);
1750
}
1751
}
1752
1753
impl<'a> From<&'a StrongRuleNode> for WeakRuleNode {
1754
fn from(node: &'a StrongRuleNode) -> Self {
1755
WeakRuleNode::from_ptr(node.p)
1756
}
1757
}
1758
1759
impl WeakRuleNode {
1760
#[inline]
1761
fn ptr(&self) -> *mut RuleNode {
1762
self.p.as_ptr()
1763
}
1764
1765
fn upgrade(&self) -> StrongRuleNode {
1766
debug!("Upgrading weak node: {:p}", self.ptr());
1767
1768
let node = unsafe { &*self.ptr() };
1769
node.refcount.fetch_add(1, Ordering::Relaxed);
1770
StrongRuleNode::from_ptr(self.p)
1771
}
1772
1773
fn from_ptr(p: ptr::NonNull<RuleNode>) -> Self {
1774
WeakRuleNode { p }
1775
}
1776
}