From e5cb3d2a4c7d05ec82a92aff6732590b27d052aa Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 12:55:47 +0200 Subject: [PATCH] Move the meat of the rule tree to a submodule "core" --- components/style/rule_tree/core.rs | 776 +++++++++++++++++++ components/style/rule_tree/mod.rs | 1041 +++++--------------------- components/style/rule_tree/source.rs | 101 +++ 3 files changed, 1076 insertions(+), 842 deletions(-) create mode 100644 components/style/rule_tree/core.rs create mode 100644 components/style/rule_tree/source.rs diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs new file mode 100644 index 00000000000..3103d416aec --- /dev/null +++ b/components/style/rule_tree/core.rs @@ -0,0 +1,776 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#![allow(unsafe_code)] + +use crate::properties::Importance; +use crate::shared_lock::StylesheetGuards; +use crate::thread_state; +use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; +use parking_lot::RwLock; +use smallvec::SmallVec; +use std::io::Write; +use std::mem; +use std::ptr; +use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; + +use super::map::Map; +use super::{CascadeLevel, StyleSource}; + +/// The rule tree, the structure servo uses to preserve the results of selector +/// matching. +/// +/// This is organized as a tree of rules. When a node matches a set of rules, +/// they're inserted in order in the tree, starting with the less specific one. +/// +/// When a rule is inserted in the tree, other elements may share the path up to +/// a given rule. If that's the case, we don't duplicate child nodes, but share +/// them. +/// +/// When the rule node refcount drops to zero, it doesn't get freed. It gets +/// instead put into a free list, and it is potentially GC'd after a while in a +/// single-threaded fashion. +/// +/// That way, a rule node that represents a likely-to-match-again rule (like a +/// :hover rule) can be reused if we haven't GC'd it yet. +/// +/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC +/// logs at http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017 +/// logs from http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017#c644094 +/// to se a discussion about the different memory orderings used here. +#[derive(Debug)] +pub struct RuleTree { + root: StrongRuleNode, +} + +impl Drop for RuleTree { + fn drop(&mut self) { + // GC the rule tree. + unsafe { + self.gc(); + } + + // After the GC, the free list should be empty. + debug_assert_eq!( + self.root.get().next_free.load(Ordering::Relaxed), + FREE_LIST_SENTINEL + ); + + // Remove the sentinel. This indicates that GCs will no longer occur. + // Any further drops of StrongRuleNodes must occur on the main thread, + // and will trigger synchronous dropping of the Rule nodes. + self.root + .get() + .next_free + .store(ptr::null_mut(), Ordering::Relaxed); + } +} + +impl MallocSizeOf for RuleTree { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + let mut n = 0; + let mut stack = SmallVec::<[_; 32]>::new(); + stack.push(self.root.downgrade()); + + while let Some(node) = stack.pop() { + n += unsafe { ops.malloc_size_of(node.ptr()) }; + let children = unsafe { (*node.ptr()).children.read() }; + children.shallow_size_of(ops); + for c in &*children { + stack.push(c.clone()); + } + } + + n + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +struct ChildKey(CascadeLevel, ptr::NonNull<()>); +unsafe impl Send for ChildKey {} +unsafe impl Sync for ChildKey {} + +/// This value exists here so a node that pushes itself to the list can know +/// that is in the free list by looking at is next pointer, and comparing it +/// with null. +/// +/// The root node doesn't have a null pointer in the free list, but this value. +const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode; + +/// A second sentinel value for the free list, indicating that it's locked (i.e. +/// another thread is currently adding an entry). We spin if we find this value. +const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode; + +impl RuleTree { + /// Construct a new rule tree. + pub fn new() -> Self { + RuleTree { + root: StrongRuleNode::new(Box::new(RuleNode::root())), + } + } + + /// Get the root rule node. + pub fn root(&self) -> &StrongRuleNode { + &self.root + } + + /// This can only be called when no other threads is accessing this tree. + pub unsafe fn gc(&self) { + self.root.gc(); + } + + /// This can only be called when no other threads is accessing this tree. + pub unsafe fn maybe_gc(&self) { + #[cfg(debug_assertions)] + self.maybe_dump_stats(); + + self.root.maybe_gc(); + } + + #[cfg(debug_assertions)] + fn maybe_dump_stats(&self) { + use itertools::Itertools; + use std::cell::Cell; + use std::time::{Duration, Instant}; + + if !log_enabled!(log::Level::Trace) { + return; + } + + const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2); + + thread_local! { + pub static LAST_STATS: Cell = Cell::new(Instant::now()); + }; + + let should_dump = LAST_STATS.with(|s| { + let now = Instant::now(); + if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL { + return false; + } + s.set(now); + true + }); + + if !should_dump { + return; + } + + let mut children_count = crate::hash::FxHashMap::default(); + + let mut stack = SmallVec::<[_; 32]>::new(); + stack.push(self.root.clone()); + while let Some(node) = stack.pop() { + let children = node.get().children.read(); + *children_count.entry(children.len()).or_insert(0) += 1; + for c in &*children { + stack.push(c.upgrade()); + } + } + + trace!("Rule tree stats:"); + let counts = children_count.keys().sorted(); + for count in counts { + trace!(" {} - {}", count, children_count[count]); + } + } +} + +/// The number of RuleNodes added to the free list before we will consider +/// doing a GC when calling maybe_gc(). (The value is copied from Gecko, +/// where it likely did not result from a rigorous performance analysis.) +const RULE_TREE_GC_INTERVAL: usize = 300; + +/// A node in the rule tree. +pub struct RuleNode { + /// The root node. Only the root has no root pointer, for obvious reasons. + root: Option, + + /// The parent rule node. Only the root has no parent. + parent: Option, + + /// The actual style source, either coming from a selector in a StyleRule, + /// or a raw property declaration block (like the style attribute). + /// + /// None for the root node. + source: Option, + + /// The cascade level this rule is positioned at. + level: CascadeLevel, + + refcount: AtomicUsize, + + /// Only used for the root, stores the number of free rule nodes that are + /// around. + free_count: AtomicUsize, + + /// The children of a given rule node. Children remove themselves from here + /// when they go away. + children: RwLock>, + + /// The next item in the rule tree free list, that starts on the root node. + /// + /// When this is set to null, that means that the rule tree has been torn + /// down, and GCs will no longer occur. When this happens, StrongRuleNodes + /// may only be dropped on the main thread, and teardown happens + /// synchronously. + next_free: AtomicPtr, +} + +// On Gecko builds, hook into the leak checking machinery. +#[cfg(feature = "gecko_refcount_logging")] +mod gecko_leak_checking { + use super::RuleNode; + use std::mem::size_of; + use std::os::raw::{c_char, c_void}; + + extern "C" { + fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); + fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); + } + + static NAME: &'static [u8] = b"RuleNode\0"; + + /// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery. + pub(super) fn log_ctor(ptr: *const RuleNode) { + let s = NAME as *const [u8] as *const u8 as *const c_char; + unsafe { + NS_LogCtor(ptr as *mut c_void, s, size_of::() as u32); + } + } + + /// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery. + pub(super) fn log_dtor(ptr: *const RuleNode) { + let s = NAME as *const [u8] as *const u8 as *const c_char; + unsafe { + NS_LogDtor(ptr as *mut c_void, s, size_of::() as u32); + } + } +} + +#[inline(always)] +fn log_new(_ptr: *const RuleNode) { + #[cfg(feature = "gecko_refcount_logging")] + gecko_leak_checking::log_ctor(_ptr); +} + +#[inline(always)] +fn log_drop(_ptr: *const RuleNode) { + #[cfg(feature = "gecko_refcount_logging")] + gecko_leak_checking::log_dtor(_ptr); +} + +impl RuleNode { + fn new( + root: WeakRuleNode, + parent: StrongRuleNode, + source: StyleSource, + level: CascadeLevel, + ) -> Self { + debug_assert!(root.upgrade().parent().is_none()); + RuleNode { + root: Some(root), + parent: Some(parent), + source: Some(source), + level: level, + refcount: AtomicUsize::new(1), + children: Default::default(), + free_count: AtomicUsize::new(0), + next_free: AtomicPtr::new(ptr::null_mut()), + } + } + + fn root() -> Self { + RuleNode { + root: None, + parent: None, + source: None, + level: CascadeLevel::UANormal, + refcount: AtomicUsize::new(1), + free_count: AtomicUsize::new(0), + children: Default::default(), + next_free: AtomicPtr::new(FREE_LIST_SENTINEL), + } + } + + fn key(&self) -> ChildKey { + ChildKey( + self.level, + self.source + .as_ref() + .expect("Called key() on the root node") + .key(), + ) + } + + fn is_root(&self) -> bool { + self.parent.is_none() + } + + fn free_count(&self) -> &AtomicUsize { + debug_assert!(self.is_root()); + &self.free_count + } + + /// Remove this rule node from the child list. + /// + /// This is expected to be called before freeing the node from the free + /// list, on the main thread. + unsafe fn remove_from_child_list(&self) { + debug!( + "Remove from child list: {:?}, parent: {:?}", + self as *const RuleNode, + self.parent.as_ref().map(|p| p.ptr()) + ); + + if let Some(parent) = self.parent.as_ref() { + let weak = parent + .get() + .children + .write() + .remove(&self.key(), |node| (*node.ptr()).key()); + assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); + } + } +} + +#[derive(Clone)] +pub(crate) struct WeakRuleNode { + p: ptr::NonNull, +} + +/// A strong reference to a rule node. +#[derive(Debug, Eq, Hash, PartialEq)] +pub struct StrongRuleNode { + p: ptr::NonNull, +} +unsafe impl Send for StrongRuleNode {} +unsafe impl Sync for StrongRuleNode {} + +#[cfg(feature = "servo")] +malloc_size_of_is_0!(StrongRuleNode); + +impl StrongRuleNode { + fn new(n: Box) -> Self { + debug_assert_eq!(n.parent.is_none(), !n.source.is_some()); + + // TODO(emilio): Use into_raw_non_null when it's stable. + let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) }; + log_new(ptr.as_ptr()); + + debug!("Creating rule node: {:p}", ptr); + + StrongRuleNode::from_ptr(ptr) + } + + fn from_ptr(p: ptr::NonNull) -> Self { + StrongRuleNode { p } + } + + pub(super) fn downgrade(&self) -> WeakRuleNode { + WeakRuleNode::from_ptr(self.p) + } + + /// Get the parent rule node of this rule node. + pub fn parent(&self) -> Option<&StrongRuleNode> { + self.get().parent.as_ref() + } + + pub(super) fn ensure_child( + &self, + root: WeakRuleNode, + source: StyleSource, + level: CascadeLevel, + ) -> StrongRuleNode { + use parking_lot::RwLockUpgradableReadGuard; + + debug_assert!( + self.get().level <= level, + "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}", + self.get().level, + level, + self.get().source, + source, + ); + + let key = ChildKey(level, source.key()); + + let children = self.get().children.upgradable_read(); + if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { + return child.upgrade(); + } + let mut children = RwLockUpgradableReadGuard::upgrade(children); + let weak = children.get_or_insert_with( + key, + |node| unsafe { (*node.ptr()).key() }, + move || { + let strong = + StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); + let weak = strong.downgrade(); + mem::forget(strong); + weak + }, + ); + + StrongRuleNode::from_ptr(weak.p) + } + + /// Raw pointer to the RuleNode + #[inline] + pub fn ptr(&self) -> *mut RuleNode { + self.p.as_ptr() + } + + fn get(&self) -> &RuleNode { + if cfg!(debug_assertions) { + let node = unsafe { &*self.p.as_ptr() }; + assert!(node.refcount.load(Ordering::Relaxed) > 0); + } + unsafe { &*self.p.as_ptr() } + } + + /// Get the style source corresponding to this rule node. May return `None` + /// if it's the root node, which means that the node hasn't matched any + /// rules. + pub fn style_source(&self) -> Option<&StyleSource> { + self.get().source.as_ref() + } + + /// The cascade level for this node + pub fn cascade_level(&self) -> CascadeLevel { + self.get().level + } + + /// Get the importance that this rule node represents. + pub fn importance(&self) -> Importance { + self.get().level.importance() + } + + /// Returns whether this node has any child, only intended for testing + /// purposes, and called on a single-threaded fashion only. + pub unsafe fn has_children_for_testing(&self) -> bool { + !self.get().children.read().is_empty() + } + + unsafe fn pop_from_free_list(&self) -> Option { + // NB: This can run from the root node destructor, so we can't use + // `get()`, since it asserts the refcount is bigger than zero. + let me = &*self.p.as_ptr(); + + debug_assert!(me.is_root()); + + // FIXME(#14213): Apparently the layout data can be gone from script. + // + // That's... suspicious, but it's fine if it happens for the rule tree + // case, so just don't crash in the case we're doing the final GC in + // script. + + debug_assert!( + !thread_state::get().is_worker() && + (thread_state::get().is_layout() || thread_state::get().is_script()) + ); + + let current = me.next_free.load(Ordering::Relaxed); + if current == FREE_LIST_SENTINEL { + return None; + } + + debug_assert!( + !current.is_null(), + "Multiple threads are operating on the free list at the \ + same time?" + ); + debug_assert!( + current != self.p.as_ptr(), + "How did the root end up in the free list?" + ); + + let next = (*current) + .next_free + .swap(ptr::null_mut(), Ordering::Relaxed); + + debug_assert!( + !next.is_null(), + "How did a null pointer end up in the free list?" + ); + + me.next_free.store(next, Ordering::Relaxed); + + debug!( + "Popping from free list: cur: {:?}, next: {:?}", + current, next + ); + + Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current))) + } + + unsafe fn assert_free_list_has_no_duplicates_or_null(&self) { + assert!(cfg!(debug_assertions), "This is an expensive check!"); + use crate::hash::FxHashSet; + + let me = &*self.p.as_ptr(); + assert!(me.is_root()); + + let mut current = self.p.as_ptr(); + let mut seen = FxHashSet::default(); + while current != FREE_LIST_SENTINEL { + let next = (*current).next_free.load(Ordering::Relaxed); + assert!(!next.is_null()); + assert!(!seen.contains(&next)); + seen.insert(next); + + current = next; + } + } + + unsafe fn gc(&self) { + if cfg!(debug_assertions) { + self.assert_free_list_has_no_duplicates_or_null(); + } + + // NB: This can run from the root node destructor, so we can't use + // `get()`, since it asserts the refcount is bigger than zero. + let me = &*self.p.as_ptr(); + + debug_assert!(me.is_root(), "Can't call GC on a non-root node!"); + + while let Some(weak) = self.pop_from_free_list() { + let node = &*weak.p.as_ptr(); + if node.refcount.load(Ordering::Relaxed) != 0 { + // Nothing to do, the node is still alive. + continue; + } + + debug!("GC'ing {:?}", weak.p.as_ptr()); + node.remove_from_child_list(); + log_drop(weak.p.as_ptr()); + let _ = Box::from_raw(weak.p.as_ptr()); + } + + me.free_count().store(0, Ordering::Relaxed); + + debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); + } + + unsafe fn maybe_gc(&self) { + debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!"); + if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { + self.gc(); + } + } + + pub(super) fn dump(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) { + const INDENT_INCREMENT: usize = 4; + + for _ in 0..indent { + let _ = write!(writer, " "); + } + + let _ = writeln!( + writer, + " - {:?} (ref: {:?}, parent: {:?})", + self.get() as *const _, + self.get().refcount.load(Ordering::Relaxed), + self.parent().map(|p| p.ptr()) + ); + + for _ in 0..indent { + let _ = write!(writer, " "); + } + + if let Some(source) = self.style_source() { + source.dump(self.cascade_level().guard(guards), writer); + } else { + if indent != 0 { + warn!("How has this happened?"); + } + let _ = write!(writer, "(root)"); + } + + let _ = write!(writer, "\n"); + for child in &*self.get().children.read() { + child + .upgrade() + .dump(guards, writer, indent + INDENT_INCREMENT); + } + } +} + +impl Clone for StrongRuleNode { + fn clone(&self) -> Self { + debug!( + "{:?}: {:?}+", + self.ptr(), + self.get().refcount.load(Ordering::Relaxed) + ); + debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); + self.get().refcount.fetch_add(1, Ordering::Relaxed); + StrongRuleNode::from_ptr(self.p) + } +} + +impl Drop for StrongRuleNode { + #[cfg_attr(feature = "servo", allow(unused_mut))] + fn drop(&mut self) { + let node = unsafe { &*self.ptr() }; + + debug!( + "{:?}: {:?}-", + self.ptr(), + node.refcount.load(Ordering::Relaxed) + ); + debug!( + "Dropping node: {:?}, root: {:?}, parent: {:?}", + self.ptr(), + node.root.as_ref().map(|r| r.ptr()), + node.parent.as_ref().map(|p| p.ptr()) + ); + let should_drop = { + debug_assert!(node.refcount.load(Ordering::Relaxed) > 0); + node.refcount.fetch_sub(1, Ordering::Relaxed) == 1 + }; + + if !should_drop { + return; + } + + if node.parent.is_none() { + debug!("Dropping root node!"); + // The free list should be null by this point + debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); + log_drop(self.ptr()); + let _ = unsafe { Box::from_raw(self.ptr()) }; + return; + } + + let root = unsafe { &*node.root.as_ref().unwrap().ptr() }; + let free_list = &root.next_free; + let mut old_head = free_list.load(Ordering::Relaxed); + + // If the free list is null, that means that the rule tree has been + // formally torn down, and the last standard GC has already occurred. + // We require that any callers using the rule tree at this point are + // on the main thread only, which lets us trigger a synchronous GC + // here to avoid leaking anything. We use the GC machinery, rather + // than just dropping directly, so that we benefit from the iterative + // destruction and don't trigger unbounded recursion during drop. See + // [1] and the associated crashtest. + // + // [1] https://bugzilla.mozilla.org/show_bug.cgi?id=439184 + if old_head.is_null() { + debug_assert!( + !thread_state::get().is_worker() && + (thread_state::get().is_layout() || thread_state::get().is_script()) + ); + // Add the node as the sole entry in the free list. + debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); + node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed); + free_list.store(node as *const _ as *mut _, Ordering::Relaxed); + + // Invoke the GC. + // + // Note that we need hold a strong reference to the root so that it + // doesn't go away during the GC (which would happen if we're freeing + // the last external reference into the rule tree). This is nicely + // enforced by having the gc() method live on StrongRuleNode rather than + // RuleNode. + let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade(); + unsafe { + strong_root.gc(); + } + + // Leave the free list null, like we found it, such that additional + // drops for straggling rule nodes will take this same codepath. + debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); + root.next_free.store(ptr::null_mut(), Ordering::Relaxed); + + // Return. If strong_root is the last strong reference to the root, + // this re-enter StrongRuleNode::drop, and take the root-dropping + // path earlier in this function. + return; + } + + // We're sure we're already in the free list, don't spinloop if we're. + // Note that this is just a fast path, so it doesn't need to have an + // strong memory ordering. + if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { + return; + } + + // Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED. + // + // Note that we use Acquire/Release semantics for the free list + // synchronization, in order to guarantee that the next_free + // reads/writes we do below are properly visible from multiple threads + // racing. + loop { + match free_list.compare_exchange_weak( + old_head, + FREE_LIST_LOCKED, + Ordering::Acquire, + Ordering::Relaxed, + ) { + Ok(..) => { + if old_head != FREE_LIST_LOCKED { + break; + } + }, + Err(new) => old_head = new, + } + } + + // If other thread has raced with use while using the same rule node, + // just store the old head again, we're done. + // + // Note that we can use relaxed operations for loading since we're + // effectively locking the free list with Acquire/Release semantics, and + // the memory ordering is already guaranteed by that locking/unlocking. + if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { + free_list.store(old_head, Ordering::Release); + return; + } + + // Else store the old head as the next pointer, and store ourselves as + // the new head of the free list. + // + // This can be relaxed since this pointer won't be read until GC. + node.next_free.store(old_head, Ordering::Relaxed); + + // Increment the free count. This doesn't need to be an RMU atomic + // operation, because the free list is "locked". + let old_free_count = root.free_count().load(Ordering::Relaxed); + root.free_count() + .store(old_free_count + 1, Ordering::Relaxed); + + // This can be release because of the locking of the free list, that + // ensures that all the other nodes racing with this one are using + // `Acquire`. + free_list.store(self.ptr(), Ordering::Release); + } +} + +impl<'a> From<&'a StrongRuleNode> for WeakRuleNode { + fn from(node: &'a StrongRuleNode) -> Self { + WeakRuleNode::from_ptr(node.p) + } +} + +impl WeakRuleNode { + #[inline] + fn ptr(&self) -> *mut RuleNode { + self.p.as_ptr() + } + + fn upgrade(&self) -> StrongRuleNode { + debug!("Upgrading weak node: {:p}", self.ptr()); + + let node = unsafe { &*self.ptr() }; + node.refcount.fetch_add(1, Ordering::Relaxed); + StrongRuleNode::from_ptr(self.p) + } + + fn from_ptr(p: ptr::NonNull) -> Self { + WeakRuleNode { p } + } +} diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index f9f9500144f..5bf66ad563f 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -2,195 +2,28 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ -#![allow(unsafe_code)] - //! The rule tree. use crate::applicable_declarations::ApplicableDeclarationList; -use crate::hash::FxHashMap; -use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock}; -use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards}; -use crate::stylesheets::StyleRule; -use crate::thread_state; -use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; -use parking_lot::RwLock; -use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow}; +use crate::properties::{LonghandIdSet, PropertyDeclarationBlock}; +use crate::shared_lock::{Locked, StylesheetGuards}; +use servo_arc::{Arc, ArcBorrow}; use smallvec::SmallVec; use std::io::{self, Write}; -use std::mem; -use std::ptr; -use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; +mod core; mod level; mod map; +mod source; +pub use self::core::{RuleTree, StrongRuleNode}; pub use self::level::{CascadeLevel, ShadowCascadeOrder}; -use self::map::Map; - -/// The rule tree, the structure servo uses to preserve the results of selector -/// matching. -/// -/// This is organized as a tree of rules. When a node matches a set of rules, -/// they're inserted in order in the tree, starting with the less specific one. -/// -/// When a rule is inserted in the tree, other elements may share the path up to -/// a given rule. If that's the case, we don't duplicate child nodes, but share -/// them. -/// -/// When the rule node refcount drops to zero, it doesn't get freed. It gets -/// instead put into a free list, and it is potentially GC'd after a while in a -/// single-threaded fashion. -/// -/// That way, a rule node that represents a likely-to-match-again rule (like a -/// :hover rule) can be reused if we haven't GC'd it yet. -/// -/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC -/// logs at http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017 -/// logs from http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017#c644094 -/// to se a discussion about the different memory orderings used here. -#[derive(Debug)] -pub struct RuleTree { - root: StrongRuleNode, -} - -impl Drop for RuleTree { - fn drop(&mut self) { - // GC the rule tree. - unsafe { - self.gc(); - } - - // After the GC, the free list should be empty. - debug_assert_eq!( - self.root.get().next_free.load(Ordering::Relaxed), - FREE_LIST_SENTINEL - ); - - // Remove the sentinel. This indicates that GCs will no longer occur. - // Any further drops of StrongRuleNodes must occur on the main thread, - // and will trigger synchronous dropping of the Rule nodes. - self.root - .get() - .next_free - .store(ptr::null_mut(), Ordering::Relaxed); - } -} - -impl MallocSizeOf for RuleTree { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - let mut n = 0; - let mut stack = SmallVec::<[_; 32]>::new(); - stack.push(self.root.downgrade()); - - while let Some(node) = stack.pop() { - n += unsafe { ops.malloc_size_of(node.ptr()) }; - let children = unsafe { (*node.ptr()).children.read() }; - children.shallow_size_of(ops); - for c in &*children { - stack.push(c.clone()); - } - } - - n - } -} - -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] -struct ChildKey(CascadeLevel, ptr::NonNull<()>); - -unsafe impl Send for ChildKey {} -unsafe impl Sync for ChildKey {} - -/// A style source for the rule node. It can either be a CSS style rule or a -/// declaration block. -/// -/// Note that, even though the declaration block from inside the style rule -/// could be enough to implement the rule tree, keeping the whole rule provides -/// more debuggability, and also the ability of show those selectors to -/// devtools. -#[derive(Clone, Debug)] -pub struct StyleSource(ArcUnion, Locked>); - -impl PartialEq for StyleSource { - fn eq(&self, other: &Self) -> bool { - ArcUnion::ptr_eq(&self.0, &other.0) - } -} - -impl StyleSource { - /// Creates a StyleSource from a StyleRule. - pub fn from_rule(rule: Arc>) -> Self { - StyleSource(ArcUnion::from_first(rule)) - } - - #[inline] - fn key(&self) -> ptr::NonNull<()> { - self.0.ptr() - } - - /// Creates a StyleSource from a PropertyDeclarationBlock. - pub fn from_declarations(decls: Arc>) -> Self { - StyleSource(ArcUnion::from_second(decls)) - } - - fn dump(&self, guard: &SharedRwLockReadGuard, writer: &mut W) { - if let Some(ref rule) = self.0.as_first() { - let rule = rule.read_with(guard); - let _ = write!(writer, "{:?}", rule.selectors); - } - - let _ = write!(writer, " -> {:?}", self.read(guard).declarations()); - } - - /// Read the style source guard, and obtain thus read access to the - /// underlying property declaration block. - #[inline] - pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock { - let block: &Locked = match self.0.borrow() { - ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block, - ArcUnionBorrow::Second(ref block) => block.get(), - }; - block.read_with(guard) - } - - /// Returns the style rule if applicable, otherwise None. - pub fn as_rule(&self) -> Option>> { - self.0.as_first() - } - - /// Returns the declaration block if applicable, otherwise None. - pub fn as_declarations(&self) -> Option>> { - self.0.as_second() - } -} - -/// This value exists here so a node that pushes itself to the list can know -/// that is in the free list by looking at is next pointer, and comparing it -/// with null. -/// -/// The root node doesn't have a null pointer in the free list, but this value. -const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode; - -/// A second sentinel value for the free list, indicating that it's locked (i.e. -/// another thread is currently adding an entry). We spin if we find this value. -const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode; +pub use self::source::StyleSource; impl RuleTree { - /// Construct a new rule tree. - pub fn new() -> Self { - RuleTree { - root: StrongRuleNode::new(Box::new(RuleNode::root())), - } - } - - /// Get the root rule node. - pub fn root(&self) -> &StrongRuleNode { - &self.root - } - fn dump(&self, guards: &StylesheetGuards, writer: &mut W) { let _ = writeln!(writer, " + RuleTree"); - self.root.get().dump(guards, writer, 0); + self.root().dump(guards, writer, 0); } /// Dump the rule tree to stdout. @@ -214,7 +47,7 @@ impl RuleTree { I: Iterator, { use self::CascadeLevel::*; - let mut current = self.root.clone(); + let mut current = self.root().clone(); let mut found_important = false; @@ -262,7 +95,7 @@ impl RuleTree { debug_assert!(transition.is_none()); transition = Some(source); } else { - current = current.ensure_child(self.root.downgrade(), source, level); + current = current.ensure_child(self.root().downgrade(), source, level); } } @@ -298,7 +131,7 @@ impl RuleTree { for (source, shadow_cascade_order) in important_author.drain(..) { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), source, AuthorImportant { shadow_cascade_order: -shadow_cascade_order, @@ -307,15 +140,15 @@ impl RuleTree { } for source in important_user.drain(..) { - current = current.ensure_child(self.root.downgrade(), source, UserImportant); + current = current.ensure_child(self.root().downgrade(), source, UserImportant); } for source in important_ua.drain(..) { - current = current.ensure_child(self.root.downgrade(), source, UAImportant); + current = current.ensure_child(self.root().downgrade(), source, UAImportant); } if let Some(source) = transition { - current = current.ensure_child(self.root.downgrade(), source, Transitions); + current = current.ensure_child(self.root().downgrade(), source, Transitions); } current @@ -340,7 +173,7 @@ impl RuleTree { where I: Iterator, { - self.insert_ordered_rules_from(self.root.clone(), iter) + self.insert_ordered_rules_from(self.root().clone(), iter) } fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode @@ -349,72 +182,11 @@ impl RuleTree { { let mut current = from; for (source, level) in iter { - current = current.ensure_child(self.root.downgrade(), source, level); + current = current.ensure_child(self.root().downgrade(), source, level); } current } - /// This can only be called when no other threads is accessing this tree. - pub unsafe fn gc(&self) { - self.root.gc(); - } - - /// This can only be called when no other threads is accessing this tree. - pub unsafe fn maybe_gc(&self) { - #[cfg(debug_assertions)] - self.maybe_dump_stats(); - - self.root.maybe_gc(); - } - - #[cfg(debug_assertions)] - fn maybe_dump_stats(&self) { - use itertools::Itertools; - use std::cell::Cell; - use std::time::{Duration, Instant}; - - if !log_enabled!(log::Level::Trace) { - return; - } - - const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2); - - thread_local! { - pub static LAST_STATS: Cell = Cell::new(Instant::now()); - }; - - let should_dump = LAST_STATS.with(|s| { - let now = Instant::now(); - if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL { - return false; - } - s.set(now); - true - }); - - if !should_dump { - return; - } - - let mut children_count = FxHashMap::default(); - - let mut stack = SmallVec::<[_; 32]>::new(); - stack.push(self.root.clone()); - while let Some(node) = stack.pop() { - let children = node.get().children.read(); - *children_count.entry(children.len()).or_insert(0) += 1; - for c in &*children { - stack.push(c.upgrade()); - } - } - - trace!("Rule tree stats:"); - let counts = children_count.keys().sorted(); - for count in counts { - trace!(" {} - {}", count, children_count[count]); - } - } - /// Replaces a rule in a given level (if present) for another rule. /// /// Returns the resulting node that represents the new path, or None if @@ -434,10 +206,10 @@ impl RuleTree { // First walk up until the first less-or-equally specific rule. let mut children = SmallVec::<[_; 10]>::new(); - while current.get().level > level { + while current.cascade_level() > level { children.push(( - current.get().source.as_ref().unwrap().clone(), - current.get().level, + current.style_source().unwrap().clone(), + current.cascade_level(), )); current = current.parent().unwrap().clone(); } @@ -452,10 +224,10 @@ impl RuleTree { // to special-case (isn't hard, it's just about removing the `if` and // special cases, and replacing them for a `while` loop, avoiding the // optimizations). - if current.get().level == level { + if current.cascade_level() == level { *important_rules_changed |= level.is_important(); - let current_decls = current.get().source.as_ref().unwrap().as_declarations(); + let current_decls = current.style_source().unwrap().as_declarations(); // If the only rule at the level we're replacing is exactly the // same as `pdb`, we're done, and `path` is still valid. @@ -490,7 +262,7 @@ impl RuleTree { if level.is_important() { if pdb.read_with(level.guard(guards)).any_important() { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), StyleSource::from_declarations(pdb.clone_arc()), level, ); @@ -499,7 +271,7 @@ impl RuleTree { } else { if pdb.read_with(level.guard(guards)).any_normal() { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), StyleSource::from_declarations(pdb.clone_arc()), level, ); @@ -537,10 +309,7 @@ impl RuleTree { let mut children = SmallVec::<[_; 10]>::new(); for node in iter { if !node.cascade_level().is_animation() { - children.push(( - node.get().source.as_ref().unwrap().clone(), - node.cascade_level(), - )); + children.push((node.style_source().unwrap().clone(), node.cascade_level())); } last = node; } @@ -573,320 +342,7 @@ impl RuleTree { } } -/// The number of RuleNodes added to the free list before we will consider -/// doing a GC when calling maybe_gc(). (The value is copied from Gecko, -/// where it likely did not result from a rigorous performance analysis.) -const RULE_TREE_GC_INTERVAL: usize = 300; - -/// A node in the rule tree. -pub struct RuleNode { - /// The root node. Only the root has no root pointer, for obvious reasons. - root: Option, - - /// The parent rule node. Only the root has no parent. - parent: Option, - - /// The actual style source, either coming from a selector in a StyleRule, - /// or a raw property declaration block (like the style attribute). - /// - /// None for the root node. - source: Option, - - /// The cascade level this rule is positioned at. - level: CascadeLevel, - - refcount: AtomicUsize, - - /// Only used for the root, stores the number of free rule nodes that are - /// around. - free_count: AtomicUsize, - - /// The children of a given rule node. Children remove themselves from here - /// when they go away. - children: RwLock>, - - /// The next item in the rule tree free list, that starts on the root node. - /// - /// When this is set to null, that means that the rule tree has been torn - /// down, and GCs will no longer occur. When this happens, StrongRuleNodes - /// may only be dropped on the main thread, and teardown happens - /// synchronously. - next_free: AtomicPtr, -} - -unsafe impl Sync for RuleTree {} -unsafe impl Send for RuleTree {} - -// On Gecko builds, hook into the leak checking machinery. -#[cfg(feature = "gecko_refcount_logging")] -mod gecko_leak_checking { - use super::RuleNode; - use std::mem::size_of; - use std::os::raw::{c_char, c_void}; - - extern "C" { - fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); - fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); - } - - static NAME: &'static [u8] = b"RuleNode\0"; - - /// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery. - pub fn log_ctor(ptr: *const RuleNode) { - let s = NAME as *const [u8] as *const u8 as *const c_char; - unsafe { - NS_LogCtor(ptr as *mut c_void, s, size_of::() as u32); - } - } - - /// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery. - pub fn log_dtor(ptr: *const RuleNode) { - let s = NAME as *const [u8] as *const u8 as *const c_char; - unsafe { - NS_LogDtor(ptr as *mut c_void, s, size_of::() as u32); - } - } -} - -#[inline(always)] -fn log_new(_ptr: *const RuleNode) { - #[cfg(feature = "gecko_refcount_logging")] - gecko_leak_checking::log_ctor(_ptr); -} - -#[inline(always)] -fn log_drop(_ptr: *const RuleNode) { - #[cfg(feature = "gecko_refcount_logging")] - gecko_leak_checking::log_dtor(_ptr); -} - -impl RuleNode { - fn new( - root: WeakRuleNode, - parent: StrongRuleNode, - source: StyleSource, - level: CascadeLevel, - ) -> Self { - debug_assert!(root.upgrade().parent().is_none()); - RuleNode { - root: Some(root), - parent: Some(parent), - source: Some(source), - level: level, - refcount: AtomicUsize::new(1), - children: Default::default(), - free_count: AtomicUsize::new(0), - next_free: AtomicPtr::new(ptr::null_mut()), - } - } - - fn root() -> Self { - RuleNode { - root: None, - parent: None, - source: None, - level: CascadeLevel::UANormal, - refcount: AtomicUsize::new(1), - free_count: AtomicUsize::new(0), - children: Default::default(), - next_free: AtomicPtr::new(FREE_LIST_SENTINEL), - } - } - - fn key(&self) -> ChildKey { - ChildKey( - self.level, - self.source - .as_ref() - .expect("Called key() on the root node") - .key(), - ) - } - - fn is_root(&self) -> bool { - self.parent.is_none() - } - - fn free_count(&self) -> &AtomicUsize { - debug_assert!(self.is_root()); - &self.free_count - } - - /// Remove this rule node from the child list. - /// - /// This is expected to be called before freeing the node from the free - /// list, on the main thread. - unsafe fn remove_from_child_list(&self) { - debug!( - "Remove from child list: {:?}, parent: {:?}", - self as *const RuleNode, - self.parent.as_ref().map(|p| p.ptr()) - ); - - if let Some(parent) = self.parent.as_ref() { - let weak = parent - .get() - .children - .write() - .remove(&self.key(), |node| (*node.ptr()).key()); - assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); - } - } - - fn dump(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) { - const INDENT_INCREMENT: usize = 4; - - for _ in 0..indent { - let _ = write!(writer, " "); - } - - let _ = writeln!( - writer, - " - {:?} (ref: {:?}, parent: {:?})", - self as *const _, - self.refcount.load(Ordering::Relaxed), - self.parent.as_ref().map(|p| p.ptr()) - ); - - for _ in 0..indent { - let _ = write!(writer, " "); - } - - if self.source.is_some() { - self.source - .as_ref() - .unwrap() - .dump(self.level.guard(guards), writer); - } else { - if indent != 0 { - warn!("How has this happened?"); - } - let _ = write!(writer, "(root)"); - } - - let _ = write!(writer, "\n"); - for child in &*self.children.read() { - child - .upgrade() - .get() - .dump(guards, writer, indent + INDENT_INCREMENT); - } - } -} - -#[derive(Clone)] -struct WeakRuleNode { - p: ptr::NonNull, -} - -/// A strong reference to a rule node. -#[derive(Debug, Eq, Hash, PartialEq)] -pub struct StrongRuleNode { - p: ptr::NonNull, -} - -unsafe impl Send for StrongRuleNode {} -unsafe impl Sync for StrongRuleNode {} - -#[cfg(feature = "servo")] -malloc_size_of_is_0!(StrongRuleNode); - impl StrongRuleNode { - fn new(n: Box) -> Self { - debug_assert_eq!(n.parent.is_none(), !n.source.is_some()); - - // TODO(emilio): Use into_raw_non_null when it's stable. - let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) }; - log_new(ptr.as_ptr()); - - debug!("Creating rule node: {:p}", ptr); - - StrongRuleNode::from_ptr(ptr) - } - - fn from_ptr(p: ptr::NonNull) -> Self { - StrongRuleNode { p } - } - - fn downgrade(&self) -> WeakRuleNode { - WeakRuleNode::from_ptr(self.p) - } - - /// Get the parent rule node of this rule node. - pub fn parent(&self) -> Option<&StrongRuleNode> { - self.get().parent.as_ref() - } - - fn ensure_child( - &self, - root: WeakRuleNode, - source: StyleSource, - level: CascadeLevel, - ) -> StrongRuleNode { - use parking_lot::RwLockUpgradableReadGuard; - - debug_assert!( - self.get().level <= level, - "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}", - self.get().level, - level, - self.get().source, - source, - ); - - let key = ChildKey(level, source.key()); - - let children = self.get().children.upgradable_read(); - if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { - return child.upgrade(); - } - let mut children = RwLockUpgradableReadGuard::upgrade(children); - let weak = children.get_or_insert_with( - key, - |node| unsafe { (*node.ptr()).key() }, - move || { - let strong = - StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); - let weak = strong.downgrade(); - mem::forget(strong); - weak - }, - ); - - StrongRuleNode::from_ptr(weak.p) - } - - /// Raw pointer to the RuleNode - #[inline] - pub fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - - fn get(&self) -> &RuleNode { - if cfg!(debug_assertions) { - let node = unsafe { &*self.p.as_ptr() }; - assert!(node.refcount.load(Ordering::Relaxed) > 0); - } - unsafe { &*self.p.as_ptr() } - } - - /// Get the style source corresponding to this rule node. May return `None` - /// if it's the root node, which means that the node hasn't matched any - /// rules. - pub fn style_source(&self) -> Option<&StyleSource> { - self.get().source.as_ref() - } - - /// The cascade level for this node - pub fn cascade_level(&self) -> CascadeLevel { - self.get().level - } - - /// Get the importance that this rule node represents. - pub fn importance(&self) -> Importance { - self.get().level.importance() - } - /// Get an iterator for this rule node and its ancestors. pub fn self_and_ancestors(&self) -> SelfAndAncestors { SelfAndAncestors { @@ -894,117 +350,196 @@ impl StrongRuleNode { } } - /// Returns whether this node has any child, only intended for testing - /// purposes, and called on a single-threaded fashion only. - pub unsafe fn has_children_for_testing(&self) -> bool { - !self.get().children.read().is_empty() - } + /// Returns true if any properties specified by `rule_type_mask` was set by + /// an author rule. + #[cfg(feature = "gecko")] + pub fn has_author_specified_rules( + &self, + mut element: E, + mut pseudo: Option, + guards: &StylesheetGuards, + rule_type_mask: u32, + author_colors_allowed: bool, + ) -> bool + where + E: crate::dom::TElement, + { + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BACKGROUND; + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BORDER; + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_PADDING; + use crate::properties::{CSSWideKeyword, LonghandId}; + use crate::properties::{PropertyDeclaration, PropertyDeclarationId}; + use std::borrow::Cow; - unsafe fn pop_from_free_list(&self) -> Option { - // NB: This can run from the root node destructor, so we can't use - // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + // Reset properties: + const BACKGROUND_PROPS: &'static [LonghandId] = + &[LonghandId::BackgroundColor, LonghandId::BackgroundImage]; - debug_assert!(me.is_root()); + const BORDER_PROPS: &'static [LonghandId] = &[ + LonghandId::BorderTopColor, + LonghandId::BorderTopStyle, + LonghandId::BorderTopWidth, + LonghandId::BorderRightColor, + LonghandId::BorderRightStyle, + LonghandId::BorderRightWidth, + LonghandId::BorderBottomColor, + LonghandId::BorderBottomStyle, + LonghandId::BorderBottomWidth, + LonghandId::BorderLeftColor, + LonghandId::BorderLeftStyle, + LonghandId::BorderLeftWidth, + LonghandId::BorderTopLeftRadius, + LonghandId::BorderTopRightRadius, + LonghandId::BorderBottomRightRadius, + LonghandId::BorderBottomLeftRadius, + LonghandId::BorderInlineStartColor, + LonghandId::BorderInlineStartStyle, + LonghandId::BorderInlineStartWidth, + LonghandId::BorderInlineEndColor, + LonghandId::BorderInlineEndStyle, + LonghandId::BorderInlineEndWidth, + LonghandId::BorderBlockStartColor, + LonghandId::BorderBlockStartStyle, + LonghandId::BorderBlockStartWidth, + LonghandId::BorderBlockEndColor, + LonghandId::BorderBlockEndStyle, + LonghandId::BorderBlockEndWidth, + ]; - // FIXME(#14213): Apparently the layout data can be gone from script. - // - // That's... suspicious, but it's fine if it happens for the rule tree - // case, so just don't crash in the case we're doing the final GC in - // script. + const PADDING_PROPS: &'static [LonghandId] = &[ + LonghandId::PaddingTop, + LonghandId::PaddingRight, + LonghandId::PaddingBottom, + LonghandId::PaddingLeft, + LonghandId::PaddingInlineStart, + LonghandId::PaddingInlineEnd, + LonghandId::PaddingBlockStart, + LonghandId::PaddingBlockEnd, + ]; - debug_assert!( - !thread_state::get().is_worker() && - (thread_state::get().is_layout() || thread_state::get().is_script()) - ); + // Set of properties that we are currently interested in. + let mut properties = LonghandIdSet::new(); - let current = me.next_free.load(Ordering::Relaxed); - if current == FREE_LIST_SENTINEL { - return None; + if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 { + for id in BACKGROUND_PROPS { + properties.insert(*id); + } + } + if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 { + for id in BORDER_PROPS { + properties.insert(*id); + } + } + if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 { + for id in PADDING_PROPS { + properties.insert(*id); + } } - debug_assert!( - !current.is_null(), - "Multiple threads are operating on the free list at the \ - same time?" - ); - debug_assert!( - current != self.p.as_ptr(), - "How did the root end up in the free list?" - ); - - let next = (*current) - .next_free - .swap(ptr::null_mut(), Ordering::Relaxed); - - debug_assert!( - !next.is_null(), - "How did a null pointer end up in the free list?" - ); - - me.next_free.store(next, Ordering::Relaxed); - - debug!( - "Popping from free list: cur: {:?}, next: {:?}", - current, next - ); - - Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current))) - } - - unsafe fn assert_free_list_has_no_duplicates_or_null(&self) { - assert!(cfg!(debug_assertions), "This is an expensive check!"); - use crate::hash::FxHashSet; - - let me = &*self.p.as_ptr(); - assert!(me.is_root()); - - let mut current = self.p.as_ptr(); - let mut seen = FxHashSet::default(); - while current != FREE_LIST_SENTINEL { - let next = (*current).next_free.load(Ordering::Relaxed); - assert!(!next.is_null()); - assert!(!seen.contains(&next)); - seen.insert(next); - - current = next; - } - } - - unsafe fn gc(&self) { - if cfg!(debug_assertions) { - self.assert_free_list_has_no_duplicates_or_null(); + // If author colors are not allowed, don't look at those properties + // (except for background-color which is special and we handle below). + if !author_colors_allowed { + properties.remove_all(LonghandIdSet::ignored_when_colors_disabled()); + if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 { + properties.insert(LonghandId::BackgroundColor); + } } - // NB: This can run from the root node destructor, so we can't use - // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + let mut element_rule_node = Cow::Borrowed(self); - debug_assert!(me.is_root(), "Can't call GC on a non-root node!"); + loop { + // We need to be careful not to count styles covered up by + // user-important or UA-important declarations. But we do want to + // catch explicit inherit styling in those and check our parent + // element to see whether we have user styling for those properties. + // Note that we don't care here about inheritance due to lack of a + // specified value, since all the properties we care about are reset + // properties. - while let Some(weak) = self.pop_from_free_list() { - let node = &*weak.p.as_ptr(); - if node.refcount.load(Ordering::Relaxed) != 0 { - // Nothing to do, the node is still alive. - continue; + let mut inherited_properties = LonghandIdSet::new(); + let mut have_explicit_ua_inherit = false; + + for node in element_rule_node.self_and_ancestors() { + let source = node.style_source(); + let declarations = if source.is_some() { + source + .as_ref() + .unwrap() + .read(node.cascade_level().guard(guards)) + .declaration_importance_iter() + } else { + continue; + }; + + // Iterate over declarations of the longhands we care about. + let node_importance = node.importance(); + let longhands = declarations.rev().filter_map(|(declaration, importance)| { + if importance != node_importance { + return None; + } + match declaration.id() { + PropertyDeclarationId::Longhand(id) => Some((id, declaration)), + _ => None, + } + }); + + let is_author = node.cascade_level().origin() == Origin::Author; + for (id, declaration) in longhands { + if !properties.contains(id) { + continue; + } + + if is_author { + if !author_colors_allowed { + if let PropertyDeclaration::BackgroundColor(ref color) = *declaration { + if color.is_transparent() { + return true; + } + continue; + } + } + return true; + } + + // This property was set by a non-author rule. + // Stop looking for it in this element's rule + // nodes. + properties.remove(id); + + // However, if it is inherited, then it might be + // inherited from an author rule from an + // ancestor element's rule nodes. + if declaration.get_css_wide_keyword() == Some(CSSWideKeyword::Inherit) { + have_explicit_ua_inherit = true; + inherited_properties.insert(id); + } + } } - debug!("GC'ing {:?}", weak.p.as_ptr()); - node.remove_from_child_list(); - log_drop(weak.p.as_ptr()); - let _ = Box::from_raw(weak.p.as_ptr()); + if !have_explicit_ua_inherit { + break; + } + + // Continue to the parent element and search for the inherited properties. + if let Some(pseudo) = pseudo.take() { + if pseudo.inherits_from_default_values() { + break; + } + } else { + element = match element.inheritance_parent() { + Some(parent) => parent, + None => break, + }; + + let parent_data = element.mutate_data().unwrap(); + let parent_rule_node = parent_data.styles.primary().rules().clone(); + element_rule_node = Cow::Owned(parent_rule_node); + } + + properties = inherited_properties; } - me.free_count().store(0, Ordering::Relaxed); - - debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); - } - - unsafe fn maybe_gc(&self) { - debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!"); - if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { - self.gc(); - } + false } /// Returns true if there is either animation or transition level rule. @@ -1077,181 +612,3 @@ impl<'a> Iterator for SelfAndAncestors<'a> { }) } } - -impl Clone for StrongRuleNode { - fn clone(&self) -> Self { - debug!( - "{:?}: {:?}+", - self.ptr(), - self.get().refcount.load(Ordering::Relaxed) - ); - debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); - self.get().refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) - } -} - -impl Drop for StrongRuleNode { - #[cfg_attr(feature = "servo", allow(unused_mut))] - fn drop(&mut self) { - let node = unsafe { &*self.ptr() }; - - debug!( - "{:?}: {:?}-", - self.ptr(), - node.refcount.load(Ordering::Relaxed) - ); - debug!( - "Dropping node: {:?}, root: {:?}, parent: {:?}", - self.ptr(), - node.root.as_ref().map(|r| r.ptr()), - node.parent.as_ref().map(|p| p.ptr()) - ); - let should_drop = { - debug_assert!(node.refcount.load(Ordering::Relaxed) > 0); - node.refcount.fetch_sub(1, Ordering::Relaxed) == 1 - }; - - if !should_drop { - return; - } - - if node.parent.is_none() { - debug!("Dropping root node!"); - // The free list should be null by this point - debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); - log_drop(self.ptr()); - let _ = unsafe { Box::from_raw(self.ptr()) }; - return; - } - - let root = unsafe { &*node.root.as_ref().unwrap().ptr() }; - let free_list = &root.next_free; - let mut old_head = free_list.load(Ordering::Relaxed); - - // If the free list is null, that means that the rule tree has been - // formally torn down, and the last standard GC has already occurred. - // We require that any callers using the rule tree at this point are - // on the main thread only, which lets us trigger a synchronous GC - // here to avoid leaking anything. We use the GC machinery, rather - // than just dropping directly, so that we benefit from the iterative - // destruction and don't trigger unbounded recursion during drop. See - // [1] and the associated crashtest. - // - // [1] https://bugzilla.mozilla.org/show_bug.cgi?id=439184 - if old_head.is_null() { - debug_assert!( - !thread_state::get().is_worker() && - (thread_state::get().is_layout() || thread_state::get().is_script()) - ); - // Add the node as the sole entry in the free list. - debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); - node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed); - free_list.store(node as *const _ as *mut _, Ordering::Relaxed); - - // Invoke the GC. - // - // Note that we need hold a strong reference to the root so that it - // doesn't go away during the GC (which would happen if we're freeing - // the last external reference into the rule tree). This is nicely - // enforced by having the gc() method live on StrongRuleNode rather than - // RuleNode. - let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade(); - unsafe { - strong_root.gc(); - } - - // Leave the free list null, like we found it, such that additional - // drops for straggling rule nodes will take this same codepath. - debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); - root.next_free.store(ptr::null_mut(), Ordering::Relaxed); - - // Return. If strong_root is the last strong reference to the root, - // this re-enter StrongRuleNode::drop, and take the root-dropping - // path earlier in this function. - return; - } - - // We're sure we're already in the free list, don't spinloop if we're. - // Note that this is just a fast path, so it doesn't need to have an - // strong memory ordering. - if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { - return; - } - - // Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED. - // - // Note that we use Acquire/Release semantics for the free list - // synchronization, in order to guarantee that the next_free - // reads/writes we do below are properly visible from multiple threads - // racing. - loop { - match free_list.compare_exchange_weak( - old_head, - FREE_LIST_LOCKED, - Ordering::Acquire, - Ordering::Relaxed, - ) { - Ok(..) => { - if old_head != FREE_LIST_LOCKED { - break; - } - }, - Err(new) => old_head = new, - } - } - - // If other thread has raced with use while using the same rule node, - // just store the old head again, we're done. - // - // Note that we can use relaxed operations for loading since we're - // effectively locking the free list with Acquire/Release semantics, and - // the memory ordering is already guaranteed by that locking/unlocking. - if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { - free_list.store(old_head, Ordering::Release); - return; - } - - // Else store the old head as the next pointer, and store ourselves as - // the new head of the free list. - // - // This can be relaxed since this pointer won't be read until GC. - node.next_free.store(old_head, Ordering::Relaxed); - - // Increment the free count. This doesn't need to be an RMU atomic - // operation, because the free list is "locked". - let old_free_count = root.free_count().load(Ordering::Relaxed); - root.free_count() - .store(old_free_count + 1, Ordering::Relaxed); - - // This can be release because of the locking of the free list, that - // ensures that all the other nodes racing with this one are using - // `Acquire`. - free_list.store(self.ptr(), Ordering::Release); - } -} - -impl<'a> From<&'a StrongRuleNode> for WeakRuleNode { - fn from(node: &'a StrongRuleNode) -> Self { - WeakRuleNode::from_ptr(node.p) - } -} - -impl WeakRuleNode { - #[inline] - fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - - fn upgrade(&self) -> StrongRuleNode { - debug!("Upgrading weak node: {:p}", self.ptr()); - - let node = unsafe { &*self.ptr() }; - node.refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) - } - - fn from_ptr(p: ptr::NonNull) -> Self { - WeakRuleNode { p } - } -} diff --git a/components/style/rule_tree/source.rs b/components/style/rule_tree/source.rs new file mode 100644 index 00000000000..6298ad649ab --- /dev/null +++ b/components/style/rule_tree/source.rs @@ -0,0 +1,101 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// FIXME: Make that forbid once the two unsafe gecko-specific methods are removed. +#![deny(unsafe_code)] + +use crate::properties::PropertyDeclarationBlock; +use crate::shared_lock::{Locked, SharedRwLockReadGuard}; +use crate::stylesheets::StyleRule; +use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow}; +use std::io::Write; +use std::ptr; + +/// A style source for the rule node. It can either be a CSS style rule or a +/// declaration block. +/// +/// Note that, even though the declaration block from inside the style rule +/// could be enough to implement the rule tree, keeping the whole rule provides +/// more debuggability, and also the ability of show those selectors to +/// devtools. +#[derive(Clone, Debug)] +pub struct StyleSource(ArcUnion, Locked>); + +impl PartialEq for StyleSource { + fn eq(&self, other: &Self) -> bool { + ArcUnion::ptr_eq(&self.0, &other.0) + } +} + +impl StyleSource { + /// Creates a StyleSource from a StyleRule. + pub fn from_rule(rule: Arc>) -> Self { + StyleSource(ArcUnion::from_first(rule)) + } + + #[inline] + pub(super) fn key(&self) -> ptr::NonNull<()> { + self.0.ptr() + } + + /// Creates a StyleSource from a PropertyDeclarationBlock. + pub fn from_declarations(decls: Arc>) -> Self { + StyleSource(ArcUnion::from_second(decls)) + } + + pub(super) fn dump(&self, guard: &SharedRwLockReadGuard, writer: &mut W) { + if let Some(ref rule) = self.0.as_first() { + let rule = rule.read_with(guard); + let _ = write!(writer, "{:?}", rule.selectors); + } + + let _ = write!(writer, " -> {:?}", self.read(guard).declarations()); + } + + // This is totally unsafe, should be removed when we figure out the cause of + // bug 1607553. + #[allow(unsafe_code)] + #[cfg(feature = "gecko")] + pub(super) unsafe fn dump_unchecked(&self, writer: &mut W) { + if let Some(ref rule) = self.0.as_first() { + let rule = rule.read_unchecked(); + let _ = write!(writer, "{:?}", rule.selectors); + } + let _ = write!(writer, " -> {:?}", self.read_unchecked().declarations()); + } + + // This is totally unsafe, should be removed when we figure out the cause of + // bug 1607553. + #[inline] + #[allow(unsafe_code)] + #[cfg(feature = "gecko")] + pub(super) unsafe fn read_unchecked(&self) -> &PropertyDeclarationBlock { + let block: &Locked = match self.0.borrow() { + ArcUnionBorrow::First(ref rule) => &rule.get().read_unchecked().block, + ArcUnionBorrow::Second(ref block) => block.get(), + }; + block.read_unchecked() + } + + /// Read the style source guard, and obtain thus read access to the + /// underlying property declaration block. + #[inline] + pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock { + let block: &Locked = match self.0.borrow() { + ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block, + ArcUnionBorrow::Second(ref block) => block.get(), + }; + block.read_with(guard) + } + + /// Returns the style rule if applicable, otherwise None. + pub fn as_rule(&self) -> Option>> { + self.0.as_first() + } + + /// Returns the declaration block if applicable, otherwise None. + pub fn as_declarations(&self) -> Option>> { + self.0.as_second() + } +}