From 13db0c158452dc4a1f865dbfa54c96a08dbd1530 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Wed, 15 Apr 2020 14:02:07 +0200 Subject: [PATCH 1/9] Refactor rule tree children We move the data structure to its own module for better encapsulation of unsafe code. --- components/style/rule_tree/map.rs | 168 ++++++++++++++++++++++++ components/style/rule_tree/mod.rs | 205 +++++------------------------- 2 files changed, 201 insertions(+), 172 deletions(-) create mode 100644 components/style/rule_tree/map.rs diff --git a/components/style/rule_tree/map.rs b/components/style/rule_tree/map.rs new file mode 100644 index 00000000000..11534d95f7a --- /dev/null +++ b/components/style/rule_tree/map.rs @@ -0,0 +1,168 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#![forbid(unsafe_code)] + +use fxhash::FxHashMap; +use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps}; +use std::hash::Hash; +use std::mem; + +pub(super) struct Map { + inner: MapInner, +} + +enum MapInner { + Empty, + One(V), + Map(Box>), +} + +pub(super) struct MapIter<'a, K, V> { + inner: MapIterInner<'a, K, V>, +} + +enum MapIterInner<'a, K, V> { + One(std::option::IntoIter<&'a V>), + Map(std::collections::hash_map::Values<'a, K, V>), +} + +impl Default for Map { + fn default() -> Self { + Map { + inner: MapInner::Empty, + } + } +} + +impl<'a, K, V> IntoIterator for &'a Map { + type Item = &'a V; + type IntoIter = MapIter<'a, K, V>; + + fn into_iter(self) -> Self::IntoIter { + MapIter { + inner: match &self.inner { + MapInner::Empty => MapIterInner::One(None.into_iter()), + MapInner::One(one) => MapIterInner::One(Some(one).into_iter()), + MapInner::Map(map) => MapIterInner::Map(map.values()), + }, + } + } +} + +impl<'a, K, V> Iterator for MapIter<'a, K, V> { + type Item = &'a V; + + fn next(&mut self) -> Option { + match &mut self.inner { + MapIterInner::One(one_iter) => one_iter.next(), + MapIterInner::Map(map_iter) => map_iter.next(), + } + } +} + +impl Map +where + K: Eq + Hash, +{ + pub(super) fn is_empty(&self) -> bool { + match &self.inner { + MapInner::Empty => true, + MapInner::One(_) => false, + MapInner::Map(map) => map.is_empty(), + } + } + + #[cfg(debug_assertions)] + pub(super) fn len(&self) -> usize { + match &self.inner { + MapInner::Empty => 0, + MapInner::One(_) => 1, + MapInner::Map(map) => map.len(), + } + } + + pub(super) fn get(&self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option<&V> { + match &self.inner { + MapInner::One(one) if *key == key_from_value(one) => Some(one), + MapInner::Map(map) => map.get(key), + MapInner::Empty | MapInner::One(_) => None, + } + } + + pub(super) fn get_or_insert_with( + &mut self, + key: K, + key_from_value: impl FnOnce(&V) -> K, + new_value: impl FnOnce() -> V, + ) -> &mut V { + match self.inner { + MapInner::Empty => { + self.inner = MapInner::One(new_value()); + match &mut self.inner { + MapInner::One(one) => one, + _ => unreachable!(), + } + }, + MapInner::One(_) => { + let one = match mem::replace(&mut self.inner, MapInner::Empty) { + MapInner::One(one) => one, + _ => unreachable!(), + }; + // If this panics, the child `one` will be lost. + let one_key = key_from_value(&one); + // Same for the equality test. + if key == one_key { + self.inner = MapInner::One(one); + match &mut self.inner { + MapInner::One(one) => return one, + _ => unreachable!(), + } + } + self.inner = MapInner::Map(Box::new(FxHashMap::with_capacity_and_hasher( + 2, + Default::default(), + ))); + let map = match &mut self.inner { + MapInner::Map(map) => map, + _ => unreachable!(), + }; + map.insert(one_key, one); + // But it doesn't matter if f panics, by this point + // the map is as before but represented as a map instead + // of a single value. + map.entry(key).or_insert_with(new_value) + }, + MapInner::Map(ref mut map) => map.entry(key).or_insert_with(new_value), + } + } + + pub(super) fn remove(&mut self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option { + match &mut self.inner { + MapInner::One(one) if *key == key_from_value(one) => { + match mem::replace(&mut self.inner, MapInner::Empty) { + MapInner::One(one) => Some(one), + _ => unreachable!(), + } + }, + MapInner::Map(map) => map.remove(key), + MapInner::Empty | MapInner::One(_) => None, + } + } +} + +impl MallocShallowSizeOf for Map +where + K: Eq + Hash, +{ + fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + match &self.inner { + MapInner::Map(m) => { + // We want to account for both the box and the hashmap. + m.shallow_size_of(ops) + (**m).shallow_size_of(ops) + }, + MapInner::One(_) | MapInner::Empty => 0, + } + } +} diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index 484cdee5183..9844148bffe 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -7,7 +7,7 @@ //! The rule tree. use crate::applicable_declarations::ApplicableDeclarationList; -use crate::hash::{self, FxHashMap}; +use crate::hash::FxHashMap; use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock}; use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards}; use crate::stylesheets::{Origin, StyleRule}; @@ -21,6 +21,9 @@ use std::mem; use std::ptr; use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; +mod map; +use self::map::Map; + /// The rule tree, the structure servo uses to preserve the results of selector /// matching. /// @@ -80,7 +83,9 @@ impl MallocSizeOf for RuleTree { n += unsafe { ops.malloc_size_of(node.ptr()) }; let children = unsafe { (*node.ptr()).children.read() }; children.shallow_size_of(ops); - children.each(|c| stack.push(c.clone())); + for c in &*children { + stack.push(c.clone()); + } } n @@ -450,7 +455,9 @@ impl RuleTree { while let Some(node) = stack.pop() { let children = node.get().children.read(); *children_count.entry(children.len()).or_insert(0) += 1; - children.each(|c| stack.push(c.upgrade())); + for c in &*children { + stack.push(c.upgrade()); + } } trace!("Rule tree stats:"); @@ -841,161 +848,6 @@ impl CascadeLevel { } } -/// The children of a single rule node. -/// -/// We optimize the case of no kids and a single child, since they're by far the -/// most common case and it'd cause a bunch of bloat for no reason. -/// -/// The children remove themselves when they go away, which means that it's ok -/// for us to store weak pointers to them. -enum RuleNodeChildren { - /// There are no kids. - Empty, - /// There's just one kid. This is an extremely common case, so we don't - /// bother allocating a map for it. - One(WeakRuleNode), - /// At least at one point in time there was more than one kid (that is to - /// say, we don't bother re-allocating if children are removed dynamically). - Map(Box>), -} - -impl MallocShallowSizeOf for RuleNodeChildren { - fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - match *self { - RuleNodeChildren::One(..) | RuleNodeChildren::Empty => 0, - RuleNodeChildren::Map(ref m) => { - // Want to account for both the box and the hashmap. - m.shallow_size_of(ops) + (**m).shallow_size_of(ops) - }, - } - } -} - -impl Default for RuleNodeChildren { - fn default() -> Self { - RuleNodeChildren::Empty - } -} - -impl RuleNodeChildren { - /// Executes a given function for each of the children. - fn each(&self, mut f: impl FnMut(&WeakRuleNode)) { - match *self { - RuleNodeChildren::Empty => {}, - RuleNodeChildren::One(ref child) => f(child), - RuleNodeChildren::Map(ref map) => { - for (_key, kid) in map.iter() { - f(kid) - } - }, - } - } - - fn len(&self) -> usize { - match *self { - RuleNodeChildren::Empty => 0, - RuleNodeChildren::One(..) => 1, - RuleNodeChildren::Map(ref map) => map.len(), - } - } - - fn is_empty(&self) -> bool { - self.len() == 0 - } - - fn get(&self, key: &ChildKey) -> Option<&WeakRuleNode> { - match *self { - RuleNodeChildren::Empty => return None, - RuleNodeChildren::One(ref kid) => { - // We're read-locked, so no need to do refcount stuff, since the - // child is only removed from the main thread, _and_ it'd need - // to write-lock us anyway. - if unsafe { (*kid.ptr()).key() } == *key { - Some(kid) - } else { - None - } - }, - RuleNodeChildren::Map(ref map) => map.get(&key), - } - } - - fn get_or_insert_with( - &mut self, - key: ChildKey, - get_new_child: impl FnOnce() -> StrongRuleNode, - ) -> StrongRuleNode { - let existing_child_key = match *self { - RuleNodeChildren::Empty => { - let new = get_new_child(); - debug_assert_eq!(new.get().key(), key); - *self = RuleNodeChildren::One(new.downgrade()); - return new; - }, - RuleNodeChildren::One(ref weak) => unsafe { - // We're locked necessarily, so it's fine to look at our - // weak-child without refcount-traffic. - let existing_child_key = (*weak.ptr()).key(); - if existing_child_key == key { - return weak.upgrade(); - } - existing_child_key - }, - RuleNodeChildren::Map(ref mut map) => { - return match map.entry(key) { - hash::map::Entry::Occupied(ref occupied) => occupied.get().upgrade(), - hash::map::Entry::Vacant(vacant) => { - let new = get_new_child(); - - debug_assert_eq!(new.get().key(), key); - vacant.insert(new.downgrade()); - - new - }, - }; - }, - }; - - let existing_child = match mem::replace(self, RuleNodeChildren::Empty) { - RuleNodeChildren::One(o) => o, - _ => unreachable!(), - }; - // Two rule-nodes are still a not-totally-uncommon thing, so - // avoid over-allocating entries. - // - // TODO(emilio): Maybe just inline two kids too? - let mut children = Box::new(FxHashMap::with_capacity_and_hasher(2, Default::default())); - children.insert(existing_child_key, existing_child); - - let new = get_new_child(); - debug_assert_eq!(new.get().key(), key); - children.insert(key, new.downgrade()); - - *self = RuleNodeChildren::Map(children); - - new - } - - fn remove(&mut self, key: &ChildKey) -> Option { - match *self { - RuleNodeChildren::Empty => return None, - RuleNodeChildren::One(ref one) => { - if unsafe { (*one.ptr()).key() } != *key { - return None; - } - }, - RuleNodeChildren::Map(ref mut multiple) => { - return multiple.remove(key); - }, - } - - match mem::replace(self, RuleNodeChildren::Empty) { - RuleNodeChildren::One(o) => Some(o), - _ => unreachable!(), - } - } -} - /// A node in the rule tree. pub struct RuleNode { /// The root node. Only the root has no root pointer, for obvious reasons. @@ -1021,7 +873,7 @@ pub struct RuleNode { /// The children of a given rule node. Children remove themselves from here /// when they go away. - children: RwLock, + children: RwLock>, /// The next item in the rule tree free list, that starts on the root node. /// @@ -1142,7 +994,11 @@ impl RuleNode { ); if let Some(parent) = self.parent.as_ref() { - let weak = parent.get().children.write().remove(&self.key()); + let weak = parent + .get() + .children + .write() + .remove(&self.key(), |node| (*node.ptr()).key()); assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); } } @@ -1179,12 +1035,12 @@ impl RuleNode { } let _ = write!(writer, "\n"); - self.children.read().each(|child| { + for child in &*self.children.read() { child .upgrade() .get() .dump(guards, writer, indent + INDENT_INCREMENT); - }); + } } } @@ -1250,19 +1106,24 @@ impl StrongRuleNode { let key = ChildKey(level, source.key()); - let read_guard = self.get().children.upgradable_read(); - if let Some(child) = read_guard.get(&key) { + let children = self.get().children.upgradable_read(); + if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { return child.upgrade(); } + let mut children = RwLockUpgradableReadGuard::upgrade(children); + let weak = children.get_or_insert_with( + key, + |node| unsafe { (*node.ptr()).key() }, + move || { + let strong = + StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); + let weak = strong.downgrade(); + mem::forget(strong); + weak + }, + ); - RwLockUpgradableReadGuard::upgrade(read_guard).get_or_insert_with(key, move || { - StrongRuleNode::new(Box::new(RuleNode::new( - root, - self.clone(), - source.clone(), - level, - ))) - }) + StrongRuleNode::from_ptr(weak.p) } /// Raw pointer to the RuleNode From 1ea6a0cdd4c4a986823158713e1da47347844b0c Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Wed, 15 Apr 2020 17:23:44 +0200 Subject: [PATCH 2/9] Move CascadeLevel to its own rule_tree submodule --- components/style/rule_tree/level.rs | 277 +++++++++++++++++++++++++++ components/style/rule_tree/mod.rs | 278 +--------------------------- 2 files changed, 281 insertions(+), 274 deletions(-) create mode 100644 components/style/rule_tree/level.rs diff --git a/components/style/rule_tree/level.rs b/components/style/rule_tree/level.rs new file mode 100644 index 00000000000..c46e63796ad --- /dev/null +++ b/components/style/rule_tree/level.rs @@ -0,0 +1,277 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#![forbid(unsafe_code)] + +use crate::properties::Importance; +use crate::shared_lock::{SharedRwLockReadGuard, StylesheetGuards}; +use crate::stylesheets::Origin; + +/// The cascade level these rules are relevant at, as per[1][2][3]. +/// +/// Presentational hints for SVG and HTML are in the "author-level +/// zero-specificity" level, that is, right after user rules, and before author +/// rules. +/// +/// The order of variants declared here is significant, and must be in +/// _ascending_ order of precedence. +/// +/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules +/// from outside the tree the element is in can't affect the element. +/// +/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow +/// tree may affect an element connected to the document or an "outer" shadow +/// tree. +/// +/// [1]: https://drafts.csswg.org/css-cascade/#cascade-origin +/// [2]: https://drafts.csswg.org/css-cascade/#preshint +/// [3]: https://html.spec.whatwg.org/multipage/#presentational-hints +/// [4]: https://drafts.csswg.org/css-scoping/#shadow-cascading +#[repr(u8)] +#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, PartialEq, PartialOrd)] +pub enum CascadeLevel { + /// Normal User-Agent rules. + UANormal, + /// User normal rules. + UserNormal, + /// Presentational hints. + PresHints, + /// Shadow DOM styles from author styles. + AuthorNormal { + /// The order in the shadow tree hierarchy. This number is relative to + /// the tree of the element, and thus the only invariants that need to + /// be preserved is: + /// + /// * Zero is the same tree as the element that matched the rule. This + /// is important so that we can optimize style attribute insertions. + /// + /// * The levels are ordered in accordance with + /// https://drafts.csswg.org/css-scoping/#shadow-cascading + shadow_cascade_order: ShadowCascadeOrder, + }, + /// SVG SMIL animations. + SMILOverride, + /// CSS animations and script-generated animations. + Animations, + /// Author-supplied important rules. + AuthorImportant { + /// The order in the shadow tree hierarchy, inverted, so that PartialOrd + /// does the right thing. + shadow_cascade_order: ShadowCascadeOrder, + }, + /// User important rules. + UserImportant, + /// User-agent important rules. + UAImportant, + /// Transitions + Transitions, +} + +impl CascadeLevel { + /// Pack this cascade level in a single byte. + /// + /// We have 10 levels, which we can represent with 4 bits, and then a + /// cascade order optionally, which we can clamp to three bits max, and + /// represent with a fourth bit for the sign. + /// + /// So this creates: SOOODDDD + /// + /// Where `S` is the sign of the order (one if negative, 0 otherwise), `O` + /// is the absolute value of the order, and `D`s are the discriminant. + #[inline] + pub fn to_byte_lossy(&self) -> u8 { + let (discriminant, order) = match *self { + Self::UANormal => (0, 0), + Self::UserNormal => (1, 0), + Self::PresHints => (2, 0), + Self::AuthorNormal { + shadow_cascade_order, + } => (3, shadow_cascade_order.0), + Self::SMILOverride => (4, 0), + Self::Animations => (5, 0), + Self::AuthorImportant { + shadow_cascade_order, + } => (6, shadow_cascade_order.0), + Self::UserImportant => (7, 0), + Self::UAImportant => (8, 0), + Self::Transitions => (9, 0), + }; + + debug_assert_eq!(discriminant & 0xf, discriminant); + if order == 0 { + return discriminant; + } + + let negative = order < 0; + let value = std::cmp::min(order.abs() as u8, 0b111); + (negative as u8) << 7 | value << 4 | discriminant + } + + /// Convert back from the single-byte representation of the cascade level + /// explained above. + #[inline] + pub fn from_byte(b: u8) -> Self { + let order = { + let abs = ((b & 0b01110000) >> 4) as i8; + let negative = b & 0b10000000 != 0; + if negative { + -abs + } else { + abs + } + }; + let discriminant = b & 0xf; + let level = match discriminant { + 0 => Self::UANormal, + 1 => Self::UserNormal, + 2 => Self::PresHints, + 3 => { + return Self::AuthorNormal { + shadow_cascade_order: ShadowCascadeOrder(order), + } + }, + 4 => Self::SMILOverride, + 5 => Self::Animations, + 6 => { + return Self::AuthorImportant { + shadow_cascade_order: ShadowCascadeOrder(order), + } + }, + 7 => Self::UserImportant, + 8 => Self::UAImportant, + 9 => Self::Transitions, + _ => unreachable!("Didn't expect {} as a discriminant", discriminant), + }; + debug_assert_eq!(order, 0, "Didn't expect an order value for {:?}", level); + level + } + + /// Select a lock guard for this level + pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> { + match *self { + Self::UANormal | Self::UserNormal | Self::UserImportant | Self::UAImportant => { + guards.ua_or_user + }, + _ => guards.author, + } + } + + /// Returns the cascade level for author important declarations from the + /// same tree as the element. + #[inline] + pub fn same_tree_author_important() -> Self { + Self::AuthorImportant { + shadow_cascade_order: ShadowCascadeOrder::for_same_tree(), + } + } + + /// Returns the cascade level for author normal declarations from the same + /// tree as the element. + #[inline] + pub fn same_tree_author_normal() -> Self { + Self::AuthorNormal { + shadow_cascade_order: ShadowCascadeOrder::for_same_tree(), + } + } + + /// Returns whether this cascade level represents important rules of some + /// sort. + #[inline] + pub fn is_important(&self) -> bool { + match *self { + Self::AuthorImportant { .. } | Self::UserImportant | Self::UAImportant => true, + _ => false, + } + } + + /// Returns the importance relevant for this rule. Pretty similar to + /// `is_important`. + #[inline] + pub fn importance(&self) -> Importance { + if self.is_important() { + Importance::Important + } else { + Importance::Normal + } + } + + /// Returns the cascade origin of the rule. + #[inline] + pub fn origin(&self) -> Origin { + match *self { + Self::UAImportant | Self::UANormal => Origin::UserAgent, + Self::UserImportant | Self::UserNormal => Origin::User, + Self::PresHints | + Self::AuthorNormal { .. } | + Self::AuthorImportant { .. } | + Self::SMILOverride | + Self::Animations | + Self::Transitions => Origin::Author, + } + } + + /// Returns whether this cascade level represents an animation rules. + #[inline] + pub fn is_animation(&self) -> bool { + match *self { + Self::SMILOverride | Self::Animations | Self::Transitions => true, + _ => false, + } + } +} + +/// A counter to track how many shadow root rules deep we are. This is used to +/// handle: +/// +/// https://drafts.csswg.org/css-scoping/#shadow-cascading +/// +/// See the static functions for the meaning of different values. +#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)] +pub struct ShadowCascadeOrder(i8); + +impl ShadowCascadeOrder { + /// A level for the outermost shadow tree (the shadow tree we own, and the + /// ones from the slots we're slotted in). + #[inline] + pub fn for_outermost_shadow_tree() -> Self { + Self(-1) + } + + /// A level for the element's tree. + #[inline] + fn for_same_tree() -> Self { + Self(0) + } + + /// A level for the innermost containing tree (the one closest to the + /// element). + #[inline] + pub fn for_innermost_containing_tree() -> Self { + Self(1) + } + + /// Decrement the level, moving inwards. We should only move inwards if + /// we're traversing slots. + #[inline] + pub fn dec(&mut self) { + debug_assert!(self.0 < 0); + self.0 = self.0.saturating_sub(1); + } + + /// The level, moving inwards. We should only move inwards if we're + /// traversing slots. + #[inline] + pub fn inc(&mut self) { + debug_assert_ne!(self.0, -1); + self.0 = self.0.saturating_add(1); + } +} + +impl std::ops::Neg for ShadowCascadeOrder { + type Output = Self; + #[inline] + fn neg(self) -> Self { + Self(self.0.neg()) + } +} diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index 9844148bffe..f9f9500144f 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -10,7 +10,7 @@ use crate::applicable_declarations::ApplicableDeclarationList; use crate::hash::FxHashMap; use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock}; use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards}; -use crate::stylesheets::{Origin, StyleRule}; +use crate::stylesheets::StyleRule; use crate::thread_state; use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; use parking_lot::RwLock; @@ -21,7 +21,10 @@ use std::mem; use std::ptr; use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; +mod level; mod map; + +pub use self::level::{CascadeLevel, ShadowCascadeOrder}; use self::map::Map; /// The rule tree, the structure servo uses to preserve the results of selector @@ -172,61 +175,6 @@ const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode; /// another thread is currently adding an entry). We spin if we find this value. const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode; -/// A counter to track how many shadow root rules deep we are. This is used to -/// handle: -/// -/// https://drafts.csswg.org/css-scoping/#shadow-cascading -/// -/// See the static functions for the meaning of different values. -#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)] -pub struct ShadowCascadeOrder(i8); - -impl ShadowCascadeOrder { - /// A level for the outermost shadow tree (the shadow tree we own, and the - /// ones from the slots we're slotted in). - #[inline] - pub fn for_outermost_shadow_tree() -> Self { - Self(-1) - } - - /// A level for the element's tree. - #[inline] - fn for_same_tree() -> Self { - Self(0) - } - - /// A level for the innermost containing tree (the one closest to the - /// element). - #[inline] - pub fn for_innermost_containing_tree() -> Self { - Self(1) - } - - /// Decrement the level, moving inwards. We should only move inwards if - /// we're traversing slots. - #[inline] - pub fn dec(&mut self) { - debug_assert!(self.0 < 0); - self.0 = self.0.saturating_sub(1); - } - - /// The level, moving inwards. We should only move inwards if we're - /// traversing slots. - #[inline] - pub fn inc(&mut self) { - debug_assert_ne!(self.0, -1); - self.0 = self.0.saturating_add(1); - } -} - -impl std::ops::Neg for ShadowCascadeOrder { - type Output = Self; - #[inline] - fn neg(self) -> Self { - Self(self.0.neg()) - } -} - impl RuleTree { /// Construct a new rule tree. pub fn new() -> Self { @@ -630,224 +578,6 @@ impl RuleTree { /// where it likely did not result from a rigorous performance analysis.) const RULE_TREE_GC_INTERVAL: usize = 300; -/// The cascade level these rules are relevant at, as per[1][2][3]. -/// -/// Presentational hints for SVG and HTML are in the "author-level -/// zero-specificity" level, that is, right after user rules, and before author -/// rules. -/// -/// The order of variants declared here is significant, and must be in -/// _ascending_ order of precedence. -/// -/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules -/// from outside the tree the element is in can't affect the element. -/// -/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow -/// tree may affect an element connected to the document or an "outer" shadow -/// tree. -/// -/// [1]: https://drafts.csswg.org/css-cascade/#cascade-origin -/// [2]: https://drafts.csswg.org/css-cascade/#preshint -/// [3]: https://html.spec.whatwg.org/multipage/#presentational-hints -/// [4]: https://drafts.csswg.org/css-scoping/#shadow-cascading -#[repr(u8)] -#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, PartialEq, PartialOrd)] -pub enum CascadeLevel { - /// Normal User-Agent rules. - UANormal, - /// User normal rules. - UserNormal, - /// Presentational hints. - PresHints, - /// Shadow DOM styles from author styles. - AuthorNormal { - /// The order in the shadow tree hierarchy. This number is relative to - /// the tree of the element, and thus the only invariants that need to - /// be preserved is: - /// - /// * Zero is the same tree as the element that matched the rule. This - /// is important so that we can optimize style attribute insertions. - /// - /// * The levels are ordered in accordance with - /// https://drafts.csswg.org/css-scoping/#shadow-cascading - shadow_cascade_order: ShadowCascadeOrder, - }, - /// SVG SMIL animations. - SMILOverride, - /// CSS animations and script-generated animations. - Animations, - /// Author-supplied important rules. - AuthorImportant { - /// The order in the shadow tree hierarchy, inverted, so that PartialOrd - /// does the right thing. - shadow_cascade_order: ShadowCascadeOrder, - }, - /// User important rules. - UserImportant, - /// User-agent important rules. - UAImportant, - /// Transitions - Transitions, -} - -impl CascadeLevel { - /// Pack this cascade level in a single byte. - /// - /// We have 10 levels, which we can represent with 4 bits, and then a - /// cascade order optionally, which we can clamp to three bits max, and - /// represent with a fourth bit for the sign. - /// - /// So this creates: SOOODDDD - /// - /// Where `S` is the sign of the order (one if negative, 0 otherwise), `O` - /// is the absolute value of the order, and `D`s are the discriminant. - #[inline] - pub fn to_byte_lossy(&self) -> u8 { - let (discriminant, order) = match *self { - Self::UANormal => (0, 0), - Self::UserNormal => (1, 0), - Self::PresHints => (2, 0), - Self::AuthorNormal { - shadow_cascade_order, - } => (3, shadow_cascade_order.0), - Self::SMILOverride => (4, 0), - Self::Animations => (5, 0), - Self::AuthorImportant { - shadow_cascade_order, - } => (6, shadow_cascade_order.0), - Self::UserImportant => (7, 0), - Self::UAImportant => (8, 0), - Self::Transitions => (9, 0), - }; - - debug_assert_eq!(discriminant & 0xf, discriminant); - if order == 0 { - return discriminant; - } - - let negative = order < 0; - let value = std::cmp::min(order.abs() as u8, 0b111); - (negative as u8) << 7 | value << 4 | discriminant - } - - /// Convert back from the single-byte representation of the cascade level - /// explained above. - #[inline] - pub fn from_byte(b: u8) -> Self { - let order = { - let abs = ((b & 0b01110000) >> 4) as i8; - let negative = b & 0b10000000 != 0; - if negative { - -abs - } else { - abs - } - }; - let discriminant = b & 0xf; - let level = match discriminant { - 0 => Self::UANormal, - 1 => Self::UserNormal, - 2 => Self::PresHints, - 3 => { - return Self::AuthorNormal { - shadow_cascade_order: ShadowCascadeOrder(order), - } - }, - 4 => Self::SMILOverride, - 5 => Self::Animations, - 6 => { - return Self::AuthorImportant { - shadow_cascade_order: ShadowCascadeOrder(order), - } - }, - 7 => Self::UserImportant, - 8 => Self::UAImportant, - 9 => Self::Transitions, - _ => unreachable!("Didn't expect {} as a discriminant", discriminant), - }; - debug_assert_eq!(order, 0, "Didn't expect an order value for {:?}", level); - level - } - - /// Select a lock guard for this level - pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> { - match *self { - CascadeLevel::UANormal | - CascadeLevel::UserNormal | - CascadeLevel::UserImportant | - CascadeLevel::UAImportant => guards.ua_or_user, - _ => guards.author, - } - } - - /// Returns the cascade level for author important declarations from the - /// same tree as the element. - #[inline] - pub fn same_tree_author_important() -> Self { - CascadeLevel::AuthorImportant { - shadow_cascade_order: ShadowCascadeOrder::for_same_tree(), - } - } - - /// Returns the cascade level for author normal declarations from the same - /// tree as the element. - #[inline] - pub fn same_tree_author_normal() -> Self { - CascadeLevel::AuthorNormal { - shadow_cascade_order: ShadowCascadeOrder::for_same_tree(), - } - } - - /// Returns whether this cascade level represents important rules of some - /// sort. - #[inline] - pub fn is_important(&self) -> bool { - match *self { - CascadeLevel::AuthorImportant { .. } | - CascadeLevel::UserImportant | - CascadeLevel::UAImportant => true, - _ => false, - } - } - - /// Returns the importance relevant for this rule. Pretty similar to - /// `is_important`. - #[inline] - pub fn importance(&self) -> Importance { - if self.is_important() { - Importance::Important - } else { - Importance::Normal - } - } - - /// Returns the cascade origin of the rule. - #[inline] - pub fn origin(&self) -> Origin { - match *self { - CascadeLevel::UAImportant | CascadeLevel::UANormal => Origin::UserAgent, - CascadeLevel::UserImportant | CascadeLevel::UserNormal => Origin::User, - CascadeLevel::PresHints | - CascadeLevel::AuthorNormal { .. } | - CascadeLevel::AuthorImportant { .. } | - CascadeLevel::SMILOverride | - CascadeLevel::Animations | - CascadeLevel::Transitions => Origin::Author, - } - } - - /// Returns whether this cascade level represents an animation rules. - #[inline] - pub fn is_animation(&self) -> bool { - match *self { - CascadeLevel::SMILOverride | CascadeLevel::Animations | CascadeLevel::Transitions => { - true - }, - _ => false, - } - } -} - /// A node in the rule tree. pub struct RuleNode { /// The root node. Only the root has no root pointer, for obvious reasons. From e5cb3d2a4c7d05ec82a92aff6732590b27d052aa Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 12:55:47 +0200 Subject: [PATCH 3/9] Move the meat of the rule tree to a submodule "core" --- components/style/rule_tree/core.rs | 776 +++++++++++++++++++ components/style/rule_tree/mod.rs | 1041 +++++--------------------- components/style/rule_tree/source.rs | 101 +++ 3 files changed, 1076 insertions(+), 842 deletions(-) create mode 100644 components/style/rule_tree/core.rs create mode 100644 components/style/rule_tree/source.rs diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs new file mode 100644 index 00000000000..3103d416aec --- /dev/null +++ b/components/style/rule_tree/core.rs @@ -0,0 +1,776 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#![allow(unsafe_code)] + +use crate::properties::Importance; +use crate::shared_lock::StylesheetGuards; +use crate::thread_state; +use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; +use parking_lot::RwLock; +use smallvec::SmallVec; +use std::io::Write; +use std::mem; +use std::ptr; +use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; + +use super::map::Map; +use super::{CascadeLevel, StyleSource}; + +/// The rule tree, the structure servo uses to preserve the results of selector +/// matching. +/// +/// This is organized as a tree of rules. When a node matches a set of rules, +/// they're inserted in order in the tree, starting with the less specific one. +/// +/// When a rule is inserted in the tree, other elements may share the path up to +/// a given rule. If that's the case, we don't duplicate child nodes, but share +/// them. +/// +/// When the rule node refcount drops to zero, it doesn't get freed. It gets +/// instead put into a free list, and it is potentially GC'd after a while in a +/// single-threaded fashion. +/// +/// That way, a rule node that represents a likely-to-match-again rule (like a +/// :hover rule) can be reused if we haven't GC'd it yet. +/// +/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC +/// logs at http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017 +/// logs from http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017#c644094 +/// to se a discussion about the different memory orderings used here. +#[derive(Debug)] +pub struct RuleTree { + root: StrongRuleNode, +} + +impl Drop for RuleTree { + fn drop(&mut self) { + // GC the rule tree. + unsafe { + self.gc(); + } + + // After the GC, the free list should be empty. + debug_assert_eq!( + self.root.get().next_free.load(Ordering::Relaxed), + FREE_LIST_SENTINEL + ); + + // Remove the sentinel. This indicates that GCs will no longer occur. + // Any further drops of StrongRuleNodes must occur on the main thread, + // and will trigger synchronous dropping of the Rule nodes. + self.root + .get() + .next_free + .store(ptr::null_mut(), Ordering::Relaxed); + } +} + +impl MallocSizeOf for RuleTree { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + let mut n = 0; + let mut stack = SmallVec::<[_; 32]>::new(); + stack.push(self.root.downgrade()); + + while let Some(node) = stack.pop() { + n += unsafe { ops.malloc_size_of(node.ptr()) }; + let children = unsafe { (*node.ptr()).children.read() }; + children.shallow_size_of(ops); + for c in &*children { + stack.push(c.clone()); + } + } + + n + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +struct ChildKey(CascadeLevel, ptr::NonNull<()>); +unsafe impl Send for ChildKey {} +unsafe impl Sync for ChildKey {} + +/// This value exists here so a node that pushes itself to the list can know +/// that is in the free list by looking at is next pointer, and comparing it +/// with null. +/// +/// The root node doesn't have a null pointer in the free list, but this value. +const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode; + +/// A second sentinel value for the free list, indicating that it's locked (i.e. +/// another thread is currently adding an entry). We spin if we find this value. +const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode; + +impl RuleTree { + /// Construct a new rule tree. + pub fn new() -> Self { + RuleTree { + root: StrongRuleNode::new(Box::new(RuleNode::root())), + } + } + + /// Get the root rule node. + pub fn root(&self) -> &StrongRuleNode { + &self.root + } + + /// This can only be called when no other threads is accessing this tree. + pub unsafe fn gc(&self) { + self.root.gc(); + } + + /// This can only be called when no other threads is accessing this tree. + pub unsafe fn maybe_gc(&self) { + #[cfg(debug_assertions)] + self.maybe_dump_stats(); + + self.root.maybe_gc(); + } + + #[cfg(debug_assertions)] + fn maybe_dump_stats(&self) { + use itertools::Itertools; + use std::cell::Cell; + use std::time::{Duration, Instant}; + + if !log_enabled!(log::Level::Trace) { + return; + } + + const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2); + + thread_local! { + pub static LAST_STATS: Cell = Cell::new(Instant::now()); + }; + + let should_dump = LAST_STATS.with(|s| { + let now = Instant::now(); + if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL { + return false; + } + s.set(now); + true + }); + + if !should_dump { + return; + } + + let mut children_count = crate::hash::FxHashMap::default(); + + let mut stack = SmallVec::<[_; 32]>::new(); + stack.push(self.root.clone()); + while let Some(node) = stack.pop() { + let children = node.get().children.read(); + *children_count.entry(children.len()).or_insert(0) += 1; + for c in &*children { + stack.push(c.upgrade()); + } + } + + trace!("Rule tree stats:"); + let counts = children_count.keys().sorted(); + for count in counts { + trace!(" {} - {}", count, children_count[count]); + } + } +} + +/// The number of RuleNodes added to the free list before we will consider +/// doing a GC when calling maybe_gc(). (The value is copied from Gecko, +/// where it likely did not result from a rigorous performance analysis.) +const RULE_TREE_GC_INTERVAL: usize = 300; + +/// A node in the rule tree. +pub struct RuleNode { + /// The root node. Only the root has no root pointer, for obvious reasons. + root: Option, + + /// The parent rule node. Only the root has no parent. + parent: Option, + + /// The actual style source, either coming from a selector in a StyleRule, + /// or a raw property declaration block (like the style attribute). + /// + /// None for the root node. + source: Option, + + /// The cascade level this rule is positioned at. + level: CascadeLevel, + + refcount: AtomicUsize, + + /// Only used for the root, stores the number of free rule nodes that are + /// around. + free_count: AtomicUsize, + + /// The children of a given rule node. Children remove themselves from here + /// when they go away. + children: RwLock>, + + /// The next item in the rule tree free list, that starts on the root node. + /// + /// When this is set to null, that means that the rule tree has been torn + /// down, and GCs will no longer occur. When this happens, StrongRuleNodes + /// may only be dropped on the main thread, and teardown happens + /// synchronously. + next_free: AtomicPtr, +} + +// On Gecko builds, hook into the leak checking machinery. +#[cfg(feature = "gecko_refcount_logging")] +mod gecko_leak_checking { + use super::RuleNode; + use std::mem::size_of; + use std::os::raw::{c_char, c_void}; + + extern "C" { + fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); + fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); + } + + static NAME: &'static [u8] = b"RuleNode\0"; + + /// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery. + pub(super) fn log_ctor(ptr: *const RuleNode) { + let s = NAME as *const [u8] as *const u8 as *const c_char; + unsafe { + NS_LogCtor(ptr as *mut c_void, s, size_of::() as u32); + } + } + + /// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery. + pub(super) fn log_dtor(ptr: *const RuleNode) { + let s = NAME as *const [u8] as *const u8 as *const c_char; + unsafe { + NS_LogDtor(ptr as *mut c_void, s, size_of::() as u32); + } + } +} + +#[inline(always)] +fn log_new(_ptr: *const RuleNode) { + #[cfg(feature = "gecko_refcount_logging")] + gecko_leak_checking::log_ctor(_ptr); +} + +#[inline(always)] +fn log_drop(_ptr: *const RuleNode) { + #[cfg(feature = "gecko_refcount_logging")] + gecko_leak_checking::log_dtor(_ptr); +} + +impl RuleNode { + fn new( + root: WeakRuleNode, + parent: StrongRuleNode, + source: StyleSource, + level: CascadeLevel, + ) -> Self { + debug_assert!(root.upgrade().parent().is_none()); + RuleNode { + root: Some(root), + parent: Some(parent), + source: Some(source), + level: level, + refcount: AtomicUsize::new(1), + children: Default::default(), + free_count: AtomicUsize::new(0), + next_free: AtomicPtr::new(ptr::null_mut()), + } + } + + fn root() -> Self { + RuleNode { + root: None, + parent: None, + source: None, + level: CascadeLevel::UANormal, + refcount: AtomicUsize::new(1), + free_count: AtomicUsize::new(0), + children: Default::default(), + next_free: AtomicPtr::new(FREE_LIST_SENTINEL), + } + } + + fn key(&self) -> ChildKey { + ChildKey( + self.level, + self.source + .as_ref() + .expect("Called key() on the root node") + .key(), + ) + } + + fn is_root(&self) -> bool { + self.parent.is_none() + } + + fn free_count(&self) -> &AtomicUsize { + debug_assert!(self.is_root()); + &self.free_count + } + + /// Remove this rule node from the child list. + /// + /// This is expected to be called before freeing the node from the free + /// list, on the main thread. + unsafe fn remove_from_child_list(&self) { + debug!( + "Remove from child list: {:?}, parent: {:?}", + self as *const RuleNode, + self.parent.as_ref().map(|p| p.ptr()) + ); + + if let Some(parent) = self.parent.as_ref() { + let weak = parent + .get() + .children + .write() + .remove(&self.key(), |node| (*node.ptr()).key()); + assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); + } + } +} + +#[derive(Clone)] +pub(crate) struct WeakRuleNode { + p: ptr::NonNull, +} + +/// A strong reference to a rule node. +#[derive(Debug, Eq, Hash, PartialEq)] +pub struct StrongRuleNode { + p: ptr::NonNull, +} +unsafe impl Send for StrongRuleNode {} +unsafe impl Sync for StrongRuleNode {} + +#[cfg(feature = "servo")] +malloc_size_of_is_0!(StrongRuleNode); + +impl StrongRuleNode { + fn new(n: Box) -> Self { + debug_assert_eq!(n.parent.is_none(), !n.source.is_some()); + + // TODO(emilio): Use into_raw_non_null when it's stable. + let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) }; + log_new(ptr.as_ptr()); + + debug!("Creating rule node: {:p}", ptr); + + StrongRuleNode::from_ptr(ptr) + } + + fn from_ptr(p: ptr::NonNull) -> Self { + StrongRuleNode { p } + } + + pub(super) fn downgrade(&self) -> WeakRuleNode { + WeakRuleNode::from_ptr(self.p) + } + + /// Get the parent rule node of this rule node. + pub fn parent(&self) -> Option<&StrongRuleNode> { + self.get().parent.as_ref() + } + + pub(super) fn ensure_child( + &self, + root: WeakRuleNode, + source: StyleSource, + level: CascadeLevel, + ) -> StrongRuleNode { + use parking_lot::RwLockUpgradableReadGuard; + + debug_assert!( + self.get().level <= level, + "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}", + self.get().level, + level, + self.get().source, + source, + ); + + let key = ChildKey(level, source.key()); + + let children = self.get().children.upgradable_read(); + if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { + return child.upgrade(); + } + let mut children = RwLockUpgradableReadGuard::upgrade(children); + let weak = children.get_or_insert_with( + key, + |node| unsafe { (*node.ptr()).key() }, + move || { + let strong = + StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); + let weak = strong.downgrade(); + mem::forget(strong); + weak + }, + ); + + StrongRuleNode::from_ptr(weak.p) + } + + /// Raw pointer to the RuleNode + #[inline] + pub fn ptr(&self) -> *mut RuleNode { + self.p.as_ptr() + } + + fn get(&self) -> &RuleNode { + if cfg!(debug_assertions) { + let node = unsafe { &*self.p.as_ptr() }; + assert!(node.refcount.load(Ordering::Relaxed) > 0); + } + unsafe { &*self.p.as_ptr() } + } + + /// Get the style source corresponding to this rule node. May return `None` + /// if it's the root node, which means that the node hasn't matched any + /// rules. + pub fn style_source(&self) -> Option<&StyleSource> { + self.get().source.as_ref() + } + + /// The cascade level for this node + pub fn cascade_level(&self) -> CascadeLevel { + self.get().level + } + + /// Get the importance that this rule node represents. + pub fn importance(&self) -> Importance { + self.get().level.importance() + } + + /// Returns whether this node has any child, only intended for testing + /// purposes, and called on a single-threaded fashion only. + pub unsafe fn has_children_for_testing(&self) -> bool { + !self.get().children.read().is_empty() + } + + unsafe fn pop_from_free_list(&self) -> Option { + // NB: This can run from the root node destructor, so we can't use + // `get()`, since it asserts the refcount is bigger than zero. + let me = &*self.p.as_ptr(); + + debug_assert!(me.is_root()); + + // FIXME(#14213): Apparently the layout data can be gone from script. + // + // That's... suspicious, but it's fine if it happens for the rule tree + // case, so just don't crash in the case we're doing the final GC in + // script. + + debug_assert!( + !thread_state::get().is_worker() && + (thread_state::get().is_layout() || thread_state::get().is_script()) + ); + + let current = me.next_free.load(Ordering::Relaxed); + if current == FREE_LIST_SENTINEL { + return None; + } + + debug_assert!( + !current.is_null(), + "Multiple threads are operating on the free list at the \ + same time?" + ); + debug_assert!( + current != self.p.as_ptr(), + "How did the root end up in the free list?" + ); + + let next = (*current) + .next_free + .swap(ptr::null_mut(), Ordering::Relaxed); + + debug_assert!( + !next.is_null(), + "How did a null pointer end up in the free list?" + ); + + me.next_free.store(next, Ordering::Relaxed); + + debug!( + "Popping from free list: cur: {:?}, next: {:?}", + current, next + ); + + Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current))) + } + + unsafe fn assert_free_list_has_no_duplicates_or_null(&self) { + assert!(cfg!(debug_assertions), "This is an expensive check!"); + use crate::hash::FxHashSet; + + let me = &*self.p.as_ptr(); + assert!(me.is_root()); + + let mut current = self.p.as_ptr(); + let mut seen = FxHashSet::default(); + while current != FREE_LIST_SENTINEL { + let next = (*current).next_free.load(Ordering::Relaxed); + assert!(!next.is_null()); + assert!(!seen.contains(&next)); + seen.insert(next); + + current = next; + } + } + + unsafe fn gc(&self) { + if cfg!(debug_assertions) { + self.assert_free_list_has_no_duplicates_or_null(); + } + + // NB: This can run from the root node destructor, so we can't use + // `get()`, since it asserts the refcount is bigger than zero. + let me = &*self.p.as_ptr(); + + debug_assert!(me.is_root(), "Can't call GC on a non-root node!"); + + while let Some(weak) = self.pop_from_free_list() { + let node = &*weak.p.as_ptr(); + if node.refcount.load(Ordering::Relaxed) != 0 { + // Nothing to do, the node is still alive. + continue; + } + + debug!("GC'ing {:?}", weak.p.as_ptr()); + node.remove_from_child_list(); + log_drop(weak.p.as_ptr()); + let _ = Box::from_raw(weak.p.as_ptr()); + } + + me.free_count().store(0, Ordering::Relaxed); + + debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); + } + + unsafe fn maybe_gc(&self) { + debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!"); + if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { + self.gc(); + } + } + + pub(super) fn dump(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) { + const INDENT_INCREMENT: usize = 4; + + for _ in 0..indent { + let _ = write!(writer, " "); + } + + let _ = writeln!( + writer, + " - {:?} (ref: {:?}, parent: {:?})", + self.get() as *const _, + self.get().refcount.load(Ordering::Relaxed), + self.parent().map(|p| p.ptr()) + ); + + for _ in 0..indent { + let _ = write!(writer, " "); + } + + if let Some(source) = self.style_source() { + source.dump(self.cascade_level().guard(guards), writer); + } else { + if indent != 0 { + warn!("How has this happened?"); + } + let _ = write!(writer, "(root)"); + } + + let _ = write!(writer, "\n"); + for child in &*self.get().children.read() { + child + .upgrade() + .dump(guards, writer, indent + INDENT_INCREMENT); + } + } +} + +impl Clone for StrongRuleNode { + fn clone(&self) -> Self { + debug!( + "{:?}: {:?}+", + self.ptr(), + self.get().refcount.load(Ordering::Relaxed) + ); + debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); + self.get().refcount.fetch_add(1, Ordering::Relaxed); + StrongRuleNode::from_ptr(self.p) + } +} + +impl Drop for StrongRuleNode { + #[cfg_attr(feature = "servo", allow(unused_mut))] + fn drop(&mut self) { + let node = unsafe { &*self.ptr() }; + + debug!( + "{:?}: {:?}-", + self.ptr(), + node.refcount.load(Ordering::Relaxed) + ); + debug!( + "Dropping node: {:?}, root: {:?}, parent: {:?}", + self.ptr(), + node.root.as_ref().map(|r| r.ptr()), + node.parent.as_ref().map(|p| p.ptr()) + ); + let should_drop = { + debug_assert!(node.refcount.load(Ordering::Relaxed) > 0); + node.refcount.fetch_sub(1, Ordering::Relaxed) == 1 + }; + + if !should_drop { + return; + } + + if node.parent.is_none() { + debug!("Dropping root node!"); + // The free list should be null by this point + debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); + log_drop(self.ptr()); + let _ = unsafe { Box::from_raw(self.ptr()) }; + return; + } + + let root = unsafe { &*node.root.as_ref().unwrap().ptr() }; + let free_list = &root.next_free; + let mut old_head = free_list.load(Ordering::Relaxed); + + // If the free list is null, that means that the rule tree has been + // formally torn down, and the last standard GC has already occurred. + // We require that any callers using the rule tree at this point are + // on the main thread only, which lets us trigger a synchronous GC + // here to avoid leaking anything. We use the GC machinery, rather + // than just dropping directly, so that we benefit from the iterative + // destruction and don't trigger unbounded recursion during drop. See + // [1] and the associated crashtest. + // + // [1] https://bugzilla.mozilla.org/show_bug.cgi?id=439184 + if old_head.is_null() { + debug_assert!( + !thread_state::get().is_worker() && + (thread_state::get().is_layout() || thread_state::get().is_script()) + ); + // Add the node as the sole entry in the free list. + debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); + node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed); + free_list.store(node as *const _ as *mut _, Ordering::Relaxed); + + // Invoke the GC. + // + // Note that we need hold a strong reference to the root so that it + // doesn't go away during the GC (which would happen if we're freeing + // the last external reference into the rule tree). This is nicely + // enforced by having the gc() method live on StrongRuleNode rather than + // RuleNode. + let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade(); + unsafe { + strong_root.gc(); + } + + // Leave the free list null, like we found it, such that additional + // drops for straggling rule nodes will take this same codepath. + debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); + root.next_free.store(ptr::null_mut(), Ordering::Relaxed); + + // Return. If strong_root is the last strong reference to the root, + // this re-enter StrongRuleNode::drop, and take the root-dropping + // path earlier in this function. + return; + } + + // We're sure we're already in the free list, don't spinloop if we're. + // Note that this is just a fast path, so it doesn't need to have an + // strong memory ordering. + if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { + return; + } + + // Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED. + // + // Note that we use Acquire/Release semantics for the free list + // synchronization, in order to guarantee that the next_free + // reads/writes we do below are properly visible from multiple threads + // racing. + loop { + match free_list.compare_exchange_weak( + old_head, + FREE_LIST_LOCKED, + Ordering::Acquire, + Ordering::Relaxed, + ) { + Ok(..) => { + if old_head != FREE_LIST_LOCKED { + break; + } + }, + Err(new) => old_head = new, + } + } + + // If other thread has raced with use while using the same rule node, + // just store the old head again, we're done. + // + // Note that we can use relaxed operations for loading since we're + // effectively locking the free list with Acquire/Release semantics, and + // the memory ordering is already guaranteed by that locking/unlocking. + if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { + free_list.store(old_head, Ordering::Release); + return; + } + + // Else store the old head as the next pointer, and store ourselves as + // the new head of the free list. + // + // This can be relaxed since this pointer won't be read until GC. + node.next_free.store(old_head, Ordering::Relaxed); + + // Increment the free count. This doesn't need to be an RMU atomic + // operation, because the free list is "locked". + let old_free_count = root.free_count().load(Ordering::Relaxed); + root.free_count() + .store(old_free_count + 1, Ordering::Relaxed); + + // This can be release because of the locking of the free list, that + // ensures that all the other nodes racing with this one are using + // `Acquire`. + free_list.store(self.ptr(), Ordering::Release); + } +} + +impl<'a> From<&'a StrongRuleNode> for WeakRuleNode { + fn from(node: &'a StrongRuleNode) -> Self { + WeakRuleNode::from_ptr(node.p) + } +} + +impl WeakRuleNode { + #[inline] + fn ptr(&self) -> *mut RuleNode { + self.p.as_ptr() + } + + fn upgrade(&self) -> StrongRuleNode { + debug!("Upgrading weak node: {:p}", self.ptr()); + + let node = unsafe { &*self.ptr() }; + node.refcount.fetch_add(1, Ordering::Relaxed); + StrongRuleNode::from_ptr(self.p) + } + + fn from_ptr(p: ptr::NonNull) -> Self { + WeakRuleNode { p } + } +} diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index f9f9500144f..5bf66ad563f 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -2,195 +2,28 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ -#![allow(unsafe_code)] - //! The rule tree. use crate::applicable_declarations::ApplicableDeclarationList; -use crate::hash::FxHashMap; -use crate::properties::{Importance, LonghandIdSet, PropertyDeclarationBlock}; -use crate::shared_lock::{Locked, SharedRwLockReadGuard, StylesheetGuards}; -use crate::stylesheets::StyleRule; -use crate::thread_state; -use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; -use parking_lot::RwLock; -use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow}; +use crate::properties::{LonghandIdSet, PropertyDeclarationBlock}; +use crate::shared_lock::{Locked, StylesheetGuards}; +use servo_arc::{Arc, ArcBorrow}; use smallvec::SmallVec; use std::io::{self, Write}; -use std::mem; -use std::ptr; -use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; +mod core; mod level; mod map; +mod source; +pub use self::core::{RuleTree, StrongRuleNode}; pub use self::level::{CascadeLevel, ShadowCascadeOrder}; -use self::map::Map; - -/// The rule tree, the structure servo uses to preserve the results of selector -/// matching. -/// -/// This is organized as a tree of rules. When a node matches a set of rules, -/// they're inserted in order in the tree, starting with the less specific one. -/// -/// When a rule is inserted in the tree, other elements may share the path up to -/// a given rule. If that's the case, we don't duplicate child nodes, but share -/// them. -/// -/// When the rule node refcount drops to zero, it doesn't get freed. It gets -/// instead put into a free list, and it is potentially GC'd after a while in a -/// single-threaded fashion. -/// -/// That way, a rule node that represents a likely-to-match-again rule (like a -/// :hover rule) can be reused if we haven't GC'd it yet. -/// -/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC -/// logs at http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017 -/// logs from http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017#c644094 -/// to se a discussion about the different memory orderings used here. -#[derive(Debug)] -pub struct RuleTree { - root: StrongRuleNode, -} - -impl Drop for RuleTree { - fn drop(&mut self) { - // GC the rule tree. - unsafe { - self.gc(); - } - - // After the GC, the free list should be empty. - debug_assert_eq!( - self.root.get().next_free.load(Ordering::Relaxed), - FREE_LIST_SENTINEL - ); - - // Remove the sentinel. This indicates that GCs will no longer occur. - // Any further drops of StrongRuleNodes must occur on the main thread, - // and will trigger synchronous dropping of the Rule nodes. - self.root - .get() - .next_free - .store(ptr::null_mut(), Ordering::Relaxed); - } -} - -impl MallocSizeOf for RuleTree { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - let mut n = 0; - let mut stack = SmallVec::<[_; 32]>::new(); - stack.push(self.root.downgrade()); - - while let Some(node) = stack.pop() { - n += unsafe { ops.malloc_size_of(node.ptr()) }; - let children = unsafe { (*node.ptr()).children.read() }; - children.shallow_size_of(ops); - for c in &*children { - stack.push(c.clone()); - } - } - - n - } -} - -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] -struct ChildKey(CascadeLevel, ptr::NonNull<()>); - -unsafe impl Send for ChildKey {} -unsafe impl Sync for ChildKey {} - -/// A style source for the rule node. It can either be a CSS style rule or a -/// declaration block. -/// -/// Note that, even though the declaration block from inside the style rule -/// could be enough to implement the rule tree, keeping the whole rule provides -/// more debuggability, and also the ability of show those selectors to -/// devtools. -#[derive(Clone, Debug)] -pub struct StyleSource(ArcUnion, Locked>); - -impl PartialEq for StyleSource { - fn eq(&self, other: &Self) -> bool { - ArcUnion::ptr_eq(&self.0, &other.0) - } -} - -impl StyleSource { - /// Creates a StyleSource from a StyleRule. - pub fn from_rule(rule: Arc>) -> Self { - StyleSource(ArcUnion::from_first(rule)) - } - - #[inline] - fn key(&self) -> ptr::NonNull<()> { - self.0.ptr() - } - - /// Creates a StyleSource from a PropertyDeclarationBlock. - pub fn from_declarations(decls: Arc>) -> Self { - StyleSource(ArcUnion::from_second(decls)) - } - - fn dump(&self, guard: &SharedRwLockReadGuard, writer: &mut W) { - if let Some(ref rule) = self.0.as_first() { - let rule = rule.read_with(guard); - let _ = write!(writer, "{:?}", rule.selectors); - } - - let _ = write!(writer, " -> {:?}", self.read(guard).declarations()); - } - - /// Read the style source guard, and obtain thus read access to the - /// underlying property declaration block. - #[inline] - pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock { - let block: &Locked = match self.0.borrow() { - ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block, - ArcUnionBorrow::Second(ref block) => block.get(), - }; - block.read_with(guard) - } - - /// Returns the style rule if applicable, otherwise None. - pub fn as_rule(&self) -> Option>> { - self.0.as_first() - } - - /// Returns the declaration block if applicable, otherwise None. - pub fn as_declarations(&self) -> Option>> { - self.0.as_second() - } -} - -/// This value exists here so a node that pushes itself to the list can know -/// that is in the free list by looking at is next pointer, and comparing it -/// with null. -/// -/// The root node doesn't have a null pointer in the free list, but this value. -const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode; - -/// A second sentinel value for the free list, indicating that it's locked (i.e. -/// another thread is currently adding an entry). We spin if we find this value. -const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode; +pub use self::source::StyleSource; impl RuleTree { - /// Construct a new rule tree. - pub fn new() -> Self { - RuleTree { - root: StrongRuleNode::new(Box::new(RuleNode::root())), - } - } - - /// Get the root rule node. - pub fn root(&self) -> &StrongRuleNode { - &self.root - } - fn dump(&self, guards: &StylesheetGuards, writer: &mut W) { let _ = writeln!(writer, " + RuleTree"); - self.root.get().dump(guards, writer, 0); + self.root().dump(guards, writer, 0); } /// Dump the rule tree to stdout. @@ -214,7 +47,7 @@ impl RuleTree { I: Iterator, { use self::CascadeLevel::*; - let mut current = self.root.clone(); + let mut current = self.root().clone(); let mut found_important = false; @@ -262,7 +95,7 @@ impl RuleTree { debug_assert!(transition.is_none()); transition = Some(source); } else { - current = current.ensure_child(self.root.downgrade(), source, level); + current = current.ensure_child(self.root().downgrade(), source, level); } } @@ -298,7 +131,7 @@ impl RuleTree { for (source, shadow_cascade_order) in important_author.drain(..) { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), source, AuthorImportant { shadow_cascade_order: -shadow_cascade_order, @@ -307,15 +140,15 @@ impl RuleTree { } for source in important_user.drain(..) { - current = current.ensure_child(self.root.downgrade(), source, UserImportant); + current = current.ensure_child(self.root().downgrade(), source, UserImportant); } for source in important_ua.drain(..) { - current = current.ensure_child(self.root.downgrade(), source, UAImportant); + current = current.ensure_child(self.root().downgrade(), source, UAImportant); } if let Some(source) = transition { - current = current.ensure_child(self.root.downgrade(), source, Transitions); + current = current.ensure_child(self.root().downgrade(), source, Transitions); } current @@ -340,7 +173,7 @@ impl RuleTree { where I: Iterator, { - self.insert_ordered_rules_from(self.root.clone(), iter) + self.insert_ordered_rules_from(self.root().clone(), iter) } fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode @@ -349,72 +182,11 @@ impl RuleTree { { let mut current = from; for (source, level) in iter { - current = current.ensure_child(self.root.downgrade(), source, level); + current = current.ensure_child(self.root().downgrade(), source, level); } current } - /// This can only be called when no other threads is accessing this tree. - pub unsafe fn gc(&self) { - self.root.gc(); - } - - /// This can only be called when no other threads is accessing this tree. - pub unsafe fn maybe_gc(&self) { - #[cfg(debug_assertions)] - self.maybe_dump_stats(); - - self.root.maybe_gc(); - } - - #[cfg(debug_assertions)] - fn maybe_dump_stats(&self) { - use itertools::Itertools; - use std::cell::Cell; - use std::time::{Duration, Instant}; - - if !log_enabled!(log::Level::Trace) { - return; - } - - const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2); - - thread_local! { - pub static LAST_STATS: Cell = Cell::new(Instant::now()); - }; - - let should_dump = LAST_STATS.with(|s| { - let now = Instant::now(); - if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL { - return false; - } - s.set(now); - true - }); - - if !should_dump { - return; - } - - let mut children_count = FxHashMap::default(); - - let mut stack = SmallVec::<[_; 32]>::new(); - stack.push(self.root.clone()); - while let Some(node) = stack.pop() { - let children = node.get().children.read(); - *children_count.entry(children.len()).or_insert(0) += 1; - for c in &*children { - stack.push(c.upgrade()); - } - } - - trace!("Rule tree stats:"); - let counts = children_count.keys().sorted(); - for count in counts { - trace!(" {} - {}", count, children_count[count]); - } - } - /// Replaces a rule in a given level (if present) for another rule. /// /// Returns the resulting node that represents the new path, or None if @@ -434,10 +206,10 @@ impl RuleTree { // First walk up until the first less-or-equally specific rule. let mut children = SmallVec::<[_; 10]>::new(); - while current.get().level > level { + while current.cascade_level() > level { children.push(( - current.get().source.as_ref().unwrap().clone(), - current.get().level, + current.style_source().unwrap().clone(), + current.cascade_level(), )); current = current.parent().unwrap().clone(); } @@ -452,10 +224,10 @@ impl RuleTree { // to special-case (isn't hard, it's just about removing the `if` and // special cases, and replacing them for a `while` loop, avoiding the // optimizations). - if current.get().level == level { + if current.cascade_level() == level { *important_rules_changed |= level.is_important(); - let current_decls = current.get().source.as_ref().unwrap().as_declarations(); + let current_decls = current.style_source().unwrap().as_declarations(); // If the only rule at the level we're replacing is exactly the // same as `pdb`, we're done, and `path` is still valid. @@ -490,7 +262,7 @@ impl RuleTree { if level.is_important() { if pdb.read_with(level.guard(guards)).any_important() { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), StyleSource::from_declarations(pdb.clone_arc()), level, ); @@ -499,7 +271,7 @@ impl RuleTree { } else { if pdb.read_with(level.guard(guards)).any_normal() { current = current.ensure_child( - self.root.downgrade(), + self.root().downgrade(), StyleSource::from_declarations(pdb.clone_arc()), level, ); @@ -537,10 +309,7 @@ impl RuleTree { let mut children = SmallVec::<[_; 10]>::new(); for node in iter { if !node.cascade_level().is_animation() { - children.push(( - node.get().source.as_ref().unwrap().clone(), - node.cascade_level(), - )); + children.push((node.style_source().unwrap().clone(), node.cascade_level())); } last = node; } @@ -573,320 +342,7 @@ impl RuleTree { } } -/// The number of RuleNodes added to the free list before we will consider -/// doing a GC when calling maybe_gc(). (The value is copied from Gecko, -/// where it likely did not result from a rigorous performance analysis.) -const RULE_TREE_GC_INTERVAL: usize = 300; - -/// A node in the rule tree. -pub struct RuleNode { - /// The root node. Only the root has no root pointer, for obvious reasons. - root: Option, - - /// The parent rule node. Only the root has no parent. - parent: Option, - - /// The actual style source, either coming from a selector in a StyleRule, - /// or a raw property declaration block (like the style attribute). - /// - /// None for the root node. - source: Option, - - /// The cascade level this rule is positioned at. - level: CascadeLevel, - - refcount: AtomicUsize, - - /// Only used for the root, stores the number of free rule nodes that are - /// around. - free_count: AtomicUsize, - - /// The children of a given rule node. Children remove themselves from here - /// when they go away. - children: RwLock>, - - /// The next item in the rule tree free list, that starts on the root node. - /// - /// When this is set to null, that means that the rule tree has been torn - /// down, and GCs will no longer occur. When this happens, StrongRuleNodes - /// may only be dropped on the main thread, and teardown happens - /// synchronously. - next_free: AtomicPtr, -} - -unsafe impl Sync for RuleTree {} -unsafe impl Send for RuleTree {} - -// On Gecko builds, hook into the leak checking machinery. -#[cfg(feature = "gecko_refcount_logging")] -mod gecko_leak_checking { - use super::RuleNode; - use std::mem::size_of; - use std::os::raw::{c_char, c_void}; - - extern "C" { - fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); - fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32); - } - - static NAME: &'static [u8] = b"RuleNode\0"; - - /// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery. - pub fn log_ctor(ptr: *const RuleNode) { - let s = NAME as *const [u8] as *const u8 as *const c_char; - unsafe { - NS_LogCtor(ptr as *mut c_void, s, size_of::() as u32); - } - } - - /// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery. - pub fn log_dtor(ptr: *const RuleNode) { - let s = NAME as *const [u8] as *const u8 as *const c_char; - unsafe { - NS_LogDtor(ptr as *mut c_void, s, size_of::() as u32); - } - } -} - -#[inline(always)] -fn log_new(_ptr: *const RuleNode) { - #[cfg(feature = "gecko_refcount_logging")] - gecko_leak_checking::log_ctor(_ptr); -} - -#[inline(always)] -fn log_drop(_ptr: *const RuleNode) { - #[cfg(feature = "gecko_refcount_logging")] - gecko_leak_checking::log_dtor(_ptr); -} - -impl RuleNode { - fn new( - root: WeakRuleNode, - parent: StrongRuleNode, - source: StyleSource, - level: CascadeLevel, - ) -> Self { - debug_assert!(root.upgrade().parent().is_none()); - RuleNode { - root: Some(root), - parent: Some(parent), - source: Some(source), - level: level, - refcount: AtomicUsize::new(1), - children: Default::default(), - free_count: AtomicUsize::new(0), - next_free: AtomicPtr::new(ptr::null_mut()), - } - } - - fn root() -> Self { - RuleNode { - root: None, - parent: None, - source: None, - level: CascadeLevel::UANormal, - refcount: AtomicUsize::new(1), - free_count: AtomicUsize::new(0), - children: Default::default(), - next_free: AtomicPtr::new(FREE_LIST_SENTINEL), - } - } - - fn key(&self) -> ChildKey { - ChildKey( - self.level, - self.source - .as_ref() - .expect("Called key() on the root node") - .key(), - ) - } - - fn is_root(&self) -> bool { - self.parent.is_none() - } - - fn free_count(&self) -> &AtomicUsize { - debug_assert!(self.is_root()); - &self.free_count - } - - /// Remove this rule node from the child list. - /// - /// This is expected to be called before freeing the node from the free - /// list, on the main thread. - unsafe fn remove_from_child_list(&self) { - debug!( - "Remove from child list: {:?}, parent: {:?}", - self as *const RuleNode, - self.parent.as_ref().map(|p| p.ptr()) - ); - - if let Some(parent) = self.parent.as_ref() { - let weak = parent - .get() - .children - .write() - .remove(&self.key(), |node| (*node.ptr()).key()); - assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); - } - } - - fn dump(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) { - const INDENT_INCREMENT: usize = 4; - - for _ in 0..indent { - let _ = write!(writer, " "); - } - - let _ = writeln!( - writer, - " - {:?} (ref: {:?}, parent: {:?})", - self as *const _, - self.refcount.load(Ordering::Relaxed), - self.parent.as_ref().map(|p| p.ptr()) - ); - - for _ in 0..indent { - let _ = write!(writer, " "); - } - - if self.source.is_some() { - self.source - .as_ref() - .unwrap() - .dump(self.level.guard(guards), writer); - } else { - if indent != 0 { - warn!("How has this happened?"); - } - let _ = write!(writer, "(root)"); - } - - let _ = write!(writer, "\n"); - for child in &*self.children.read() { - child - .upgrade() - .get() - .dump(guards, writer, indent + INDENT_INCREMENT); - } - } -} - -#[derive(Clone)] -struct WeakRuleNode { - p: ptr::NonNull, -} - -/// A strong reference to a rule node. -#[derive(Debug, Eq, Hash, PartialEq)] -pub struct StrongRuleNode { - p: ptr::NonNull, -} - -unsafe impl Send for StrongRuleNode {} -unsafe impl Sync for StrongRuleNode {} - -#[cfg(feature = "servo")] -malloc_size_of_is_0!(StrongRuleNode); - impl StrongRuleNode { - fn new(n: Box) -> Self { - debug_assert_eq!(n.parent.is_none(), !n.source.is_some()); - - // TODO(emilio): Use into_raw_non_null when it's stable. - let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) }; - log_new(ptr.as_ptr()); - - debug!("Creating rule node: {:p}", ptr); - - StrongRuleNode::from_ptr(ptr) - } - - fn from_ptr(p: ptr::NonNull) -> Self { - StrongRuleNode { p } - } - - fn downgrade(&self) -> WeakRuleNode { - WeakRuleNode::from_ptr(self.p) - } - - /// Get the parent rule node of this rule node. - pub fn parent(&self) -> Option<&StrongRuleNode> { - self.get().parent.as_ref() - } - - fn ensure_child( - &self, - root: WeakRuleNode, - source: StyleSource, - level: CascadeLevel, - ) -> StrongRuleNode { - use parking_lot::RwLockUpgradableReadGuard; - - debug_assert!( - self.get().level <= level, - "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}", - self.get().level, - level, - self.get().source, - source, - ); - - let key = ChildKey(level, source.key()); - - let children = self.get().children.upgradable_read(); - if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { - return child.upgrade(); - } - let mut children = RwLockUpgradableReadGuard::upgrade(children); - let weak = children.get_or_insert_with( - key, - |node| unsafe { (*node.ptr()).key() }, - move || { - let strong = - StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); - let weak = strong.downgrade(); - mem::forget(strong); - weak - }, - ); - - StrongRuleNode::from_ptr(weak.p) - } - - /// Raw pointer to the RuleNode - #[inline] - pub fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - - fn get(&self) -> &RuleNode { - if cfg!(debug_assertions) { - let node = unsafe { &*self.p.as_ptr() }; - assert!(node.refcount.load(Ordering::Relaxed) > 0); - } - unsafe { &*self.p.as_ptr() } - } - - /// Get the style source corresponding to this rule node. May return `None` - /// if it's the root node, which means that the node hasn't matched any - /// rules. - pub fn style_source(&self) -> Option<&StyleSource> { - self.get().source.as_ref() - } - - /// The cascade level for this node - pub fn cascade_level(&self) -> CascadeLevel { - self.get().level - } - - /// Get the importance that this rule node represents. - pub fn importance(&self) -> Importance { - self.get().level.importance() - } - /// Get an iterator for this rule node and its ancestors. pub fn self_and_ancestors(&self) -> SelfAndAncestors { SelfAndAncestors { @@ -894,117 +350,196 @@ impl StrongRuleNode { } } - /// Returns whether this node has any child, only intended for testing - /// purposes, and called on a single-threaded fashion only. - pub unsafe fn has_children_for_testing(&self) -> bool { - !self.get().children.read().is_empty() - } + /// Returns true if any properties specified by `rule_type_mask` was set by + /// an author rule. + #[cfg(feature = "gecko")] + pub fn has_author_specified_rules( + &self, + mut element: E, + mut pseudo: Option, + guards: &StylesheetGuards, + rule_type_mask: u32, + author_colors_allowed: bool, + ) -> bool + where + E: crate::dom::TElement, + { + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BACKGROUND; + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_BORDER; + use crate::gecko_bindings::structs::NS_AUTHOR_SPECIFIED_PADDING; + use crate::properties::{CSSWideKeyword, LonghandId}; + use crate::properties::{PropertyDeclaration, PropertyDeclarationId}; + use std::borrow::Cow; - unsafe fn pop_from_free_list(&self) -> Option { - // NB: This can run from the root node destructor, so we can't use - // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + // Reset properties: + const BACKGROUND_PROPS: &'static [LonghandId] = + &[LonghandId::BackgroundColor, LonghandId::BackgroundImage]; - debug_assert!(me.is_root()); + const BORDER_PROPS: &'static [LonghandId] = &[ + LonghandId::BorderTopColor, + LonghandId::BorderTopStyle, + LonghandId::BorderTopWidth, + LonghandId::BorderRightColor, + LonghandId::BorderRightStyle, + LonghandId::BorderRightWidth, + LonghandId::BorderBottomColor, + LonghandId::BorderBottomStyle, + LonghandId::BorderBottomWidth, + LonghandId::BorderLeftColor, + LonghandId::BorderLeftStyle, + LonghandId::BorderLeftWidth, + LonghandId::BorderTopLeftRadius, + LonghandId::BorderTopRightRadius, + LonghandId::BorderBottomRightRadius, + LonghandId::BorderBottomLeftRadius, + LonghandId::BorderInlineStartColor, + LonghandId::BorderInlineStartStyle, + LonghandId::BorderInlineStartWidth, + LonghandId::BorderInlineEndColor, + LonghandId::BorderInlineEndStyle, + LonghandId::BorderInlineEndWidth, + LonghandId::BorderBlockStartColor, + LonghandId::BorderBlockStartStyle, + LonghandId::BorderBlockStartWidth, + LonghandId::BorderBlockEndColor, + LonghandId::BorderBlockEndStyle, + LonghandId::BorderBlockEndWidth, + ]; - // FIXME(#14213): Apparently the layout data can be gone from script. - // - // That's... suspicious, but it's fine if it happens for the rule tree - // case, so just don't crash in the case we're doing the final GC in - // script. + const PADDING_PROPS: &'static [LonghandId] = &[ + LonghandId::PaddingTop, + LonghandId::PaddingRight, + LonghandId::PaddingBottom, + LonghandId::PaddingLeft, + LonghandId::PaddingInlineStart, + LonghandId::PaddingInlineEnd, + LonghandId::PaddingBlockStart, + LonghandId::PaddingBlockEnd, + ]; - debug_assert!( - !thread_state::get().is_worker() && - (thread_state::get().is_layout() || thread_state::get().is_script()) - ); + // Set of properties that we are currently interested in. + let mut properties = LonghandIdSet::new(); - let current = me.next_free.load(Ordering::Relaxed); - if current == FREE_LIST_SENTINEL { - return None; + if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 { + for id in BACKGROUND_PROPS { + properties.insert(*id); + } + } + if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 { + for id in BORDER_PROPS { + properties.insert(*id); + } + } + if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 { + for id in PADDING_PROPS { + properties.insert(*id); + } } - debug_assert!( - !current.is_null(), - "Multiple threads are operating on the free list at the \ - same time?" - ); - debug_assert!( - current != self.p.as_ptr(), - "How did the root end up in the free list?" - ); - - let next = (*current) - .next_free - .swap(ptr::null_mut(), Ordering::Relaxed); - - debug_assert!( - !next.is_null(), - "How did a null pointer end up in the free list?" - ); - - me.next_free.store(next, Ordering::Relaxed); - - debug!( - "Popping from free list: cur: {:?}, next: {:?}", - current, next - ); - - Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current))) - } - - unsafe fn assert_free_list_has_no_duplicates_or_null(&self) { - assert!(cfg!(debug_assertions), "This is an expensive check!"); - use crate::hash::FxHashSet; - - let me = &*self.p.as_ptr(); - assert!(me.is_root()); - - let mut current = self.p.as_ptr(); - let mut seen = FxHashSet::default(); - while current != FREE_LIST_SENTINEL { - let next = (*current).next_free.load(Ordering::Relaxed); - assert!(!next.is_null()); - assert!(!seen.contains(&next)); - seen.insert(next); - - current = next; - } - } - - unsafe fn gc(&self) { - if cfg!(debug_assertions) { - self.assert_free_list_has_no_duplicates_or_null(); + // If author colors are not allowed, don't look at those properties + // (except for background-color which is special and we handle below). + if !author_colors_allowed { + properties.remove_all(LonghandIdSet::ignored_when_colors_disabled()); + if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 { + properties.insert(LonghandId::BackgroundColor); + } } - // NB: This can run from the root node destructor, so we can't use - // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + let mut element_rule_node = Cow::Borrowed(self); - debug_assert!(me.is_root(), "Can't call GC on a non-root node!"); + loop { + // We need to be careful not to count styles covered up by + // user-important or UA-important declarations. But we do want to + // catch explicit inherit styling in those and check our parent + // element to see whether we have user styling for those properties. + // Note that we don't care here about inheritance due to lack of a + // specified value, since all the properties we care about are reset + // properties. - while let Some(weak) = self.pop_from_free_list() { - let node = &*weak.p.as_ptr(); - if node.refcount.load(Ordering::Relaxed) != 0 { - // Nothing to do, the node is still alive. - continue; + let mut inherited_properties = LonghandIdSet::new(); + let mut have_explicit_ua_inherit = false; + + for node in element_rule_node.self_and_ancestors() { + let source = node.style_source(); + let declarations = if source.is_some() { + source + .as_ref() + .unwrap() + .read(node.cascade_level().guard(guards)) + .declaration_importance_iter() + } else { + continue; + }; + + // Iterate over declarations of the longhands we care about. + let node_importance = node.importance(); + let longhands = declarations.rev().filter_map(|(declaration, importance)| { + if importance != node_importance { + return None; + } + match declaration.id() { + PropertyDeclarationId::Longhand(id) => Some((id, declaration)), + _ => None, + } + }); + + let is_author = node.cascade_level().origin() == Origin::Author; + for (id, declaration) in longhands { + if !properties.contains(id) { + continue; + } + + if is_author { + if !author_colors_allowed { + if let PropertyDeclaration::BackgroundColor(ref color) = *declaration { + if color.is_transparent() { + return true; + } + continue; + } + } + return true; + } + + // This property was set by a non-author rule. + // Stop looking for it in this element's rule + // nodes. + properties.remove(id); + + // However, if it is inherited, then it might be + // inherited from an author rule from an + // ancestor element's rule nodes. + if declaration.get_css_wide_keyword() == Some(CSSWideKeyword::Inherit) { + have_explicit_ua_inherit = true; + inherited_properties.insert(id); + } + } } - debug!("GC'ing {:?}", weak.p.as_ptr()); - node.remove_from_child_list(); - log_drop(weak.p.as_ptr()); - let _ = Box::from_raw(weak.p.as_ptr()); + if !have_explicit_ua_inherit { + break; + } + + // Continue to the parent element and search for the inherited properties. + if let Some(pseudo) = pseudo.take() { + if pseudo.inherits_from_default_values() { + break; + } + } else { + element = match element.inheritance_parent() { + Some(parent) => parent, + None => break, + }; + + let parent_data = element.mutate_data().unwrap(); + let parent_rule_node = parent_data.styles.primary().rules().clone(); + element_rule_node = Cow::Owned(parent_rule_node); + } + + properties = inherited_properties; } - me.free_count().store(0, Ordering::Relaxed); - - debug_assert_eq!(me.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); - } - - unsafe fn maybe_gc(&self) { - debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!"); - if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { - self.gc(); - } + false } /// Returns true if there is either animation or transition level rule. @@ -1077,181 +612,3 @@ impl<'a> Iterator for SelfAndAncestors<'a> { }) } } - -impl Clone for StrongRuleNode { - fn clone(&self) -> Self { - debug!( - "{:?}: {:?}+", - self.ptr(), - self.get().refcount.load(Ordering::Relaxed) - ); - debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); - self.get().refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) - } -} - -impl Drop for StrongRuleNode { - #[cfg_attr(feature = "servo", allow(unused_mut))] - fn drop(&mut self) { - let node = unsafe { &*self.ptr() }; - - debug!( - "{:?}: {:?}-", - self.ptr(), - node.refcount.load(Ordering::Relaxed) - ); - debug!( - "Dropping node: {:?}, root: {:?}, parent: {:?}", - self.ptr(), - node.root.as_ref().map(|r| r.ptr()), - node.parent.as_ref().map(|p| p.ptr()) - ); - let should_drop = { - debug_assert!(node.refcount.load(Ordering::Relaxed) > 0); - node.refcount.fetch_sub(1, Ordering::Relaxed) == 1 - }; - - if !should_drop { - return; - } - - if node.parent.is_none() { - debug!("Dropping root node!"); - // The free list should be null by this point - debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); - log_drop(self.ptr()); - let _ = unsafe { Box::from_raw(self.ptr()) }; - return; - } - - let root = unsafe { &*node.root.as_ref().unwrap().ptr() }; - let free_list = &root.next_free; - let mut old_head = free_list.load(Ordering::Relaxed); - - // If the free list is null, that means that the rule tree has been - // formally torn down, and the last standard GC has already occurred. - // We require that any callers using the rule tree at this point are - // on the main thread only, which lets us trigger a synchronous GC - // here to avoid leaking anything. We use the GC machinery, rather - // than just dropping directly, so that we benefit from the iterative - // destruction and don't trigger unbounded recursion during drop. See - // [1] and the associated crashtest. - // - // [1] https://bugzilla.mozilla.org/show_bug.cgi?id=439184 - if old_head.is_null() { - debug_assert!( - !thread_state::get().is_worker() && - (thread_state::get().is_layout() || thread_state::get().is_script()) - ); - // Add the node as the sole entry in the free list. - debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); - node.next_free.store(FREE_LIST_SENTINEL, Ordering::Relaxed); - free_list.store(node as *const _ as *mut _, Ordering::Relaxed); - - // Invoke the GC. - // - // Note that we need hold a strong reference to the root so that it - // doesn't go away during the GC (which would happen if we're freeing - // the last external reference into the rule tree). This is nicely - // enforced by having the gc() method live on StrongRuleNode rather than - // RuleNode. - let strong_root: StrongRuleNode = node.root.as_ref().unwrap().upgrade(); - unsafe { - strong_root.gc(); - } - - // Leave the free list null, like we found it, such that additional - // drops for straggling rule nodes will take this same codepath. - debug_assert_eq!(root.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL); - root.next_free.store(ptr::null_mut(), Ordering::Relaxed); - - // Return. If strong_root is the last strong reference to the root, - // this re-enter StrongRuleNode::drop, and take the root-dropping - // path earlier in this function. - return; - } - - // We're sure we're already in the free list, don't spinloop if we're. - // Note that this is just a fast path, so it doesn't need to have an - // strong memory ordering. - if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { - return; - } - - // Ensure we "lock" the free list head swapping it with FREE_LIST_LOCKED. - // - // Note that we use Acquire/Release semantics for the free list - // synchronization, in order to guarantee that the next_free - // reads/writes we do below are properly visible from multiple threads - // racing. - loop { - match free_list.compare_exchange_weak( - old_head, - FREE_LIST_LOCKED, - Ordering::Acquire, - Ordering::Relaxed, - ) { - Ok(..) => { - if old_head != FREE_LIST_LOCKED { - break; - } - }, - Err(new) => old_head = new, - } - } - - // If other thread has raced with use while using the same rule node, - // just store the old head again, we're done. - // - // Note that we can use relaxed operations for loading since we're - // effectively locking the free list with Acquire/Release semantics, and - // the memory ordering is already guaranteed by that locking/unlocking. - if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() { - free_list.store(old_head, Ordering::Release); - return; - } - - // Else store the old head as the next pointer, and store ourselves as - // the new head of the free list. - // - // This can be relaxed since this pointer won't be read until GC. - node.next_free.store(old_head, Ordering::Relaxed); - - // Increment the free count. This doesn't need to be an RMU atomic - // operation, because the free list is "locked". - let old_free_count = root.free_count().load(Ordering::Relaxed); - root.free_count() - .store(old_free_count + 1, Ordering::Relaxed); - - // This can be release because of the locking of the free list, that - // ensures that all the other nodes racing with this one are using - // `Acquire`. - free_list.store(self.ptr(), Ordering::Release); - } -} - -impl<'a> From<&'a StrongRuleNode> for WeakRuleNode { - fn from(node: &'a StrongRuleNode) -> Self { - WeakRuleNode::from_ptr(node.p) - } -} - -impl WeakRuleNode { - #[inline] - fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - - fn upgrade(&self) -> StrongRuleNode { - debug!("Upgrading weak node: {:p}", self.ptr()); - - let node = unsafe { &*self.ptr() }; - node.refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) - } - - fn from_ptr(p: ptr::NonNull) -> Self { - WeakRuleNode { p } - } -} diff --git a/components/style/rule_tree/source.rs b/components/style/rule_tree/source.rs new file mode 100644 index 00000000000..6298ad649ab --- /dev/null +++ b/components/style/rule_tree/source.rs @@ -0,0 +1,101 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// FIXME: Make that forbid once the two unsafe gecko-specific methods are removed. +#![deny(unsafe_code)] + +use crate::properties::PropertyDeclarationBlock; +use crate::shared_lock::{Locked, SharedRwLockReadGuard}; +use crate::stylesheets::StyleRule; +use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow}; +use std::io::Write; +use std::ptr; + +/// A style source for the rule node. It can either be a CSS style rule or a +/// declaration block. +/// +/// Note that, even though the declaration block from inside the style rule +/// could be enough to implement the rule tree, keeping the whole rule provides +/// more debuggability, and also the ability of show those selectors to +/// devtools. +#[derive(Clone, Debug)] +pub struct StyleSource(ArcUnion, Locked>); + +impl PartialEq for StyleSource { + fn eq(&self, other: &Self) -> bool { + ArcUnion::ptr_eq(&self.0, &other.0) + } +} + +impl StyleSource { + /// Creates a StyleSource from a StyleRule. + pub fn from_rule(rule: Arc>) -> Self { + StyleSource(ArcUnion::from_first(rule)) + } + + #[inline] + pub(super) fn key(&self) -> ptr::NonNull<()> { + self.0.ptr() + } + + /// Creates a StyleSource from a PropertyDeclarationBlock. + pub fn from_declarations(decls: Arc>) -> Self { + StyleSource(ArcUnion::from_second(decls)) + } + + pub(super) fn dump(&self, guard: &SharedRwLockReadGuard, writer: &mut W) { + if let Some(ref rule) = self.0.as_first() { + let rule = rule.read_with(guard); + let _ = write!(writer, "{:?}", rule.selectors); + } + + let _ = write!(writer, " -> {:?}", self.read(guard).declarations()); + } + + // This is totally unsafe, should be removed when we figure out the cause of + // bug 1607553. + #[allow(unsafe_code)] + #[cfg(feature = "gecko")] + pub(super) unsafe fn dump_unchecked(&self, writer: &mut W) { + if let Some(ref rule) = self.0.as_first() { + let rule = rule.read_unchecked(); + let _ = write!(writer, "{:?}", rule.selectors); + } + let _ = write!(writer, " -> {:?}", self.read_unchecked().declarations()); + } + + // This is totally unsafe, should be removed when we figure out the cause of + // bug 1607553. + #[inline] + #[allow(unsafe_code)] + #[cfg(feature = "gecko")] + pub(super) unsafe fn read_unchecked(&self) -> &PropertyDeclarationBlock { + let block: &Locked = match self.0.borrow() { + ArcUnionBorrow::First(ref rule) => &rule.get().read_unchecked().block, + ArcUnionBorrow::Second(ref block) => block.get(), + }; + block.read_unchecked() + } + + /// Read the style source guard, and obtain thus read access to the + /// underlying property declaration block. + #[inline] + pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock { + let block: &Locked = match self.0.borrow() { + ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block, + ArcUnionBorrow::Second(ref block) => block.get(), + }; + block.read_with(guard) + } + + /// Returns the style rule if applicable, otherwise None. + pub fn as_rule(&self) -> Option>> { + self.0.as_first() + } + + /// Returns the declaration block if applicable, otherwise None. + pub fn as_declarations(&self) -> Option>> { + self.0.as_second() + } +} From 05accaa7bd3ac0e0cc34626937499209973ccc77 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 14:34:24 +0200 Subject: [PATCH 4/9] Make StrongRuleNode::from_ptr be unsafe --- components/style/rule_tree/core.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index 3103d416aec..0b1afd8548a 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -361,10 +361,10 @@ impl StrongRuleNode { debug!("Creating rule node: {:p}", ptr); - StrongRuleNode::from_ptr(ptr) + unsafe { StrongRuleNode::from_ptr(ptr) } } - fn from_ptr(p: ptr::NonNull) -> Self { + unsafe fn from_ptr(p: ptr::NonNull) -> Self { StrongRuleNode { p } } @@ -413,7 +413,7 @@ impl StrongRuleNode { }, ); - StrongRuleNode::from_ptr(weak.p) + unsafe { StrongRuleNode::from_ptr(weak.p) } } /// Raw pointer to the RuleNode @@ -606,7 +606,7 @@ impl Clone for StrongRuleNode { ); debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); self.get().refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) + unsafe { StrongRuleNode::from_ptr(self.p) } } } @@ -767,7 +767,7 @@ impl WeakRuleNode { let node = unsafe { &*self.ptr() }; node.refcount.fetch_add(1, Ordering::Relaxed); - StrongRuleNode::from_ptr(self.p) + unsafe { StrongRuleNode::from_ptr(self.p) } } fn from_ptr(p: ptr::NonNull) -> Self { From e06e1645713f4a7e8fa841c6ad45a23ce5ea30e4 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 14:39:14 +0200 Subject: [PATCH 5/9] Make WeakRuleNode::from_ptr be unsafe --- components/style/rule_tree/core.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index 0b1afd8548a..4c96de291c4 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -369,7 +369,7 @@ impl StrongRuleNode { } pub(super) fn downgrade(&self) -> WeakRuleNode { - WeakRuleNode::from_ptr(self.p) + unsafe { WeakRuleNode::from_ptr(self.p) } } /// Get the parent rule node of this rule node. @@ -750,12 +750,6 @@ impl Drop for StrongRuleNode { } } -impl<'a> From<&'a StrongRuleNode> for WeakRuleNode { - fn from(node: &'a StrongRuleNode) -> Self { - WeakRuleNode::from_ptr(node.p) - } -} - impl WeakRuleNode { #[inline] fn ptr(&self) -> *mut RuleNode { @@ -770,7 +764,7 @@ impl WeakRuleNode { unsafe { StrongRuleNode::from_ptr(self.p) } } - fn from_ptr(p: ptr::NonNull) -> Self { + unsafe fn from_ptr(p: ptr::NonNull) -> Self { WeakRuleNode { p } } } From 37c70609f9188418859ca1c227d79ee493a9ecf8 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 15:56:09 +0200 Subject: [PATCH 6/9] Remove WeakRuleNode::clone MallocSizeOf for RuleTree should not keep around weak references in case someone runs a GC meanwhile. --- components/style/rule_tree/core.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index 4c96de291c4..226108da9b7 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -71,14 +71,14 @@ impl MallocSizeOf for RuleTree { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { let mut n = 0; let mut stack = SmallVec::<[_; 32]>::new(); - stack.push(self.root.downgrade()); + stack.push(self.root.clone()); while let Some(node) = stack.pop() { n += unsafe { ops.malloc_size_of(node.ptr()) }; let children = unsafe { (*node.ptr()).children.read() }; children.shallow_size_of(ops); for c in &*children { - stack.push(c.clone()); + stack.push(c.upgrade()); } } @@ -335,7 +335,6 @@ impl RuleNode { } } -#[derive(Clone)] pub(crate) struct WeakRuleNode { p: ptr::NonNull, } From fb28ce6bbea8118de2d6349da6855237431aaa2c Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 16:20:47 +0200 Subject: [PATCH 7/9] Make StrongRuleNode::ensure_child take a StrongRuleNode for the root --- components/style/rule_tree/core.rs | 5 +++-- components/style/rule_tree/mod.rs | 16 ++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index 226108da9b7..d1aaa0d26a3 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -367,7 +367,7 @@ impl StrongRuleNode { StrongRuleNode { p } } - pub(super) fn downgrade(&self) -> WeakRuleNode { + fn downgrade(&self) -> WeakRuleNode { unsafe { WeakRuleNode::from_ptr(self.p) } } @@ -378,7 +378,7 @@ impl StrongRuleNode { pub(super) fn ensure_child( &self, - root: WeakRuleNode, + root: &StrongRuleNode, source: StyleSource, level: CascadeLevel, ) -> StrongRuleNode { @@ -404,6 +404,7 @@ impl StrongRuleNode { key, |node| unsafe { (*node.ptr()).key() }, move || { + let root = root.downgrade(); let strong = StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); let weak = strong.downgrade(); diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index 5bf66ad563f..41f87935df4 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -95,7 +95,7 @@ impl RuleTree { debug_assert!(transition.is_none()); transition = Some(source); } else { - current = current.ensure_child(self.root().downgrade(), source, level); + current = current.ensure_child(self.root(), source, level); } } @@ -131,7 +131,7 @@ impl RuleTree { for (source, shadow_cascade_order) in important_author.drain(..) { current = current.ensure_child( - self.root().downgrade(), + self.root(), source, AuthorImportant { shadow_cascade_order: -shadow_cascade_order, @@ -140,15 +140,15 @@ impl RuleTree { } for source in important_user.drain(..) { - current = current.ensure_child(self.root().downgrade(), source, UserImportant); + current = current.ensure_child(self.root(), source, UserImportant); } for source in important_ua.drain(..) { - current = current.ensure_child(self.root().downgrade(), source, UAImportant); + current = current.ensure_child(self.root(), source, UAImportant); } if let Some(source) = transition { - current = current.ensure_child(self.root().downgrade(), source, Transitions); + current = current.ensure_child(self.root(), source, Transitions); } current @@ -182,7 +182,7 @@ impl RuleTree { { let mut current = from; for (source, level) in iter { - current = current.ensure_child(self.root().downgrade(), source, level); + current = current.ensure_child(self.root(), source, level); } current } @@ -262,7 +262,7 @@ impl RuleTree { if level.is_important() { if pdb.read_with(level.guard(guards)).any_important() { current = current.ensure_child( - self.root().downgrade(), + self.root(), StyleSource::from_declarations(pdb.clone_arc()), level, ); @@ -271,7 +271,7 @@ impl RuleTree { } else { if pdb.read_with(level.guard(guards)).any_normal() { current = current.ensure_child( - self.root().downgrade(), + self.root(), StyleSource::from_declarations(pdb.clone_arc()), level, ); From bc4e2942bff4c2998b5c2a36144b168b02e884b3 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 16:24:19 +0200 Subject: [PATCH 8/9] Make StrongRuleNode::downgrade be unsafe --- components/style/rule_tree/core.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index d1aaa0d26a3..0e1dcc6c433 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -367,8 +367,8 @@ impl StrongRuleNode { StrongRuleNode { p } } - fn downgrade(&self) -> WeakRuleNode { - unsafe { WeakRuleNode::from_ptr(self.p) } + unsafe fn downgrade(&self) -> WeakRuleNode { + WeakRuleNode::from_ptr(self.p) } /// Get the parent rule node of this rule node. @@ -404,10 +404,10 @@ impl StrongRuleNode { key, |node| unsafe { (*node.ptr()).key() }, move || { - let root = root.downgrade(); + let root = unsafe { root.downgrade() }; let strong = StrongRuleNode::new(Box::new(RuleNode::new(root, self.clone(), source, level))); - let weak = strong.downgrade(); + let weak = unsafe { strong.downgrade() }; mem::forget(strong); weak }, From a30da7ad8e75dc9c599af92e297650d15cbead07 Mon Sep 17 00:00:00 2001 From: Anthony Ramine Date: Thu, 16 Apr 2020 16:41:01 +0200 Subject: [PATCH 9/9] Introduce a new type UnsafeBox in the rule tree This lets us rely less on raw pointers, thus better tracking the lifetime of the rule node values while dropping strong references etc. --- components/style/rule_tree/core.rs | 208 +++++++++++------------ components/style/rule_tree/mod.rs | 3 + components/style/rule_tree/unsafe_box.rs | 64 +++++++ 3 files changed, 170 insertions(+), 105 deletions(-) create mode 100644 components/style/rule_tree/unsafe_box.rs diff --git a/components/style/rule_tree/core.rs b/components/style/rule_tree/core.rs index 0e1dcc6c433..2c11868201a 100644 --- a/components/style/rule_tree/core.rs +++ b/components/style/rule_tree/core.rs @@ -10,12 +10,15 @@ use crate::thread_state; use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps}; use parking_lot::RwLock; use smallvec::SmallVec; +use std::fmt; +use std::hash; use std::io::Write; use std::mem; use std::ptr; use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; use super::map::Map; +use super::unsafe_box::UnsafeBox; use super::{CascadeLevel, StyleSource}; /// The rule tree, the structure servo uses to preserve the results of selector @@ -53,7 +56,7 @@ impl Drop for RuleTree { // After the GC, the free list should be empty. debug_assert_eq!( - self.root.get().next_free.load(Ordering::Relaxed), + self.root.p.next_free.load(Ordering::Relaxed), FREE_LIST_SENTINEL ); @@ -61,7 +64,7 @@ impl Drop for RuleTree { // Any further drops of StrongRuleNodes must occur on the main thread, // and will trigger synchronous dropping of the Rule nodes. self.root - .get() + .p .next_free .store(ptr::null_mut(), Ordering::Relaxed); } @@ -74,8 +77,8 @@ impl MallocSizeOf for RuleTree { stack.push(self.root.clone()); while let Some(node) = stack.pop() { - n += unsafe { ops.malloc_size_of(node.ptr()) }; - let children = unsafe { (*node.ptr()).children.read() }; + n += unsafe { ops.malloc_size_of(&*node.p) }; + let children = node.p.children.read(); children.shallow_size_of(ops); for c in &*children { stack.push(c.upgrade()); @@ -162,7 +165,7 @@ impl RuleTree { let mut stack = SmallVec::<[_; 32]>::new(); stack.push(self.root.clone()); while let Some(node) = stack.pop() { - let children = node.get().children.read(); + let children = node.p.children.read(); *children_count.entry(children.len()).or_insert(0) += 1; for c in &*children { stack.push(c.upgrade()); @@ -183,7 +186,7 @@ impl RuleTree { const RULE_TREE_GC_INTERVAL: usize = 300; /// A node in the rule tree. -pub struct RuleNode { +struct RuleNode { /// The root node. Only the root has no root pointer, for obvious reasons. root: Option, @@ -321,31 +324,28 @@ impl RuleNode { debug!( "Remove from child list: {:?}, parent: {:?}", self as *const RuleNode, - self.parent.as_ref().map(|p| p.ptr()) + self.parent.as_ref().map(|p| &*p.p as *const RuleNode) ); if let Some(parent) = self.parent.as_ref() { let weak = parent - .get() + .p .children .write() - .remove(&self.key(), |node| (*node.ptr()).key()); - assert_eq!(weak.unwrap().ptr() as *const _, self as *const _); + .remove(&self.key(), |node| node.p.key()); + assert_eq!(&*weak.unwrap().p as *const _, self as *const _); } } } pub(crate) struct WeakRuleNode { - p: ptr::NonNull, + p: UnsafeBox, } /// A strong reference to a rule node. -#[derive(Debug, Eq, Hash, PartialEq)] pub struct StrongRuleNode { - p: ptr::NonNull, + p: UnsafeBox, } -unsafe impl Send for StrongRuleNode {} -unsafe impl Sync for StrongRuleNode {} #[cfg(feature = "servo")] malloc_size_of_is_0!(StrongRuleNode); @@ -354,26 +354,28 @@ impl StrongRuleNode { fn new(n: Box) -> Self { debug_assert_eq!(n.parent.is_none(), !n.source.is_some()); - // TODO(emilio): Use into_raw_non_null when it's stable. - let ptr = unsafe { ptr::NonNull::new_unchecked(Box::into_raw(n)) }; - log_new(ptr.as_ptr()); + log_new(&*n); - debug!("Creating rule node: {:p}", ptr); + debug!("Creating rule node: {:p}", &*n); - unsafe { StrongRuleNode::from_ptr(ptr) } + Self { + p: UnsafeBox::from_box(n), + } } - unsafe fn from_ptr(p: ptr::NonNull) -> Self { - StrongRuleNode { p } + unsafe fn from_unsafe_box(p: UnsafeBox) -> Self { + Self { p } } unsafe fn downgrade(&self) -> WeakRuleNode { - WeakRuleNode::from_ptr(self.p) + WeakRuleNode { + p: UnsafeBox::clone(&self.p), + } } /// Get the parent rule node of this rule node. pub fn parent(&self) -> Option<&StrongRuleNode> { - self.get().parent.as_ref() + self.p.parent.as_ref() } pub(super) fn ensure_child( @@ -385,24 +387,24 @@ impl StrongRuleNode { use parking_lot::RwLockUpgradableReadGuard; debug_assert!( - self.get().level <= level, + self.p.level <= level, "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}", - self.get().level, + self.p.level, level, - self.get().source, + self.p.source, source, ); let key = ChildKey(level, source.key()); - let children = self.get().children.upgradable_read(); - if let Some(child) = children.get(&key, |node| unsafe { (*node.ptr()).key() }) { + let children = self.p.children.upgradable_read(); + if let Some(child) = children.get(&key, |node| node.p.key()) { return child.upgrade(); } let mut children = RwLockUpgradableReadGuard::upgrade(children); let weak = children.get_or_insert_with( key, - |node| unsafe { (*node.ptr()).key() }, + |node| node.p.key(), move || { let root = unsafe { root.downgrade() }; let strong = @@ -413,50 +415,36 @@ impl StrongRuleNode { }, ); - unsafe { StrongRuleNode::from_ptr(weak.p) } - } - - /// Raw pointer to the RuleNode - #[inline] - pub fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - - fn get(&self) -> &RuleNode { - if cfg!(debug_assertions) { - let node = unsafe { &*self.p.as_ptr() }; - assert!(node.refcount.load(Ordering::Relaxed) > 0); - } - unsafe { &*self.p.as_ptr() } + unsafe { StrongRuleNode::from_unsafe_box(UnsafeBox::clone(&weak.p)) } } /// Get the style source corresponding to this rule node. May return `None` /// if it's the root node, which means that the node hasn't matched any /// rules. pub fn style_source(&self) -> Option<&StyleSource> { - self.get().source.as_ref() + self.p.source.as_ref() } /// The cascade level for this node pub fn cascade_level(&self) -> CascadeLevel { - self.get().level + self.p.level } /// Get the importance that this rule node represents. pub fn importance(&self) -> Importance { - self.get().level.importance() + self.p.level.importance() } /// Returns whether this node has any child, only intended for testing /// purposes, and called on a single-threaded fashion only. pub unsafe fn has_children_for_testing(&self) -> bool { - !self.get().children.read().is_empty() + !self.p.children.read().is_empty() } unsafe fn pop_from_free_list(&self) -> Option { // NB: This can run from the root node destructor, so we can't use // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + let me = &self.p; debug_assert!(me.is_root()); @@ -482,7 +470,7 @@ impl StrongRuleNode { same time?" ); debug_assert!( - current != self.p.as_ptr(), + current != &*self.p as *const RuleNode as *mut RuleNode, "How did the root end up in the free list?" ); @@ -502,17 +490,18 @@ impl StrongRuleNode { current, next ); - Some(WeakRuleNode::from_ptr(ptr::NonNull::new_unchecked(current))) + Some(WeakRuleNode { + p: UnsafeBox::from_raw(current), + }) } unsafe fn assert_free_list_has_no_duplicates_or_null(&self) { assert!(cfg!(debug_assertions), "This is an expensive check!"); use crate::hash::FxHashSet; - let me = &*self.p.as_ptr(); - assert!(me.is_root()); + assert!(self.p.is_root()); - let mut current = self.p.as_ptr(); + let mut current = &*self.p as *const RuleNode as *mut RuleNode; let mut seen = FxHashSet::default(); while current != FREE_LIST_SENTINEL { let next = (*current).next_free.load(Ordering::Relaxed); @@ -531,21 +520,20 @@ impl StrongRuleNode { // NB: This can run from the root node destructor, so we can't use // `get()`, since it asserts the refcount is bigger than zero. - let me = &*self.p.as_ptr(); + let me = &self.p; debug_assert!(me.is_root(), "Can't call GC on a non-root node!"); - while let Some(weak) = self.pop_from_free_list() { - let node = &*weak.p.as_ptr(); - if node.refcount.load(Ordering::Relaxed) != 0 { + while let Some(mut weak) = self.pop_from_free_list() { + if weak.p.refcount.load(Ordering::Relaxed) != 0 { // Nothing to do, the node is still alive. continue; } - debug!("GC'ing {:?}", weak.p.as_ptr()); - node.remove_from_child_list(); - log_drop(weak.p.as_ptr()); - let _ = Box::from_raw(weak.p.as_ptr()); + debug!("GC'ing {:?}", &*weak.p as *const RuleNode); + weak.p.remove_from_child_list(); + log_drop(&*weak.p); + UnsafeBox::drop(&mut weak.p); } me.free_count().store(0, Ordering::Relaxed); @@ -554,8 +542,8 @@ impl StrongRuleNode { } unsafe fn maybe_gc(&self) { - debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!"); - if self.get().free_count().load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { + debug_assert!(self.p.is_root(), "Can't call GC on a non-root node!"); + if self.p.free_count.load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL { self.gc(); } } @@ -569,10 +557,10 @@ impl StrongRuleNode { let _ = writeln!( writer, - " - {:?} (ref: {:?}, parent: {:?})", - self.get() as *const _, - self.get().refcount.load(Ordering::Relaxed), - self.parent().map(|p| p.ptr()) + " - {:p} (ref: {:?}, parent: {:?})", + &*self.p, + self.p.refcount.load(Ordering::Relaxed), + self.parent().map(|p| &*p.p as *const RuleNode) ); for _ in 0..indent { @@ -589,7 +577,7 @@ impl StrongRuleNode { } let _ = write!(writer, "\n"); - for child in &*self.get().children.read() { + for child in &*self.p.children.read() { child .upgrade() .dump(guards, writer, indent + INDENT_INCREMENT); @@ -600,31 +588,27 @@ impl StrongRuleNode { impl Clone for StrongRuleNode { fn clone(&self) -> Self { debug!( - "{:?}: {:?}+", - self.ptr(), - self.get().refcount.load(Ordering::Relaxed) + "{:p}: {:?}+", + &*self.p, + self.p.refcount.load(Ordering::Relaxed) ); - debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0); - self.get().refcount.fetch_add(1, Ordering::Relaxed); - unsafe { StrongRuleNode::from_ptr(self.p) } + debug_assert!(self.p.refcount.load(Ordering::Relaxed) > 0); + self.p.refcount.fetch_add(1, Ordering::Relaxed); + unsafe { StrongRuleNode::from_unsafe_box(UnsafeBox::clone(&self.p)) } } } impl Drop for StrongRuleNode { #[cfg_attr(feature = "servo", allow(unused_mut))] fn drop(&mut self) { - let node = unsafe { &*self.ptr() }; + let node = &*self.p; + debug!("{:p}: {:?}-", node, node.refcount.load(Ordering::Relaxed)); debug!( - "{:?}: {:?}-", - self.ptr(), - node.refcount.load(Ordering::Relaxed) - ); - debug!( - "Dropping node: {:?}, root: {:?}, parent: {:?}", - self.ptr(), - node.root.as_ref().map(|r| r.ptr()), - node.parent.as_ref().map(|p| p.ptr()) + "Dropping node: {:p}, root: {:?}, parent: {:?}", + node, + node.root.as_ref().map(|r| &*r.p as *const RuleNode), + node.parent.as_ref().map(|p| &*p.p as *const RuleNode) ); let should_drop = { debug_assert!(node.refcount.load(Ordering::Relaxed) > 0); @@ -638,13 +622,13 @@ impl Drop for StrongRuleNode { if node.parent.is_none() { debug!("Dropping root node!"); // The free list should be null by this point - debug_assert!(node.next_free.load(Ordering::Relaxed).is_null()); - log_drop(self.ptr()); - let _ = unsafe { Box::from_raw(self.ptr()) }; + debug_assert!(self.p.next_free.load(Ordering::Relaxed).is_null()); + log_drop(&*self.p); + unsafe { UnsafeBox::drop(&mut self.p) }; return; } - let root = unsafe { &*node.root.as_ref().unwrap().ptr() }; + let root = &node.root.as_ref().unwrap().p; let free_list = &root.next_free; let mut old_head = free_list.load(Ordering::Relaxed); @@ -746,25 +730,39 @@ impl Drop for StrongRuleNode { // This can be release because of the locking of the free list, that // ensures that all the other nodes racing with this one are using // `Acquire`. - free_list.store(self.ptr(), Ordering::Release); + free_list.store( + &*self.p as *const RuleNode as *mut RuleNode, + Ordering::Release, + ); } } impl WeakRuleNode { - #[inline] - fn ptr(&self) -> *mut RuleNode { - self.p.as_ptr() - } - fn upgrade(&self) -> StrongRuleNode { - debug!("Upgrading weak node: {:p}", self.ptr()); - - let node = unsafe { &*self.ptr() }; - node.refcount.fetch_add(1, Ordering::Relaxed); - unsafe { StrongRuleNode::from_ptr(self.p) } - } - - unsafe fn from_ptr(p: ptr::NonNull) -> Self { - WeakRuleNode { p } + debug!("Upgrading weak node: {:p}", &*self.p); + self.p.refcount.fetch_add(1, Ordering::Relaxed); + unsafe { StrongRuleNode::from_unsafe_box(UnsafeBox::clone(&self.p)) } + } +} + +impl fmt::Debug for StrongRuleNode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + (&*self.p as *const RuleNode).fmt(f) + } +} + +impl Eq for StrongRuleNode {} +impl PartialEq for StrongRuleNode { + fn eq(&self, other: &Self) -> bool { + &*self.p as *const RuleNode == &*other.p + } +} + +impl hash::Hash for StrongRuleNode { + fn hash(&self, state: &mut H) + where + H: hash::Hasher, + { + (&*self.p as *const RuleNode).hash(state) } } diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index 41f87935df4..2af7989ca4c 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -2,6 +2,8 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +#![deny(unsafe_code)] + //! The rule tree. use crate::applicable_declarations::ApplicableDeclarationList; @@ -15,6 +17,7 @@ mod core; mod level; mod map; mod source; +mod unsafe_box; pub use self::core::{RuleTree, StrongRuleNode}; pub use self::level::{CascadeLevel, ShadowCascadeOrder}; diff --git a/components/style/rule_tree/unsafe_box.rs b/components/style/rule_tree/unsafe_box.rs new file mode 100644 index 00000000000..1c000d9bac7 --- /dev/null +++ b/components/style/rule_tree/unsafe_box.rs @@ -0,0 +1,64 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#![allow(unsafe_code)] + +use std::mem::ManuallyDrop; +use std::ops::Deref; +use std::ptr; + +/// An unsafe box, derefs to `T`. +pub(super) struct UnsafeBox { + inner: ManuallyDrop>, +} + +impl UnsafeBox { + /// Creates a new unsafe box. + pub(super) fn from_box(value: Box) -> Self { + Self { + inner: ManuallyDrop::new(value), + } + } + + /// Creates a new box from a pointer. + /// + /// # Safety + /// + /// The input should point to a valid `T`. + pub(super) unsafe fn from_raw(ptr: *mut T) -> Self { + Self { + inner: ManuallyDrop::new(Box::from_raw(ptr)), + } + } + + /// Creates a new unsafe box from an existing one. + /// + /// # Safety + /// + /// There is no refcounting or whatever else in an unsafe box, so this + /// operation can lead to double frees. + pub(super) unsafe fn clone(this: &Self) -> Self { + Self { + inner: ptr::read(&this.inner), + } + } + + /// Drops the inner value of this unsafe box. + /// + /// # Safety + /// + /// Given this doesn't consume the unsafe box itself, this has the same + /// safety caveats as `ManuallyDrop::drop`. + pub(super) unsafe fn drop(this: &mut Self) { + ManuallyDrop::drop(&mut this.inner) + } +} + +impl Deref for UnsafeBox { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.inner + } +}