style: Shrink maps if needed after stylist rebuilds

Hashbrown grows a lot sometimes making us waste a lot of memory. Shrink
some of these maps after CascadeData rebuild / stylesheet collection
invalidation.

Differential Revision: https://phabricator.services.mozilla.com/D134716
This commit is contained in:
Emilio Cobos Álvarez 2023-06-06 23:27:20 +02:00 committed by Oriol Brufau
parent f9610e5898
commit fcc55f2156
6 changed files with 122 additions and 17 deletions

View file

@ -11,7 +11,7 @@ use crate::selector_map::{
};
use crate::selector_parser::SelectorImpl;
use crate::AllocErr;
use crate::{Atom, LocalName, Namespace};
use crate::{Atom, LocalName, Namespace, ShrinkIfNeeded};
use selectors::attr::NamespaceConstraint;
use selectors::parser::{Combinator, Component};
use selectors::parser::{Selector, SelectorIter};
@ -237,6 +237,14 @@ impl InvalidationMap {
self.other_attribute_affecting_selectors.clear();
}
/// Shrink the capacity of hash maps if needed.
pub fn shrink_if_needed(&mut self) {
self.class_to_selector.shrink_if_needed();
self.id_to_selector.shrink_if_needed();
self.state_affecting_selectors.shrink_if_needed();
self.other_attribute_affecting_selectors.shrink_if_needed();
}
/// Adds a selector to this `InvalidationMap`. Returns Err(..) to
/// signify OOM.
pub fn note_selector(

View file

@ -18,7 +18,7 @@ use crate::shared_lock::SharedRwLockReadGuard;
use crate::stylesheets::{CssRule, StylesheetInDocument};
use crate::stylesheets::{EffectiveRules, EffectiveRulesIterator};
use crate::values::AtomIdent;
use crate::Atom;
use crate::{Atom, ShrinkIfNeeded};
use crate::LocalName as SelectorLocalName;
use selectors::parser::{Component, LocalName, Selector};
@ -119,6 +119,15 @@ impl StylesheetInvalidationSet {
self.fully_invalid = true;
}
fn shrink_if_needed(&mut self) {
if self.fully_invalid {
return;
}
self.classes.shrink_if_needed();
self.ids.shrink_if_needed();
self.local_names.shrink_if_needed();
}
/// Analyze the given stylesheet, and collect invalidations from their
/// rules, in order to avoid doing a full restyle when we style the document
/// next time.
@ -149,6 +158,8 @@ impl StylesheetInvalidationSet {
}
}
self.shrink_if_needed();
debug!(" > resulting class invalidations: {:?}", self.classes);
debug!(" > resulting id invalidations: {:?}", self.ids);
debug!(

View file

@ -157,6 +157,8 @@ pub use style_traits::arc_slice::ArcSlice;
pub use style_traits::owned_slice::OwnedSlice;
pub use style_traits::owned_str::OwnedStr;
use std::hash::{Hash, BuildHasher};
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
@ -286,3 +288,44 @@ impl From<std::collections::TryReserveError> for AllocErr {
Self
}
}
/// Shrink the capacity of the collection if needed.
pub (crate) trait ShrinkIfNeeded {
fn shrink_if_needed(&mut self);
}
/// We shrink the capacity of a collection if we're wasting more than a 25% of
/// its capacity, and if the collection is arbitrarily big enough
/// (>= CAPACITY_THRESHOLD entries).
#[inline]
fn should_shrink(len: usize, capacity: usize) -> bool {
const CAPACITY_THRESHOLD: usize = 64;
capacity >= CAPACITY_THRESHOLD && len + capacity / 4 < capacity
}
impl<K, V, H> ShrinkIfNeeded for std::collections::HashMap<K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
fn shrink_if_needed(&mut self) {
if should_shrink(self.len(), self.capacity()) {
self.shrink_to_fit();
}
}
}
impl<T, H> ShrinkIfNeeded for std::collections::HashSet<T, H>
where
T: Eq + Hash,
H: BuildHasher,
{
fn shrink_if_needed(&mut self) {
if should_shrink(self.len(), self.capacity()) {
self.shrink_to_fit();
}
}
}
// TODO(emilio): Measure and see if we're wasting a lot of memory on Vec /
// SmallVec, and if so consider shrinking those as well.

View file

@ -12,7 +12,7 @@ use crate::rule_tree::CascadeLevel;
use crate::selector_parser::SelectorImpl;
use crate::stylist::{CascadeData, Rule};
use crate::AllocErr;
use crate::{Atom, LocalName, Namespace, WeakAtom};
use crate::{Atom, LocalName, Namespace, ShrinkIfNeeded, WeakAtom};
use precomputed_hash::PrecomputedHash;
use selectors::matching::{matches_selector, ElementSelectorFlags, MatchingContext};
use selectors::parser::{Combinator, Component, SelectorIter};
@ -122,10 +122,7 @@ impl<T: 'static> Default for SelectorMap<T> {
}
}
// FIXME(Manishearth) the 'static bound can be removed when
// our HashMap fork (hashglobe) is able to use NonZero,
// or when stdlib gets fallible collections
impl<T: 'static> SelectorMap<T> {
impl<T> SelectorMap<T> {
/// Trivially constructs an empty `SelectorMap`.
pub fn new() -> Self {
SelectorMap {
@ -152,6 +149,15 @@ impl<T: 'static> SelectorMap<T> {
ret
}
/// Shrink the capacity of the map if needed.
pub fn shrink_if_needed(&mut self) {
self.id_hash.shrink_if_needed();
self.class_hash.shrink_if_needed();
self.attribute_hash.shrink_if_needed();
self.local_name_hash.shrink_if_needed();
self.namespace_hash.shrink_if_needed();
}
/// Clears the hashmap retaining storage.
pub fn clear(&mut self) {
self.root.clear();
@ -715,26 +721,28 @@ fn find_bucket<'a>(
/// Wrapper for PrecomputedHashMap that does ASCII-case-insensitive lookup in quirks mode.
#[derive(Clone, Debug, MallocSizeOf)]
pub struct MaybeCaseInsensitiveHashMap<K: PrecomputedHash + Hash + Eq, V: 'static>(
pub struct MaybeCaseInsensitiveHashMap<K: PrecomputedHash + Hash + Eq, V>(
PrecomputedHashMap<K, V>,
);
impl<V: 'static> Default for MaybeCaseInsensitiveHashMap<Atom, V> {
impl<V> Default for MaybeCaseInsensitiveHashMap<Atom, V> {
#[inline]
fn default() -> Self {
MaybeCaseInsensitiveHashMap(PrecomputedHashMap::default())
}
}
// FIXME(Manishearth) the 'static bound can be removed when
// our HashMap fork (hashglobe) is able to use NonZero,
// or when stdlib gets fallible collections
impl<V: 'static> MaybeCaseInsensitiveHashMap<Atom, V> {
impl<V> MaybeCaseInsensitiveHashMap<Atom, V> {
/// Empty map
pub fn new() -> Self {
Self::default()
}
/// Shrink the capacity of the map if needed.
pub fn shrink_if_needed(&mut self) {
self.0.shrink_if_needed()
}
/// HashMap::try_entry
pub fn try_entry(
&mut self,

View file

@ -170,9 +170,14 @@ impl<T> PerPseudoElementMap<T> {
}
/// Get an iterator for the entries.
pub fn iter(&self) -> ::std::slice::Iter<Option<T>> {
pub fn iter(&self) -> std::slice::Iter<Option<T>> {
self.entries.iter()
}
/// Get a mutable iterator for the entries.
pub fn iter_mut(&mut self) -> std::slice::IterMut<Option<T>> {
self.entries.iter_mut()
}
}
/// Values for the :dir() pseudo class

View file

@ -41,7 +41,7 @@ use crate::stylesheets::{
use crate::stylesheets::{StyleRule, StylesheetContents, StylesheetInDocument};
use crate::thread_state::{self, ThreadState};
use crate::AllocErr;
use crate::{Atom, LocalName, Namespace, WeakAtom};
use crate::{Atom, LocalName, Namespace, ShrinkIfNeeded, WeakAtom};
use fxhash::FxHashMap;
use malloc_size_of::MallocSizeOf;
#[cfg(feature = "gecko")]
@ -291,7 +291,7 @@ impl CascadeDataCacheEntry for UserAgentCascadeData {
)?;
}
new_data.cascade_data.compute_layer_order();
new_data.cascade_data.did_finish_rebuild();
Ok(Arc::new(new_data))
}
@ -1978,6 +1978,15 @@ impl ElementAndPseudoRules {
self.element_map.clear();
self.pseudos_map.clear();
}
fn shrink_if_needed(&mut self) {
self.element_map.shrink_if_needed();
for pseudo in self.pseudos_map.iter_mut() {
if let Some(ref mut pseudo) = pseudo {
pseudo.shrink_if_needed();
}
}
}
}
impl PartElementAndPseudoRules {
@ -2164,7 +2173,7 @@ impl CascadeData {
result.is_ok()
});
self.compute_layer_order();
self.did_finish_rebuild();
result
}
@ -2232,6 +2241,27 @@ impl CascadeData {
self.layers[id.0 as usize].order
}
fn did_finish_rebuild(&mut self) {
self.shrink_maps_if_needed();
self.compute_layer_order();
}
fn shrink_maps_if_needed(&mut self) {
self.normal_rules.shrink_if_needed();
if let Some(ref mut host_rules) = self.host_rules {
host_rules.shrink_if_needed();
}
if let Some(ref mut slotted_rules) = self.slotted_rules {
slotted_rules.shrink_if_needed();
}
self.invalidation_map.shrink_if_needed();
self.attribute_dependencies.shrink_if_needed();
self.mapped_ids.shrink_if_needed();
self.layer_id.shrink_if_needed();
self.selectors_for_cache_revalidation.shrink_if_needed();
}
fn compute_layer_order(&mut self) {
debug_assert_ne!(
self.layers.len(),