mirror of
https://github.com/servo/servo.git
synced 2025-08-03 04:30:10 +01:00
Make LRUCache use a linked list to reduce memmoves.
https://bugzilla.mozilla.org/show_bug.cgi?id=1398957
This commit is contained in:
parent
2cbd27c83a
commit
286888009f
5 changed files with 172 additions and 54 deletions
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -66,15 +66,6 @@ dependencies = [
|
|||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arraydeque"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nodrop 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"odds 0.2.25 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.3.23"
|
||||
|
@ -3121,7 +3112,6 @@ name = "style"
|
|||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"app_units 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arraydeque 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arrayvec 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3771,7 +3761,6 @@ dependencies = [
|
|||
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
|
||||
"checksum antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34fde25430d87a9388dadbe6e34d7f72a462c8b43ac8d309b42b0a8505d7e2a5"
|
||||
"checksum app_units 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ed0a4de09a3b8449515e649f3bb84f72ea15fc2d10639beb0776a09b7d308074"
|
||||
"checksum arraydeque 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "96e774cadb24c2245225280c6799793f9802b918a58a79615e9490607489a717"
|
||||
"checksum arrayvec 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "699e63a93b79d717e8c3b5eb1b28b7780d0d6d9e59a72eb769291c83b0c8dc67"
|
||||
"checksum aster 0.41.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfdf7355d9db158df68f976ed030ab0f6578af811f5a7bb6dcf221ec24e0e0"
|
||||
"checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
|
||||
|
|
|
@ -32,7 +32,6 @@ gecko_debug = ["nsstring_vendor/gecko_debug"]
|
|||
[dependencies]
|
||||
app_units = "0.5.5"
|
||||
arrayvec = "0.3.20"
|
||||
arraydeque = "0.2.3"
|
||||
atomic_refcell = "0.1"
|
||||
bitflags = "0.7"
|
||||
byteorder = "1.0"
|
||||
|
|
|
@ -4,31 +4,43 @@
|
|||
|
||||
//! A simple LRU cache.
|
||||
|
||||
#![deny(missing_docs)]
|
||||
use arrayvec::{Array, ArrayVec};
|
||||
|
||||
extern crate arraydeque;
|
||||
use self::arraydeque::Array;
|
||||
use self::arraydeque::ArrayDeque;
|
||||
|
||||
/// A LRU cache used to store a set of at most `n` elements at the same time.
|
||||
/// A LRU cache using a statically-sized array for storage.
|
||||
///
|
||||
/// The most-recently-used entry is at index zero.
|
||||
pub struct LRUCache <K: Array>{
|
||||
entries: ArrayDeque<K>,
|
||||
/// The most-recently-used entry is at index `head`. The entries form a linked list, linked to each
|
||||
/// other by indices within the `entries` array. After an entry is added to the array, its index
|
||||
/// never changes, so these links are never invalidated.
|
||||
pub struct LRUCache<T, A: Array<Item=Entry<T>>> {
|
||||
entries: ArrayVec<A>,
|
||||
/// Index of the first entry. If the cache is empty, ignore this field.
|
||||
head: u16,
|
||||
/// Index of the last entry. If the cache is empty, ignore this field.
|
||||
tail: u16,
|
||||
}
|
||||
|
||||
/// A iterator over the items of the LRU cache.
|
||||
pub type LRUCacheIterator<'a, K> = arraydeque::Iter<'a, K>;
|
||||
/// An opaque token used as an index into an LRUCache.
|
||||
pub struct CacheIndex(u16);
|
||||
|
||||
/// A iterator over the mutable items of the LRU cache.
|
||||
pub type LRUCacheMutIterator<'a, K> = arraydeque::IterMut<'a, K>;
|
||||
/// An entry in an LRUCache.
|
||||
pub struct Entry<T> {
|
||||
val: T,
|
||||
/// Index of the previous entry. If this entry is the head, ignore this field.
|
||||
prev: u16,
|
||||
/// Index of the next entry. If this entry is the tail, ignore this field.
|
||||
next: u16,
|
||||
}
|
||||
|
||||
impl<K: Array> LRUCache<K> {
|
||||
/// Create a new LRU cache with `size` elements at most.
|
||||
impl<T, A: Array<Item=Entry<T>>> LRUCache<T, A> {
|
||||
/// Create an empty LRU cache.
|
||||
pub fn new() -> Self {
|
||||
LRUCache {
|
||||
entries: ArrayDeque::new(),
|
||||
}
|
||||
let cache = LRUCache {
|
||||
entries: ArrayVec::new(),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
};
|
||||
assert!(cache.entries.capacity() < u16::max_value() as usize, "Capacity overflow");
|
||||
cache
|
||||
}
|
||||
|
||||
/// Returns the number of elements in the cache.
|
||||
|
@ -38,41 +50,98 @@ impl<K: Array> LRUCache<K> {
|
|||
|
||||
#[inline]
|
||||
/// Touch a given entry, putting it first in the list.
|
||||
pub fn touch(&mut self, pos: usize) {
|
||||
if pos != 0 {
|
||||
let entry = self.entries.remove(pos).unwrap();
|
||||
self.entries.push_front(entry);
|
||||
pub fn touch(&mut self, idx: CacheIndex) {
|
||||
if idx.0 != self.head {
|
||||
self.remove(idx.0);
|
||||
self.push_front(idx.0);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the front entry in the list (most recently used).
|
||||
pub fn front(&self) -> Option<&K::Item> {
|
||||
self.entries.get(0)
|
||||
pub fn front(&self) -> Option<&T> {
|
||||
self.entries.get(self.head as usize).map(|e| &e.val)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the front entry in the list (most recently used).
|
||||
pub fn front_mut(&mut self) -> Option<&mut K::Item> {
|
||||
self.entries.get_mut(0)
|
||||
pub fn front_mut(&mut self) -> Option<&mut T> {
|
||||
self.entries.get_mut(self.head as usize).map(|e| &mut e.val)
|
||||
}
|
||||
|
||||
/// Iterate over the contents of this cache, from more to less recently
|
||||
/// used.
|
||||
pub fn iter(&self) -> arraydeque::Iter<K::Item> {
|
||||
self.entries.iter()
|
||||
pub fn iter(&self) -> LRUCacheIterator<T, A> {
|
||||
LRUCacheIterator {
|
||||
pos: self.head,
|
||||
done: self.entries.len() == 0,
|
||||
cache: self,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate mutably over the contents of this cache.
|
||||
pub fn iter_mut(&mut self) -> arraydeque::IterMut<K::Item> {
|
||||
self.entries.iter_mut()
|
||||
pub fn iter_mut(&mut self) -> LRUCacheMutIterator<T, A> {
|
||||
LRUCacheMutIterator {
|
||||
pos: self.head,
|
||||
done: self.entries.len() == 0,
|
||||
cache: self,
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a given key in the cache.
|
||||
pub fn insert(&mut self, key: K::Item) {
|
||||
if self.entries.len() == self.entries.capacity() {
|
||||
self.entries.pop_back();
|
||||
pub fn insert(&mut self, val: T) {
|
||||
let entry = Entry { val, prev: 0, next: 0 };
|
||||
|
||||
// If the cache is full, replace the oldest entry. Otherwise, add an entry.
|
||||
let new_head = if self.entries.len() == self.entries.capacity() {
|
||||
let i = self.pop_back();
|
||||
self.entries[i as usize] = entry;
|
||||
i
|
||||
} else {
|
||||
self.entries.push(entry);
|
||||
self.entries.len() as u16 - 1
|
||||
};
|
||||
|
||||
self.push_front(new_head);
|
||||
}
|
||||
|
||||
/// Remove an from the linked list.
|
||||
///
|
||||
/// Note: This only unlinks the entry from the list; it does not remove it from the array.
|
||||
fn remove(&mut self, i: u16) {
|
||||
let prev = self.entries[i as usize].prev;
|
||||
let next = self.entries[i as usize].next;
|
||||
|
||||
if i == self.head {
|
||||
self.head = next;
|
||||
} else {
|
||||
self.entries[prev as usize].next = next;
|
||||
}
|
||||
self.entries.push_front(key);
|
||||
debug_assert!(self.entries.len() <= self.entries.capacity());
|
||||
|
||||
if i == self.tail {
|
||||
self.tail = prev;
|
||||
} else {
|
||||
self.entries[next as usize].prev = prev;
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a new entry at the head of the list.
|
||||
fn push_front(&mut self, i: u16) {
|
||||
if self.entries.len() == 1 {
|
||||
self.tail = i;
|
||||
} else {
|
||||
self.entries[i as usize].next = self.head;
|
||||
self.entries[self.head as usize].prev = i;
|
||||
}
|
||||
self.head = i;
|
||||
}
|
||||
|
||||
/// Remove the last entry from the linked list. Returns the index of the removed entry.
|
||||
///
|
||||
/// Note: This only unlinks the entry from the list; it does not remove it from the array.
|
||||
fn pop_back(&mut self) -> u16 {
|
||||
let old_tail = self.tail;
|
||||
let new_tail = self.entries[old_tail as usize].prev;
|
||||
self.tail = new_tail;
|
||||
old_tail
|
||||
}
|
||||
|
||||
/// Evict all elements from the cache.
|
||||
|
@ -80,3 +149,62 @@ impl<K: Array> LRUCache<K> {
|
|||
self.entries.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Immutable iterator over values in an LRUCache, from most-recently-used to least-recently-used.
|
||||
pub struct LRUCacheIterator<'a, T: 'a, A: 'a + Array<Item=Entry<T>>> {
|
||||
cache: &'a LRUCache<T, A>,
|
||||
pos: u16,
|
||||
done: bool,
|
||||
}
|
||||
|
||||
impl<'a, T, A> Iterator for LRUCacheIterator<'a, T, A>
|
||||
where T: 'a,
|
||||
A: 'a + Array<Item=Entry<T>>
|
||||
{
|
||||
type Item = (CacheIndex, &'a T);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.done { return None }
|
||||
|
||||
let entry = &self.cache.entries[self.pos as usize];
|
||||
|
||||
let index = CacheIndex(self.pos);
|
||||
if self.pos == self.cache.tail {
|
||||
self.done = true;
|
||||
}
|
||||
self.pos = entry.next;
|
||||
|
||||
Some((index, &entry.val))
|
||||
}
|
||||
}
|
||||
|
||||
/// Mutable iterator over values in an LRUCache, from most-recently-used to least-recently-used.
|
||||
pub struct LRUCacheMutIterator<'a, T: 'a, A: 'a + Array<Item=Entry<T>>> {
|
||||
cache: &'a mut LRUCache<T, A>,
|
||||
pos: u16,
|
||||
done: bool,
|
||||
}
|
||||
|
||||
impl<'a, T, A> Iterator for LRUCacheMutIterator<'a, T, A>
|
||||
where T: 'a,
|
||||
A: 'a + Array<Item=Entry<T>>
|
||||
{
|
||||
type Item = (CacheIndex, &'a mut T);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.done { return None }
|
||||
|
||||
// Use a raw pointer because the compiler doesn't know that subsequent calls can't alias.
|
||||
let entry = unsafe {
|
||||
&mut *(&mut self.cache.entries[self.pos as usize] as *mut Entry<T>)
|
||||
};
|
||||
|
||||
let index = CacheIndex(self.pos);
|
||||
if self.pos == self.cache.tail {
|
||||
self.done = true;
|
||||
}
|
||||
self.pos = entry.next;
|
||||
|
||||
Some((index, &mut entry.val))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#[cfg(feature = "servo")] use animation::PropertyAnimation;
|
||||
use app_units::Au;
|
||||
use bloom::StyleBloom;
|
||||
use cache::LRUCache;
|
||||
use cache::{Entry, LRUCache};
|
||||
use data::{EagerPseudoStyles, ElementData};
|
||||
use dom::{OpaqueNode, TNode, TElement, SendElement};
|
||||
use euclid::ScaleFactor;
|
||||
|
@ -522,6 +522,8 @@ impl<E: TElement> SequentialTask<E> {
|
|||
}
|
||||
}
|
||||
|
||||
type CacheItem<E> = (SendElement<E>, ElementSelectorFlags);
|
||||
|
||||
/// Map from Elements to ElementSelectorFlags. Used to defer applying selector
|
||||
/// flags until after the traversal.
|
||||
pub struct SelectorFlagsMap<E: TElement> {
|
||||
|
@ -529,7 +531,7 @@ pub struct SelectorFlagsMap<E: TElement> {
|
|||
map: FnvHashMap<SendElement<E>, ElementSelectorFlags>,
|
||||
/// An LRU cache to avoid hashmap lookups, which can be slow if the map
|
||||
/// gets big.
|
||||
cache: LRUCache<[(SendElement<E>, ElementSelectorFlags); 4 + 1]>,
|
||||
cache: LRUCache<CacheItem<E>, [Entry<CacheItem<E>>; 4 + 1]>,
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
|
@ -552,8 +554,8 @@ impl<E: TElement> SelectorFlagsMap<E> {
|
|||
pub fn insert_flags(&mut self, element: E, flags: ElementSelectorFlags) {
|
||||
let el = unsafe { SendElement::new(element) };
|
||||
// Check the cache. If the flags have already been noted, we're done.
|
||||
if self.cache.iter().find(|x| x.0 == el)
|
||||
.map_or(ElementSelectorFlags::empty(), |x| x.1)
|
||||
if self.cache.iter().find(|&(_, ref x)| x.0 == el)
|
||||
.map_or(ElementSelectorFlags::empty(), |(_, x)| x.1)
|
||||
.contains(flags) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ use Atom;
|
|||
use applicable_declarations::ApplicableDeclarationBlock;
|
||||
use atomic_refcell::{AtomicRefCell, AtomicRefMut};
|
||||
use bloom::StyleBloom;
|
||||
use cache::LRUCache;
|
||||
use cache::{LRUCache, Entry};
|
||||
use context::{SelectorFlagsMap, SharedStyleContext, StyleContext};
|
||||
use dom::{TElement, SendElement};
|
||||
use matching::MatchMethods;
|
||||
|
@ -409,7 +409,7 @@ impl<E: TElement> StyleSharingTarget<E> {
|
|||
}
|
||||
|
||||
struct SharingCacheBase<Candidate> {
|
||||
entries: LRUCache<[Candidate; SHARING_CACHE_BACKING_STORE_SIZE]>,
|
||||
entries: LRUCache<Candidate, [Entry<Candidate>; SHARING_CACHE_BACKING_STORE_SIZE]>,
|
||||
}
|
||||
|
||||
impl<Candidate> Default for SharingCacheBase<Candidate> {
|
||||
|
@ -448,7 +448,7 @@ impl<E: TElement> SharingCache<E> {
|
|||
F: FnMut(&mut StyleSharingCandidate<E>) -> bool
|
||||
{
|
||||
let mut index = None;
|
||||
for (i, candidate) in self.entries.iter_mut().enumerate() {
|
||||
for (i, candidate) in self.entries.iter_mut() {
|
||||
if is_match(candidate) {
|
||||
index = Some(i);
|
||||
break;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue