mirror of
https://github.com/servo/servo.git
synced 2025-08-05 13:40:08 +01:00
style: Deduplicate a bit the malloc_size_of code for hashmaps / hashsets.
This allows to experiment with other hash maps easily rather than depending on what hashglobe::fake::HashMap dereferences to. In particular I wrote it while trying to get a build working with hashbrown. Differential Revision: https://phabricator.services.mozilla.com/D14098
This commit is contained in:
parent
626172d64c
commit
3f58e0b069
1 changed files with 74 additions and 167 deletions
|
@ -436,126 +436,89 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
|
macro_rules! malloc_size_of_hash_set {
|
||||||
where
|
($ty:ty) => {
|
||||||
T: Eq + Hash,
|
impl<T, S> MallocShallowSizeOf for $ty
|
||||||
S: BuildHasher,
|
where
|
||||||
{
|
T: Eq + Hash,
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
S: BuildHasher,
|
||||||
if ops.has_malloc_enclosing_size_of() {
|
{
|
||||||
// The first value from the iterator gives us an interior pointer.
|
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||||
// `ops.malloc_enclosing_size_of()` then gives us the storage size.
|
if ops.has_malloc_enclosing_size_of() {
|
||||||
// This assumes that the `HashSet`'s contents (values and hashes)
|
// The first value from the iterator gives us an interior pointer.
|
||||||
// are all stored in a single contiguous heap allocation.
|
// `ops.malloc_enclosing_size_of()` then gives us the storage size.
|
||||||
self.iter()
|
// This assumes that the `HashSet`'s contents (values and hashes)
|
||||||
.next()
|
// are all stored in a single contiguous heap allocation.
|
||||||
.map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
|
self.iter()
|
||||||
} else {
|
.next()
|
||||||
// An estimate.
|
.map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
|
||||||
self.capacity() * (size_of::<T>() + size_of::<usize>())
|
} else {
|
||||||
|
// An estimate.
|
||||||
|
self.capacity() * (size_of::<T>() + size_of::<usize>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, S> MallocSizeOf for $ty
|
||||||
|
where
|
||||||
|
T: Eq + Hash + MallocSizeOf,
|
||||||
|
S: BuildHasher,
|
||||||
|
{
|
||||||
|
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||||
|
let mut n = self.shallow_size_of(ops);
|
||||||
|
for t in self.iter() {
|
||||||
|
n += t.size_of(ops);
|
||||||
|
}
|
||||||
|
n
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
|
malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
|
||||||
where
|
malloc_size_of_hash_set!(hashglobe::hash_set::HashSet<T, S>);
|
||||||
T: Eq + Hash + MallocSizeOf,
|
malloc_size_of_hash_set!(hashglobe::fake::HashSet<T, S>);
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
let mut n = self.shallow_size_of(ops);
|
|
||||||
for t in self.iter() {
|
|
||||||
n += t.size_of(ops);
|
|
||||||
}
|
|
||||||
n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, S> MallocShallowSizeOf for hashglobe::hash_set::HashSet<T, S>
|
macro_rules! malloc_size_of_hash_map {
|
||||||
where
|
($ty:ty) => {
|
||||||
T: Eq + Hash,
|
impl<K, V, S> MallocShallowSizeOf for $ty
|
||||||
S: BuildHasher,
|
where
|
||||||
{
|
K: Eq + Hash,
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
S: BuildHasher,
|
||||||
// See the implementation for std::collections::HashSet for details.
|
{
|
||||||
if ops.has_malloc_enclosing_size_of() {
|
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||||
self.iter()
|
// See the implementation for std::collections::HashSet for details.
|
||||||
.next()
|
if ops.has_malloc_enclosing_size_of() {
|
||||||
.map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
|
self.values()
|
||||||
} else {
|
.next()
|
||||||
self.capacity() * (size_of::<T>() + size_of::<usize>())
|
.map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
|
||||||
|
} else {
|
||||||
|
self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V, S> MallocSizeOf for $ty
|
||||||
|
where
|
||||||
|
K: Eq + Hash + MallocSizeOf,
|
||||||
|
V: MallocSizeOf,
|
||||||
|
S: BuildHasher,
|
||||||
|
{
|
||||||
|
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||||
|
let mut n = self.shallow_size_of(ops);
|
||||||
|
for (k, v) in self.iter() {
|
||||||
|
n += k.size_of(ops);
|
||||||
|
n += v.size_of(ops);
|
||||||
|
}
|
||||||
|
n
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, S> MallocSizeOf for hashglobe::hash_set::HashSet<T, S>
|
malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
|
||||||
where
|
malloc_size_of_hash_map!(hashglobe::hash_map::HashMap<K, V, S>);
|
||||||
T: Eq + Hash + MallocSizeOf,
|
malloc_size_of_hash_map!(hashglobe::fake::HashMap<K, V, S>);
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
let mut n = self.shallow_size_of(ops);
|
|
||||||
for t in self.iter() {
|
|
||||||
n += t.size_of(ops);
|
|
||||||
}
|
|
||||||
n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, S> MallocShallowSizeOf for hashglobe::fake::HashSet<T, S>
|
|
||||||
where
|
|
||||||
T: Eq + Hash,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
use std::ops::Deref;
|
|
||||||
self.deref().shallow_size_of(ops)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, S> MallocSizeOf for hashglobe::fake::HashSet<T, S>
|
|
||||||
where
|
|
||||||
T: Eq + Hash + MallocSizeOf,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
use std::ops::Deref;
|
|
||||||
self.deref().size_of(ops)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
// See the implementation for std::collections::HashSet for details.
|
|
||||||
if ops.has_malloc_enclosing_size_of() {
|
|
||||||
self.values()
|
|
||||||
.next()
|
|
||||||
.map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
|
|
||||||
} else {
|
|
||||||
self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash + MallocSizeOf,
|
|
||||||
V: MallocSizeOf,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
let mut n = self.shallow_size_of(ops);
|
|
||||||
for (k, v) in self.iter() {
|
|
||||||
n += k.size_of(ops);
|
|
||||||
n += v.size_of(ops);
|
|
||||||
}
|
|
||||||
n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
|
impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
|
||||||
where
|
where
|
||||||
|
@ -587,62 +550,6 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V, S> MallocShallowSizeOf for hashglobe::hash_map::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
// See the implementation for std::collections::HashSet for details.
|
|
||||||
if ops.has_malloc_enclosing_size_of() {
|
|
||||||
self.values()
|
|
||||||
.next()
|
|
||||||
.map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
|
|
||||||
} else {
|
|
||||||
self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> MallocSizeOf for hashglobe::hash_map::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash + MallocSizeOf,
|
|
||||||
V: MallocSizeOf,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
let mut n = self.shallow_size_of(ops);
|
|
||||||
for (k, v) in self.iter() {
|
|
||||||
n += k.size_of(ops);
|
|
||||||
n += v.size_of(ops);
|
|
||||||
}
|
|
||||||
n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> MallocShallowSizeOf for hashglobe::fake::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
use std::ops::Deref;
|
|
||||||
self.deref().shallow_size_of(ops)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V, S> MallocSizeOf for hashglobe::fake::HashMap<K, V, S>
|
|
||||||
where
|
|
||||||
K: Eq + Hash + MallocSizeOf,
|
|
||||||
V: MallocSizeOf,
|
|
||||||
S: BuildHasher,
|
|
||||||
{
|
|
||||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
|
||||||
use std::ops::Deref;
|
|
||||||
self.deref().size_of(ops)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PhantomData is always 0.
|
// PhantomData is always 0.
|
||||||
impl<T> MallocSizeOf for std::marker::PhantomData<T> {
|
impl<T> MallocSizeOf for std::marker::PhantomData<T> {
|
||||||
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue