diff --git a/components/malloc_size_of/lib.rs b/components/malloc_size_of/lib.rs index ae80385eb07..5ecd29d794a 100644 --- a/components/malloc_size_of/lib.rs +++ b/components/malloc_size_of/lib.rs @@ -103,25 +103,32 @@ impl MallocSizeOfOps { /// Check if an allocation is empty. This relies on knowledge of how Rust /// handles empty allocations, which may change in the future. - fn is_empty(ptr: *const T) -> bool { - return ptr as usize <= ::std::mem::align_of::(); + fn is_empty(ptr: *const T) -> bool { + // The correct condition is this: + // `ptr as usize <= ::std::mem::align_of::()` + // But we can't call align_of() on a ?Sized T. So we approximate it + // with the following. 256 is large enough that it should always be + // larger than the required alignment, but small enough that it is + // always in the first page of memory and therefore not a legitimate + // address. + return ptr as *const usize as usize <= 256 } /// Call `size_of_op` on `ptr`, first checking that the allocation isn't /// empty, because some types (such as `Vec`) utilize empty allocations. - pub fn malloc_size_of(&self, ptr: *const T) -> usize { + pub unsafe fn malloc_size_of(&self, ptr: *const T) -> usize { if MallocSizeOfOps::is_empty(ptr) { 0 } else { - unsafe { (self.size_of_op)(ptr as *const c_void) } + (self.size_of_op)(ptr as *const c_void) } } /// Call `enclosing_size_of_op` on `ptr`, which must not be empty. - pub fn malloc_enclosing_size_of(&self, ptr: *const T) -> usize { + pub unsafe fn malloc_enclosing_size_of(&self, ptr: *const T) -> usize { assert!(!MallocSizeOfOps::is_empty(ptr)); let enclosing_size_of_op = self.enclosing_size_of_op.expect("missing enclosing_size_of_op"); - unsafe { enclosing_size_of_op(ptr as *const c_void) } + enclosing_size_of_op(ptr as *const c_void) } /// Call `have_seen_ptr_op` on `ptr`. @@ -179,13 +186,13 @@ pub trait MallocConditionalShallowSizeOf { fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize; } -impl MallocShallowSizeOf for Box { +impl MallocShallowSizeOf for Box { fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - ops.malloc_size_of(&**self) + unsafe { ops.malloc_size_of(&**self) } } } -impl MallocSizeOf for Box { +impl MallocSizeOf for Box { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { self.shallow_size_of(ops) + (**self).size_of(ops) } @@ -207,9 +214,19 @@ impl MallocSizeOf for Option { } } +impl MallocSizeOf for [T] { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + let mut n = 0; + for elem in self.iter() { + n += elem.size_of(ops); + } + n + } +} + impl MallocShallowSizeOf for Vec { fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - ops.malloc_size_of(self.as_ptr()) + unsafe { ops.malloc_size_of(self.as_ptr()) } } } @@ -226,7 +243,7 @@ impl MallocSizeOf for Vec { impl MallocShallowSizeOf for SmallVec { fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { if self.spilled() { - ops.malloc_size_of(self.as_ptr()) + unsafe { ops.malloc_size_of(self.as_ptr()) } } else { 0 } @@ -255,7 +272,7 @@ impl MallocShallowSizeOf for HashSet // `ops.malloc_enclosing_size_of()` then gives us the storage size. // This assumes that the `HashSet`'s contents (values and hashes) are // all stored in a single contiguous heap allocation. - self.iter().next().map_or(0, |t| ops.malloc_enclosing_size_of(t)) + self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) }) } } @@ -281,7 +298,7 @@ impl MallocShallowSizeOf for HashMap // `ops.malloc_enclosing_size_of()` then gives us the storage size. // This assumes that the `HashMap`'s contents (keys, values, and // hashes) are all stored in a single contiguous heap allocation. - self.values().next().map_or(0, |v| ops.malloc_enclosing_size_of(v)) + self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) }) } } @@ -309,7 +326,7 @@ impl MallocSizeOf for HashMap impl MallocUnconditionalShallowSizeOf for Arc { fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - ops.malloc_size_of(self.heap_ptr()) + unsafe { ops.malloc_size_of(self.heap_ptr()) } } } diff --git a/components/style/rule_tree/mod.rs b/components/style/rule_tree/mod.rs index 83d5e32f44b..81e51ecc5e1 100644 --- a/components/style/rule_tree/mod.rs +++ b/components/style/rule_tree/mod.rs @@ -67,7 +67,7 @@ impl Drop for RuleTree { #[cfg(feature = "gecko")] impl MallocSizeOf for RuleTree { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - let mut n = ops.malloc_size_of(self.root.ptr()); + let mut n = unsafe { ops.malloc_size_of(self.root.ptr()) }; n += self.root.get().size_of(ops); n } @@ -806,7 +806,7 @@ impl MallocSizeOf for RuleNode { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { let mut n = 0; for child in self.iter_children() { - n += ops.malloc_size_of(child.ptr()); + n += unsafe { ops.malloc_size_of(child.ptr()) }; n += unsafe { (*child.ptr()).size_of(ops) }; } n diff --git a/components/style/stylesheets/style_rule.rs b/components/style/stylesheets/style_rule.rs index 017a0577128..fbc391f6d2b 100644 --- a/components/style/stylesheets/style_rule.rs +++ b/components/style/stylesheets/style_rule.rs @@ -55,7 +55,7 @@ impl StyleRule { // It's safe to measure this ThinArc directly because it's the // "primary" reference. (The secondary references are on the // Stylist.) - n += ops.malloc_size_of(selector.thin_arc_heap_ptr()); + n += unsafe { ops.malloc_size_of(selector.thin_arc_heap_ptr()) }; } n += self.block.unconditional_shallow_size_of(ops) +