diff --git a/components/servo_arc/lib.rs b/components/servo_arc/lib.rs index d980eb2afab..4fbc094d070 100644 --- a/components/servo_arc/lib.rs +++ b/components/servo_arc/lib.rs @@ -69,15 +69,57 @@ macro_rules! offset_of { /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. const MAX_REFCOUNT: usize = (isize::MAX) as usize; +/// Wrapper type for pointers to get the non-zero optimization. When +/// NonZero/Shared/Unique are stabilized, we should just use Shared +/// here to get the same effect. Gankro is working on this in [1]. +/// +/// It's unfortunate that this needs to infect all the caller types +/// with 'static. It would be nice to just use a &() and a PhantomData +/// instead, but then the compiler can't determine whether the &() should +/// be thin or fat (which depends on whether or not T is sized). Given +/// that this is all a temporary hack, this restriction is fine for now. +/// +/// [1] https://github.com/rust-lang/rust/issues/27730 +pub struct NonZeroPtrMut(&'static mut T); +impl NonZeroPtrMut { + pub fn new(ptr: *mut T) -> Self { + assert!(!(ptr as *mut u8).is_null()); + NonZeroPtrMut(unsafe { mem::transmute(ptr) }) + } + + pub fn ptr(&self) -> *mut T { + self.0 as *const T as *mut T + } +} + +impl Clone for NonZeroPtrMut { + fn clone(&self) -> Self { + NonZeroPtrMut::new(self.ptr()) + } +} + +impl fmt::Pointer for NonZeroPtrMut { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Pointer::fmt(&self.ptr(), f) + } +} + +impl fmt::Debug for NonZeroPtrMut { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + ::fmt(self, f) + } +} + +impl PartialEq for NonZeroPtrMut { + fn eq(&self, other: &Self) -> bool { + self.ptr() == other.ptr() + } +} + +impl Eq for NonZeroPtrMut {} + pub struct Arc { - // FIXME(bholley): When NonZero/Shared/Unique are stabilized, we should use - // Shared here to get the NonZero optimization. Gankro is working on this. - // - // If we need a compact Option> beforehand, we can make a helper - // class that wraps the result of Arc::into_raw. - // - // https://github.com/rust-lang/rust/issues/27730 - ptr: *mut ArcInner, + p: NonZeroPtrMut>, } /// An Arc that is known to be uniquely owned @@ -110,7 +152,7 @@ impl Deref for UniqueArc { impl DerefMut for UniqueArc { fn deref_mut(&mut self) -> &mut T { // We know this to be uniquely owned - unsafe { &mut (*self.0.ptr).data } + unsafe { &mut (*self.0.ptr()).data } } } @@ -132,11 +174,11 @@ impl Arc { count: atomic::AtomicUsize::new(1), data: data, }); - Arc { ptr: Box::into_raw(x) } + Arc { p: NonZeroPtrMut::new(Box::into_raw(x)) } } pub fn into_raw(this: Self) -> *const T { - let ptr = unsafe { &((*this.ptr).data) as *const _ }; + let ptr = unsafe { &((*this.ptr()).data) as *const _ }; mem::forget(this); ptr } @@ -146,7 +188,7 @@ impl Arc { // to subtract the offset of the `data` field from the pointer. let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner, data)); Arc { - ptr: ptr as *mut ArcInner, + p: NonZeroPtrMut::new(ptr as *mut ArcInner), } } } @@ -159,19 +201,23 @@ impl Arc { // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. - unsafe { &*self.ptr } + unsafe { &*self.ptr() } } // Non-inlined part of `drop`. Just invokes the destructor. #[inline(never)] unsafe fn drop_slow(&mut self) { - let _ = Box::from_raw(self.ptr); + let _ = Box::from_raw(self.ptr()); } #[inline] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr == other.ptr + this.ptr() == other.ptr() + } + + fn ptr(&self) -> *mut ArcInner { + self.p.ptr() } } @@ -210,7 +256,7 @@ impl Clone for Arc { panic!(); } - Arc { ptr: self.ptr } + Arc { p: NonZeroPtrMut::new(self.ptr()) } } } @@ -237,7 +283,7 @@ impl Arc { // reference count is guaranteed to be 1 at this point, and we required // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. - &mut (*this.ptr).data + &mut (*this.ptr()).data } } } @@ -248,7 +294,7 @@ impl Arc { if this.is_unique() { unsafe { // See make_mut() for documentation of the threadsafety here. - Some(&mut (*this.ptr).data) + Some(&mut (*this.ptr()).data) } } else { None @@ -357,7 +403,7 @@ impl fmt::Debug for Arc { impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Pointer::fmt(&self.ptr, f) + fmt::Pointer::fmt(&self.ptr(), f) } } @@ -516,7 +562,7 @@ impl Arc> { // Return the fat Arc. assert_eq!(size_of::(), size_of::() * 2, "The Arc will be fat"); - Arc { ptr: ptr } + Arc { p: NonZeroPtrMut::new(ptr) } } } @@ -572,7 +618,7 @@ impl ThinArc { { // Synthesize transient Arc, which never touches the refcount of the ArcInner. let transient = NoDrop::new(Arc { - ptr: thin_to_thick(self.ptr) + p: NonZeroPtrMut::new(thin_to_thick(self.ptr)) }); // Expose the transient Arc to the callback, which may clone it if it wants. @@ -611,7 +657,7 @@ impl Arc> { pub fn into_thin(a: Self) -> ThinArc { assert!(a.header.length == a.slice.len(), "Length needs to be correct for ThinArc to work"); - let fat_ptr: *mut ArcInner> = a.ptr; + let fat_ptr: *mut ArcInner> = a.ptr(); mem::forget(a); let thin_ptr = fat_ptr as *mut [usize] as *mut usize; ThinArc { @@ -625,7 +671,7 @@ impl Arc> { let ptr = thin_to_thick(a.ptr); mem::forget(a); Arc { - ptr: ptr + p: NonZeroPtrMut::new(ptr) } } } diff --git a/tests/unit/script/size_of.rs b/tests/unit/script/size_of.rs index 3368271d8e1..c7c04b4aac1 100644 --- a/tests/unit/script/size_of.rs +++ b/tests/unit/script/size_of.rs @@ -31,10 +31,10 @@ macro_rules! sizeof_checker ( // Update the sizes here sizeof_checker!(size_event_target, EventTarget, 40); sizeof_checker!(size_node, Node, 184); -sizeof_checker!(size_element, Element, 352); -sizeof_checker!(size_htmlelement, HTMLElement, 368); -sizeof_checker!(size_div, HTMLDivElement, 368); -sizeof_checker!(size_span, HTMLSpanElement, 368); +sizeof_checker!(size_element, Element, 344); +sizeof_checker!(size_htmlelement, HTMLElement, 360); +sizeof_checker!(size_div, HTMLDivElement, 360); +sizeof_checker!(size_span, HTMLSpanElement, 360); sizeof_checker!(size_text, Text, 216); sizeof_checker!(size_characterdata, CharacterData, 216); sizeof_checker!(size_servothreadsafelayoutnode, ServoThreadSafeLayoutNode, 16); diff --git a/tests/unit/stylo/size_of.rs b/tests/unit/stylo/size_of.rs index 21bab9ba140..f73c44f0150 100644 --- a/tests/unit/stylo/size_of.rs +++ b/tests/unit/stylo/size_of.rs @@ -28,11 +28,11 @@ fn size_of_selectors_dummy_types() { // selectors (with the inline hashes) with as few cache misses as possible. size_of_test!(test_size_of_rule, style::stylist::Rule, 40); -size_of_test!(test_size_of_option_arc_cv, Option>, 16); +size_of_test!(test_size_of_option_arc_cv, Option>, 8); size_of_test!(test_size_of_option_rule_node, Option, 16); -size_of_test!(test_size_of_computed_style, ComputedStyle, 56); -size_of_test!(test_size_of_element_styles, ElementStyles, 72); -size_of_test!(test_size_of_element_data, ElementData, 88); +size_of_test!(test_size_of_computed_style, ComputedStyle, 40); +size_of_test!(test_size_of_element_styles, ElementStyles, 56); +size_of_test!(test_size_of_element_data, ElementData, 72); size_of_test!(test_size_of_property_declaration, style::properties::PropertyDeclaration, 32);