diff --git a/components/layout/flow.rs b/components/layout/flow.rs index 5d10a6d9fca..5227f97dc43 100644 --- a/components/layout/flow.rs +++ b/components/layout/flow.rs @@ -1393,7 +1393,7 @@ impl ContainingBlockLink { panic!("Link to containing block not established; perhaps you forgot to call \ `set_absolute_descendants`?") } - Some(ref mut link) => link.generated_containing_block_size(for_flow), + Some(ref link) => link.upgrade().unwrap().generated_containing_block_size(for_flow), } } } diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs index 6188c882ce9..f120841d2e4 100644 --- a/components/layout/flow_ref.rs +++ b/components/layout/flow_ref.rs @@ -10,7 +10,7 @@ #![allow(unsafe_code)] -use flow::Flow; +use flow::{Flow, BaseFlow}; use flow; use std::mem; @@ -133,11 +133,19 @@ impl Clone for FlowRef { } } +fn base<'a>(r: &WeakFlowRef) -> &'a BaseFlow { + let data = r.object.data; + debug_assert!(!data.is_null()); + unsafe { + mem::transmute::<*mut (), &'a BaseFlow>(data) + } +} + impl WeakFlowRef { /// Upgrades a WeakFlowRef to a FlowRef. pub fn upgrade(&self) -> Option { unsafe { - let object = flow::base(&**self); + let object = base(self); // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 is must never be above // 0. @@ -153,27 +161,10 @@ impl WeakFlowRef { } } -impl<'a> Deref for WeakFlowRef { - type Target = Flow + 'a; - fn deref(&self) -> &(Flow + 'a) { - unsafe { - mem::transmute_copy::(&self.object) - } - } -} - -impl DerefMut for WeakFlowRef { - fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) { - unsafe { - mem::transmute_copy::(&self.object) - } - } -} - impl Clone for WeakFlowRef { fn clone(&self) -> WeakFlowRef { unsafe { - flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed); + base(self).weak_ref_count().fetch_add(1, Ordering::Relaxed); } WeakFlowRef { object: self. object } } @@ -187,7 +178,7 @@ impl Drop for WeakFlowRef { return } - if flow::base(&**self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 { + if base(self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 { atomic::fence(Ordering::Acquire); // This dance deallocates the Box without running its