From 2d22aa8e7e99ef52198e0f5918a2a16483aaba0e Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Fri, 14 Aug 2015 14:33:58 +0200 Subject: [PATCH 1/3] Replace the unsound `impl DerefMut for FlowRef` with an unsafe function. See #6503. --- components/layout/construct.rs | 46 +++++++++++++---------- components/layout/display_list_builder.rs | 8 ++-- components/layout/flow.rs | 8 ++-- components/layout/flow_list.rs | 9 +++-- components/layout/flow_ref.rs | 12 +++--- components/layout/fragment.rs | 32 ++++++++++------ components/layout/inline.rs | 18 +++++---- components/layout/layout_task.rs | 24 ++++++------ components/layout/parallel.rs | 22 ++++++----- components/layout/sequential.rs | 14 ++++--- 10 files changed, 112 insertions(+), 81 deletions(-) diff --git a/components/layout/construct.rs b/components/layout/construct.rs index a53f0d0b209..e726f687beb 100644 --- a/components/layout/construct.rs +++ b/components/layout/construct.rs @@ -19,7 +19,7 @@ use data::{HAS_NEWLY_CONSTRUCTED_FLOW, LayoutDataWrapper}; use floats::FloatKind; use flow::{MutableFlowUtils, MutableOwnedFlowUtils}; use flow::{self, AbsoluteDescendants, Flow, ImmutableFlowUtils, IS_ABSOLUTELY_POSITIONED}; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use fragment::{CanvasFragmentInfo, ImageFragmentInfo, InlineAbsoluteFragmentInfo}; use fragment::{Fragment, GeneratedContentInfo, IframeFragmentInfo}; use fragment::{InlineAbsoluteHypotheticalFragmentInfo, TableColumnFragmentInfo}; @@ -415,6 +415,7 @@ impl<'a> FlowConstructor<'a> { /// `#[inline(always)]` because this is performance critical and LLVM will not inline it /// otherwise. #[inline(always)] + #[allow(unsafe_code)] fn flush_inline_fragments_to_flow_or_list(&mut self, fragment_accumulator: InlineFragmentsAccumulator, flow: &mut FlowRef, @@ -480,7 +481,8 @@ impl<'a> FlowConstructor<'a> { absolute_descendants.push_descendants(fragments.absolute_descendants); { - let inline_flow = inline_flow_ref.as_mut_inline(); + // FIXME(#6503): Use Arc::get_mut().unwrap() here. + let inline_flow = unsafe { flow_ref::deref_mut(&mut inline_flow_ref) }.as_mut_inline(); let (ascent, descent) = @@ -1287,6 +1289,7 @@ impl<'a> FlowConstructor<'a> { /// /// TODO(pcwalton): Add some more fast paths, like toggling `display: none`, adding block kids /// to block parents with no {ib} splits, adding out-of-flow kids, etc. + #[allow(unsafe_code)] pub fn repair_if_possible(&mut self, node: &ThreadSafeLayoutNode) -> bool { // We can skip reconstructing the flow if we don't have to reconstruct and none of our kids // did either. @@ -1317,7 +1320,8 @@ impl<'a> FlowConstructor<'a> { if !flow.is_block_flow() { return false } - flow::mut_base(&mut **flow).restyle_damage.insert(damage); + let flow = unsafe { flow_ref::deref_mut(flow) }; + flow::mut_base(flow).restyle_damage.insert(damage); flow.repair_style_and_bubble_inline_sizes(&style); true } @@ -1341,27 +1345,29 @@ impl<'a> FlowConstructor<'a> { match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut inline_block_fragment) => { - flow::mut_base(&mut *inline_block_fragment.flow_ref).restyle_damage - .insert(damage); + let flow_ref = unsafe { + flow_ref::deref_mut(&mut inline_block_fragment.flow_ref) + }; + flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? - inline_block_fragment.flow_ref - .repair_style_and_bubble_inline_sizes(&style); + flow_ref.repair_style_and_bubble_inline_sizes(&style); } SpecificFragmentInfo::InlineAbsoluteHypothetical( ref mut inline_absolute_hypothetical_fragment) => { - flow::mut_base(&mut *inline_absolute_hypothetical_fragment.flow_ref) - .restyle_damage.insert(damage); + let flow_ref = unsafe { + flow_ref::deref_mut(&mut inline_absolute_hypothetical_fragment.flow_ref) + }; + flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? - inline_absolute_hypothetical_fragment - .flow_ref - .repair_style_and_bubble_inline_sizes(&style); + flow_ref.repair_style_and_bubble_inline_sizes(&style); } SpecificFragmentInfo::InlineAbsolute(ref mut inline_absolute_fragment) => { - flow::mut_base(&mut *inline_absolute_fragment.flow_ref).restyle_damage - .insert(damage); + let flow_ref = unsafe { + flow_ref::deref_mut(&mut inline_absolute_fragment.flow_ref) + }; + flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? - inline_absolute_fragment.flow_ref - .repair_style_and_bubble_inline_sizes(&style); + flow_ref.repair_style_and_bubble_inline_sizes(&style); } SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::UnscannedText(_) => { @@ -1655,13 +1661,14 @@ impl FlowConstructionUtils for FlowRef { /// Adds a new flow as a child of this flow. Fails if this flow is marked as a leaf. /// /// This must not be public because only the layout constructor can do this. + #[allow(unsafe_code)] fn add_new_child(&mut self, mut new_child: FlowRef) { { - let kid_base = flow::mut_base(&mut *new_child); + let kid_base = flow::mut_base(unsafe { flow_ref::deref_mut(&mut new_child) }); kid_base.parallel.parent = parallel::mut_owned_flow_to_unsafe_flow(self); } - let base = flow::mut_base(&mut **self); + let base = flow::mut_base(unsafe { flow_ref::deref_mut(self) }); base.children.push_back(new_child); let _ = base.parallel.children_count.fetch_add(1, Ordering::Relaxed); } @@ -1674,9 +1681,10 @@ impl FlowConstructionUtils for FlowRef { /// properly computed. (This is not, however, a memory safety problem.) /// /// This must not be public because only the layout constructor can do this. + #[allow(unsafe_code)] fn finish(&mut self) { if !opts::get().bubble_inline_sizes_separately { - self.bubble_inline_sizes() + unsafe { flow_ref::deref_mut(self) }.bubble_inline_sizes() } } } diff --git a/components/layout/display_list_builder.rs b/components/layout/display_list_builder.rs index 2e2ff543650..259b420c977 100644 --- a/components/layout/display_list_builder.rs +++ b/components/layout/display_list_builder.rs @@ -14,6 +14,7 @@ use azure::azure_hl::Color; use block::BlockFlow; use context::LayoutContext; use flow::{self, BaseFlow, Flow, IS_ABSOLUTELY_POSITIONED, NEEDS_LAYER}; +use flow_ref; use fragment::{CoordinateSystem, Fragment, IframeFragmentInfo, ImageFragmentInfo}; use fragment::{ScannedTextFragmentInfo, SpecificFragmentInfo}; use inline::InlineFlow; @@ -1770,6 +1771,7 @@ pub trait InlineFlowDisplayListBuilding { } impl InlineFlowDisplayListBuilding for InlineFlow { + #[allow(unsafe_code)] fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext) { // TODO(#228): Once we form lines and have their cached bounds, we can be smarter and // not recurse on a line if nothing in it can intersect the dirty region. @@ -1796,17 +1798,17 @@ impl InlineFlowDisplayListBuilding for InlineFlow { match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut block_flow) => { - let block_flow = &mut *block_flow.flow_ref; + let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut block_flow) => { - let block_flow = &mut *block_flow.flow_ref; + let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsolute(ref mut block_flow) => { - let block_flow = &mut *block_flow.flow_ref; + let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } diff --git a/components/layout/flow.rs b/components/layout/flow.rs index e41b19c9bb0..1c559d4e112 100644 --- a/components/layout/flow.rs +++ b/components/layout/flow.rs @@ -30,7 +30,7 @@ use context::LayoutContext; use display_list_builder::DisplayListBuildingResult; use floats::Floats; use flow_list::{FlowList, FlowListIterator, MutFlowListIterator}; -use flow_ref::{FlowRef, WeakFlowRef}; +use flow_ref::{self, FlowRef, WeakFlowRef}; use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo}; use incremental::{self, RECONSTRUCT_FLOW, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage}; use inline::InlineFlow; @@ -771,8 +771,9 @@ pub struct AbsoluteDescendantIter<'a> { impl<'a> Iterator for AbsoluteDescendantIter<'a> { type Item = &'a mut (Flow + 'a); + #[allow(unsafe_code)] fn next(&mut self) -> Option<&'a mut (Flow + 'a)> { - self.iter.next().map(|info| &mut *info.flow) + self.iter.next().map(|info| unsafe { flow_ref::deref_mut(&mut info.flow) }) } } @@ -1382,9 +1383,10 @@ impl MutableOwnedFlowUtils for FlowRef { /// This is called during flow construction, so nothing else can be accessing the descendant /// flows. This is enforced by the fact that we have a mutable `FlowRef`, which only flow /// construction is allowed to possess. + #[allow(unsafe_code)] fn set_absolute_descendants(&mut self, abs_descendants: AbsoluteDescendants) { let this = self.clone(); - let base = mut_base(&mut **self); + let base = mut_base(unsafe { flow_ref::deref_mut(self) }); base.abs_descendants = abs_descendants; for descendant_link in base.abs_descendants.iter() { let descendant_base = mut_base(descendant_link); diff --git a/components/layout/flow_list.rs b/components/layout/flow_list.rs index ea11b545dac..0d1df38051f 100644 --- a/components/layout/flow_list.rs +++ b/components/layout/flow_list.rs @@ -3,7 +3,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use flow::Flow; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use std::collections::{linked_list, LinkedList}; @@ -33,7 +33,7 @@ impl FlowList { #[inline] #[allow(unsafe_code)] pub unsafe fn front_mut<'a>(&'a mut self) -> Option<&'a mut Flow> { - self.flows.front_mut().map(|head| &mut **head) + self.flows.front_mut().map(|head| flow_ref::deref_mut(head)) } /// Provide a reference to the back element, or None if the list is empty @@ -46,7 +46,7 @@ impl FlowList { #[inline] #[allow(unsafe_code)] pub unsafe fn back_mut<'a>(&'a mut self) -> Option<&'a mut Flow> { - self.flows.back_mut().map(|tail| &mut **tail) + self.flows.back_mut().map(|tail| flow_ref::deref_mut(tail)) } /// Add an element first in the list @@ -123,8 +123,9 @@ impl<'a> Iterator for FlowListIterator<'a> { impl<'a> Iterator for MutFlowListIterator<'a> { type Item = &'a mut (Flow + 'a); #[inline] + #[allow(unsafe_code)] fn next(&mut self) -> Option<&'a mut (Flow + 'a)> { - self.it.next().map(|x| &mut **x) + self.it.next().map(|x| unsafe { flow_ref::deref_mut(x) }) } #[inline] diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs index 65c07f81f19..b7d143734fb 100644 --- a/components/layout/flow_ref.rs +++ b/components/layout/flow_ref.rs @@ -14,7 +14,7 @@ use flow; use flow::{Flow, BaseFlow}; use std::mem; -use std::ops::{Deref, DerefMut}; +use std::ops::Deref; use std::ptr; use std::raw; use std::rt::heap; @@ -67,12 +67,10 @@ impl<'a> Deref for FlowRef { } } -impl DerefMut for FlowRef { - fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) { - unsafe { - mem::transmute_copy::(&self.object) - } - } +// FIXME(https://github.com/servo/servo/issues/6503) This introduces unsound mutable aliasing. +// Try to replace it with Arc::get_mut (which checks that the reference count is 1). +pub unsafe fn deref_mut<'a>(flow_ref: &mut FlowRef) -> &mut (Flow + 'a) { + mem::transmute_copy::(&flow_ref.object) } impl Drop for FlowRef { diff --git a/components/layout/fragment.rs b/components/layout/fragment.rs index 542fd5dd416..d60785c1df1 100644 --- a/components/layout/fragment.rs +++ b/components/layout/fragment.rs @@ -11,7 +11,7 @@ use context::LayoutContext; use floats::ClearType; use flow; use flow::Flow; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use incremental::{self, RestyleDamage}; use inline::{InlineFragmentContext, InlineFragmentNodeInfo, InlineMetrics}; use layout_debug; @@ -1248,6 +1248,7 @@ impl Fragment { } /// Computes the intrinsic inline-sizes of this fragment. + #[allow(unsafe_code)] pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { @@ -1260,11 +1261,11 @@ impl Fragment { SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {} - SpecificFragmentInfo::InlineBlock(ref mut info) => { + SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } - SpecificFragmentInfo::InlineAbsolute(ref mut info) => { + SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } @@ -1619,6 +1620,7 @@ impl Fragment { /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. + #[allow(unsafe_code)] pub fn assign_replaced_inline_size_if_necessary<'a>(&'a mut self, container_inline_size: Au) { match self.specific { SpecificFragmentInfo::Generic | @@ -1647,7 +1649,7 @@ impl Fragment { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { - let block_flow = info.flow_ref.as_mut_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; @@ -1655,7 +1657,7 @@ impl Fragment { self.border_box.size.inline = Au(0); } SpecificFragmentInfo::InlineBlock(ref mut info) => { - let block_flow = info.flow_ref.as_mut_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); @@ -1663,7 +1665,7 @@ impl Fragment { block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { - let block_flow = info.flow_ref.as_mut_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); @@ -1711,6 +1713,7 @@ impl Fragment { /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. + #[allow(unsafe_code)] pub fn assign_replaced_block_size_if_necessary(&mut self, containing_block_block_size: Option) { match self.specific { SpecificFragmentInfo::Generic | @@ -1767,18 +1770,18 @@ impl Fragment { } SpecificFragmentInfo::InlineBlock(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = info.flow_ref.as_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = info.flow_ref.as_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); self.border_box.size.block = block_flow.base.position.size.block; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = info.flow_ref.as_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } @@ -1904,9 +1907,10 @@ impl Fragment { /// Determines the inline sizes of inline-block fragments. These cannot be fully computed until /// inline size assignment has run for the child flow: thus it is computed "late", during /// block size assignment. + #[allow(unsafe_code)] pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific { - let block_flow = inline_block_info.flow_ref.as_block(); + let block_flow = unsafe { flow_ref::deref_mut(&mut inline_block_info.flow_ref) }.as_block(); let margin = block_flow.fragment.style.logical_margin(); self.border_box.size.inline = block_flow.fragment.border_box.size.inline + MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + @@ -1914,21 +1918,25 @@ impl Fragment { } } + #[allow(unsafe_code)] pub fn update_late_computed_inline_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.i; - info.flow_ref.update_late_computed_inline_position_if_necessary(position) + unsafe { flow_ref::deref_mut(&mut info.flow_ref) } + .update_late_computed_inline_position_if_necessary(position) } _ => {} } } + #[allow(unsafe_code)] pub fn update_late_computed_block_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.b; - info.flow_ref.update_late_computed_block_position_if_necessary(position) + unsafe { flow_ref::deref_mut(&mut info.flow_ref) } + .update_late_computed_block_position_if_necessary(position) } _ => {} } diff --git a/components/layout/inline.rs b/components/layout/inline.rs index 3a23634aa40..966cf5f9bff 100644 --- a/components/layout/inline.rs +++ b/components/layout/inline.rs @@ -10,6 +10,7 @@ use display_list_builder::{FragmentDisplayListBuilding, InlineFlowDisplayListBui use floats::{FloatKind, Floats, PlacementInfo}; use flow::{MutableFlowUtils, OpaqueFlow}; use flow::{self, BaseFlow, FlowClass, Flow, ForceNonfloatedFlag, IS_ABSOLUTELY_POSITIONED}; +use flow_ref; use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo}; use incremental::{REFLOW, REFLOW_OUT_OF_FLOW, RESOLVE_GENERATED_CONTENT}; use layout_debug; @@ -1596,6 +1597,7 @@ impl Flow for InlineFlow { self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); } + #[allow(unsafe_code)] fn compute_absolute_position(&mut self, _: &LayoutContext) { // First, gather up the positions of all the containing blocks (if any). // @@ -1656,9 +1658,10 @@ impl Flow for InlineFlow { let is_positioned = fragment.is_positioned(); match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut info) => { - flow::mut_base(&mut *info.flow_ref).clip = clip; + let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + flow::mut_base(flow).clip = clip; - let block_flow = info.flow_ref.as_mut_block(); + let block_flow = flow.as_mut_block(); block_flow.base.absolute_position_info = self.base.absolute_position_info; let stacking_relative_position = self.base.stacking_relative_position; @@ -1676,9 +1679,9 @@ impl Flow for InlineFlow { self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { - flow::mut_base(&mut *info.flow_ref).clip = clip; - - let block_flow = info.flow_ref.as_mut_block(); + let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + flow::mut_base(flow).clip = clip; + let block_flow = flow.as_mut_block(); block_flow.base.absolute_position_info = self.base.absolute_position_info; block_flow.base.stacking_relative_position = @@ -1687,9 +1690,10 @@ impl Flow for InlineFlow { self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { - flow::mut_base(&mut *info.flow_ref).clip = clip; + let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + flow::mut_base(flow).clip = clip; - let block_flow = info.flow_ref.as_mut_block(); + let block_flow = flow.as_mut_block(); block_flow.base.absolute_position_info = self.base.absolute_position_info; let stacking_relative_position = self.base.stacking_relative_position; diff --git a/components/layout/layout_task.rs b/components/layout/layout_task.rs index 6da26ab59a9..5ae874cb697 100644 --- a/components/layout/layout_task.rs +++ b/components/layout/layout_task.rs @@ -14,7 +14,7 @@ use cssparser::ToCss; use data::LayoutDataWrapper; use display_list_builder::ToGfxColor; use flow::{self, Flow, ImmutableFlowUtils, MutableFlowUtils, MutableOwnedFlowUtils}; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo}; use incremental::{LayoutDamageComputation, REFLOW, REFLOW_ENTIRE_DOCUMENT, REPAINT}; use layout_debug; @@ -781,7 +781,7 @@ impl LayoutTask { _ => return None, }; - flow.mark_as_root(); + unsafe { flow_ref::deref_mut(&mut flow) }.mark_as_root(); Some(flow) } @@ -999,11 +999,11 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - flow::mut_base(&mut **layout_root).stacking_relative_position = + flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }).stacking_relative_position = LogicalPoint::zero(writing_mode).to_physical(writing_mode, rw_data.screen_size); - flow::mut_base(&mut **layout_root).clip = + flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }).clip = ClippingRegion::from_rect(&data.page_clip_rect); match (&mut rw_data.parallel_traversal, opts::get().parallel_display_list_building) { @@ -1023,14 +1023,16 @@ impl LayoutTask { if data.goal == ReflowGoal::ForDisplay { debug!("Done building display list."); - let root_background_color = get_root_flow_background_color(&mut **layout_root); + let root_background_color = get_root_flow_background_color( + unsafe { flow_ref::deref_mut(layout_root) }); let root_size = { let root_flow = flow::base(&**layout_root); root_flow.position.size.to_physical(root_flow.writing_mode) }; let mut display_list = box DisplayList::new(); - flow::mut_base(&mut **layout_root).display_list_building_result - .add_to(&mut *display_list); + flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }) + .display_list_building_result + .add_to(&mut *display_list); let paint_layer = PaintLayer::new(layout_root.layer_id(0), root_background_color, ScrollPolicy::Scrollable); @@ -1128,7 +1130,7 @@ impl LayoutTask { } if needs_reflow { if let Some(mut flow) = self.try_get_layout_root(*node) { - LayoutTask::reflow_all_nodes(&mut *flow); + LayoutTask::reflow_all_nodes(unsafe { flow_ref::deref_mut(&mut flow) }); } } @@ -1289,7 +1291,7 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - animation::recalc_style_for_animations(root_flow.deref_mut(), + animation::recalc_style_for_animations(unsafe { flow_ref::deref_mut(&mut root_flow) }, animations) }); } @@ -1309,10 +1311,10 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - if opts::get().nonincremental_layout || root_flow.deref_mut() + if opts::get().nonincremental_layout || unsafe { flow_ref::deref_mut(&mut root_flow) } .compute_layout_damage() .contains(REFLOW_ENTIRE_DOCUMENT) { - root_flow.deref_mut().reflow_entire_document() + unsafe { flow_ref::deref_mut(&mut root_flow) }.reflow_entire_document() } }); diff --git a/components/layout/parallel.rs b/components/layout/parallel.rs index 7895127e4be..a9cb4aa9bb5 100644 --- a/components/layout/parallel.rs +++ b/components/layout/parallel.rs @@ -11,7 +11,7 @@ use context::{LayoutContext, SharedLayoutContext}; use flow; use flow::{Flow, MutableFlowUtils, PreorderFlowTraversal, PostorderFlowTraversal}; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use traversal::PostorderNodeMutTraversal; use traversal::{BubbleISizes, AssignISizes, AssignBSizesAndStoreOverflow}; use traversal::{ComputeAbsolutePositions, BuildDisplayList}; @@ -242,14 +242,15 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal { let flow: &mut FlowRef = unsafe { mem::transmute(&mut unsafe_flow) }; + let flow = unsafe { flow_ref::deref_mut(flow) }; // Perform the appropriate traversal. - if self.should_process(&mut **flow) { - self.process(&mut **flow); + if self.should_process(flow) { + self.process(flow); } - let base = flow::mut_base(&mut **flow); + let base = flow::mut_base(flow); // Reset the count of children for the next layout traversal. base.parallel.children_count.store(base.children.len() as isize, @@ -268,7 +269,7 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal { let parent: &mut FlowRef = unsafe { mem::transmute(&mut unsafe_parent) }; - let parent_base = flow::mut_base(&mut **parent); + let parent_base = flow::mut_base(unsafe { flow_ref::deref_mut(parent) }); if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 { // We were the last child of our parent. Reflow our parent. unsafe_flow = unsafe_parent @@ -300,18 +301,19 @@ trait ParallelPreorderFlowTraversal : PreorderFlowTraversal { unsafe { // Get a real flow. let flow: &mut FlowRef = mem::transmute(&mut unsafe_flow); + let flow = flow_ref::deref_mut(flow); if self.should_record_thread_ids() { - flow::mut_base(&mut **flow).thread_id = proxy.worker_index(); + flow::mut_base(flow).thread_id = proxy.worker_index(); } - if self.should_process(&mut **flow) { + if self.should_process(flow) { // Perform the appropriate traversal. - self.process(&mut **flow); + self.process(flow); } // Possibly enqueue the children. - for kid in flow::child_iter(&mut **flow) { + for kid in flow::child_iter(flow) { had_children = true; discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid)); } @@ -470,7 +472,7 @@ pub fn traverse_flow_tree_preorder( if opts::get().bubble_inline_sizes_separately { let layout_context = LayoutContext::new(shared_layout_context); let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context }; - root.traverse_postorder(&bubble_inline_sizes); + unsafe { flow_ref::deref_mut(root) }.traverse_postorder(&bubble_inline_sizes); } run_queue_with_custom_work_data_type(queue, |queue| { diff --git a/components/layout/sequential.rs b/components/layout/sequential.rs index 51d8f39e59e..02289a58428 100644 --- a/components/layout/sequential.rs +++ b/components/layout/sequential.rs @@ -7,7 +7,7 @@ use context::{LayoutContext, SharedLayoutContext}; use flow::{PostorderFlowTraversal, PreorderFlowTraversal}; use flow::{self, Flow, ImmutableFlowUtils, InorderFlowTraversal, MutableFlowUtils}; -use flow_ref::FlowRef; +use flow_ref::{self, FlowRef}; use fragment::FragmentBorderBoxIterator; use generated_content::ResolveGeneratedContent; use traversal::PostorderNodeMutTraversal; @@ -40,6 +40,7 @@ pub fn traverse_dom_preorder(root: LayoutNode, doit(root, recalc_style, construct_flows); } +#[allow(unsafe_code)] pub fn resolve_generated_content(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, level: u32, traversal: &mut ResolveGeneratedContent) { if !traversal.should_process(flow) { @@ -55,9 +56,10 @@ pub fn resolve_generated_content(root: &mut FlowRef, shared_layout_context: &Sha let layout_context = LayoutContext::new(shared_layout_context); let mut traversal = ResolveGeneratedContent::new(&layout_context); - doit(&mut **root, 0, &mut traversal) + doit(unsafe { flow_ref::deref_mut(root) }, 0, &mut traversal) } +#[allow(unsafe_code)] pub fn traverse_flow_tree_preorder(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, @@ -78,7 +80,7 @@ pub fn traverse_flow_tree_preorder(root: &mut FlowRef, let layout_context = LayoutContext::new(shared_layout_context); - let root = &mut **root; + let root = unsafe { flow_ref::deref_mut(root) }; if opts::get().bubble_inline_sizes_separately { let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context }; @@ -94,6 +96,7 @@ pub fn traverse_flow_tree_preorder(root: &mut FlowRef, doit(root, assign_inline_sizes, assign_block_sizes); } +#[allow(unsafe_code)] pub fn build_display_list_for_subtree(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, @@ -116,9 +119,10 @@ pub fn build_display_list_for_subtree(root: &mut FlowRef, let compute_absolute_positions = ComputeAbsolutePositions { layout_context: &layout_context }; let build_display_list = BuildDisplayList { layout_context: &layout_context }; - doit(&mut **root, compute_absolute_positions, build_display_list); + doit(unsafe { flow_ref::deref_mut(root) }, compute_absolute_positions, build_display_list); } +#[allow(unsafe_code)] pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut FlowRef, iterator: &mut FragmentBorderBoxIterator) { fn doit(flow: &mut Flow, @@ -141,5 +145,5 @@ pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut FlowRef, } } - doit(&mut **root, 0, iterator, &ZERO_POINT); + doit(unsafe { flow_ref::deref_mut(root) }, 0, iterator, &ZERO_POINT); } From 649250130b221685dfa8e570ae07d0d2f634bd40 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Fri, 14 Aug 2015 14:34:13 +0200 Subject: [PATCH 2/3] Replace FlowRef with Arc, now that Arc supports DST. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … and WeakFlowRef with Weak. --- components/layout/construct.rs | 60 +++++----- components/layout/flow.rs | 50 ++------ components/layout/flow_list.rs | 8 +- components/layout/flow_ref.rs | 189 ++----------------------------- components/layout/incremental.rs | 2 +- components/layout/lib.rs | 3 +- components/layout/parallel.rs | 26 ++--- 7 files changed, 62 insertions(+), 276 deletions(-) diff --git a/components/layout/construct.rs b/components/layout/construct.rs index e726f687beb..fe41ee5b922 100644 --- a/components/layout/construct.rs +++ b/components/layout/construct.rs @@ -375,7 +375,7 @@ impl<'a> FlowConstructor<'a> { style, child_node.restyle_damage(), SpecificFragmentInfo::TableRow); - let mut new_child = FlowRef::new(box TableRowFlow::from_fragment(fragment)); + let mut new_child: FlowRef = Arc::new(TableRowFlow::from_fragment(fragment)); new_child.add_new_child(child.clone()); child.finish(); *child = new_child @@ -388,7 +388,7 @@ impl<'a> FlowConstructor<'a> { style, child_node.restyle_damage(), SpecificFragmentInfo::Table); - let mut new_child = FlowRef::new(box TableFlow::from_fragment(fragment)); + let mut new_child: FlowRef = Arc::new(TableFlow::from_fragment(fragment)); new_child.add_new_child(child.clone()); child.finish(); *child = new_child @@ -402,7 +402,7 @@ impl<'a> FlowConstructor<'a> { style, child_node.restyle_damage(), SpecificFragmentInfo::TableWrapper); - let mut new_child = FlowRef::new(box TableWrapperFlow::from_fragment(fragment, None)); + let mut new_child: FlowRef = Arc::new(TableWrapperFlow::from_fragment(fragment, None)); new_child.add_new_child(child.clone()); child.finish(); *child = new_child @@ -457,9 +457,8 @@ impl<'a> FlowConstructor<'a> { let scanned_fragments = TextRunScanner::new().scan_for_runs(&mut self.layout_context.font_context(), fragments.fragments); - let mut inline_flow_ref = - FlowRef::new(box InlineFlow::from_fragments(scanned_fragments, - node.style().writing_mode)); + let mut inline_flow_ref: FlowRef = Arc::new( + InlineFlow::from_fragments(scanned_fragments, node.style().writing_mode)); // Add all the inline-block fragments as children of the inline flow. for inline_block_flow in &inline_block_flows { @@ -753,12 +752,12 @@ impl<'a> FlowConstructor<'a> { fn build_flow_for_block(&mut self, node: &ThreadSafeLayoutNode, float_kind: Option) -> ConstructionResult { let fragment = self.build_fragment_for_block(node); - let flow = if node.style().is_multicol() { - box MulticolFlow::from_fragment(fragment, float_kind) as Box + let flow: FlowRef = if node.style().is_multicol() { + Arc::new(MulticolFlow::from_fragment(fragment, float_kind)) } else { - box BlockFlow::from_fragment(fragment, float_kind) as Box + Arc::new(BlockFlow::from_fragment(fragment, float_kind)) }; - self.build_flow_for_block_like(FlowRef::new(flow), node) + self.build_flow_for_block_like(flow, node) } /// Bubbles up {ib} splits. @@ -1072,13 +1071,11 @@ impl<'a> FlowConstructor<'a> { fn build_flow_for_table_wrapper(&mut self, node: &ThreadSafeLayoutNode, float_value: float::T) -> ConstructionResult { let fragment = Fragment::new(node, SpecificFragmentInfo::TableWrapper); - let wrapper_flow = - box TableWrapperFlow::from_fragment(fragment, FloatKind::from_property(float_value)); - let mut wrapper_flow = FlowRef::new(wrapper_flow as Box); + let mut wrapper_flow: FlowRef = Arc::new( + TableWrapperFlow::from_fragment(fragment, FloatKind::from_property(float_value))); let table_fragment = Fragment::new(node, SpecificFragmentInfo::Table); - let table_flow = box TableFlow::from_fragment(table_fragment); - let table_flow = FlowRef::new(table_flow as Box); + let table_flow = Arc::new(TableFlow::from_fragment(table_fragment)); // First populate the table flow with its children. let construction_result = self.build_flow_for_block_like(table_flow, node); @@ -1135,8 +1132,8 @@ impl<'a> FlowConstructor<'a> { /// with possibly other `BlockFlow`s or `InlineFlow`s underneath it. fn build_flow_for_table_caption(&mut self, node: &ThreadSafeLayoutNode) -> ConstructionResult { let fragment = self.build_fragment_for_block(node); - let flow = box TableCaptionFlow::from_fragment(fragment) as Box; - self.build_flow_for_block_like(FlowRef::new(flow), node) + let flow = Arc::new(TableCaptionFlow::from_fragment(fragment)); + self.build_flow_for_block_like(flow, node) } /// Builds a flow for a node with `display: table-row-group`. This yields a `TableRowGroupFlow` @@ -1144,16 +1141,16 @@ impl<'a> FlowConstructor<'a> { fn build_flow_for_table_rowgroup(&mut self, node: &ThreadSafeLayoutNode) -> ConstructionResult { let fragment = Fragment::new(node, SpecificFragmentInfo::TableRow); - let flow = box TableRowGroupFlow::from_fragment(fragment) as Box; - self.build_flow_for_block_like(FlowRef::new(flow), node) + let flow = Arc::new(TableRowGroupFlow::from_fragment(fragment)); + self.build_flow_for_block_like(flow, node) } /// Builds a flow for a node with `display: table-row`. This yields a `TableRowFlow` with /// possibly other `TableCellFlow`s underneath it. fn build_flow_for_table_row(&mut self, node: &ThreadSafeLayoutNode) -> ConstructionResult { let fragment = Fragment::new(node, SpecificFragmentInfo::TableRow); - let flow = box TableRowFlow::from_fragment(fragment) as Box; - self.build_flow_for_block_like(FlowRef::new(flow), node) + let flow = Arc::new(TableRowFlow::from_fragment(fragment)); + self.build_flow_for_block_like(flow, node) } /// Builds a flow for a node with `display: table-cell`. This yields a `TableCellFlow` with @@ -1172,9 +1169,9 @@ impl<'a> FlowConstructor<'a> { position == position::T::fixed }); - let flow = box TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide) - as Box; - self.build_flow_for_block_like(FlowRef::new(flow), node) + let flow = Arc::new( + TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide)); + self.build_flow_for_block_like(flow, node) } /// Builds a flow for a node with `display: list-item`. This yields a `ListItemFlow` with @@ -1222,21 +1219,19 @@ impl<'a> FlowConstructor<'a> { let main_fragment = self.build_fragment_for_block(node); let flow = match node.style().get_list().list_style_position { list_style_position::T::outside => { - box ListItemFlow::from_fragments_and_flotation(main_fragment, - marker_fragments, - flotation) + Arc::new(ListItemFlow::from_fragments_and_flotation( + main_fragment, marker_fragments, flotation)) } list_style_position::T::inside => { for marker_fragment in marker_fragments { initial_fragments.fragments.push_back(marker_fragment) } - box ListItemFlow::from_fragments_and_flotation(main_fragment, vec![], flotation) + Arc::new(ListItemFlow::from_fragments_and_flotation( + main_fragment, vec![], flotation)) } }; - self.build_flow_for_block_starting_with_fragments(FlowRef::new(flow as Box), - node, - initial_fragments) + self.build_flow_for_block_starting_with_fragments(flow, node, initial_fragments) } /// Creates a fragment for a node with `display: table-column`. @@ -1277,8 +1272,7 @@ impl<'a> FlowConstructor<'a> { let specific = SpecificFragmentInfo::TableColumn(TableColumnFragmentInfo::new(node)); col_fragments.push(Fragment::new(node, specific)); } - let flow = box TableColGroupFlow::from_fragments(fragment, col_fragments); - let mut flow = FlowRef::new(flow as Box); + let mut flow: FlowRef = Arc::new(TableColGroupFlow::from_fragments(fragment, col_fragments)); flow.finish(); ConstructionResult::Flow(flow, AbsoluteDescendants::new()) diff --git a/components/layout/flow.rs b/components/layout/flow.rs index 1c559d4e112..e6bf124230b 100644 --- a/components/layout/flow.rs +++ b/components/layout/flow.rs @@ -57,7 +57,7 @@ use std::mem; use std::raw; use std::slice::IterMut; use std::sync::Arc; -use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::atomic::Ordering; use style::computed_values::{clear, display, empty_cells, float, position, text_align}; use style::properties::{self, ComputedValues}; use style::values::computed::LengthOrPercentageOrAuto; @@ -68,7 +68,7 @@ use util::logical_geometry::{LogicalRect, LogicalSize, WritingMode}; /// /// Note that virtual methods have a cost; we should not overuse them in Servo. Consider adding /// methods to `ImmutableFlowUtils` or `MutableFlowUtils` before adding more methods here. -pub trait Flow: fmt::Debug + Sync { +pub trait Flow: fmt::Debug + Sync + Send + 'static { // RTTI // // TODO(pcwalton): Use Rust's RTTI, once that works. @@ -770,9 +770,9 @@ pub struct AbsoluteDescendantIter<'a> { } impl<'a> Iterator for AbsoluteDescendantIter<'a> { - type Item = &'a mut (Flow + 'a); + type Item = &'a mut Flow; #[allow(unsafe_code)] - fn next(&mut self) -> Option<&'a mut (Flow + 'a)> { + fn next(&mut self) -> Option<&'a mut Flow> { self.iter.next().map(|info| unsafe { flow_ref::deref_mut(&mut info.flow) }) } } @@ -815,13 +815,6 @@ impl AbsolutePositionInfo { /// Data common to all flows. pub struct BaseFlow { - /// NB: Must be the first element. - /// - /// The necessity of this will disappear once we have dynamically-sized types. - strong_ref_count: AtomicUsize, - - weak_ref_count: AtomicUsize, - pub restyle_damage: RestyleDamage, /// The children of this flow. @@ -963,15 +956,6 @@ impl Encodable for BaseFlow { } } -impl Drop for BaseFlow { - fn drop(&mut self) { - if self.strong_ref_count.load(Ordering::SeqCst) != 0 && - self.weak_ref_count.load(Ordering::SeqCst) != 0 { - panic!("Flow destroyed before its ref count hit zero—this is unsafe!") - } - } -} - /// Whether a base flow should be forced to be nonfloated. This can affect e.g. `TableFlow`, which /// is never floated because the table wrapper flow is the floated one. #[derive(Clone, PartialEq)] @@ -1039,8 +1023,6 @@ impl BaseFlow { damage.remove(RECONSTRUCT_FLOW); BaseFlow { - strong_ref_count: AtomicUsize::new(1), - weak_ref_count: AtomicUsize::new(1), restyle_damage: damage, children: FlowList::new(), intrinsic_inline_sizes: IntrinsicISizes::new(), @@ -1069,16 +1051,6 @@ impl BaseFlow { self.children.iter_mut() } - #[allow(unsafe_code)] - pub unsafe fn strong_ref_count<'a>(&'a self) -> &'a AtomicUsize { - &self.strong_ref_count - } - - #[allow(unsafe_code)] - pub unsafe fn weak_ref_count<'a>(&'a self) -> &'a AtomicUsize { - &self.weak_ref_count - } - pub fn debug_id(&self) -> usize { let p = self as *const _; p as usize @@ -1115,7 +1087,7 @@ impl BaseFlow { } } -impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) { +impl<'a> ImmutableFlowUtils for &'a Flow { /// Returns true if this flow is a block flow or subclass thereof. fn is_block_like(self) -> bool { match self.class() { @@ -1213,7 +1185,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) { /// as it's harder to understand. fn generate_missing_child_flow(self, node: &ThreadSafeLayoutNode) -> FlowRef { let mut style = node.style().clone(); - let flow = match self.class() { + match self.class() { FlowClass::Table | FlowClass::TableRowGroup => { properties::modify_style_for_anonymous_table_object( &mut style, @@ -1224,7 +1196,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) { style, node.restyle_damage(), SpecificFragmentInfo::TableRow); - box TableRowFlow::from_fragment(fragment) as Box + Arc::new(TableRowFlow::from_fragment(fragment)) }, FlowClass::TableRow => { properties::modify_style_for_anonymous_table_object( @@ -1237,14 +1209,12 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) { node.restyle_damage(), SpecificFragmentInfo::TableCell); let hide = node.style().get_inheritedtable().empty_cells == empty_cells::T::hide; - box TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide) as - Box + Arc::new(TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide)) }, _ => { panic!("no need to generate a missing child") } - }; - FlowRef::new(flow) + } } /// Returns true if this flow contains fragments that are roots of an absolute flow tree. @@ -1315,7 +1285,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) { } } -impl<'a> MutableFlowUtils for &'a mut (Flow + 'a) { +impl<'a> MutableFlowUtils for &'a mut Flow { /// Traverses the tree in preorder. fn traverse_preorder(self, traversal: &T) { if traversal.should_process(self) { diff --git a/components/layout/flow_list.rs b/components/layout/flow_list.rs index 0d1df38051f..409a4b6516e 100644 --- a/components/layout/flow_list.rs +++ b/components/layout/flow_list.rs @@ -108,9 +108,9 @@ impl FlowList { } impl<'a> Iterator for FlowListIterator<'a> { - type Item = &'a (Flow + 'a); + type Item = &'a Flow; #[inline] - fn next(&mut self) -> Option<&'a (Flow + 'a)> { + fn next(&mut self) -> Option<&'a Flow> { self.it.next().map(|x| &**x) } @@ -121,10 +121,10 @@ impl<'a> Iterator for FlowListIterator<'a> { } impl<'a> Iterator for MutFlowListIterator<'a> { - type Item = &'a mut (Flow + 'a); + type Item = &'a mut Flow; #[inline] #[allow(unsafe_code)] - fn next(&mut self) -> Option<&'a mut (Flow + 'a)> { + fn next(&mut self) -> Option<&'a mut Flow> { self.it.next().map(|x| unsafe { flow_ref::deref_mut(x) }) } diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs index b7d143734fb..7408a2ac2fb 100644 --- a/components/layout/flow_ref.rs +++ b/components/layout/flow_ref.rs @@ -10,190 +10,15 @@ #![allow(unsafe_code)] -use flow; -use flow::{Flow, BaseFlow}; +use flow::Flow; +use std::sync::{Arc, Weak}; -use std::mem; -use std::ops::Deref; -use std::ptr; -use std::raw; -use std::rt::heap; -use std::sync::atomic::{self, Ordering}; - -#[unsafe_no_drop_flag] -pub struct FlowRef { - object: raw::TraitObject, -} - -unsafe impl Send for FlowRef {} -unsafe impl Sync for FlowRef {} - -#[unsafe_no_drop_flag] -pub struct WeakFlowRef { - object: raw::TraitObject, -} - -unsafe impl Send for WeakFlowRef {} -unsafe impl Sync for WeakFlowRef {} - -impl FlowRef { - pub fn new(mut flow: Box) -> FlowRef { - unsafe { - let result = { - let flow_ref: &mut Flow = &mut *flow; - let object = mem::transmute::<&mut Flow, raw::TraitObject>(flow_ref); - FlowRef { object: object } - }; - mem::forget(flow); - result - } - } - - /// Downgrades the FlowRef to a WeakFlowRef. - pub fn downgrade(&self) -> WeakFlowRef { - unsafe { - flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed); - } - WeakFlowRef { object: self.object } - } -} - -impl<'a> Deref for FlowRef { - type Target = Flow + 'a; - fn deref(&self) -> &(Flow + 'a) { - unsafe { - mem::transmute_copy::(&self.object) - } - } -} +pub type FlowRef = Arc; +pub type WeakFlowRef = Weak; // FIXME(https://github.com/servo/servo/issues/6503) This introduces unsound mutable aliasing. // Try to replace it with Arc::get_mut (which checks that the reference count is 1). -pub unsafe fn deref_mut<'a>(flow_ref: &mut FlowRef) -> &mut (Flow + 'a) { - mem::transmute_copy::(&flow_ref.object) -} - -impl Drop for FlowRef { - fn drop(&mut self) { - unsafe { - if self.object.vtable.is_null() || - self.object.vtable as usize == mem::POST_DROP_USIZE { - return - } - if flow::base(&**self).strong_ref_count().fetch_sub(1, Ordering::Release) != 1 { - return - } - atomic::fence(Ordering::Acquire); - - // Normally we'd call the underlying Drop logic but not free the - // allocation, but this is almost impossible without DST in - // Rust. Instead we make a fake trait object to run the drop logic - // on. - let flow_ref: FlowRef = mem::replace(self, FlowRef { - object: raw::TraitObject { - vtable: ptr::null_mut(), - data: ptr::null_mut(), - } - }); - - let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(flow_ref.object.vtable); - let object_size = vtable[1]; - let object_align = vtable[2]; - - let fake_data = heap::allocate(object_size, object_align); - ptr::copy(flow_ref.object.data as *const u8, fake_data, object_size); - - let fake_box = raw::TraitObject { vtable: flow_ref.object.vtable, data: fake_data as *mut () }; - let fake_flow = mem::transmute::>(fake_box); - drop(fake_flow); - - if flow::base(&*flow_ref).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 { - atomic::fence(Ordering::Acquire); - heap::deallocate(flow_ref.object.data as *mut u8, object_size, object_align); - } - - mem::forget(flow_ref); - } - } -} - -impl Clone for FlowRef { - fn clone(&self) -> FlowRef { - unsafe { - let _ = flow::base(&**self).strong_ref_count().fetch_add(1, Ordering::Relaxed); - FlowRef { - object: raw::TraitObject { - vtable: self.object.vtable, - data: self.object.data, - } - } - } - } -} - -fn base<'a>(r: &WeakFlowRef) -> &'a BaseFlow { - let data = r.object.data; - debug_assert!(!data.is_null()); - unsafe { - mem::transmute::<*mut (), &'a BaseFlow>(data) - } -} - -impl WeakFlowRef { - /// Upgrades a WeakFlowRef to a FlowRef. - pub fn upgrade(&self) -> Option { - unsafe { - let object = base(self); - // We use a CAS loop to increment the strong count instead of a - // fetch_add because once the count hits 0 is must never be above - // 0. - loop { - let n = object.strong_ref_count().load(Ordering::SeqCst); - if n == 0 { return None } - let old = object.strong_ref_count().compare_and_swap(n, n + 1, Ordering::SeqCst); - if old == n { - return Some(FlowRef { object: self.object }) - } - } - } - } -} - -impl Clone for WeakFlowRef { - fn clone(&self) -> WeakFlowRef { - unsafe { - base(self).weak_ref_count().fetch_add(1, Ordering::Relaxed); - } - WeakFlowRef { object: self. object } - } -} - -impl Drop for WeakFlowRef { - fn drop(&mut self) { - unsafe { - if self.object.vtable.is_null() || - self.object.vtable as usize == mem::POST_DROP_USIZE { - return - } - - if base(self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 { - atomic::fence(Ordering::Acquire); - - // This dance deallocates the Box without running its - // drop glue. The drop glue is run when the last strong - // reference is released. - let weak_ref: WeakFlowRef = mem::replace(self, WeakFlowRef { - object: raw::TraitObject { - vtable: ptr::null_mut(), - data: ptr::null_mut(), - } - }); - let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(weak_ref.object.vtable); - let object_size = vtable[1]; - let object_align = vtable[2]; - heap::deallocate(weak_ref.object.data as *mut u8, object_size, object_align); - mem::forget(weak_ref); - } - } - } +pub unsafe fn deref_mut<'a>(r: &'a mut FlowRef) -> &'a mut Flow { + let ptr: *const Flow = &**r; + &mut *(ptr as *mut Flow) } diff --git a/components/layout/incremental.rs b/components/layout/incremental.rs index 4b976ec6138..623fa4685fe 100644 --- a/components/layout/incremental.rs +++ b/components/layout/incremental.rs @@ -192,7 +192,7 @@ pub trait LayoutDamageComputation { fn reflow_entire_document(self); } -impl<'a> LayoutDamageComputation for &'a mut (Flow + 'a) { +impl<'a> LayoutDamageComputation for &'a mut Flow { fn compute_layout_damage(self) -> SpecialRestyleDamage { let mut special_damage = SpecialRestyleDamage::empty(); let is_absolutely_positioned = flow::base(self).flags.contains(IS_ABSOLUTELY_POSITIONED); diff --git a/components/layout/lib.rs b/components/layout/lib.rs index a96b3104e83..438ac1edd87 100644 --- a/components/layout/lib.rs +++ b/components/layout/lib.rs @@ -4,13 +4,12 @@ #![feature(append)] #![feature(arc_unique)] +#![feature(arc_weak)] #![feature(box_str)] #![feature(box_syntax)] #![feature(cell_extras)] #![feature(custom_derive)] -#![feature(filling_drop)] #![feature(hashmap_hasher)] -#![feature(heap_api)] #![feature(mpsc_select)] #![feature(plugin)] #![feature(raw)] diff --git a/components/layout/parallel.rs b/components/layout/parallel.rs index a9cb4aa9bb5..6420478cf11 100644 --- a/components/layout/parallel.rs +++ b/components/layout/parallel.rs @@ -47,25 +47,25 @@ fn null_unsafe_flow() -> UnsafeFlow { pub fn owned_flow_to_unsafe_flow(flow: *const FlowRef) -> UnsafeFlow { unsafe { - mem::transmute_copy(&*flow) + mem::transmute::<&Flow, UnsafeFlow>(&**flow) } } pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow { unsafe { - mem::transmute_copy(&*flow) + mem::transmute::<&Flow, UnsafeFlow>(&**flow) } } pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow { unsafe { - mem::transmute_copy(&flow) + mem::transmute::<&Flow, UnsafeFlow>(flow) } } pub fn mut_borrowed_flow_to_unsafe_flow(flow: &mut Flow) -> UnsafeFlow { unsafe { - mem::transmute_copy(&flow) + mem::transmute::<&Flow, UnsafeFlow>(flow) } } @@ -239,10 +239,9 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal { fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) { loop { // Get a real flow. - let flow: &mut FlowRef = unsafe { - mem::transmute(&mut unsafe_flow) + let flow: &mut Flow = unsafe { + mem::transmute(unsafe_flow) }; - let flow = unsafe { flow_ref::deref_mut(flow) }; // Perform the appropriate traversal. if self.should_process(flow) { @@ -257,7 +256,7 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal { Ordering::Relaxed); // Possibly enqueue the parent. - let mut unsafe_parent = base.parallel.parent; + let unsafe_parent = base.parallel.parent; if unsafe_parent == null_unsafe_flow() { // We're done! break @@ -266,10 +265,10 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal { // No, we're not at the root yet. Then are we the last child // of our parent to finish processing? If so, we can continue // on with our parent; otherwise, we've gotta wait. - let parent: &mut FlowRef = unsafe { - mem::transmute(&mut unsafe_parent) + let parent: &mut Flow = unsafe { + mem::transmute(unsafe_parent) }; - let parent_base = flow::mut_base(unsafe { flow_ref::deref_mut(parent) }); + let parent_base = flow::mut_base(parent); if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 { // We were the last child of our parent. Reflow our parent. unsafe_flow = unsafe_parent @@ -296,12 +295,11 @@ trait ParallelPreorderFlowTraversal : PreorderFlowTraversal { top_down_func: ChunkedFlowTraversalFunction, bottom_up_func: FlowTraversalFunction) { let mut discovered_child_flows = Vec::new(); - for mut unsafe_flow in *unsafe_flows.0 { + for unsafe_flow in *unsafe_flows.0 { let mut had_children = false; unsafe { // Get a real flow. - let flow: &mut FlowRef = mem::transmute(&mut unsafe_flow); - let flow = flow_ref::deref_mut(flow); + let flow: &mut Flow = mem::transmute(unsafe_flow); if self.should_record_thread_ids() { flow::mut_base(flow).thread_id = proxy.worker_index(); From 21d69314d4752751f55a250b68a9b7f87368cb92 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 18 Aug 2015 19:37:15 +0200 Subject: [PATCH 3/3] =?UTF-8?q?Don=E2=80=99t=20mark=20flow=5Fref::deref=5F?= =?UTF-8?q?mut=20as=20unsafe.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See discussion in https://github.com/servo/servo/pull/7237 --- components/layout/construct.rs | 28 ++++++++--------------- components/layout/display_list_builder.rs | 7 +++--- components/layout/flow.rs | 6 ++--- components/layout/flow_list.rs | 7 +++--- components/layout/flow_ref.rs | 14 ++++++++---- components/layout/fragment.rs | 24 ++++++++----------- components/layout/inline.rs | 7 +++--- components/layout/layout_task.rs | 18 +++++++-------- components/layout/parallel.rs | 2 +- components/layout/sequential.rs | 12 ++++------ 10 files changed, 53 insertions(+), 72 deletions(-) diff --git a/components/layout/construct.rs b/components/layout/construct.rs index fe41ee5b922..f9d11e89915 100644 --- a/components/layout/construct.rs +++ b/components/layout/construct.rs @@ -415,7 +415,6 @@ impl<'a> FlowConstructor<'a> { /// `#[inline(always)]` because this is performance critical and LLVM will not inline it /// otherwise. #[inline(always)] - #[allow(unsafe_code)] fn flush_inline_fragments_to_flow_or_list(&mut self, fragment_accumulator: InlineFragmentsAccumulator, flow: &mut FlowRef, @@ -481,7 +480,7 @@ impl<'a> FlowConstructor<'a> { { // FIXME(#6503): Use Arc::get_mut().unwrap() here. - let inline_flow = unsafe { flow_ref::deref_mut(&mut inline_flow_ref) }.as_mut_inline(); + let inline_flow = flow_ref::deref_mut(&mut inline_flow_ref).as_mut_inline(); let (ascent, descent) = @@ -1283,7 +1282,6 @@ impl<'a> FlowConstructor<'a> { /// /// TODO(pcwalton): Add some more fast paths, like toggling `display: none`, adding block kids /// to block parents with no {ib} splits, adding out-of-flow kids, etc. - #[allow(unsafe_code)] pub fn repair_if_possible(&mut self, node: &ThreadSafeLayoutNode) -> bool { // We can skip reconstructing the flow if we don't have to reconstruct and none of our kids // did either. @@ -1314,7 +1312,7 @@ impl<'a> FlowConstructor<'a> { if !flow.is_block_flow() { return false } - let flow = unsafe { flow_ref::deref_mut(flow) }; + let flow = flow_ref::deref_mut(flow); flow::mut_base(flow).restyle_damage.insert(damage); flow.repair_style_and_bubble_inline_sizes(&style); true @@ -1339,26 +1337,22 @@ impl<'a> FlowConstructor<'a> { match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut inline_block_fragment) => { - let flow_ref = unsafe { - flow_ref::deref_mut(&mut inline_block_fragment.flow_ref) - }; + let flow_ref = flow_ref::deref_mut(&mut inline_block_fragment.flow_ref); flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? flow_ref.repair_style_and_bubble_inline_sizes(&style); } SpecificFragmentInfo::InlineAbsoluteHypothetical( ref mut inline_absolute_hypothetical_fragment) => { - let flow_ref = unsafe { - flow_ref::deref_mut(&mut inline_absolute_hypothetical_fragment.flow_ref) - }; + let flow_ref = flow_ref::deref_mut( + &mut inline_absolute_hypothetical_fragment.flow_ref); flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? flow_ref.repair_style_and_bubble_inline_sizes(&style); } SpecificFragmentInfo::InlineAbsolute(ref mut inline_absolute_fragment) => { - let flow_ref = unsafe { - flow_ref::deref_mut(&mut inline_absolute_fragment.flow_ref) - }; + let flow_ref = flow_ref::deref_mut( + &mut inline_absolute_fragment.flow_ref); flow::mut_base(flow_ref).restyle_damage.insert(damage); // FIXME(pcwalton): Fragment restyle damage too? flow_ref.repair_style_and_bubble_inline_sizes(&style); @@ -1655,14 +1649,13 @@ impl FlowConstructionUtils for FlowRef { /// Adds a new flow as a child of this flow. Fails if this flow is marked as a leaf. /// /// This must not be public because only the layout constructor can do this. - #[allow(unsafe_code)] fn add_new_child(&mut self, mut new_child: FlowRef) { { - let kid_base = flow::mut_base(unsafe { flow_ref::deref_mut(&mut new_child) }); + let kid_base = flow::mut_base(flow_ref::deref_mut(&mut new_child)); kid_base.parallel.parent = parallel::mut_owned_flow_to_unsafe_flow(self); } - let base = flow::mut_base(unsafe { flow_ref::deref_mut(self) }); + let base = flow::mut_base(flow_ref::deref_mut(self)); base.children.push_back(new_child); let _ = base.parallel.children_count.fetch_add(1, Ordering::Relaxed); } @@ -1675,10 +1668,9 @@ impl FlowConstructionUtils for FlowRef { /// properly computed. (This is not, however, a memory safety problem.) /// /// This must not be public because only the layout constructor can do this. - #[allow(unsafe_code)] fn finish(&mut self) { if !opts::get().bubble_inline_sizes_separately { - unsafe { flow_ref::deref_mut(self) }.bubble_inline_sizes() + flow_ref::deref_mut(self).bubble_inline_sizes() } } } diff --git a/components/layout/display_list_builder.rs b/components/layout/display_list_builder.rs index 259b420c977..9cc43559bed 100644 --- a/components/layout/display_list_builder.rs +++ b/components/layout/display_list_builder.rs @@ -1771,7 +1771,6 @@ pub trait InlineFlowDisplayListBuilding { } impl InlineFlowDisplayListBuilding for InlineFlow { - #[allow(unsafe_code)] fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext) { // TODO(#228): Once we form lines and have their cached bounds, we can be smarter and // not recurse on a line if nothing in it can intersect the dirty region. @@ -1798,17 +1797,17 @@ impl InlineFlowDisplayListBuilding for InlineFlow { match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut block_flow) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; + let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut block_flow) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; + let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsolute(ref mut block_flow) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut block_flow.flow_ref) }; + let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } diff --git a/components/layout/flow.rs b/components/layout/flow.rs index e6bf124230b..66ee466d450 100644 --- a/components/layout/flow.rs +++ b/components/layout/flow.rs @@ -771,9 +771,8 @@ pub struct AbsoluteDescendantIter<'a> { impl<'a> Iterator for AbsoluteDescendantIter<'a> { type Item = &'a mut Flow; - #[allow(unsafe_code)] fn next(&mut self) -> Option<&'a mut Flow> { - self.iter.next().map(|info| unsafe { flow_ref::deref_mut(&mut info.flow) }) + self.iter.next().map(|info| flow_ref::deref_mut(&mut info.flow)) } } @@ -1353,10 +1352,9 @@ impl MutableOwnedFlowUtils for FlowRef { /// This is called during flow construction, so nothing else can be accessing the descendant /// flows. This is enforced by the fact that we have a mutable `FlowRef`, which only flow /// construction is allowed to possess. - #[allow(unsafe_code)] fn set_absolute_descendants(&mut self, abs_descendants: AbsoluteDescendants) { let this = self.clone(); - let base = mut_base(unsafe { flow_ref::deref_mut(self) }); + let base = mut_base(flow_ref::deref_mut(self)); base.abs_descendants = abs_descendants; for descendant_link in base.abs_descendants.iter() { let descendant_base = mut_base(descendant_link); diff --git a/components/layout/flow_list.rs b/components/layout/flow_list.rs index 409a4b6516e..34fd5d80975 100644 --- a/components/layout/flow_list.rs +++ b/components/layout/flow_list.rs @@ -33,7 +33,7 @@ impl FlowList { #[inline] #[allow(unsafe_code)] pub unsafe fn front_mut<'a>(&'a mut self) -> Option<&'a mut Flow> { - self.flows.front_mut().map(|head| flow_ref::deref_mut(head)) + self.flows.front_mut().map(flow_ref::deref_mut) } /// Provide a reference to the back element, or None if the list is empty @@ -46,7 +46,7 @@ impl FlowList { #[inline] #[allow(unsafe_code)] pub unsafe fn back_mut<'a>(&'a mut self) -> Option<&'a mut Flow> { - self.flows.back_mut().map(|tail| flow_ref::deref_mut(tail)) + self.flows.back_mut().map(flow_ref::deref_mut) } /// Add an element first in the list @@ -123,9 +123,8 @@ impl<'a> Iterator for FlowListIterator<'a> { impl<'a> Iterator for MutFlowListIterator<'a> { type Item = &'a mut Flow; #[inline] - #[allow(unsafe_code)] fn next(&mut self) -> Option<&'a mut Flow> { - self.it.next().map(|x| unsafe { flow_ref::deref_mut(x) }) + self.it.next().map(flow_ref::deref_mut) } #[inline] diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs index 7408a2ac2fb..d1717ef2000 100644 --- a/components/layout/flow_ref.rs +++ b/components/layout/flow_ref.rs @@ -8,7 +8,6 @@ //! be superfluous. This design is largely duplicating logic of Arc and //! Weak; please see comments there for details. -#![allow(unsafe_code)] use flow::Flow; use std::sync::{Arc, Weak}; @@ -16,9 +15,14 @@ use std::sync::{Arc, Weak}; pub type FlowRef = Arc; pub type WeakFlowRef = Weak; -// FIXME(https://github.com/servo/servo/issues/6503) This introduces unsound mutable aliasing. -// Try to replace it with Arc::get_mut (which checks that the reference count is 1). -pub unsafe fn deref_mut<'a>(r: &'a mut FlowRef) -> &'a mut Flow { +/// WARNING: This should only be used when there is no aliasing: +/// when the traversal ensures that no other threads accesses the same flow at the same time. +/// See https://github.com/servo/servo/issues/6503 +/// Use Arc::get_mut instead when possible (e.g. on an Arc that was just created). +#[allow(unsafe_code)] +pub fn deref_mut<'a>(r: &'a mut FlowRef) -> &'a mut Flow { let ptr: *const Flow = &**r; - &mut *(ptr as *mut Flow) + unsafe { + &mut *(ptr as *mut Flow) + } } diff --git a/components/layout/fragment.rs b/components/layout/fragment.rs index d60785c1df1..55ee934bb7e 100644 --- a/components/layout/fragment.rs +++ b/components/layout/fragment.rs @@ -1248,7 +1248,6 @@ impl Fragment { } /// Computes the intrinsic inline-sizes of this fragment. - #[allow(unsafe_code)] pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { @@ -1620,7 +1619,6 @@ impl Fragment { /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. - #[allow(unsafe_code)] pub fn assign_replaced_inline_size_if_necessary<'a>(&'a mut self, container_inline_size: Au) { match self.specific { SpecificFragmentInfo::Generic | @@ -1649,7 +1647,7 @@ impl Fragment { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; @@ -1657,7 +1655,7 @@ impl Fragment { self.border_box.size.inline = Au(0); } SpecificFragmentInfo::InlineBlock(ref mut info) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); @@ -1665,7 +1663,7 @@ impl Fragment { block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_mut_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); @@ -1713,7 +1711,6 @@ impl Fragment { /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. - #[allow(unsafe_code)] pub fn assign_replaced_block_size_if_necessary(&mut self, containing_block_block_size: Option) { match self.specific { SpecificFragmentInfo::Generic | @@ -1770,18 +1767,18 @@ impl Fragment { } SpecificFragmentInfo::InlineBlock(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. - let block_flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }.as_block(); + let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } @@ -1907,10 +1904,9 @@ impl Fragment { /// Determines the inline sizes of inline-block fragments. These cannot be fully computed until /// inline size assignment has run for the child flow: thus it is computed "late", during /// block size assignment. - #[allow(unsafe_code)] pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific { - let block_flow = unsafe { flow_ref::deref_mut(&mut inline_block_info.flow_ref) }.as_block(); + let block_flow = flow_ref::deref_mut(&mut inline_block_info.flow_ref).as_block(); let margin = block_flow.fragment.style.logical_margin(); self.border_box.size.inline = block_flow.fragment.border_box.size.inline + MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + @@ -1918,24 +1914,22 @@ impl Fragment { } } - #[allow(unsafe_code)] pub fn update_late_computed_inline_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.i; - unsafe { flow_ref::deref_mut(&mut info.flow_ref) } + flow_ref::deref_mut(&mut info.flow_ref) .update_late_computed_inline_position_if_necessary(position) } _ => {} } } - #[allow(unsafe_code)] pub fn update_late_computed_block_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.b; - unsafe { flow_ref::deref_mut(&mut info.flow_ref) } + flow_ref::deref_mut(&mut info.flow_ref) .update_late_computed_block_position_if_necessary(position) } _ => {} diff --git a/components/layout/inline.rs b/components/layout/inline.rs index 966cf5f9bff..4b5a6b985c7 100644 --- a/components/layout/inline.rs +++ b/components/layout/inline.rs @@ -1597,7 +1597,6 @@ impl Flow for InlineFlow { self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); } - #[allow(unsafe_code)] fn compute_absolute_position(&mut self, _: &LayoutContext) { // First, gather up the positions of all the containing blocks (if any). // @@ -1658,7 +1657,7 @@ impl Flow for InlineFlow { let is_positioned = fragment.is_positioned(); match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut info) => { - let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); @@ -1679,7 +1678,7 @@ impl Flow for InlineFlow { self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { - let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); block_flow.base.absolute_position_info = self.base.absolute_position_info; @@ -1690,7 +1689,7 @@ impl Flow for InlineFlow { self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { - let flow = unsafe { flow_ref::deref_mut(&mut info.flow_ref) }; + let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); diff --git a/components/layout/layout_task.rs b/components/layout/layout_task.rs index 5ae874cb697..a82914a692a 100644 --- a/components/layout/layout_task.rs +++ b/components/layout/layout_task.rs @@ -781,7 +781,7 @@ impl LayoutTask { _ => return None, }; - unsafe { flow_ref::deref_mut(&mut flow) }.mark_as_root(); + flow_ref::deref_mut(&mut flow).mark_as_root(); Some(flow) } @@ -999,11 +999,11 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }).stacking_relative_position = + flow::mut_base(flow_ref::deref_mut(layout_root)).stacking_relative_position = LogicalPoint::zero(writing_mode).to_physical(writing_mode, rw_data.screen_size); - flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }).clip = + flow::mut_base(flow_ref::deref_mut(layout_root)).clip = ClippingRegion::from_rect(&data.page_clip_rect); match (&mut rw_data.parallel_traversal, opts::get().parallel_display_list_building) { @@ -1024,13 +1024,13 @@ impl LayoutTask { debug!("Done building display list."); let root_background_color = get_root_flow_background_color( - unsafe { flow_ref::deref_mut(layout_root) }); + flow_ref::deref_mut(layout_root)); let root_size = { let root_flow = flow::base(&**layout_root); root_flow.position.size.to_physical(root_flow.writing_mode) }; let mut display_list = box DisplayList::new(); - flow::mut_base(unsafe { flow_ref::deref_mut(layout_root) }) + flow::mut_base(flow_ref::deref_mut(layout_root)) .display_list_building_result .add_to(&mut *display_list); let paint_layer = PaintLayer::new(layout_root.layer_id(0), @@ -1130,7 +1130,7 @@ impl LayoutTask { } if needs_reflow { if let Some(mut flow) = self.try_get_layout_root(*node) { - LayoutTask::reflow_all_nodes(unsafe { flow_ref::deref_mut(&mut flow) }); + LayoutTask::reflow_all_nodes(flow_ref::deref_mut(&mut flow)); } } @@ -1291,7 +1291,7 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - animation::recalc_style_for_animations(unsafe { flow_ref::deref_mut(&mut root_flow) }, + animation::recalc_style_for_animations(flow_ref::deref_mut(&mut root_flow), animations) }); } @@ -1311,10 +1311,10 @@ impl LayoutTask { self.profiler_metadata(), self.time_profiler_chan.clone(), || { - if opts::get().nonincremental_layout || unsafe { flow_ref::deref_mut(&mut root_flow) } + if opts::get().nonincremental_layout || flow_ref::deref_mut(&mut root_flow) .compute_layout_damage() .contains(REFLOW_ENTIRE_DOCUMENT) { - unsafe { flow_ref::deref_mut(&mut root_flow) }.reflow_entire_document() + flow_ref::deref_mut(&mut root_flow).reflow_entire_document() } }); diff --git a/components/layout/parallel.rs b/components/layout/parallel.rs index 6420478cf11..78e7e876db4 100644 --- a/components/layout/parallel.rs +++ b/components/layout/parallel.rs @@ -470,7 +470,7 @@ pub fn traverse_flow_tree_preorder( if opts::get().bubble_inline_sizes_separately { let layout_context = LayoutContext::new(shared_layout_context); let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context }; - unsafe { flow_ref::deref_mut(root) }.traverse_postorder(&bubble_inline_sizes); + flow_ref::deref_mut(root).traverse_postorder(&bubble_inline_sizes); } run_queue_with_custom_work_data_type(queue, |queue| { diff --git a/components/layout/sequential.rs b/components/layout/sequential.rs index 02289a58428..d557cc93b41 100644 --- a/components/layout/sequential.rs +++ b/components/layout/sequential.rs @@ -40,7 +40,6 @@ pub fn traverse_dom_preorder(root: LayoutNode, doit(root, recalc_style, construct_flows); } -#[allow(unsafe_code)] pub fn resolve_generated_content(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, level: u32, traversal: &mut ResolveGeneratedContent) { if !traversal.should_process(flow) { @@ -56,10 +55,9 @@ pub fn resolve_generated_content(root: &mut FlowRef, shared_layout_context: &Sha let layout_context = LayoutContext::new(shared_layout_context); let mut traversal = ResolveGeneratedContent::new(&layout_context); - doit(unsafe { flow_ref::deref_mut(root) }, 0, &mut traversal) + doit(flow_ref::deref_mut(root), 0, &mut traversal) } -#[allow(unsafe_code)] pub fn traverse_flow_tree_preorder(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, @@ -80,7 +78,7 @@ pub fn traverse_flow_tree_preorder(root: &mut FlowRef, let layout_context = LayoutContext::new(shared_layout_context); - let root = unsafe { flow_ref::deref_mut(root) }; + let root = flow_ref::deref_mut(root); if opts::get().bubble_inline_sizes_separately { let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context }; @@ -96,7 +94,6 @@ pub fn traverse_flow_tree_preorder(root: &mut FlowRef, doit(root, assign_inline_sizes, assign_block_sizes); } -#[allow(unsafe_code)] pub fn build_display_list_for_subtree(root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { fn doit(flow: &mut Flow, @@ -119,10 +116,9 @@ pub fn build_display_list_for_subtree(root: &mut FlowRef, let compute_absolute_positions = ComputeAbsolutePositions { layout_context: &layout_context }; let build_display_list = BuildDisplayList { layout_context: &layout_context }; - doit(unsafe { flow_ref::deref_mut(root) }, compute_absolute_positions, build_display_list); + doit(flow_ref::deref_mut(root), compute_absolute_positions, build_display_list); } -#[allow(unsafe_code)] pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut FlowRef, iterator: &mut FragmentBorderBoxIterator) { fn doit(flow: &mut Flow, @@ -145,5 +141,5 @@ pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut FlowRef, } } - doit(unsafe { flow_ref::deref_mut(root) }, 0, iterator, &ZERO_POINT); + doit(flow_ref::deref_mut(root), 0, iterator, &ZERO_POINT); }