layout: Make incremental reflow more fine-grained by introducing "reflow

out-of-flow" and "reconstruct flow" damage bits.

This is needed for good performance on the maze solver.
This commit is contained in:
Patrick Walton 2014-10-30 13:27:35 -07:00
parent 7712052e13
commit 08fc7c2795
20 changed files with 644 additions and 403 deletions

View file

@ -8,10 +8,10 @@ use css::node_style::StyledNode;
use css::matching::{ApplicableDeclarations, CannotShare, MatchMethods, StyleWasShared};
use construct::FlowConstructor;
use context::LayoutContext;
use flow::{Flow, ImmutableFlowUtils, MutableFlowUtils};
use flow::{Flow, MutableFlowUtils};
use flow::{PreorderFlowTraversal, PostorderFlowTraversal};
use flow;
use incremental::{RestyleDamage, BubbleISizes, Reflow};
use incremental::{RestyleDamage, BubbleISizes, Reflow, ReflowOutOfFlow};
use wrapper::{layout_node_to_unsafe_layout_node, LayoutNode};
use wrapper::{PostorderNodeMutTraversal, ThreadSafeLayoutNode, UnsafeLayoutNode};
use wrapper::{PreorderDomTraversal, PostorderDomTraversal};
@ -134,9 +134,10 @@ impl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> {
// Just needs to be wrapped in an option for `match_node`.
let some_bf = Some(bf);
if node.is_dirty() || node.has_dirty_siblings() {
// Remove existing CSS styles from changed nodes, to force
// non-incremental reflow.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || node.is_dirty() {
// Remove existing CSS styles from nodes whose content has changed (e.g. text changed),
// to force non-incremental reflow.
if node.has_changed() {
let node = ThreadSafeLayoutNode::new(&node);
node.unstyle();
@ -161,7 +162,9 @@ impl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> {
&some_bf,
&mut applicable_declarations,
&mut shareable);
}
} else {
ThreadSafeLayoutNode::new(&node).set_restyle_damage(RestyleDamage::all())
}
// Perform the CSS cascade.
unsafe {
@ -175,7 +178,10 @@ impl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> {
style_sharing_candidate_cache.insert_if_possible(&node);
}
}
StyleWasShared(index) => style_sharing_candidate_cache.touch(index),
StyleWasShared(index, damage) => {
style_sharing_candidate_cache.touch(index);
ThreadSafeLayoutNode::new(&node).set_restyle_damage(damage);
}
}
}
@ -205,18 +211,16 @@ impl<'a> PostorderDomTraversal for ConstructFlows<'a> {
{
let tnode = ThreadSafeLayoutNode::new(&node);
// Always re-construct if incremental layout is turned off.
if opts::get().nonincremental_layout {
unsafe {
node.set_dirty_descendants(true);
}
}
if node.has_dirty_descendants() {
tnode.set_restyle_damage(RestyleDamage::all());
// Always reconstruct if incremental layout is turned off.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || node.has_dirty_descendants() {
let mut flow_constructor = FlowConstructor::new(self.layout_context);
flow_constructor.process(&tnode);
debug!("Constructed flow for {:x}: {:x}", tnode.debug_id(), tnode.flow_debug_id());
if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) {
flow_constructor.process(&tnode);
debug!("Constructed flow for {:x}: {:x}",
tnode.debug_id(),
tnode.flow_debug_id());
}
}
// Reset the layout damage in this node. It's been propagated to the
@ -300,18 +304,12 @@ pub struct AssignISizes<'a> {
impl<'a> PreorderFlowTraversal for AssignISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
if flow::base(flow).restyle_damage.contains(Reflow) {
flow.assign_inline_sizes(self.layout_context);
} else if flow.is_block_like() {
let block = flow.as_block();
block.propagate_and_compute_used_inline_size(self.layout_context);
}
flow.assign_inline_sizes(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
// TODO(cgaebel): Incremental inline size assignment.
flow::base(flow).restyle_damage.contains(Reflow) || true
flow::base(flow).restyle_damage.intersects(ReflowOutOfFlow | Reflow)
}
}
@ -326,26 +324,25 @@ pub struct AssignBSizesAndStoreOverflow<'a> {
impl<'a> PostorderFlowTraversal for AssignBSizesAndStoreOverflow<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
if !flow::base(flow).flags.impacted_by_floats() {
flow.assign_block_size(self.layout_context);
// Skip store-overflow for absolutely positioned flows. That will be
// done in a separate traversal.
if flow::base(flow).restyle_damage.contains(Reflow) {
if !flow.is_store_overflow_delayed() {
flow.store_overflow(self.layout_context);
}
}
// Can't do anything with flows impacted by floats until we reach their inorder parent.
// NB: We must return without resetting the restyle bits for these, as we haven't actually
// reflowed anything!
if flow::base(flow).flags.impacted_by_floats() {
return
}
flow::mut_base(flow).restyle_damage.remove(Reflow);
flow.assign_block_size(self.layout_context);
// Skip store-overflow for absolutely positioned flows. That will be
// done in a separate traversal.
if !flow.is_store_overflow_delayed() {
flow.store_overflow(self.layout_context);
}
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
// TODO(cgaebel): Incremental block size assignment.
flow::base(flow).restyle_damage.contains(Reflow) || true
flow::base(flow).restyle_damage.intersects(ReflowOutOfFlow | Reflow)
}
}