Replace FlowRef with Arc<Flow>, now that Arc supports DST.

… and WeakFlowRef with Weak<Flow>.
This commit is contained in:
Simon Sapin 2015-08-14 14:34:13 +02:00
parent 2d22aa8e7e
commit 649250130b
7 changed files with 62 additions and 276 deletions

View file

@ -375,7 +375,7 @@ impl<'a> FlowConstructor<'a> {
style,
child_node.restyle_damage(),
SpecificFragmentInfo::TableRow);
let mut new_child = FlowRef::new(box TableRowFlow::from_fragment(fragment));
let mut new_child: FlowRef = Arc::new(TableRowFlow::from_fragment(fragment));
new_child.add_new_child(child.clone());
child.finish();
*child = new_child
@ -388,7 +388,7 @@ impl<'a> FlowConstructor<'a> {
style,
child_node.restyle_damage(),
SpecificFragmentInfo::Table);
let mut new_child = FlowRef::new(box TableFlow::from_fragment(fragment));
let mut new_child: FlowRef = Arc::new(TableFlow::from_fragment(fragment));
new_child.add_new_child(child.clone());
child.finish();
*child = new_child
@ -402,7 +402,7 @@ impl<'a> FlowConstructor<'a> {
style,
child_node.restyle_damage(),
SpecificFragmentInfo::TableWrapper);
let mut new_child = FlowRef::new(box TableWrapperFlow::from_fragment(fragment, None));
let mut new_child: FlowRef = Arc::new(TableWrapperFlow::from_fragment(fragment, None));
new_child.add_new_child(child.clone());
child.finish();
*child = new_child
@ -457,9 +457,8 @@ impl<'a> FlowConstructor<'a> {
let scanned_fragments =
TextRunScanner::new().scan_for_runs(&mut self.layout_context.font_context(),
fragments.fragments);
let mut inline_flow_ref =
FlowRef::new(box InlineFlow::from_fragments(scanned_fragments,
node.style().writing_mode));
let mut inline_flow_ref: FlowRef = Arc::new(
InlineFlow::from_fragments(scanned_fragments, node.style().writing_mode));
// Add all the inline-block fragments as children of the inline flow.
for inline_block_flow in &inline_block_flows {
@ -753,12 +752,12 @@ impl<'a> FlowConstructor<'a> {
fn build_flow_for_block(&mut self, node: &ThreadSafeLayoutNode, float_kind: Option<FloatKind>)
-> ConstructionResult {
let fragment = self.build_fragment_for_block(node);
let flow = if node.style().is_multicol() {
box MulticolFlow::from_fragment(fragment, float_kind) as Box<Flow>
let flow: FlowRef = if node.style().is_multicol() {
Arc::new(MulticolFlow::from_fragment(fragment, float_kind))
} else {
box BlockFlow::from_fragment(fragment, float_kind) as Box<Flow>
Arc::new(BlockFlow::from_fragment(fragment, float_kind))
};
self.build_flow_for_block_like(FlowRef::new(flow), node)
self.build_flow_for_block_like(flow, node)
}
/// Bubbles up {ib} splits.
@ -1072,13 +1071,11 @@ impl<'a> FlowConstructor<'a> {
fn build_flow_for_table_wrapper(&mut self, node: &ThreadSafeLayoutNode, float_value: float::T)
-> ConstructionResult {
let fragment = Fragment::new(node, SpecificFragmentInfo::TableWrapper);
let wrapper_flow =
box TableWrapperFlow::from_fragment(fragment, FloatKind::from_property(float_value));
let mut wrapper_flow = FlowRef::new(wrapper_flow as Box<Flow>);
let mut wrapper_flow: FlowRef = Arc::new(
TableWrapperFlow::from_fragment(fragment, FloatKind::from_property(float_value)));
let table_fragment = Fragment::new(node, SpecificFragmentInfo::Table);
let table_flow = box TableFlow::from_fragment(table_fragment);
let table_flow = FlowRef::new(table_flow as Box<Flow>);
let table_flow = Arc::new(TableFlow::from_fragment(table_fragment));
// First populate the table flow with its children.
let construction_result = self.build_flow_for_block_like(table_flow, node);
@ -1135,8 +1132,8 @@ impl<'a> FlowConstructor<'a> {
/// with possibly other `BlockFlow`s or `InlineFlow`s underneath it.
fn build_flow_for_table_caption(&mut self, node: &ThreadSafeLayoutNode) -> ConstructionResult {
let fragment = self.build_fragment_for_block(node);
let flow = box TableCaptionFlow::from_fragment(fragment) as Box<Flow>;
self.build_flow_for_block_like(FlowRef::new(flow), node)
let flow = Arc::new(TableCaptionFlow::from_fragment(fragment));
self.build_flow_for_block_like(flow, node)
}
/// Builds a flow for a node with `display: table-row-group`. This yields a `TableRowGroupFlow`
@ -1144,16 +1141,16 @@ impl<'a> FlowConstructor<'a> {
fn build_flow_for_table_rowgroup(&mut self, node: &ThreadSafeLayoutNode)
-> ConstructionResult {
let fragment = Fragment::new(node, SpecificFragmentInfo::TableRow);
let flow = box TableRowGroupFlow::from_fragment(fragment) as Box<Flow>;
self.build_flow_for_block_like(FlowRef::new(flow), node)
let flow = Arc::new(TableRowGroupFlow::from_fragment(fragment));
self.build_flow_for_block_like(flow, node)
}
/// Builds a flow for a node with `display: table-row`. This yields a `TableRowFlow` with
/// possibly other `TableCellFlow`s underneath it.
fn build_flow_for_table_row(&mut self, node: &ThreadSafeLayoutNode) -> ConstructionResult {
let fragment = Fragment::new(node, SpecificFragmentInfo::TableRow);
let flow = box TableRowFlow::from_fragment(fragment) as Box<Flow>;
self.build_flow_for_block_like(FlowRef::new(flow), node)
let flow = Arc::new(TableRowFlow::from_fragment(fragment));
self.build_flow_for_block_like(flow, node)
}
/// Builds a flow for a node with `display: table-cell`. This yields a `TableCellFlow` with
@ -1172,9 +1169,9 @@ impl<'a> FlowConstructor<'a> {
position == position::T::fixed
});
let flow = box TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide)
as Box<Flow>;
self.build_flow_for_block_like(FlowRef::new(flow), node)
let flow = Arc::new(
TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide));
self.build_flow_for_block_like(flow, node)
}
/// Builds a flow for a node with `display: list-item`. This yields a `ListItemFlow` with
@ -1222,21 +1219,19 @@ impl<'a> FlowConstructor<'a> {
let main_fragment = self.build_fragment_for_block(node);
let flow = match node.style().get_list().list_style_position {
list_style_position::T::outside => {
box ListItemFlow::from_fragments_and_flotation(main_fragment,
marker_fragments,
flotation)
Arc::new(ListItemFlow::from_fragments_and_flotation(
main_fragment, marker_fragments, flotation))
}
list_style_position::T::inside => {
for marker_fragment in marker_fragments {
initial_fragments.fragments.push_back(marker_fragment)
}
box ListItemFlow::from_fragments_and_flotation(main_fragment, vec![], flotation)
Arc::new(ListItemFlow::from_fragments_and_flotation(
main_fragment, vec![], flotation))
}
};
self.build_flow_for_block_starting_with_fragments(FlowRef::new(flow as Box<Flow>),
node,
initial_fragments)
self.build_flow_for_block_starting_with_fragments(flow, node, initial_fragments)
}
/// Creates a fragment for a node with `display: table-column`.
@ -1277,8 +1272,7 @@ impl<'a> FlowConstructor<'a> {
let specific = SpecificFragmentInfo::TableColumn(TableColumnFragmentInfo::new(node));
col_fragments.push(Fragment::new(node, specific));
}
let flow = box TableColGroupFlow::from_fragments(fragment, col_fragments);
let mut flow = FlowRef::new(flow as Box<Flow>);
let mut flow: FlowRef = Arc::new(TableColGroupFlow::from_fragments(fragment, col_fragments));
flow.finish();
ConstructionResult::Flow(flow, AbsoluteDescendants::new())

View file

@ -57,7 +57,7 @@ use std::mem;
use std::raw;
use std::slice::IterMut;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::atomic::Ordering;
use style::computed_values::{clear, display, empty_cells, float, position, text_align};
use style::properties::{self, ComputedValues};
use style::values::computed::LengthOrPercentageOrAuto;
@ -68,7 +68,7 @@ use util::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
///
/// Note that virtual methods have a cost; we should not overuse them in Servo. Consider adding
/// methods to `ImmutableFlowUtils` or `MutableFlowUtils` before adding more methods here.
pub trait Flow: fmt::Debug + Sync {
pub trait Flow: fmt::Debug + Sync + Send + 'static {
// RTTI
//
// TODO(pcwalton): Use Rust's RTTI, once that works.
@ -770,9 +770,9 @@ pub struct AbsoluteDescendantIter<'a> {
}
impl<'a> Iterator for AbsoluteDescendantIter<'a> {
type Item = &'a mut (Flow + 'a);
type Item = &'a mut Flow;
#[allow(unsafe_code)]
fn next(&mut self) -> Option<&'a mut (Flow + 'a)> {
fn next(&mut self) -> Option<&'a mut Flow> {
self.iter.next().map(|info| unsafe { flow_ref::deref_mut(&mut info.flow) })
}
}
@ -815,13 +815,6 @@ impl AbsolutePositionInfo {
/// Data common to all flows.
pub struct BaseFlow {
/// NB: Must be the first element.
///
/// The necessity of this will disappear once we have dynamically-sized types.
strong_ref_count: AtomicUsize,
weak_ref_count: AtomicUsize,
pub restyle_damage: RestyleDamage,
/// The children of this flow.
@ -963,15 +956,6 @@ impl Encodable for BaseFlow {
}
}
impl Drop for BaseFlow {
fn drop(&mut self) {
if self.strong_ref_count.load(Ordering::SeqCst) != 0 &&
self.weak_ref_count.load(Ordering::SeqCst) != 0 {
panic!("Flow destroyed before its ref count hit zero—this is unsafe!")
}
}
}
/// Whether a base flow should be forced to be nonfloated. This can affect e.g. `TableFlow`, which
/// is never floated because the table wrapper flow is the floated one.
#[derive(Clone, PartialEq)]
@ -1039,8 +1023,6 @@ impl BaseFlow {
damage.remove(RECONSTRUCT_FLOW);
BaseFlow {
strong_ref_count: AtomicUsize::new(1),
weak_ref_count: AtomicUsize::new(1),
restyle_damage: damage,
children: FlowList::new(),
intrinsic_inline_sizes: IntrinsicISizes::new(),
@ -1069,16 +1051,6 @@ impl BaseFlow {
self.children.iter_mut()
}
#[allow(unsafe_code)]
pub unsafe fn strong_ref_count<'a>(&'a self) -> &'a AtomicUsize {
&self.strong_ref_count
}
#[allow(unsafe_code)]
pub unsafe fn weak_ref_count<'a>(&'a self) -> &'a AtomicUsize {
&self.weak_ref_count
}
pub fn debug_id(&self) -> usize {
let p = self as *const _;
p as usize
@ -1115,7 +1087,7 @@ impl BaseFlow {
}
}
impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
impl<'a> ImmutableFlowUtils for &'a Flow {
/// Returns true if this flow is a block flow or subclass thereof.
fn is_block_like(self) -> bool {
match self.class() {
@ -1213,7 +1185,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
/// as it's harder to understand.
fn generate_missing_child_flow(self, node: &ThreadSafeLayoutNode) -> FlowRef {
let mut style = node.style().clone();
let flow = match self.class() {
match self.class() {
FlowClass::Table | FlowClass::TableRowGroup => {
properties::modify_style_for_anonymous_table_object(
&mut style,
@ -1224,7 +1196,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
style,
node.restyle_damage(),
SpecificFragmentInfo::TableRow);
box TableRowFlow::from_fragment(fragment) as Box<Flow>
Arc::new(TableRowFlow::from_fragment(fragment))
},
FlowClass::TableRow => {
properties::modify_style_for_anonymous_table_object(
@ -1237,14 +1209,12 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
node.restyle_damage(),
SpecificFragmentInfo::TableCell);
let hide = node.style().get_inheritedtable().empty_cells == empty_cells::T::hide;
box TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide) as
Box<Flow>
Arc::new(TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide))
},
_ => {
panic!("no need to generate a missing child")
}
};
FlowRef::new(flow)
}
}
/// Returns true if this flow contains fragments that are roots of an absolute flow tree.
@ -1315,7 +1285,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
}
}
impl<'a> MutableFlowUtils for &'a mut (Flow + 'a) {
impl<'a> MutableFlowUtils for &'a mut Flow {
/// Traverses the tree in preorder.
fn traverse_preorder<T: PreorderFlowTraversal>(self, traversal: &T) {
if traversal.should_process(self) {

View file

@ -108,9 +108,9 @@ impl FlowList {
}
impl<'a> Iterator for FlowListIterator<'a> {
type Item = &'a (Flow + 'a);
type Item = &'a Flow;
#[inline]
fn next(&mut self) -> Option<&'a (Flow + 'a)> {
fn next(&mut self) -> Option<&'a Flow> {
self.it.next().map(|x| &**x)
}
@ -121,10 +121,10 @@ impl<'a> Iterator for FlowListIterator<'a> {
}
impl<'a> Iterator for MutFlowListIterator<'a> {
type Item = &'a mut (Flow + 'a);
type Item = &'a mut Flow;
#[inline]
#[allow(unsafe_code)]
fn next(&mut self) -> Option<&'a mut (Flow + 'a)> {
fn next(&mut self) -> Option<&'a mut Flow> {
self.it.next().map(|x| unsafe { flow_ref::deref_mut(x) })
}

View file

@ -10,190 +10,15 @@
#![allow(unsafe_code)]
use flow;
use flow::{Flow, BaseFlow};
use flow::Flow;
use std::sync::{Arc, Weak};
use std::mem;
use std::ops::Deref;
use std::ptr;
use std::raw;
use std::rt::heap;
use std::sync::atomic::{self, Ordering};
#[unsafe_no_drop_flag]
pub struct FlowRef {
object: raw::TraitObject,
}
unsafe impl Send for FlowRef {}
unsafe impl Sync for FlowRef {}
#[unsafe_no_drop_flag]
pub struct WeakFlowRef {
object: raw::TraitObject,
}
unsafe impl Send for WeakFlowRef {}
unsafe impl Sync for WeakFlowRef {}
impl FlowRef {
pub fn new(mut flow: Box<Flow>) -> FlowRef {
unsafe {
let result = {
let flow_ref: &mut Flow = &mut *flow;
let object = mem::transmute::<&mut Flow, raw::TraitObject>(flow_ref);
FlowRef { object: object }
};
mem::forget(flow);
result
}
}
/// Downgrades the FlowRef to a WeakFlowRef.
pub fn downgrade(&self) -> WeakFlowRef {
unsafe {
flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
}
WeakFlowRef { object: self.object }
}
}
impl<'a> Deref for FlowRef {
type Target = Flow + 'a;
fn deref(&self) -> &(Flow + 'a) {
unsafe {
mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)
}
}
}
pub type FlowRef = Arc<Flow>;
pub type WeakFlowRef = Weak<Flow>;
// FIXME(https://github.com/servo/servo/issues/6503) This introduces unsound mutable aliasing.
// Try to replace it with Arc::get_mut (which checks that the reference count is 1).
pub unsafe fn deref_mut<'a>(flow_ref: &mut FlowRef) -> &mut (Flow + 'a) {
mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&flow_ref.object)
}
impl Drop for FlowRef {
fn drop(&mut self) {
unsafe {
if self.object.vtable.is_null() ||
self.object.vtable as usize == mem::POST_DROP_USIZE {
return
}
if flow::base(&**self).strong_ref_count().fetch_sub(1, Ordering::Release) != 1 {
return
}
atomic::fence(Ordering::Acquire);
// Normally we'd call the underlying Drop logic but not free the
// allocation, but this is almost impossible without DST in
// Rust. Instead we make a fake trait object to run the drop logic
// on.
let flow_ref: FlowRef = mem::replace(self, FlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(flow_ref.object.vtable);
let object_size = vtable[1];
let object_align = vtable[2];
let fake_data = heap::allocate(object_size, object_align);
ptr::copy(flow_ref.object.data as *const u8, fake_data, object_size);
let fake_box = raw::TraitObject { vtable: flow_ref.object.vtable, data: fake_data as *mut () };
let fake_flow = mem::transmute::<raw::TraitObject, Box<Flow>>(fake_box);
drop(fake_flow);
if flow::base(&*flow_ref).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
atomic::fence(Ordering::Acquire);
heap::deallocate(flow_ref.object.data as *mut u8, object_size, object_align);
}
mem::forget(flow_ref);
}
}
}
impl Clone for FlowRef {
fn clone(&self) -> FlowRef {
unsafe {
let _ = flow::base(&**self).strong_ref_count().fetch_add(1, Ordering::Relaxed);
FlowRef {
object: raw::TraitObject {
vtable: self.object.vtable,
data: self.object.data,
}
}
}
}
}
fn base<'a>(r: &WeakFlowRef) -> &'a BaseFlow {
let data = r.object.data;
debug_assert!(!data.is_null());
unsafe {
mem::transmute::<*mut (), &'a BaseFlow>(data)
}
}
impl WeakFlowRef {
/// Upgrades a WeakFlowRef to a FlowRef.
pub fn upgrade(&self) -> Option<FlowRef> {
unsafe {
let object = base(self);
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 is must never be above
// 0.
loop {
let n = object.strong_ref_count().load(Ordering::SeqCst);
if n == 0 { return None }
let old = object.strong_ref_count().compare_and_swap(n, n + 1, Ordering::SeqCst);
if old == n {
return Some(FlowRef { object: self.object })
}
}
}
}
}
impl Clone for WeakFlowRef {
fn clone(&self) -> WeakFlowRef {
unsafe {
base(self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
}
WeakFlowRef { object: self. object }
}
}
impl Drop for WeakFlowRef {
fn drop(&mut self) {
unsafe {
if self.object.vtable.is_null() ||
self.object.vtable as usize == mem::POST_DROP_USIZE {
return
}
if base(self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
atomic::fence(Ordering::Acquire);
// This dance deallocates the Box<Flow> without running its
// drop glue. The drop glue is run when the last strong
// reference is released.
let weak_ref: WeakFlowRef = mem::replace(self, WeakFlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(weak_ref.object.vtable);
let object_size = vtable[1];
let object_align = vtable[2];
heap::deallocate(weak_ref.object.data as *mut u8, object_size, object_align);
mem::forget(weak_ref);
}
}
}
pub unsafe fn deref_mut<'a>(r: &'a mut FlowRef) -> &'a mut Flow {
let ptr: *const Flow = &**r;
&mut *(ptr as *mut Flow)
}

View file

@ -192,7 +192,7 @@ pub trait LayoutDamageComputation {
fn reflow_entire_document(self);
}
impl<'a> LayoutDamageComputation for &'a mut (Flow + 'a) {
impl<'a> LayoutDamageComputation for &'a mut Flow {
fn compute_layout_damage(self) -> SpecialRestyleDamage {
let mut special_damage = SpecialRestyleDamage::empty();
let is_absolutely_positioned = flow::base(self).flags.contains(IS_ABSOLUTELY_POSITIONED);

View file

@ -4,13 +4,12 @@
#![feature(append)]
#![feature(arc_unique)]
#![feature(arc_weak)]
#![feature(box_str)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(custom_derive)]
#![feature(filling_drop)]
#![feature(hashmap_hasher)]
#![feature(heap_api)]
#![feature(mpsc_select)]
#![feature(plugin)]
#![feature(raw)]

View file

@ -47,25 +47,25 @@ fn null_unsafe_flow() -> UnsafeFlow {
pub fn owned_flow_to_unsafe_flow(flow: *const FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute_copy(&*flow)
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute_copy(&*flow)
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {
unsafe {
mem::transmute_copy(&flow)
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
pub fn mut_borrowed_flow_to_unsafe_flow(flow: &mut Flow) -> UnsafeFlow {
unsafe {
mem::transmute_copy(&flow)
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
@ -239,10 +239,9 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut FlowRef = unsafe {
mem::transmute(&mut unsafe_flow)
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
let flow = unsafe { flow_ref::deref_mut(flow) };
// Perform the appropriate traversal.
if self.should_process(flow) {
@ -257,7 +256,7 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
Ordering::Relaxed);
// Possibly enqueue the parent.
let mut unsafe_parent = base.parallel.parent;
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
@ -266,10 +265,10 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut FlowRef = unsafe {
mem::transmute(&mut unsafe_parent)
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(unsafe { flow_ref::deref_mut(parent) });
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
@ -296,12 +295,11 @@ trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for mut unsafe_flow in *unsafe_flows.0 {
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut FlowRef = mem::transmute(&mut unsafe_flow);
let flow = flow_ref::deref_mut(flow);
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();