servo/components/profile/time.rs
Martin Robinson 312cf0df08
script: Create a CrossProcessInstant to enable serializable monotonic time (#33282)
Up until now, Servo was using a very old version of time to get a
cross-process monotonic timestamp (using `time::precise_time_ns()`).
This change replaces the usage of old time with a new serializable
monotonic time called `CrossProcessInstant` and uses it where `u64`
timestamps were stored before. The standard library doesn't provide this
functionality because it isn't something you can do reliably on all
platforms. The idea is that we do our best and then fall back
gracefully.

This is a big change, because Servo was using `u64` timestamps all over
the place some as raw values taken from `time::precise_time_ns()` and
some as relative offsets from the "navigation start," which is a concept
similar to DOM's `timeOrigin` (but not exactly the same). It's very
difficult to fix this situation without fixing it everywhere as the
`Instant` concept is supposed to be opaque. The good thing is that this
change clears up all ambiguity when passing times as a `time::Duration`
is unit agnostic and a `CrossProcessInstant` represents an absolute
moment in time.

The `time` version of `Duration` is used because it can both be negative
and is also serializable.

Good things:
 - No need too pass around `time` and `time_precise` any longer.
   `CrossProcessInstant` is also precise and monotonic.
 - The distinction between a time that is unset or at `0` (at some kind
   of timer epoch) is now gone.

There still a lot of work to do to clean up timing, but this is the
first step. In general, I've tried to preserve existing behavior, even
when not spec compliant, as much as possible. I plan to submit followup
PRs fixing some of the issues I've noticed.

Signed-off-by: Martin Robinson <mrobinson@igalia.com>
2024-09-05 18:50:09 +00:00

406 lines
18 KiB
Rust

/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Timing functions.
use std::borrow::ToOwned;
use std::collections::{BTreeMap, HashMap};
use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
use std::thread;
use ipc_channel::ipc::{self, IpcReceiver};
use profile_traits::time::{
ProfilerCategory, ProfilerChan, ProfilerData, ProfilerMsg, TimerMetadata,
TimerMetadataFrameType, TimerMetadataReflowType,
};
use servo_config::opts::OutputOptions;
use time_03::Duration;
use crate::trace_dump::TraceDump;
pub trait Formattable {
fn format(&self, output: &Option<OutputOptions>) -> String;
}
impl Formattable for Option<TimerMetadata> {
fn format(&self, output: &Option<OutputOptions>) -> String {
match *self {
// TODO(cgaebel): Center-align in the format strings as soon as rustc supports it.
Some(ref meta) => {
let url = &*meta.url;
match *output {
Some(OutputOptions::FileName(_)) => {
/* The profiling output is a CSV file */
let incremental = match meta.incremental {
TimerMetadataReflowType::Incremental => "yes",
TimerMetadataReflowType::FirstReflow => "no",
};
let iframe = match meta.iframe {
TimerMetadataFrameType::RootWindow => "yes",
TimerMetadataFrameType::IFrame => "no",
};
format!(" {}\t{}\t{}", incremental, iframe, url)
},
_ => {
/* The profiling output is the terminal */
let url = if url.len() > 30 { &url[..30] } else { url };
let incremental = match meta.incremental {
TimerMetadataReflowType::Incremental => " yes",
TimerMetadataReflowType::FirstReflow => " no ",
};
let iframe = match meta.iframe {
TimerMetadataFrameType::RootWindow => " yes",
TimerMetadataFrameType::IFrame => " no ",
};
format!(" {:14} {:9} {:30}", incremental, iframe, url)
},
}
},
None => match *output {
Some(OutputOptions::FileName(_)) => {
format!(" {}\t{}\t{}", " N/A", " N/A", " N/A")
},
_ => format!(" {:14} {:9} {:30}", " N/A", " N/A", " N/A"),
},
}
}
}
impl Formattable for ProfilerCategory {
// some categories are subcategories of LayoutPerformCategory
// and should be printed to indicate this
fn format(&self, _output: &Option<OutputOptions>) -> String {
let padding = match *self {
ProfilerCategory::LayoutStyleRecalc |
ProfilerCategory::LayoutRestyleDamagePropagation |
ProfilerCategory::LayoutNonIncrementalReset |
ProfilerCategory::LayoutGeneratedContent |
ProfilerCategory::LayoutDisplayListSorting |
ProfilerCategory::LayoutFloatPlacementSpeculation |
ProfilerCategory::LayoutMain |
ProfilerCategory::LayoutStoreOverflow |
ProfilerCategory::LayoutDispListBuild |
ProfilerCategory::LayoutDamagePropagate |
ProfilerCategory::PaintingPerTile |
ProfilerCategory::PaintingPrepBuff => "+ ",
ProfilerCategory::LayoutParallelWarmup |
ProfilerCategory::LayoutSelectorMatch |
ProfilerCategory::LayoutTreeBuilder |
ProfilerCategory::LayoutTextShaping => "| + ",
_ => "",
};
let name = match *self {
ProfilerCategory::Compositing => "Compositing",
ProfilerCategory::LayoutPerform => "Layout",
ProfilerCategory::LayoutStyleRecalc => "Style Recalc",
ProfilerCategory::LayoutTextShaping => "Text Shaping",
ProfilerCategory::LayoutRestyleDamagePropagation => "Restyle Damage Propagation",
ProfilerCategory::LayoutNonIncrementalReset => "Non-incremental reset (temporary)",
ProfilerCategory::LayoutSelectorMatch => "Selector Matching",
ProfilerCategory::LayoutTreeBuilder => "Tree Building",
ProfilerCategory::LayoutDamagePropagate => "Damage Propagation",
ProfilerCategory::LayoutDisplayListSorting => "Sorting Display List",
ProfilerCategory::LayoutGeneratedContent => "Generated Content Resolution",
ProfilerCategory::LayoutFloatPlacementSpeculation => "Float Placement Speculation",
ProfilerCategory::LayoutMain => "Primary Layout Pass",
ProfilerCategory::LayoutStoreOverflow => "Store Overflow",
ProfilerCategory::LayoutParallelWarmup => "Parallel Warmup",
ProfilerCategory::LayoutDispListBuild => "Display List Construction",
ProfilerCategory::NetHTTPRequestResponse => "Network HTTP Request/Response",
ProfilerCategory::PaintingPerTile => "Painting Per Tile",
ProfilerCategory::PaintingPrepBuff => "Buffer Prep",
ProfilerCategory::Painting => "Painting",
ProfilerCategory::ImageDecoding => "Image Decoding",
ProfilerCategory::ImageSaving => "Image Saving",
ProfilerCategory::ScriptAttachLayout => "Script Attach Layout",
ProfilerCategory::ScriptConstellationMsg => "Script Constellation Msg",
ProfilerCategory::ScriptDevtoolsMsg => "Script Devtools Msg",
ProfilerCategory::ScriptDocumentEvent => "Script Document Event",
ProfilerCategory::ScriptDomEvent => "Script Dom Event",
ProfilerCategory::ScriptEvaluate => "Script JS Evaluate",
ProfilerCategory::ScriptFileRead => "Script File Read",
ProfilerCategory::ScriptHistoryEvent => "Script History Event",
ProfilerCategory::ScriptImageCacheMsg => "Script Image Cache Msg",
ProfilerCategory::ScriptInputEvent => "Script Input Event",
ProfilerCategory::ScriptNetworkEvent => "Script Network Event",
ProfilerCategory::ScriptParseHTML => "Script Parse HTML",
ProfilerCategory::ScriptParseXML => "Script Parse XML",
ProfilerCategory::ScriptPlannedNavigation => "Script Planned Navigation",
ProfilerCategory::ScriptPortMessage => "Script Port Message",
ProfilerCategory::ScriptResize => "Script Resize",
ProfilerCategory::ScriptEvent => "Script Event",
ProfilerCategory::ScriptUpdateReplacedElement => "Script Update Replaced Element",
ProfilerCategory::ScriptSetScrollState => "Script Set Scroll State",
ProfilerCategory::ScriptSetViewport => "Script Set Viewport",
ProfilerCategory::ScriptTimerEvent => "Script Timer Event",
ProfilerCategory::ScriptStylesheetLoad => "Script Stylesheet Load",
ProfilerCategory::ScriptWebSocketEvent => "Script Web Socket Event",
ProfilerCategory::ScriptWorkerEvent => "Script Worker Event",
ProfilerCategory::ScriptServiceWorkerEvent => "Script Service Worker Event",
ProfilerCategory::ScriptEnterFullscreen => "Script Enter Fullscreen",
ProfilerCategory::ScriptExitFullscreen => "Script Exit Fullscreen",
ProfilerCategory::ScriptWebVREvent => "Script WebVR Event",
ProfilerCategory::ScriptWorkletEvent => "Script Worklet Event",
ProfilerCategory::ScriptPerformanceEvent => "Script Performance Event",
ProfilerCategory::ScriptWebGPUMsg => "Script WebGPU Message",
ProfilerCategory::TimeToFirstPaint => "Time To First Paint",
ProfilerCategory::TimeToFirstContentfulPaint => "Time To First Contentful Paint",
ProfilerCategory::TimeToInteractive => "Time to Interactive",
ProfilerCategory::IpcReceiver => "Blocked at IPC Receive",
ProfilerCategory::IpcBytesReceiver => "Blocked at IPC Bytes Receive",
};
format!("{}{}", padding, name)
}
}
type ProfilerBuckets = BTreeMap<(ProfilerCategory, Option<TimerMetadata>), Vec<Duration>>;
// back end of the profiler that handles data aggregation and performance metrics
pub struct Profiler {
pub port: IpcReceiver<ProfilerMsg>,
buckets: ProfilerBuckets,
output: Option<OutputOptions>,
pub last_msg: Option<ProfilerMsg>,
trace: Option<TraceDump>,
blocked_layout_queries: HashMap<String, u32>,
}
impl Profiler {
pub fn create(output: &Option<OutputOptions>, file_path: Option<String>) -> ProfilerChan {
let (chan, port) = ipc::channel().unwrap();
match *output {
Some(ref option) => {
// Spawn the time profiler thread
let outputoption = option.clone();
thread::Builder::new()
.name("TimeProfiler".to_owned())
.spawn(move || {
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
let mut profiler = Profiler::new(port, trace, Some(outputoption));
profiler.start();
})
.expect("Thread spawning failed");
// decide if we need to spawn the timer thread
match *option {
OutputOptions::FileName(_) => { /* no timer thread needed */ },
OutputOptions::Stdout(period) => {
// Spawn a timer thread
let chan = chan.clone();
thread::Builder::new()
.name("TimeProfTimer".to_owned())
.spawn(move || loop {
thread::sleep(std::time::Duration::from_secs_f64(period));
if chan.send(ProfilerMsg::Print).is_err() {
break;
}
})
.expect("Thread spawning failed");
},
}
},
None => {
// this is when the -p option hasn't been specified
if file_path.is_some() {
// Spawn the time profiler
thread::Builder::new()
.name("TimeProfiler".to_owned())
.spawn(move || {
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
let mut profiler = Profiler::new(port, trace, None);
profiler.start();
})
.expect("Thread spawning failed");
} else {
// No-op to handle messages when the time profiler is not printing:
thread::Builder::new()
.name("TimeProfiler".to_owned())
.spawn(move || loop {
match port.recv() {
Err(_) => break,
Ok(ProfilerMsg::Exit(chan)) => {
let _ = chan.send(());
break;
},
_ => {},
}
})
.expect("Thread spawning failed");
}
},
}
ProfilerChan(chan)
}
pub fn new(
port: IpcReceiver<ProfilerMsg>,
trace: Option<TraceDump>,
output: Option<OutputOptions>,
) -> Profiler {
Profiler {
port,
buckets: BTreeMap::new(),
output,
last_msg: None,
trace,
blocked_layout_queries: HashMap::new(),
}
}
pub fn start(&mut self) {
while let Ok(msg) = self.port.recv() {
if !self.handle_msg(msg) {
break;
}
}
}
fn find_or_insert(&mut self, k: (ProfilerCategory, Option<TimerMetadata>), duration: Duration) {
self.buckets.entry(k).or_default().push(duration);
}
fn handle_msg(&mut self, msg: ProfilerMsg) -> bool {
match msg.clone() {
ProfilerMsg::Time(category_and_metadata, (start_time, end_time)) => {
if let Some(ref mut trace) = self.trace {
trace.write_one(&category_and_metadata, start_time, end_time);
}
self.find_or_insert(category_and_metadata, end_time - start_time);
},
ProfilerMsg::Print => {
if let Some(ProfilerMsg::Time(..)) = self.last_msg {
// only print if more data has arrived since the last printout
self.print_buckets();
}
},
ProfilerMsg::Get(k, sender) => {
let vec_option = self.buckets.get(&k);
match vec_option {
Some(vec_entry) => sender
.send(ProfilerData::Record(vec_entry.to_vec()))
.unwrap(),
None => sender.send(ProfilerData::NoRecords).unwrap(),
};
},
ProfilerMsg::BlockedLayoutQuery(url) => {
*self.blocked_layout_queries.entry(url).or_insert(0) += 1;
},
ProfilerMsg::Exit(chan) => {
self.print_buckets();
let _ = chan.send(());
return false;
},
};
self.last_msg = Some(msg);
true
}
/// Get tuple (mean, median, min, max) for profiler statistics.
pub fn get_statistics(data: &[Duration]) -> (Duration, Duration, Duration, Duration) {
debug_assert!(
data.windows(2).all(|window| window[0] <= window[1]),
"Data must be sorted"
);
let data_len = data.len();
debug_assert!(data_len > 0);
let (mean, median, min, max) = (
data.iter().sum::<Duration>() / data_len as u32,
data[data_len / 2],
data[0],
data[data_len - 1],
);
(mean, median, min, max)
}
fn print_buckets(&mut self) {
match self.output {
Some(OutputOptions::FileName(ref filename)) => {
let path = Path::new(&filename);
let mut file = match File::create(path) {
Err(e) => panic!("Couldn't create {}: {}", path.display(), e),
Ok(file) => file,
};
writeln!(
file,
"_category_\t_incremental?_\t_iframe?_\t_url_\t_mean (ms)_\t\
_median (ms)_\t_min (ms)_\t_max (ms)_\t_events_"
)
.unwrap();
for ((category, meta), ref mut data) in &mut self.buckets {
data.sort_by(|a, b| a.partial_cmp(b).expect("No NaN values in profiles"));
let data_len = data.len();
if data_len > 0 {
let (mean, median, min, max) = Self::get_statistics(data);
writeln!(
file,
"{}\t{}\t{:15.4}\t{:15.4}\t{:15.4}\t{:15.4}\t{:15}",
category.format(&self.output),
meta.format(&self.output),
mean.as_seconds_f64() * 1000.,
median.as_seconds_f64() * 1000.,
min.as_seconds_f64() * 1000.,
max.as_seconds_f64() * 1000.,
data_len
)
.unwrap();
}
}
writeln!(file, "_url\t_blocked layout queries_").unwrap();
for (url, count) in &self.blocked_layout_queries {
writeln!(file, "{}\t{}", url, count).unwrap();
}
},
Some(OutputOptions::Stdout(_)) => {
let stdout = io::stdout();
let mut lock = stdout.lock();
writeln!(
&mut lock,
"{:35} {:14} {:9} {:30} {:15} {:15} {:-15} {:-15} {:-15}",
"_category_",
"_incremental?_",
"_iframe?_",
" _url_",
" _mean (ms)_",
" _median (ms)_",
" _min (ms)_",
" _max (ms)_",
" _events_"
)
.unwrap();
for ((category, meta), ref mut data) in &mut self.buckets {
data.sort_by(|a, b| a.partial_cmp(b).expect("No NaN values in profiles"));
let data_len = data.len();
if data_len > 0 {
let (mean, median, min, max) = Self::get_statistics(data);
writeln!(
&mut lock,
"{:-35}{} {:15.4} {:15.4} {:15.4} {:15.4} {:15}",
category.format(&self.output),
meta.format(&self.output),
mean.as_seconds_f64() * 1000.,
median.as_seconds_f64() * 1000.,
min.as_seconds_f64() * 1000.,
max.as_seconds_f64() * 1000.,
data_len
)
.unwrap();
}
}
writeln!(&mut lock).unwrap();
writeln!(&mut lock, "_url_\t_blocked layout queries_").unwrap();
for (url, count) in &self.blocked_layout_queries {
writeln!(&mut lock, "{}\t{}", url, count).unwrap();
}
writeln!(&mut lock).unwrap();
},
None => { /* Do nothing if no output option has been set */ },
};
}
}