mirror of
https://github.com/servo/servo.git
synced 2025-08-03 20:50:07 +01:00
Remove support for heartbeats profiling
This commit is contained in:
parent
0abe90647f
commit
b282bd3a44
14 changed files with 7 additions and 1157 deletions
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -2393,27 +2393,6 @@ dependencies = [
|
||||||
"http",
|
"http",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "heartbeats-simple"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "78bef7dfbf1dbfa968260a57d59f5bb9078625593c35d5a4a3dc78dd4f86e28b"
|
|
||||||
dependencies = [
|
|
||||||
"heartbeats-simple-sys",
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "heartbeats-simple-sys"
|
|
||||||
version = "0.4.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "29f06e482fdc3f29015d288ad63bc5e855ffca0830866550a08b29e55924c935"
|
|
||||||
dependencies = [
|
|
||||||
"cmake",
|
|
||||||
"libc",
|
|
||||||
"pkg-config",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.3.1"
|
version = "0.3.1"
|
||||||
|
@ -4169,10 +4148,8 @@ checksum = "9a1574a51c3fd37b26d2c0032b649d08a7d51d4cca9c41bbc5bf7118fa4509d0"
|
||||||
name = "profile"
|
name = "profile"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heartbeats-simple",
|
|
||||||
"ipc-channel",
|
"ipc-channel",
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
|
||||||
"profile_traits",
|
"profile_traits",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -4180,7 +4157,6 @@ dependencies = [
|
||||||
"servo_allocator",
|
"servo_allocator",
|
||||||
"servo_config",
|
"servo_config",
|
||||||
"task_info",
|
"task_info",
|
||||||
"time",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -109,9 +109,6 @@ pub struct Opts {
|
||||||
/// Periodically print out on which events script threads spend their processing time.
|
/// Periodically print out on which events script threads spend their processing time.
|
||||||
pub profile_script_events: bool,
|
pub profile_script_events: bool,
|
||||||
|
|
||||||
/// Enable all heartbeats for profiling.
|
|
||||||
pub profile_heartbeats: bool,
|
|
||||||
|
|
||||||
/// `None` to disable debugger or `Some` with a port number to start a server to listen to
|
/// `None` to disable debugger or `Some` with a port number to start a server to listen to
|
||||||
/// remote Firefox debugger connections.
|
/// remote Firefox debugger connections.
|
||||||
pub debugger_port: Option<u16>,
|
pub debugger_port: Option<u16>,
|
||||||
|
@ -264,9 +261,6 @@ pub struct DebugOptions {
|
||||||
/// Profile which events script threads spend their time on.
|
/// Profile which events script threads spend their time on.
|
||||||
pub profile_script_events: bool,
|
pub profile_script_events: bool,
|
||||||
|
|
||||||
/// Enable all heartbeats for profiling.
|
|
||||||
pub profile_heartbeats: bool,
|
|
||||||
|
|
||||||
/// Paint borders along fragment boundaries.
|
/// Paint borders along fragment boundaries.
|
||||||
pub show_fragment_borders: bool,
|
pub show_fragment_borders: bool,
|
||||||
|
|
||||||
|
@ -333,7 +327,6 @@ impl DebugOptions {
|
||||||
"dump-display-list-json" => self.dump_display_list_json = true,
|
"dump-display-list-json" => self.dump_display_list_json = true,
|
||||||
"relayout-event" => self.relayout_event = true,
|
"relayout-event" => self.relayout_event = true,
|
||||||
"profile-script-events" => self.profile_script_events = true,
|
"profile-script-events" => self.profile_script_events = true,
|
||||||
"profile-heartbeats" => self.profile_heartbeats = true,
|
|
||||||
"show-fragment-borders" => self.show_fragment_borders = true,
|
"show-fragment-borders" => self.show_fragment_borders = true,
|
||||||
"show-parallel-layout" => self.show_parallel_layout = true,
|
"show-parallel-layout" => self.show_parallel_layout = true,
|
||||||
"trace-layout" => self.trace_layout = true,
|
"trace-layout" => self.trace_layout = true,
|
||||||
|
@ -397,10 +390,6 @@ fn print_debug_usage(app: &str) -> ! {
|
||||||
"profile-script-events",
|
"profile-script-events",
|
||||||
"Enable profiling of script-related events.",
|
"Enable profiling of script-related events.",
|
||||||
);
|
);
|
||||||
print_option(
|
|
||||||
"profile-heartbeats",
|
|
||||||
"Enable heartbeats for all thread categories.",
|
|
||||||
);
|
|
||||||
print_option(
|
print_option(
|
||||||
"show-fragment-borders",
|
"show-fragment-borders",
|
||||||
"Paint borders along fragment boundaries.",
|
"Paint borders along fragment boundaries.",
|
||||||
|
@ -509,7 +498,6 @@ pub fn default_opts() -> Opts {
|
||||||
dump_display_list_json: false,
|
dump_display_list_json: false,
|
||||||
relayout_event: false,
|
relayout_event: false,
|
||||||
profile_script_events: false,
|
profile_script_events: false,
|
||||||
profile_heartbeats: false,
|
|
||||||
disable_share_style_cache: false,
|
disable_share_style_cache: false,
|
||||||
style_sharing_stats: false,
|
style_sharing_stats: false,
|
||||||
convert_mouse_to_touch: false,
|
convert_mouse_to_touch: false,
|
||||||
|
@ -896,7 +884,6 @@ pub fn from_cmdline_args(mut opts: Options, args: &[String]) -> ArgumentParsingR
|
||||||
hard_fail: opt_match.opt_present("f") && !opt_match.opt_present("F"),
|
hard_fail: opt_match.opt_present("f") && !opt_match.opt_present("F"),
|
||||||
bubble_inline_sizes_separately: bubble_inline_sizes_separately,
|
bubble_inline_sizes_separately: bubble_inline_sizes_separately,
|
||||||
profile_script_events: debug_options.profile_script_events,
|
profile_script_events: debug_options.profile_script_events,
|
||||||
profile_heartbeats: debug_options.profile_heartbeats,
|
|
||||||
trace_layout: debug_options.trace_layout,
|
trace_layout: debug_options.trace_layout,
|
||||||
debugger_port: debugger_port,
|
debugger_port: debugger_port,
|
||||||
devtools_port: devtools_port,
|
devtools_port: devtools_port,
|
||||||
|
|
|
@ -11,14 +11,11 @@ name = "profile"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
heartbeats-simple = "0.4"
|
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
log = "0.4"
|
|
||||||
profile_traits = { path = "../profile_traits" }
|
profile_traits = { path = "../profile_traits" }
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
servo_config = { path = "../config" }
|
servo_config = { path = "../config" }
|
||||||
time_crate = { package = "time", version = "0.1.12" }
|
|
||||||
|
|
||||||
[target.'cfg(target_os = "macos")'.dependencies]
|
[target.'cfg(target_os = "macos")'.dependencies]
|
||||||
task_info = { path = "../../support/rust-task_info" }
|
task_info = { path = "../../support/rust-task_info" }
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
This crate hosts the Servo profiler.
|
|
||||||
Its APIs can be found in the `profile_traits` crate.
|
|
||||||
|
|
||||||
|
|
||||||
# Heartbeats
|
|
||||||
|
|
||||||
Heartbeats allow fine-grained timing and energy profiling of Servo tasks specified in the `ProfilerCategory` enum (see the `profile_traits::time` module).
|
|
||||||
When enabled, a heartbeat is issued for each profiler category event.
|
|
||||||
They also compute the average performance and power for three levels of granularity:
|
|
||||||
|
|
||||||
* Global: the entire runtime.
|
|
||||||
* Window: the category's last `N` events, where `N` is the size of a sliding window.
|
|
||||||
* Instant: the category's most recent event.
|
|
||||||
|
|
||||||
## Enabling
|
|
||||||
|
|
||||||
Heartbeats are enabled for categories by setting proper environment variables prior to launching Servo.
|
|
||||||
|
|
||||||
For each desired category, set the `SERVO_HEARTBEAT_ENABLE_MyCategory` environment variable to any value (an empty string will do) where `MyCategory` is the `ProfilerCategory` name exactly as it appears in the enum.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
SERVO_HEARTBEAT_ENABLE_LayoutPerform=""
|
|
||||||
```
|
|
||||||
|
|
||||||
Then set the `SERVO_HEARTBEAT_LOG_MyCategory` environment variable so Servo knows where to write the results.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
SERVO_HEARTBEAT_LOG_LayoutPerform="/tmp/heartbeat-LayoutPerform.log"
|
|
||||||
```
|
|
||||||
|
|
||||||
The target directory must already exist and be writeable.
|
|
||||||
Results are written to the log file every `N` heartbeats and when the profiler shuts down.
|
|
||||||
|
|
||||||
You can optionally specify the size of the sliding window by setting `SERVO_HEARTBEAT_WINDOW_MyCategory` to a positive integer value.
|
|
||||||
The default value is `20`.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
SERVO_HEARTBEAT_WINDOW_LayoutPerform=20
|
|
||||||
```
|
|
||||||
|
|
||||||
The window size is also how many heartbeats will be stored in memory.
|
|
||||||
|
|
||||||
## Log Files
|
|
||||||
|
|
||||||
Log files are whitespace-delimited.
|
|
||||||
|
|
||||||
`HB` is the heartbeat number, ordered by when they are registered (not necessarily start or end time!).
|
|
||||||
The count starts at `0`.
|
|
||||||
|
|
||||||
`Tag` is a client-specified identifier for each heartbeat.
|
|
||||||
Servo does not use this, so the value is always `0`.
|
|
||||||
|
|
||||||
`Work` is the amount of work completed for a particular heartbeat and is used in computing performance.
|
|
||||||
At this time, Servo simply specifies `1` unit of work for each heartbeat.
|
|
||||||
|
|
||||||
`Time` and `Energy` have `Start` and `End` values as captured during runtime.
|
|
||||||
Time is measured in nanoseconds and energy is measured in microjoules.
|
|
||||||
|
|
||||||
`Work`, `Time`, and `Energy` also have `Global` and `Window` values which are the summed over the entire runtime and sliding window period, respectively.
|
|
||||||
|
|
||||||
`Perf` (performance) and `Pwr` (power) have `Global`, `Window`, and `Instant` values as described above.
|
|
|
@ -1,181 +0,0 @@
|
||||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
|
||||||
|
|
||||||
use self::synchronized_heartbeat::{heartbeat_window_callback, lock_and_work};
|
|
||||||
use heartbeats_simple::HeartbeatPow as Heartbeat;
|
|
||||||
use profile_traits::time::ProfilerCategory;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::env::var_os;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// Initialize heartbeats
|
|
||||||
pub fn init(profile_heartbeats: bool) {
|
|
||||||
lock_and_work(|hbs_opt| {
|
|
||||||
if hbs_opt.is_none() {
|
|
||||||
let mut hbs: Box<HashMap<ProfilerCategory, Heartbeat>> = Box::new(HashMap::new());
|
|
||||||
maybe_create_heartbeat(
|
|
||||||
&mut hbs,
|
|
||||||
ProfilerCategory::ApplicationHeartbeat,
|
|
||||||
profile_heartbeats,
|
|
||||||
);
|
|
||||||
*hbs_opt = Some(Box::into_raw(hbs))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log regmaining buffer data and cleanup heartbeats
|
|
||||||
pub fn cleanup() {
|
|
||||||
let hbs_opt_box: Option<Box<HashMap<ProfilerCategory, Heartbeat>>> = lock_and_work(|hbs_opt| {
|
|
||||||
hbs_opt
|
|
||||||
.take()
|
|
||||||
.map(|hbs_ptr| unsafe { Box::from_raw(hbs_ptr) })
|
|
||||||
});
|
|
||||||
if let Some(mut hbs) = hbs_opt_box {
|
|
||||||
for (_, v) in hbs.iter_mut() {
|
|
||||||
// log any remaining heartbeat records before dropping
|
|
||||||
log_heartbeat_records(v);
|
|
||||||
}
|
|
||||||
hbs.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a heartbeat exists for the given category
|
|
||||||
pub fn is_heartbeat_enabled(category: &ProfilerCategory, profile_heartbeats: bool) -> bool {
|
|
||||||
let is_enabled = lock_and_work(|hbs_opt| {
|
|
||||||
hbs_opt.map_or(false, |hbs_ptr| unsafe {
|
|
||||||
(*hbs_ptr).contains_key(category)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
is_enabled || is_create_heartbeat(category, profile_heartbeats)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Issue a heartbeat (if one exists) for the given category
|
|
||||||
pub fn maybe_heartbeat(
|
|
||||||
category: &ProfilerCategory,
|
|
||||||
start_time: u64,
|
|
||||||
end_time: u64,
|
|
||||||
start_energy: u64,
|
|
||||||
end_energy: u64,
|
|
||||||
profile_heartbeats: bool,
|
|
||||||
) {
|
|
||||||
lock_and_work(|hbs_opt| {
|
|
||||||
if let Some(hbs_ptr) = *hbs_opt {
|
|
||||||
unsafe {
|
|
||||||
if !(*hbs_ptr).contains_key(category) {
|
|
||||||
maybe_create_heartbeat(&mut (*hbs_ptr), category.clone(), profile_heartbeats);
|
|
||||||
}
|
|
||||||
if let Some(h) = (*hbs_ptr).get_mut(category) {
|
|
||||||
(*h).heartbeat(0, 1, start_time, end_time, start_energy, end_energy);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(cimes): Android doesn't really do environment variables. Need a better way to configure dynamically.
|
|
||||||
|
|
||||||
fn is_create_heartbeat(category: &ProfilerCategory, profile_heartbeats: bool) -> bool {
|
|
||||||
profile_heartbeats || var_os(format!("SERVO_HEARTBEAT_ENABLE_{:?}", category)).is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_heartbeat_log<P: AsRef<Path>>(name: P) -> Option<File> {
|
|
||||||
match File::create(name) {
|
|
||||||
Ok(f) => Some(f),
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Failed to open heartbeat log: {}", e);
|
|
||||||
None
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "android")]
|
|
||||||
fn get_heartbeat_log(category: &ProfilerCategory) -> Option<File> {
|
|
||||||
open_heartbeat_log(format!("/sdcard/servo/heartbeat-{:?}.log", category))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "android"))]
|
|
||||||
fn get_heartbeat_log(category: &ProfilerCategory) -> Option<File> {
|
|
||||||
var_os(format!("SERVO_HEARTBEAT_LOG_{:?}", category)).and_then(|name| open_heartbeat_log(&name))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_heartbeat_window_size(category: &ProfilerCategory) -> usize {
|
|
||||||
const WINDOW_SIZE_DEFAULT: usize = 1;
|
|
||||||
match var_os(format!("SERVO_HEARTBEAT_WINDOW_{:?}", category)) {
|
|
||||||
Some(w) => match w.into_string() {
|
|
||||||
Ok(s) => s.parse::<usize>().unwrap_or(WINDOW_SIZE_DEFAULT),
|
|
||||||
_ => WINDOW_SIZE_DEFAULT,
|
|
||||||
},
|
|
||||||
None => WINDOW_SIZE_DEFAULT,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Possibly create a heartbeat
|
|
||||||
fn maybe_create_heartbeat(
|
|
||||||
hbs: &mut HashMap<ProfilerCategory, Heartbeat>,
|
|
||||||
category: ProfilerCategory,
|
|
||||||
profile_heartbeats: bool,
|
|
||||||
) {
|
|
||||||
if is_create_heartbeat(&category, profile_heartbeats) {
|
|
||||||
// get optional log file
|
|
||||||
let logfile: Option<File> = get_heartbeat_log(&category);
|
|
||||||
// window size
|
|
||||||
let window_size: usize = get_heartbeat_window_size(&category);
|
|
||||||
// create the heartbeat
|
|
||||||
match Heartbeat::new(window_size, Some(heartbeat_window_callback), logfile) {
|
|
||||||
Ok(hb) => {
|
|
||||||
debug!("Created heartbeat for {:?}", category);
|
|
||||||
hbs.insert(category, hb);
|
|
||||||
},
|
|
||||||
Err(e) => warn!("Failed to create heartbeat for {:?}: {}", category, e),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log heartbeat records up to the buffer index
|
|
||||||
fn log_heartbeat_records(hb: &mut Heartbeat) {
|
|
||||||
match hb.log_to_buffer_index() {
|
|
||||||
Ok(_) => (),
|
|
||||||
Err(e) => warn!("Failed to write heartbeat log: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod synchronized_heartbeat {
|
|
||||||
use super::log_heartbeat_records;
|
|
||||||
use heartbeats_simple::HeartbeatPow as Heartbeat;
|
|
||||||
use heartbeats_simple::HeartbeatPowContext as HeartbeatContext;
|
|
||||||
use profile_traits::time::ProfilerCategory;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
|
||||||
|
|
||||||
static mut HBS: Option<*mut HashMap<ProfilerCategory, Heartbeat>> = None;
|
|
||||||
|
|
||||||
// unfortunately can't encompass the actual hashmap in a Mutex (Heartbeat isn't Send/Sync), so we'll use a spinlock
|
|
||||||
static HBS_SPINLOCK: AtomicBool = AtomicBool::new(false);
|
|
||||||
|
|
||||||
pub fn lock_and_work<F, R>(work: F) -> R
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut Option<*mut HashMap<ProfilerCategory, Heartbeat>>) -> R,
|
|
||||||
{
|
|
||||||
while HBS_SPINLOCK.compare_and_swap(false, true, Ordering::SeqCst) {}
|
|
||||||
let result = unsafe { work(&mut HBS) };
|
|
||||||
HBS_SPINLOCK.store(false, Ordering::SeqCst);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Callback function used to log the window buffer.
|
|
||||||
/// When this is called from native C, the heartbeat is safely locked internally and the global lock is held.
|
|
||||||
/// If calling from this file, you must already hold the global lock!
|
|
||||||
pub extern "C" fn heartbeat_window_callback(hb: *const HeartbeatContext) {
|
|
||||||
unsafe {
|
|
||||||
if let Some(hbs_ptr) = HBS {
|
|
||||||
for (_, v) in (*hbs_ptr).iter_mut() {
|
|
||||||
if &v.hb as *const HeartbeatContext == hb {
|
|
||||||
log_heartbeat_records(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,15 +4,11 @@
|
||||||
|
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate profile_traits;
|
extern crate profile_traits;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
|
||||||
#[allow(unsafe_code)]
|
|
||||||
mod heartbeats;
|
|
||||||
#[allow(unsafe_code)]
|
#[allow(unsafe_code)]
|
||||||
pub mod mem;
|
pub mod mem;
|
||||||
pub mod time;
|
pub mod time;
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
//! Timing functions.
|
//! Timing functions.
|
||||||
|
|
||||||
use crate::heartbeats;
|
|
||||||
use crate::trace_dump::TraceDump;
|
use crate::trace_dump::TraceDump;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver};
|
use ipc_channel::ipc::{self, IpcReceiver};
|
||||||
use profile_traits::time::{
|
use profile_traits::time::{
|
||||||
|
@ -19,7 +18,6 @@ use std::io::{self, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use std::{f64, thread, u32, u64};
|
use std::{f64, thread, u32, u64};
|
||||||
use time_crate::precise_time_ns;
|
|
||||||
|
|
||||||
pub trait Formattable {
|
pub trait Formattable {
|
||||||
fn format(&self, output: &Option<OutputOptions>) -> String;
|
fn format(&self, output: &Option<OutputOptions>) -> String;
|
||||||
|
@ -151,7 +149,6 @@ impl Formattable for ProfilerCategory {
|
||||||
ProfilerCategory::TimeToInteractive => "Time to Interactive",
|
ProfilerCategory::TimeToInteractive => "Time to Interactive",
|
||||||
ProfilerCategory::IpcReceiver => "Blocked at IPC Receive",
|
ProfilerCategory::IpcReceiver => "Blocked at IPC Receive",
|
||||||
ProfilerCategory::IpcBytesReceiver => "Blocked at IPC Bytes Receive",
|
ProfilerCategory::IpcBytesReceiver => "Blocked at IPC Bytes Receive",
|
||||||
ProfilerCategory::ApplicationHeartbeat => "Application Heartbeat",
|
|
||||||
};
|
};
|
||||||
format!("{}{}", padding, name)
|
format!("{}{}", padding, name)
|
||||||
}
|
}
|
||||||
|
@ -167,15 +164,10 @@ pub struct Profiler {
|
||||||
pub last_msg: Option<ProfilerMsg>,
|
pub last_msg: Option<ProfilerMsg>,
|
||||||
trace: Option<TraceDump>,
|
trace: Option<TraceDump>,
|
||||||
blocked_layout_queries: HashMap<String, u32>,
|
blocked_layout_queries: HashMap<String, u32>,
|
||||||
profile_heartbeats: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Profiler {
|
impl Profiler {
|
||||||
pub fn create(
|
pub fn create(output: &Option<OutputOptions>, file_path: Option<String>) -> ProfilerChan {
|
||||||
output: &Option<OutputOptions>,
|
|
||||||
file_path: Option<String>,
|
|
||||||
profile_heartbeats: bool,
|
|
||||||
) -> ProfilerChan {
|
|
||||||
let (chan, port) = ipc::channel().unwrap();
|
let (chan, port) = ipc::channel().unwrap();
|
||||||
match *output {
|
match *output {
|
||||||
Some(ref option) => {
|
Some(ref option) => {
|
||||||
|
@ -185,8 +177,7 @@ impl Profiler {
|
||||||
.name("Time profiler".to_owned())
|
.name("Time profiler".to_owned())
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
|
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
|
||||||
let mut profiler =
|
let mut profiler = Profiler::new(port, trace, Some(outputoption));
|
||||||
Profiler::new(port, trace, Some(outputoption), profile_heartbeats);
|
|
||||||
profiler.start();
|
profiler.start();
|
||||||
})
|
})
|
||||||
.expect("Thread spawning failed");
|
.expect("Thread spawning failed");
|
||||||
|
@ -216,7 +207,7 @@ impl Profiler {
|
||||||
.name("Time profiler".to_owned())
|
.name("Time profiler".to_owned())
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
|
let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
|
||||||
let mut profiler = Profiler::new(port, trace, None, profile_heartbeats);
|
let mut profiler = Profiler::new(port, trace, None);
|
||||||
profiler.start();
|
profiler.start();
|
||||||
})
|
})
|
||||||
.expect("Thread spawning failed");
|
.expect("Thread spawning failed");
|
||||||
|
@ -239,60 +230,13 @@ impl Profiler {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
heartbeats::init(profile_heartbeats);
|
ProfilerChan(chan)
|
||||||
let profiler_chan = ProfilerChan(chan);
|
|
||||||
|
|
||||||
// only spawn the application-level profiler thread if its heartbeat is enabled
|
|
||||||
let run_ap_thread = move || {
|
|
||||||
heartbeats::is_heartbeat_enabled(
|
|
||||||
&ProfilerCategory::ApplicationHeartbeat,
|
|
||||||
profile_heartbeats,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
if run_ap_thread() {
|
|
||||||
let profiler_chan = profiler_chan.clone();
|
|
||||||
// min of 1 heartbeat/sec, max of 20 should provide accurate enough readings
|
|
||||||
// waking up more frequently allows the thread to end faster on exit
|
|
||||||
const SLEEP_MS: u32 = 10;
|
|
||||||
let interval_ms = 200;
|
|
||||||
let loop_count: u32 = (interval_ms as f32 / SLEEP_MS as f32).ceil() as u32;
|
|
||||||
thread::Builder::new()
|
|
||||||
.name("Application heartbeat profiler".to_owned())
|
|
||||||
.spawn(move || {
|
|
||||||
let mut start_time = precise_time_ns();
|
|
||||||
loop {
|
|
||||||
for _ in 0..loop_count {
|
|
||||||
if run_ap_thread() {
|
|
||||||
thread::sleep(Duration::from_millis(SLEEP_MS as u64))
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let end_time = precise_time_ns();
|
|
||||||
// send using the inner channel
|
|
||||||
// (using ProfilerChan.send() forces an unwrap
|
|
||||||
// and sometimes panics for this background profiler)
|
|
||||||
let ProfilerChan(ref c) = profiler_chan;
|
|
||||||
if let Err(_) = c.send(ProfilerMsg::Time(
|
|
||||||
(ProfilerCategory::ApplicationHeartbeat, None),
|
|
||||||
(start_time, end_time),
|
|
||||||
)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
start_time = end_time;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expect("Thread spawning failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
profiler_chan
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
port: IpcReceiver<ProfilerMsg>,
|
port: IpcReceiver<ProfilerMsg>,
|
||||||
trace: Option<TraceDump>,
|
trace: Option<TraceDump>,
|
||||||
output: Option<OutputOptions>,
|
output: Option<OutputOptions>,
|
||||||
profile_heartbeats: bool,
|
|
||||||
) -> Profiler {
|
) -> Profiler {
|
||||||
Profiler {
|
Profiler {
|
||||||
port: port,
|
port: port,
|
||||||
|
@ -301,7 +245,6 @@ impl Profiler {
|
||||||
last_msg: None,
|
last_msg: None,
|
||||||
trace: trace,
|
trace: trace,
|
||||||
blocked_layout_queries: HashMap::new(),
|
blocked_layout_queries: HashMap::new(),
|
||||||
profile_heartbeats,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -320,7 +263,6 @@ impl Profiler {
|
||||||
fn handle_msg(&mut self, msg: ProfilerMsg) -> bool {
|
fn handle_msg(&mut self, msg: ProfilerMsg) -> bool {
|
||||||
match msg.clone() {
|
match msg.clone() {
|
||||||
ProfilerMsg::Time(k, t) => {
|
ProfilerMsg::Time(k, t) => {
|
||||||
heartbeats::maybe_heartbeat(&k.0, t.0, t.1, 0, 0, self.profile_heartbeats);
|
|
||||||
if let Some(ref mut trace) = self.trace {
|
if let Some(ref mut trace) = self.trace {
|
||||||
trace.write_one(&k, t);
|
trace.write_one(&k, t);
|
||||||
}
|
}
|
||||||
|
@ -346,7 +288,6 @@ impl Profiler {
|
||||||
*self.blocked_layout_queries.entry(url).or_insert(0) += 1;
|
*self.blocked_layout_queries.entry(url).or_insert(0) += 1;
|
||||||
},
|
},
|
||||||
ProfilerMsg::Exit(chan) => {
|
ProfilerMsg::Exit(chan) => {
|
||||||
heartbeats::cleanup();
|
|
||||||
self.print_buckets();
|
self.print_buckets();
|
||||||
let _ = chan.send(());
|
let _ = chan.send(());
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -110,7 +110,6 @@ pub enum ProfilerCategory {
|
||||||
TimeToInteractive = 0x82,
|
TimeToInteractive = 0x82,
|
||||||
IpcReceiver = 0x83,
|
IpcReceiver = 0x83,
|
||||||
IpcBytesReceiver = 0x84,
|
IpcBytesReceiver = 0x84,
|
||||||
ApplicationHeartbeat = 0x90,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
|
||||||
|
|
|
@ -386,7 +386,6 @@ where
|
||||||
let time_profiler_chan = profile_time::Profiler::create(
|
let time_profiler_chan = profile_time::Profiler::create(
|
||||||
&opts.time_profiling,
|
&opts.time_profiling,
|
||||||
opts.time_profiler_trace_path.clone(),
|
opts.time_profiler_trace_path.clone(),
|
||||||
opts.profile_heartbeats,
|
|
||||||
);
|
);
|
||||||
let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period);
|
let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period);
|
||||||
|
|
||||||
|
|
|
@ -87,8 +87,6 @@
|
||||||
* tests
|
* tests
|
||||||
* dromaeo
|
* dromaeo
|
||||||
* Harness for automatically running the Dromaeo testsuite.
|
* Harness for automatically running the Dromaeo testsuite.
|
||||||
* heartbeats
|
|
||||||
* Tools for periodic measurement of energy consumption.
|
|
||||||
* html
|
* html
|
||||||
* Manual tests and experiments.
|
* Manual tests and experiments.
|
||||||
* jquery
|
* jquery
|
||||||
|
|
|
@ -1,266 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from os import path
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
import argparse
|
|
||||||
import platform
|
|
||||||
import subprocess
|
|
||||||
import six
|
|
||||||
|
|
||||||
TOP_DIR = path.join("..", "..")
|
|
||||||
GUARD_TIME = 10
|
|
||||||
HEARTBEAT_DEFAULT_WINDOW_SIZE = 20
|
|
||||||
# Use a larger window sizes to reduce or prevent writing log files until benchmark completion
|
|
||||||
# (profiler name, window size)
|
|
||||||
# These categories need to be kept aligned with ProfilerCategory in components/profile_traits/time.rs
|
|
||||||
HEARTBEAT_PROFILER_CATEGORIES = [
|
|
||||||
("Compositing", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutPerform", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutStyleRecalc", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
# ("LayoutTextShaping", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutRestyleDamagePropagation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutNonIncrementalReset", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutSelectorMatch", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutTreeBuilder", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutDamagePropagate", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutGeneratedContent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutDisplayListSorting", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutFloatPlacementSpeculation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutMain", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutStoreOverflow", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutParallelWarmup", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("LayoutDispListBuild", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("NetHTTPRequestResponse", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("PaintingPerTile", 50),
|
|
||||||
("PaintingPrepBuff", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("Painting", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ImageDecoding", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ImageSaving", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptAttachLayout", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptConstellationMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptDevtoolsMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptDocumentEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptDomEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptEvaluate", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptFileRead", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptImageCacheMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptInputEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptNetworkEvent", 200),
|
|
||||||
("ScriptParseHTML", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptPlannedNavigation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptResize", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptSetScrollState", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptSetViewport", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptTimerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptStylesheetLoad", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptUpdateReplacedElement", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptWebSocketEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptServiceWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptParseXML", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptEnterFullscreen", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptExitFullscreen", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ScriptWebVREvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
|
|
||||||
("ApplicationHeartbeat", 100),
|
|
||||||
]
|
|
||||||
ENERGY_READER_BIN = "energymon-file-provider"
|
|
||||||
ENERGY_READER_TEMP_OUTPUT = "energymon.txt"
|
|
||||||
SUMMARY_OUTPUT = "summary.txt"
|
|
||||||
|
|
||||||
|
|
||||||
def get_command(build_target, layout_thread_count, renderer, page, profile):
|
|
||||||
"""Get the command to execute.
|
|
||||||
"""
|
|
||||||
return path.join(TOP_DIR, "target", build_target, "servo") + \
|
|
||||||
" -p %d -o output.png -y %d %s -Z profile-script-events '%s'" % \
|
|
||||||
(profile, layout_thread_count, renderer, page)
|
|
||||||
|
|
||||||
|
|
||||||
def set_app_environment(log_dir):
|
|
||||||
"""Set environment variables to enable heartbeats.
|
|
||||||
"""
|
|
||||||
prefix = "heartbeat-"
|
|
||||||
for (profiler, window) in HEARTBEAT_PROFILER_CATEGORIES:
|
|
||||||
os.environ["SERVO_HEARTBEAT_ENABLE_" + profiler] = ""
|
|
||||||
os.environ["SERVO_HEARTBEAT_LOG_" + profiler] = path.join(log_dir, prefix + profiler + ".log")
|
|
||||||
os.environ["SERVO_HEARTBEAT_WINDOW_" + profiler] = str(window)
|
|
||||||
|
|
||||||
|
|
||||||
def start_energy_reader():
|
|
||||||
"""Energy reader writes to a file that we will poll.
|
|
||||||
"""
|
|
||||||
os.system(ENERGY_READER_BIN + " " + ENERGY_READER_TEMP_OUTPUT + "&")
|
|
||||||
|
|
||||||
|
|
||||||
def stop_energy_reader():
|
|
||||||
"""Stop the energy reader and remove its temp file.
|
|
||||||
"""
|
|
||||||
os.system("pkill -x " + ENERGY_READER_BIN)
|
|
||||||
os.remove(ENERGY_READER_TEMP_OUTPUT)
|
|
||||||
|
|
||||||
|
|
||||||
def read_energy():
|
|
||||||
"""Poll the energy reader's temp file.
|
|
||||||
"""
|
|
||||||
data = 0
|
|
||||||
with open(ENERGY_READER_TEMP_OUTPUT, "r") as em:
|
|
||||||
data = int(em.read().replace('\n', ''))
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def git_rev_hash():
|
|
||||||
"""Get the git revision hash.
|
|
||||||
"""
|
|
||||||
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).rstrip()
|
|
||||||
|
|
||||||
|
|
||||||
def git_rev_hash_short():
|
|
||||||
"""Get the git revision short hash.
|
|
||||||
"""
|
|
||||||
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).rstrip()
|
|
||||||
|
|
||||||
|
|
||||||
def execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count):
|
|
||||||
"""Run a single execution.
|
|
||||||
"""
|
|
||||||
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
|
|
||||||
"trial_" + str(trial))
|
|
||||||
if os.path.exists(log_dir):
|
|
||||||
print("Log directory already exists: " + log_dir)
|
|
||||||
sys.exit(1)
|
|
||||||
os.makedirs(log_dir)
|
|
||||||
|
|
||||||
set_app_environment(log_dir)
|
|
||||||
cmd = get_command(build_target, layout_thread_count, renderer, page, profile)
|
|
||||||
|
|
||||||
# Execute
|
|
||||||
start_energy_reader()
|
|
||||||
print('sleep ' + str(GUARD_TIME))
|
|
||||||
time.sleep(GUARD_TIME)
|
|
||||||
time_start = time.time()
|
|
||||||
energy_start = read_energy()
|
|
||||||
print(cmd)
|
|
||||||
os.system(cmd)
|
|
||||||
energy_end = read_energy()
|
|
||||||
time_end = time.time()
|
|
||||||
stop_energy_reader()
|
|
||||||
print('sleep ' + str(GUARD_TIME))
|
|
||||||
time.sleep(GUARD_TIME)
|
|
||||||
|
|
||||||
uj = energy_end - energy_start
|
|
||||||
latency = time_end - time_start
|
|
||||||
watts = uj / 1000000.0 / latency
|
|
||||||
# Write a file that describes this execution
|
|
||||||
with open(path.join(log_dir, SUMMARY_OUTPUT), "w") as f:
|
|
||||||
f.write("Datetime (UTC): " + datetime.datetime.utcnow().isoformat())
|
|
||||||
f.write("\nPlatform: " + platform.platform())
|
|
||||||
f.write("\nGit hash: " + git_rev_hash())
|
|
||||||
f.write("\nGit short hash: " + git_rev_hash_short())
|
|
||||||
f.write("\nRelease: " + build_target)
|
|
||||||
f.write("\nLayout threads: " + str(layout_thread_count))
|
|
||||||
f.write("\nTrial: " + str(trial))
|
|
||||||
f.write("\nCommand: " + cmd)
|
|
||||||
f.write("\nTime (sec): " + str(latency))
|
|
||||||
f.write("\nEnergy (uJ): " + str(uj))
|
|
||||||
f.write("\nPower (W): " + str(watts))
|
|
||||||
|
|
||||||
|
|
||||||
def characterize(build_target, base_dir, layout_threads_limits, renderer, page, profile, trials):
|
|
||||||
"""Run all configurations and capture results.
|
|
||||||
"""
|
|
||||||
for layout_thread_count in six.moves.xrange(layout_threads_limits[0], layout_threads_limits[1] + 1):
|
|
||||||
for trial in six.moves.xrange(1, trials + 1):
|
|
||||||
execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""For this script to be useful, the following conditions are needed:
|
|
||||||
- HEARTBEAT_PROFILER_CATEGORIES should be aligned with the profiler categories in the source code.
|
|
||||||
- The "energymon" project needs to be installed to the system (libraries and the "energymon" binary).
|
|
||||||
- The "default" energymon library will be used - make sure you choose one that is useful for your system setup
|
|
||||||
when installing energymon.
|
|
||||||
- Build servo in release mode with the "energy-profiling" feature enabled (this links with the energymon lib).
|
|
||||||
"""
|
|
||||||
# Default max number of layout threads
|
|
||||||
max_layout_threads = 1
|
|
||||||
# Default benchmark
|
|
||||||
benchmark = path.join(TOP_DIR, "tests", "html", "perf-rainbow.html")
|
|
||||||
# Default renderer
|
|
||||||
renderer = ""
|
|
||||||
# Default output directory
|
|
||||||
output_dir = "heartbeat_logs"
|
|
||||||
# Default build target
|
|
||||||
build_target = "release"
|
|
||||||
# Default profile interval
|
|
||||||
profile = 60
|
|
||||||
# Default single argument
|
|
||||||
single = False
|
|
||||||
# Default number of trials
|
|
||||||
trials = 1
|
|
||||||
|
|
||||||
# Parsing the input of the script
|
|
||||||
parser = argparse.ArgumentParser(description="Characterize Servo timing and energy behavior")
|
|
||||||
parser.add_argument("-b", "--benchmark",
|
|
||||||
default=benchmark,
|
|
||||||
help="Gets the benchmark, for example \"-b http://www.example.com\"")
|
|
||||||
parser.add_argument("-d", "--debug",
|
|
||||||
action='store_true',
|
|
||||||
help="Use debug build instead of release build")
|
|
||||||
parser.add_argument("-w", "--webrender",
|
|
||||||
action='store_true',
|
|
||||||
help="Use webrender backend")
|
|
||||||
parser.add_argument("-l", "--max_layout_threads",
|
|
||||||
help="Specify the maximum number of threads for layout, for example \"-l 5\"")
|
|
||||||
parser.add_argument("-o", "--output",
|
|
||||||
help="Specify the log output directory, for example \"-o heartbeat_logs\"")
|
|
||||||
parser.add_argument("-p", "--profile",
|
|
||||||
default=60,
|
|
||||||
help="Profiler output interval, for example \"-p 60\"")
|
|
||||||
parser.add_argument("-s", "--single",
|
|
||||||
action='store_true',
|
|
||||||
help="Just run a single trial of the config provided, for example \"-s\"")
|
|
||||||
parser.add_argument("-t", "--trials",
|
|
||||||
default=1,
|
|
||||||
type=int,
|
|
||||||
help="Number of trials to run for each configuration, for example \"-t 1\"")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
if args.benchmark:
|
|
||||||
benchmark = args.benchmark
|
|
||||||
if args.debug:
|
|
||||||
build_target = "debug"
|
|
||||||
if args.webrender:
|
|
||||||
renderer = "-w"
|
|
||||||
if args.max_layout_threads:
|
|
||||||
max_layout_threads = int(args.max_layout_threads)
|
|
||||||
if args.output:
|
|
||||||
output_dir = args.output
|
|
||||||
if args.profile:
|
|
||||||
profile = args.profile
|
|
||||||
if args.single:
|
|
||||||
single = True
|
|
||||||
if args.trials:
|
|
||||||
trials = args.trials
|
|
||||||
|
|
||||||
if os.path.exists(output_dir):
|
|
||||||
print("Output directory already exists: " + output_dir)
|
|
||||||
sys.exit(1)
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
|
|
||||||
if single:
|
|
||||||
execute(output_dir, build_target, renderer, benchmark, profile, trials, max_layout_threads)
|
|
||||||
else:
|
|
||||||
characterize(build_target, output_dir, (1, max_layout_threads), renderer, benchmark, profile, trials)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,121 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from os import path
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
import argparse
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
TOP_DIR = path.join("..", "..")
|
|
||||||
GUARD_TIME = 20
|
|
||||||
SUMMARY_OUTPUT = "summary.txt"
|
|
||||||
|
|
||||||
|
|
||||||
def get_command(layout_thread_count, renderer, page, profile):
|
|
||||||
"""Get the command to execute.
|
|
||||||
"""
|
|
||||||
return path.join(TOP_DIR, "mach") + " run --android" + \
|
|
||||||
" -p %d -o /sdcard/servo/output.png -y %d %s -Z profile-script-events,profile-heartbeats '%s'" % \
|
|
||||||
(profile, layout_thread_count, renderer, page)
|
|
||||||
|
|
||||||
|
|
||||||
def git_rev_hash():
|
|
||||||
"""Get the git revision hash.
|
|
||||||
"""
|
|
||||||
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).rstrip()
|
|
||||||
|
|
||||||
|
|
||||||
def git_rev_hash_short():
|
|
||||||
"""Get the git revision short hash.
|
|
||||||
"""
|
|
||||||
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).rstrip()
|
|
||||||
|
|
||||||
|
|
||||||
def execute(base_dir, renderer, page, profile, trial, layout_thread_count):
|
|
||||||
"""Run a single execution.
|
|
||||||
"""
|
|
||||||
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
|
|
||||||
"trial_" + str(trial))
|
|
||||||
if os.path.exists(log_dir):
|
|
||||||
print("Log directory already exists: " + log_dir)
|
|
||||||
sys.exit(1)
|
|
||||||
os.makedirs(log_dir)
|
|
||||||
|
|
||||||
# Execute
|
|
||||||
cmd = get_command(layout_thread_count, renderer, page, profile)
|
|
||||||
print(cmd)
|
|
||||||
os.system(cmd)
|
|
||||||
print('sleep ' + str(GUARD_TIME))
|
|
||||||
time.sleep(GUARD_TIME)
|
|
||||||
|
|
||||||
# Write a file that describes this execution
|
|
||||||
with open(path.join(log_dir, SUMMARY_OUTPUT), "w") as f:
|
|
||||||
f.write("Datetime (UTC): " + datetime.datetime.utcnow().isoformat())
|
|
||||||
f.write("\nPlatform: Android")
|
|
||||||
f.write("\nGit hash: " + git_rev_hash())
|
|
||||||
f.write("\nGit short hash: " + git_rev_hash_short())
|
|
||||||
f.write("\nLayout threads: " + str(layout_thread_count))
|
|
||||||
f.write("\nTrial: " + str(trial))
|
|
||||||
f.write("\nCommand: " + cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""For this script to be useful, the following conditions are needed:
|
|
||||||
- Build servo for Android in release mode with the "energy-profiling" feature enabled.
|
|
||||||
"""
|
|
||||||
# Default number of layout threads
|
|
||||||
layout_threads = 1
|
|
||||||
# Default benchmark
|
|
||||||
benchmark = "https://www.mozilla.org/"
|
|
||||||
# Default renderer
|
|
||||||
renderer = ""
|
|
||||||
# Default output directory
|
|
||||||
output_dir = "heartbeat_logs"
|
|
||||||
# Default profile interval
|
|
||||||
profile = 60
|
|
||||||
|
|
||||||
# Parsing the input of the script
|
|
||||||
parser = argparse.ArgumentParser(description="Characterize Servo timing and energy behavior on Android")
|
|
||||||
parser.add_argument("-b", "--benchmark",
|
|
||||||
default=benchmark,
|
|
||||||
help="Gets the benchmark, for example \"-b http://www.example.com\"")
|
|
||||||
parser.add_argument("-w", "--webrender",
|
|
||||||
action='store_true',
|
|
||||||
help="Use webrender backend")
|
|
||||||
parser.add_argument("-l", "--layout_threads",
|
|
||||||
help="Specify the number of threads for layout, for example \"-l 5\"")
|
|
||||||
parser.add_argument("-o", "--output",
|
|
||||||
help="Specify the log output directory, for example \"-o heartbeat_logs\"")
|
|
||||||
parser.add_argument("-p", "--profile",
|
|
||||||
default=60,
|
|
||||||
help="Profiler output interval, for example \"-p 60\"")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
if args.benchmark:
|
|
||||||
benchmark = args.benchmark
|
|
||||||
if args.webrender:
|
|
||||||
renderer = "-w"
|
|
||||||
if args.layout_threads:
|
|
||||||
layout_threads = int(args.layout_threads)
|
|
||||||
if args.output:
|
|
||||||
output_dir = args.output
|
|
||||||
if args.profile:
|
|
||||||
profile = args.profile
|
|
||||||
|
|
||||||
if os.path.exists(output_dir):
|
|
||||||
print("Output directory already exists: " + output_dir)
|
|
||||||
sys.exit(1)
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
|
|
||||||
execute(output_dir, renderer, benchmark, profile, 1, layout_threads)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,411 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import numpy as np
|
|
||||||
import os
|
|
||||||
from os import path
|
|
||||||
import six
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
HB_LOG_IDX_START_TIME = 7
|
|
||||||
HB_LOG_IDX_END_TIME = HB_LOG_IDX_START_TIME + 1
|
|
||||||
HB_LOG_IDX_START_ENERGY = 14
|
|
||||||
HB_LOG_IDX_END_ENERGY = HB_LOG_IDX_START_ENERGY + 1
|
|
||||||
|
|
||||||
ENERGY_PROFILER_NAME = 'ApplicationHeartbeat'
|
|
||||||
SUMMARY_OUTPUT = "summary.txt"
|
|
||||||
SUMMARY_TIME_IDX = 8
|
|
||||||
SUMMARY_ENERGY_IDX = SUMMARY_TIME_IDX + 1
|
|
||||||
SUMMARY_POWER_IDX = SUMMARY_ENERGY_IDX + 1
|
|
||||||
|
|
||||||
|
|
||||||
def autolabel(rects, ax):
|
|
||||||
"""Attach some text labels.
|
|
||||||
"""
|
|
||||||
for rect in rects:
|
|
||||||
ax.text(rect.get_x() + rect.get_width() / 2., 1.05 * rect.get_height(), '', ha='center', va='bottom')
|
|
||||||
|
|
||||||
|
|
||||||
def plot_raw_totals(config, plot_data, max_time, max_time_std, max_energy, max_energy_std, output_dir, normalize):
|
|
||||||
"""Plot the raw totals for a configuration.
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config -- configuration name
|
|
||||||
plot_data -- (profiler name, total_time, total_time_std, total_energy, total_energy_std)
|
|
||||||
max_time, max_time_std, max_energy, max_energy_std -- single values
|
|
||||||
normalize -- True/False
|
|
||||||
"""
|
|
||||||
plot_data = sorted(plot_data)
|
|
||||||
keys = [p for (p, tt, tts, te, tes) in plot_data]
|
|
||||||
total_times = [tt for (p, tt, tts, te, tes) in plot_data]
|
|
||||||
total_times_std = [tts for (p, tt, tts, te, tes) in plot_data]
|
|
||||||
total_energies = [te for (p, tt, tts, te, tes) in plot_data]
|
|
||||||
total_energies_std = [tes for (p, tt, tts, te, tes) in plot_data]
|
|
||||||
|
|
||||||
fig, ax1 = plt.subplots()
|
|
||||||
ind = np.arange(len(keys)) # the x locations for the groups
|
|
||||||
width = 0.35 # the width of the bars
|
|
||||||
# add some text for labels, title and axes ticks
|
|
||||||
ax1.set_title('Time/Energy Data for Configuration ' + config)
|
|
||||||
ax1.set_xticks(ind + width)
|
|
||||||
ax1.set_xticklabels(keys, rotation=45)
|
|
||||||
fig.set_tight_layout(True)
|
|
||||||
fig.set_size_inches(len(plot_data) / 1.5, 8)
|
|
||||||
|
|
||||||
ax2 = ax1.twinx()
|
|
||||||
|
|
||||||
# Normalize
|
|
||||||
if normalize:
|
|
||||||
total_times_std /= np.sum(total_times)
|
|
||||||
total_times /= np.sum(total_times)
|
|
||||||
total_energies_std /= np.sum(total_energies)
|
|
||||||
total_energies /= np.sum(total_energies)
|
|
||||||
ax1.set_ylabel('Time (Normalized)')
|
|
||||||
ax2.set_ylabel('Energy (Normalized)')
|
|
||||||
else:
|
|
||||||
# set time in us instead of ns
|
|
||||||
total_times_std /= np.array(1000000.0)
|
|
||||||
total_times /= np.array(1000000.0)
|
|
||||||
total_energies_std /= np.array(1000000.0)
|
|
||||||
total_energies /= np.array(1000000.0)
|
|
||||||
ax1.set_ylabel('Time (ms)')
|
|
||||||
ax2.set_ylabel('Energy (Joules)')
|
|
||||||
|
|
||||||
rects1 = ax1.bar(ind, total_times, width, color='r', yerr=total_times_std)
|
|
||||||
rects2 = ax2.bar(ind + width, total_energies, width, color='y', yerr=total_energies_std)
|
|
||||||
ax1.legend([rects1[0], rects2[0]], ['Time', 'Energy'])
|
|
||||||
|
|
||||||
# set axis
|
|
||||||
x1, x2, y1, y2 = plt.axis()
|
|
||||||
if normalize:
|
|
||||||
ax1.set_ylim(ymin=0, ymax=1)
|
|
||||||
ax2.set_ylim(ymin=0, ymax=1)
|
|
||||||
else:
|
|
||||||
ax1.set_ylim(ymin=0, ymax=((max_time + max_time_std) * 1.25 / 1000000.0))
|
|
||||||
ax2.set_ylim(ymin=0, ymax=((max_energy + max_energy_std) * 1.25 / 1000000.0))
|
|
||||||
|
|
||||||
autolabel(rects1, ax1)
|
|
||||||
autolabel(rects2, ax2)
|
|
||||||
|
|
||||||
# plt.show()
|
|
||||||
plt.savefig(path.join(output_dir, config + ".png"))
|
|
||||||
plt.close(fig)
|
|
||||||
|
|
||||||
|
|
||||||
def create_raw_total_data(config_data):
|
|
||||||
"""Get the raw data to plot for a configuration
|
|
||||||
Return: [(profiler, time_mean, time_stddev, energy_mean, energy_stddev)]
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config_data -- (trial, trial_data)
|
|
||||||
"""
|
|
||||||
# We can't assume that the same number of heartbeats are always issued across trials
|
|
||||||
# key: profiler name; value: list of timing sums for each trial
|
|
||||||
profiler_total_times = {}
|
|
||||||
# key: profiler name; value: list of energy sums for each trial
|
|
||||||
profiler_total_energies = {}
|
|
||||||
for (t, td) in config_data:
|
|
||||||
for (profiler, ts, te, es, ee) in td:
|
|
||||||
# sum the total times and energies for each profiler in this trial
|
|
||||||
total_time = np.sum(te - ts)
|
|
||||||
total_energy = np.sum(ee - es)
|
|
||||||
# add to list to be averaged later
|
|
||||||
time_list = profiler_total_times.get(profiler, [])
|
|
||||||
time_list.append(total_time)
|
|
||||||
profiler_total_times[profiler] = time_list
|
|
||||||
energy_list = profiler_total_energies.get(profiler, [])
|
|
||||||
energy_list.append(total_energy)
|
|
||||||
profiler_total_energies[profiler] = energy_list
|
|
||||||
|
|
||||||
# Get mean and stddev for time and energy totals
|
|
||||||
return [(profiler,
|
|
||||||
np.mean(profiler_total_times[profiler]),
|
|
||||||
np.std(profiler_total_times[profiler]),
|
|
||||||
np.mean(profiler_total_energies[profiler]),
|
|
||||||
np.std(profiler_total_energies[profiler]))
|
|
||||||
for profiler in profiler_total_times.keys()]
|
|
||||||
|
|
||||||
|
|
||||||
def plot_all_raw_totals(config_list, output_dir):
|
|
||||||
"""Plot column charts of the raw total time/energy spent in each profiler category.
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config_list -- [(config, result of process_config_dir(...))]
|
|
||||||
output_dir -- where to write plots to
|
|
||||||
"""
|
|
||||||
raw_total_norm_out_dir = path.join(output_dir, 'raw_totals_normalized')
|
|
||||||
os.makedirs(raw_total_norm_out_dir)
|
|
||||||
raw_total_out_dir = path.join(output_dir, 'raw_totals')
|
|
||||||
os.makedirs(raw_total_out_dir)
|
|
||||||
|
|
||||||
# (name, (profiler, (time_mean, time_stddev, energy_mean, energy_stddev)))
|
|
||||||
raw_totals_data = [(config, create_raw_total_data(config_data)) for (config, config_data) in config_list]
|
|
||||||
|
|
||||||
mean_times = []
|
|
||||||
mean_times_std = []
|
|
||||||
mean_energies = []
|
|
||||||
mean_energies_std = []
|
|
||||||
for profiler_tup in [config_tup[1] for config_tup in raw_totals_data]:
|
|
||||||
for (p, tt, tts, te, tes) in profiler_tup:
|
|
||||||
mean_times.append(tt)
|
|
||||||
mean_times_std.append(tts)
|
|
||||||
mean_energies.append(te)
|
|
||||||
mean_energies_std.append(tes)
|
|
||||||
# get consistent max time/energy values across plots
|
|
||||||
max_t = np.max(mean_times)
|
|
||||||
max_t_std = np.max(mean_times_std)
|
|
||||||
max_e = np.max(mean_energies)
|
|
||||||
max_e_std = np.max(mean_energies_std)
|
|
||||||
[plot_raw_totals(data[0], data[1], max_t, max_t_std, max_e, max_e_std, raw_total_norm_out_dir, True)
|
|
||||||
for data in raw_totals_data]
|
|
||||||
[plot_raw_totals(data[0], data[1], max_t, max_t_std, max_e, max_e_std, raw_total_out_dir, False)
|
|
||||||
for data in raw_totals_data]
|
|
||||||
|
|
||||||
|
|
||||||
def plot_trial_time_series(config, trial, trial_data, max_end_time, max_power, output_dir):
|
|
||||||
"""Plot time series for a single trial.
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config -- the config name
|
|
||||||
trial -- the trial name
|
|
||||||
trial_data -- [(profiler, [start times], [end times], [start energies], [end energies])]
|
|
||||||
max_end_time -- single value to use as max X axis value (for consistency across trials)
|
|
||||||
output_dir -- the output directory
|
|
||||||
"""
|
|
||||||
# TODO: Some profilers may have parallel tasks - need to identify this on plots
|
|
||||||
max_end_time = max_end_time / 1000000.0
|
|
||||||
trial_data = sorted(trial_data)
|
|
||||||
fig, ax1 = plt.subplots()
|
|
||||||
keys = [p for (p, ts, te, es, ee) in trial_data]
|
|
||||||
# add some text for labels, title and axes ticks
|
|
||||||
ax1.set_title('Profiler Activity for ' + config + ', ' + trial)
|
|
||||||
ax1.set_xlabel('Time (ms)')
|
|
||||||
ax1.grid(True)
|
|
||||||
width = 8 # the width of the bars
|
|
||||||
ax1.set_yticks(10 * np.arange(1, len(keys) + 2))
|
|
||||||
ax1.set_yticklabels(keys)
|
|
||||||
ax1.set_ylim(ymin=0, ymax=((len(trial_data) + 1) * 10))
|
|
||||||
ax1.set_xlim(xmin=0, xmax=max_end_time)
|
|
||||||
fig.set_tight_layout(True)
|
|
||||||
fig.set_size_inches(16, len(trial_data) / 3)
|
|
||||||
|
|
||||||
i = 10
|
|
||||||
for (p, ts, te, es, ee) in trial_data:
|
|
||||||
xranges = [(ts[j] / 1000000.0, (te[j] - ts[j]) / 1000000.0) for j in six.moves.xrange(len(ts))]
|
|
||||||
ax1.broken_barh(xranges, (i - 0.5 * width, width))
|
|
||||||
i += 10
|
|
||||||
# place a vbar at the final time for this trial
|
|
||||||
last_profiler_times = map(np.nanmax, filter(lambda x: len(x) > 0, [te for (p, ts, te, es, ee) in trial_data]))
|
|
||||||
plt.axvline(np.max(last_profiler_times) / 1000000.0, color='black')
|
|
||||||
|
|
||||||
power_times = []
|
|
||||||
power_values = []
|
|
||||||
for (p, ts, te, es, ee) in trial_data:
|
|
||||||
if p == ENERGY_PROFILER_NAME:
|
|
||||||
power_times = te / 1000000.0
|
|
||||||
power_values = (ee - es) / ((te - ts) / 1000.0)
|
|
||||||
ax2 = ax1.twinx()
|
|
||||||
ax2.set_xlim(xmin=0, xmax=max_end_time)
|
|
||||||
ax2.set_ylim(ymin=0, ymax=max_power)
|
|
||||||
ax2.set_ylabel('Power (Watts)')
|
|
||||||
ax2.plot(power_times, power_values, color='r')
|
|
||||||
|
|
||||||
# plt.show()
|
|
||||||
plt.savefig(path.join(output_dir, "ts_" + config + "_" + trial + ".png"))
|
|
||||||
plt.close(fig)
|
|
||||||
|
|
||||||
|
|
||||||
def hb_energy_times_to_power(es, ee, ts, te):
|
|
||||||
"""Compute power from start and end energy and times.
|
|
||||||
Return: power values
|
|
||||||
"""
|
|
||||||
return (ee - es) / ((te - ts) / 1000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def plot_all_time_series(config_list, output_dir):
|
|
||||||
"""Plot column charts of the raw total time/energy spent in each profiler category.
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config_list -- [(config, result of process_config_dir(...))]
|
|
||||||
output_dir -- where to write plots to
|
|
||||||
"""
|
|
||||||
time_series_out_dir = path.join(output_dir, 'time_series')
|
|
||||||
os.makedirs(time_series_out_dir)
|
|
||||||
|
|
||||||
max_end_times = []
|
|
||||||
max_power_values = []
|
|
||||||
for (c, cd) in config_list:
|
|
||||||
for (t, td) in cd:
|
|
||||||
trial_max_end_times = map(np.nanmax, filter(lambda x: len(x) > 0, [te for (p, ts, te, es, ee) in td]))
|
|
||||||
max_end_times.append(np.nanmax(trial_max_end_times))
|
|
||||||
for (p, ts, te, es, ee) in td:
|
|
||||||
# We only care about the energy profiler (others aren't reliable for instant power anyway)
|
|
||||||
if p == ENERGY_PROFILER_NAME and len(te) > 0:
|
|
||||||
max_power_values.append(np.nanmax(hb_energy_times_to_power(es, ee, ts, te)))
|
|
||||||
max_time = np.nanmax(max_end_times)
|
|
||||||
max_power = np.nanmax(np.array(max_power_values)) * 1.2 # leave a little space at the top
|
|
||||||
|
|
||||||
for (config, config_data) in config_list:
|
|
||||||
[plot_trial_time_series(config, trial, trial_data, max_time, max_power, time_series_out_dir)
|
|
||||||
for (trial, trial_data) in config_data]
|
|
||||||
|
|
||||||
|
|
||||||
def read_heartbeat_log(profiler_hb_log):
|
|
||||||
"""Read a heartbeat log file.
|
|
||||||
Return: (profiler name, [start times], [end times], [start energies], [end energies], [instant powers])
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
profiler_hb_log -- the file to read
|
|
||||||
"""
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
try:
|
|
||||||
warnings.simplefilter("ignore")
|
|
||||||
time_start, time_end, energy_start, energy_end = \
|
|
||||||
np.loadtxt(profiler_hb_log,
|
|
||||||
dtype=np.dtype('uint64'),
|
|
||||||
skiprows=1,
|
|
||||||
usecols=(HB_LOG_IDX_START_TIME,
|
|
||||||
HB_LOG_IDX_END_TIME,
|
|
||||||
HB_LOG_IDX_START_ENERGY,
|
|
||||||
HB_LOG_IDX_END_ENERGY),
|
|
||||||
unpack=True,
|
|
||||||
ndmin=1)
|
|
||||||
except ValueError:
|
|
||||||
time_start, time_end, energy_start, energy_end = [], [], [], []
|
|
||||||
name = path.split(profiler_hb_log)[1].split('-')[1].split('.')[0]
|
|
||||||
return (name,
|
|
||||||
np.atleast_1d(time_start),
|
|
||||||
np.atleast_1d(time_end),
|
|
||||||
np.atleast_1d(energy_start),
|
|
||||||
np.atleast_1d(energy_end))
|
|
||||||
|
|
||||||
|
|
||||||
def process_trial_dir(trial_dir):
|
|
||||||
"""Process trial directory.
|
|
||||||
Return: [(profiler name, [start times], [end times], [start energies], [end energies])]
|
|
||||||
Time and energy are normalized to 0 start values.
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
trial_dir -- the directory for this trial
|
|
||||||
"""
|
|
||||||
log_data = map(lambda h: read_heartbeat_log(path.join(trial_dir, h)),
|
|
||||||
filter(lambda f: f.endswith(".log"), os.listdir(trial_dir)))
|
|
||||||
|
|
||||||
# Find the earliest timestamps and energy readings
|
|
||||||
min_t = np.nanmin(map(np.nanmin, filter(lambda x: len(x) > 0, [ts for (profiler, ts, te, es, ee) in log_data])))
|
|
||||||
min_e = np.nanmin(map(np.nanmin, filter(lambda x: len(x) > 0, [es for (profiler, ts, te, es, ee) in log_data])))
|
|
||||||
|
|
||||||
# Normalize timing/energy data to start values of 0
|
|
||||||
return [(profiler, ts - min_t, te - min_t, es - min_e, ee - min_e) for (profiler, ts, te, es, ee) in log_data]
|
|
||||||
|
|
||||||
|
|
||||||
def process_config_dir(config_dir):
|
|
||||||
"""Process a configuration directory.
|
|
||||||
Return: [(trial, [(profiler name, [start times], [end times], [start energies], [end energies])])]
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
config_dir -- the directory for this configuration - contains subdirectories for each trial
|
|
||||||
"""
|
|
||||||
return [(trial_dir, process_trial_dir(path.join(config_dir, trial_dir))) for trial_dir in os.listdir(config_dir)]
|
|
||||||
|
|
||||||
|
|
||||||
def process_logs(log_dir):
|
|
||||||
"""Process log directory.
|
|
||||||
Return: [(config, [(trial, [(profiler name, [start times], [end times], [start energies], [end energies])])])]
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
log_dir -- the log directory to process - contains subdirectories for each configuration
|
|
||||||
"""
|
|
||||||
return [((config_dir.split('_')[1], process_config_dir(path.join(log_dir, config_dir))))
|
|
||||||
for config_dir in os.listdir(log_dir)]
|
|
||||||
|
|
||||||
|
|
||||||
def find_best_executions(log_dir):
|
|
||||||
"""Get the best time, energy, and power from the characterization summaries.
|
|
||||||
Return: ((config, trial, min_time), (config, trial, min_energy), (config, trial, min_power))
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
results -- the results from process_logs(...).
|
|
||||||
"""
|
|
||||||
DEFAULT = ('', '', 1000000000.0)
|
|
||||||
min_time = DEFAULT
|
|
||||||
min_energy = DEFAULT
|
|
||||||
min_power = DEFAULT
|
|
||||||
for config_dir in os.listdir(log_dir):
|
|
||||||
for trial_dir in os.listdir(path.join(log_dir, config_dir)):
|
|
||||||
with open(path.join(log_dir, config_dir, trial_dir, SUMMARY_OUTPUT), "r") as s:
|
|
||||||
lines = s.readlines()
|
|
||||||
time = float(lines[SUMMARY_TIME_IDX].split(':')[1])
|
|
||||||
energy = int(lines[SUMMARY_ENERGY_IDX].split(':')[1])
|
|
||||||
power = float(lines[SUMMARY_POWER_IDX].split(':')[1])
|
|
||||||
if time < min_time[2]:
|
|
||||||
min_time = (config_dir, trial_dir, time)
|
|
||||||
if energy < min_energy[2]:
|
|
||||||
min_energy = (config_dir, trial_dir, energy)
|
|
||||||
if power < min_power:
|
|
||||||
min_power = (config_dir, trial_dir, power)
|
|
||||||
return (min_time, min_energy, min_power)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""This script processes the log files from the "characterize.py" script and produces visualizations.
|
|
||||||
"""
|
|
||||||
# Default log directory
|
|
||||||
directory = 'heartbeat_logs'
|
|
||||||
# Default output directory
|
|
||||||
output_dir = 'plots'
|
|
||||||
# Default android
|
|
||||||
android = False
|
|
||||||
|
|
||||||
# Parsing the input of the script
|
|
||||||
parser = argparse.ArgumentParser(description="Process Heartbeat log files from characterization")
|
|
||||||
parser.add_argument("-d", "--directory",
|
|
||||||
default=directory,
|
|
||||||
help="Heartbeat log directory \"-d heartbeat_logs\"")
|
|
||||||
parser.add_argument("-o", "--output",
|
|
||||||
default=output_dir,
|
|
||||||
help="Specify the log output directory, for example \"-o plots\"")
|
|
||||||
parser.add_argument("--android",
|
|
||||||
action="store_true",
|
|
||||||
dest="android",
|
|
||||||
default=False,
|
|
||||||
help="Specify if processing results from Android")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
if args.directory:
|
|
||||||
directory = args.directory
|
|
||||||
if args.output:
|
|
||||||
output_dir = args.output
|
|
||||||
if args.android:
|
|
||||||
android = args.android
|
|
||||||
|
|
||||||
if not os.path.exists(directory):
|
|
||||||
print("Input directory does not exist: " + directory)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if os.path.exists(output_dir):
|
|
||||||
print("Output directory already exists: " + output_dir)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
res = process_logs(directory)
|
|
||||||
|
|
||||||
if not android:
|
|
||||||
best = find_best_executions(directory)
|
|
||||||
print('Best time:', best[0])
|
|
||||||
print('Best energy:', best[1])
|
|
||||||
print('Best power:', best[2])
|
|
||||||
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
plot_all_raw_totals(res, output_dir)
|
|
||||||
plot_all_time_series(res, output_dir)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -12,7 +12,7 @@ use std::time::Duration;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn time_profiler_smoke_test() {
|
fn time_profiler_smoke_test() {
|
||||||
let chan = time::Profiler::create(&None, None, false);
|
let chan = time::Profiler::create(&None, None);
|
||||||
assert!(true, "Can create the profiler thread");
|
assert!(true, "Can create the profiler thread");
|
||||||
|
|
||||||
let (ipcchan, _ipcport) = ipc::channel().unwrap();
|
let (ipcchan, _ipcport) = ipc::channel().unwrap();
|
||||||
|
@ -45,7 +45,7 @@ fn time_profiler_stats_test() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn channel_profiler_test() {
|
fn channel_profiler_test() {
|
||||||
let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None, false);
|
let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None);
|
||||||
let (profiled_sender, profiled_receiver) = ProfiledIpc::channel(chan.clone()).unwrap();
|
let (profiled_sender, profiled_receiver) = ProfiledIpc::channel(chan.clone()).unwrap();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
thread::sleep(Duration::from_secs(2));
|
thread::sleep(Duration::from_secs(2));
|
||||||
|
@ -70,7 +70,7 @@ fn channel_profiler_test() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bytes_channel_profiler_test() {
|
fn bytes_channel_profiler_test() {
|
||||||
let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None, false);
|
let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None);
|
||||||
let (profiled_sender, profiled_receiver) = ProfiledIpc::bytes_channel(chan.clone()).unwrap();
|
let (profiled_sender, profiled_receiver) = ProfiledIpc::bytes_channel(chan.clone()).unwrap();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
thread::sleep(Duration::from_secs(2));
|
thread::sleep(Duration::from_secs(2));
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue