mirror of
https://github.com/servo/servo.git
synced 2025-08-06 22:15:33 +01:00
auto merge of #559 : tkuehn/servo/master, r=metajack
r? @metajack this includes the rust-azure pointer change
This commit is contained in:
commit
b0495eb7fd
7 changed files with 103 additions and 68 deletions
|
@ -8,7 +8,7 @@
|
|||
use azure::azure_hl::{BackendType, CairoBackend, CoreGraphicsBackend};
|
||||
use azure::azure_hl::{CoreGraphicsAcceleratedBackend, Direct2DBackend, SkiaBackend};
|
||||
|
||||
use std::f64;
|
||||
use std::float;
|
||||
use std::result;
|
||||
use std::uint;
|
||||
|
||||
|
@ -17,7 +17,7 @@ pub struct Opts {
|
|||
render_backend: BackendType,
|
||||
n_render_threads: uint,
|
||||
tile_size: uint,
|
||||
profiler_period: Option<f64>,
|
||||
profiler_period: Option<float>,
|
||||
|
||||
/// A scale factor to apply to tiles, to allow rendering tiles at higher resolutions for
|
||||
/// testing pan and zoom code.
|
||||
|
@ -40,9 +40,10 @@ pub fn from_cmdline_args(args: &[~str]) -> Opts {
|
|||
];
|
||||
|
||||
let opt_match = match getopts::getopts(args, opts) {
|
||||
result::Ok(m) => { copy m }
|
||||
result::Err(f) => { fail!(getopts::fail_str(copy f)) }
|
||||
result::Ok(m) => m,
|
||||
result::Err(f) => fail!(getopts::fail_str(copy f)),
|
||||
};
|
||||
|
||||
let urls = if opt_match.free.is_empty() {
|
||||
fail!(~"servo asks that you provide 1 or more URLs")
|
||||
} else {
|
||||
|
@ -82,10 +83,10 @@ pub fn from_cmdline_args(args: &[~str]) -> Opts {
|
|||
None => 1, // FIXME: Number of cores.
|
||||
};
|
||||
|
||||
let profiler_period: Option<f64> =
|
||||
let profiler_period: Option<float> =
|
||||
// if only flag is present, default to 5 second period
|
||||
match getopts::opt_default(&opt_match, "p", "5") {
|
||||
Some(period) => Some(f64::from_str(period).get()),
|
||||
Some(period) => Some(float::from_str(period).get()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
|
|
|
@ -128,8 +128,8 @@ impl<C: RenderListener + Send> RenderTask<C> {
|
|||
PaintPermissionGranted => {
|
||||
self.paint_permission = true;
|
||||
match self.last_paint_msg {
|
||||
Some((ref layer_buffer_set, ref layer_size)) => {
|
||||
self.compositor.paint(self.id, layer_buffer_set.clone(), *layer_size);
|
||||
Some((ref layer_buffer_set, layer_size)) => {
|
||||
self.compositor.paint(self.id, layer_buffer_set.clone(), layer_size);
|
||||
self.compositor.set_render_state(IdleRenderState);
|
||||
}
|
||||
None => {}
|
||||
|
|
|
@ -11,10 +11,10 @@ use std::task;
|
|||
use gfx::opts::Opts;
|
||||
use gfx::render_task::{PaintPermissionGranted, PaintPermissionRevoked};
|
||||
use pipeline::Pipeline;
|
||||
use servo_msg::constellation_msg::{CompositorAck, ConstellationChan, ExitMsg};
|
||||
use servo_msg::constellation_msg::{LoadUrlMsg, Msg, NavigateMsg, RendererReadyMsg};
|
||||
use servo_msg::constellation_msg::{CompositorAck, ConstellationChan, ExitMsg, LoadUrlMsg};
|
||||
use servo_msg::constellation_msg::{Msg, NavigateMsg, RendererReadyMsg, ResizedWindowBroadcast};
|
||||
use servo_msg::constellation_msg;
|
||||
use script::script_task::ExecuteMsg;
|
||||
use script::script_task::{ResizeInactiveMsg, ExecuteMsg};
|
||||
use servo_net::image_cache_task::{ImageCacheTask, ImageCacheTaskClient};
|
||||
use servo_net::resource_task::ResourceTask;
|
||||
use servo_net::resource_task;
|
||||
|
@ -199,6 +199,17 @@ impl Constellation {
|
|||
}
|
||||
}
|
||||
|
||||
ResizedWindowBroadcast(new_size) => match self.current_painter {
|
||||
Some(current_painter_id) => for self.pipelines.iter().advance |(&id, pipeline)| {
|
||||
if current_painter_id != id {
|
||||
pipeline.script_chan.send(ResizeInactiveMsg(new_size));
|
||||
}
|
||||
},
|
||||
None => for self.pipelines.iter().advance |(_, pipeline)| {
|
||||
pipeline.script_chan.send(ResizeInactiveMsg(new_size));
|
||||
},
|
||||
},
|
||||
|
||||
// Acknowledgement from the compositor that it has updated its active pipeline id
|
||||
CompositorAck(id) => {
|
||||
self.grant_paint_permission(id);
|
||||
|
|
|
@ -109,7 +109,7 @@ fn run(opts: &Opts) {
|
|||
do spawn {
|
||||
loop {
|
||||
extra::timer::sleep(&uv_global_loop::get(),
|
||||
(period * 1000f64) as uint);
|
||||
(period * 1000f) as uint);
|
||||
profiler_chan.send(PrintMsg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
use std::comm::{Chan, SharedChan};
|
||||
use extra::net::url::Url;
|
||||
use geom::size::Size2D;
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub struct ConstellationChan {
|
||||
|
@ -30,6 +31,7 @@ pub enum Msg {
|
|||
ExitMsg(Chan<()>),
|
||||
RendererReadyMsg(uint),
|
||||
CompositorAck(uint),
|
||||
ResizedWindowBroadcast(Size2D<uint>),
|
||||
}
|
||||
|
||||
/// Represents the two different ways to which a page can be navigated
|
||||
|
|
|
@ -20,7 +20,7 @@ use layout_interface::{ReflowDocumentDamage, ReflowForDisplay, ReflowForScriptQu
|
|||
use layout_interface::ReflowMsg;
|
||||
use layout_interface;
|
||||
use servo_msg::constellation_msg::{ConstellationChan, LoadUrlMsg, NavigationDirection};
|
||||
use servo_msg::constellation_msg::RendererReadyMsg;
|
||||
use servo_msg::constellation_msg::{RendererReadyMsg, ResizedWindowBroadcast};
|
||||
use servo_msg::constellation_msg;
|
||||
|
||||
use std::cast::transmute;
|
||||
|
@ -63,6 +63,8 @@ pub enum ScriptMsg {
|
|||
FireTimerMsg(~TimerData),
|
||||
/// Notifies script that reflow is finished.
|
||||
ReflowCompleteMsg,
|
||||
/// Notifies script that window has been resized but to not take immediate action.
|
||||
ResizeInactiveMsg(Size2D<uint>),
|
||||
/// Exits the constellation.
|
||||
ExitMsg,
|
||||
}
|
||||
|
@ -143,8 +145,8 @@ pub struct ScriptTask {
|
|||
|
||||
/// Cached copy of the most recent url loaded by the script
|
||||
/// TODO(tkuehn): this currently does not follow any particular caching policy
|
||||
/// and simply caches pages forever (!).
|
||||
last_loaded_url: Option<Url>,
|
||||
/// and simply caches pages forever (!). The bool indicates if reflow is required
|
||||
last_loaded_url: Option<(Url, bool)>,
|
||||
}
|
||||
|
||||
fn global_script_context_key(_: @ScriptTask) {}
|
||||
|
@ -284,6 +286,7 @@ impl ScriptTask {
|
|||
FireTimerMsg(timer_data) => self.handle_fire_timer_msg(timer_data),
|
||||
NavigateMsg(direction) => self.handle_navigate_msg(direction),
|
||||
ReflowCompleteMsg => self.handle_reflow_complete_msg(),
|
||||
ResizeInactiveMsg(new_size) => self.handle_resize_inactive_msg(new_size),
|
||||
ExitMsg => {
|
||||
self.handle_exit_msg();
|
||||
return false
|
||||
|
@ -343,6 +346,15 @@ impl ScriptTask {
|
|||
self.constellation_chan.send(constellation_msg::NavigateMsg(direction));
|
||||
}
|
||||
|
||||
/// Window was resized, but this script was not active, so don't reflow yet
|
||||
fn handle_resize_inactive_msg(&mut self, new_size: Size2D<uint>) {
|
||||
self.window_size = new_size;
|
||||
let last_loaded_url = replace(&mut self.last_loaded_url, None);
|
||||
for last_loaded_url.iter().advance |last_loaded_url| {
|
||||
self.last_loaded_url = Some((last_loaded_url.first(), true));
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles a request to exit the script task and shut down layout.
|
||||
fn handle_exit_msg(&mut self) {
|
||||
self.join_layout();
|
||||
|
@ -356,9 +368,18 @@ impl ScriptTask {
|
|||
/// The entry point to document loading. Defines bindings, sets up the window and document
|
||||
/// objects, parses HTML and CSS, and kicks off initial layout.
|
||||
fn load(&mut self, url: Url) {
|
||||
for self.last_loaded_url.iter().advance |last_loaded_url| {
|
||||
if url == *last_loaded_url { return; }
|
||||
let last_loaded_url = replace(&mut self.last_loaded_url, None);
|
||||
for last_loaded_url.iter().advance |last_loaded_url| {
|
||||
let (ref last_loaded_url, needs_reflow) = *last_loaded_url;
|
||||
if *last_loaded_url == url {
|
||||
if needs_reflow {
|
||||
self.reflow_all(ReflowForDisplay);
|
||||
self.last_loaded_url = Some((last_loaded_url.clone(), false));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Define the script DOM bindings.
|
||||
//
|
||||
// FIXME: Can this be done earlier, to save the flag?
|
||||
|
@ -425,7 +446,7 @@ impl ScriptTask {
|
|||
~"???",
|
||||
1);
|
||||
}
|
||||
self.last_loaded_url = Some(url);
|
||||
self.last_loaded_url = Some((url, false));
|
||||
}
|
||||
|
||||
/// Sends a ping to layout and waits for the response. The response will arrive when the
|
||||
|
@ -493,8 +514,8 @@ impl ScriptTask {
|
|||
pub fn reflow_all(&mut self, goal: ReflowGoal) {
|
||||
for self.root_frame.iter().advance |root_frame| {
|
||||
ScriptTask::damage(&mut self.damage,
|
||||
root_frame.document.root,
|
||||
MatchSelectorsDocumentDamage)
|
||||
root_frame.document.root,
|
||||
MatchSelectorsDocumentDamage)
|
||||
}
|
||||
|
||||
self.reflow(goal)
|
||||
|
@ -539,13 +560,14 @@ impl ScriptTask {
|
|||
|
||||
for self.root_frame.iter().advance |root_frame| {
|
||||
ScriptTask::damage(&mut self.damage,
|
||||
root_frame.document.root,
|
||||
ReflowDocumentDamage);
|
||||
root_frame.document.root,
|
||||
ReflowDocumentDamage);
|
||||
}
|
||||
|
||||
if self.root_frame.is_some() {
|
||||
self.reflow(ReflowForDisplay)
|
||||
}
|
||||
self.constellation_chan.send(ResizedWindowBroadcast(self.window_size));
|
||||
}
|
||||
|
||||
// FIXME(pcwalton): This reflows the entire document and is not incremental-y.
|
||||
|
@ -554,8 +576,8 @@ impl ScriptTask {
|
|||
|
||||
for self.root_frame.iter().advance |root_frame| {
|
||||
ScriptTask::damage(&mut self.damage,
|
||||
root_frame.document.root,
|
||||
MatchSelectorsDocumentDamage);
|
||||
root_frame.document.root,
|
||||
MatchSelectorsDocumentDamage);
|
||||
}
|
||||
|
||||
if self.root_frame.is_some() {
|
||||
|
|
|
@ -45,10 +45,11 @@ pub enum ProfilerCategory {
|
|||
}
|
||||
// FIXME(#5873) this should be initialized by a NUM_BUCKETS cast,
|
||||
static BUCKETS: uint = 13;
|
||||
type ProfilerBuckets = [(ProfilerCategory, ~[float]), ..BUCKETS];
|
||||
|
||||
pub enum ProfilerMsg {
|
||||
// Normal message used for reporting time
|
||||
TimeMsg(ProfilerCategory, f64),
|
||||
TimeMsg(ProfilerCategory, float),
|
||||
// Message used to force print the profiling metrics
|
||||
PrintMsg,
|
||||
}
|
||||
|
@ -56,7 +57,7 @@ pub enum ProfilerMsg {
|
|||
// back end of the profiler that handles data aggregation and performance metrics
|
||||
pub struct Profiler {
|
||||
port: Port<ProfilerMsg>,
|
||||
buckets: ~[(ProfilerCategory, ~[f64])],
|
||||
buckets: ProfilerBuckets,
|
||||
last_msg: Option<ProfilerMsg>,
|
||||
}
|
||||
|
||||
|
@ -67,29 +68,30 @@ impl ProfilerCategory {
|
|||
}
|
||||
|
||||
// enumeration of all ProfilerCategory types
|
||||
// FIXME(tkuehn): this is ugly and error-prone,
|
||||
// but currently we lack better alternatives without an enum enumeration
|
||||
priv fn empty_buckets() -> ~[(ProfilerCategory, ~[f64])] {
|
||||
let mut vec = ~[];
|
||||
vec.push((CompositingCategory, ~[]));
|
||||
vec.push((LayoutQueryCategory, ~[]));
|
||||
vec.push((LayoutPerformCategory, ~[]));
|
||||
vec.push((LayoutAuxInitCategory, ~[]));
|
||||
vec.push((LayoutSelectorMatchCategory, ~[]));
|
||||
vec.push((LayoutTreeBuilderCategory, ~[]));
|
||||
vec.push((LayoutMainCategory, ~[]));
|
||||
vec.push((LayoutShapingCategory, ~[]));
|
||||
vec.push((LayoutDispListBuildCategory, ~[]));
|
||||
vec.push((GfxRegenAvailableFontsCategory, ~[]));
|
||||
vec.push((RenderingDrawingCategory, ~[]));
|
||||
vec.push((RenderingPrepBuffCategory, ~[]));
|
||||
vec.push((RenderingCategory, ~[]));
|
||||
// TODO(tkuehn): is there a better way to ensure proper order of categories?
|
||||
priv fn empty_buckets() -> ProfilerBuckets {
|
||||
let buckets = [
|
||||
(CompositingCategory, ~[]),
|
||||
(LayoutQueryCategory, ~[]),
|
||||
(LayoutPerformCategory, ~[]),
|
||||
(LayoutAuxInitCategory, ~[]),
|
||||
(LayoutSelectorMatchCategory, ~[]),
|
||||
(LayoutTreeBuilderCategory, ~[]),
|
||||
(LayoutMainCategory, ~[]),
|
||||
(LayoutShapingCategory, ~[]),
|
||||
(LayoutDispListBuildCategory, ~[]),
|
||||
(GfxRegenAvailableFontsCategory, ~[]),
|
||||
(RenderingDrawingCategory, ~[]),
|
||||
(RenderingPrepBuffCategory, ~[]),
|
||||
(RenderingCategory, ~[]),
|
||||
];
|
||||
|
||||
ProfilerCategory::check_order(vec);
|
||||
vec
|
||||
ProfilerCategory::check_order(&buckets);
|
||||
buckets
|
||||
}
|
||||
|
||||
priv fn check_order(vec: &[(ProfilerCategory, ~[f64])]) {
|
||||
// ensure that the order of the buckets matches the order of the enum categories
|
||||
priv fn check_order(vec: &ProfilerBuckets) {
|
||||
for vec.iter().advance |&(category, _)| {
|
||||
if category != vec[category as uint].first() {
|
||||
fail!("Enum category does not match bucket index. This is a bug.");
|
||||
|
@ -136,13 +138,11 @@ impl Profiler {
|
|||
priv fn handle_msg(&mut self, msg: ProfilerMsg) {
|
||||
match msg {
|
||||
TimeMsg(category, t) => match self.buckets[category as uint] {
|
||||
// FIXME(#3874): this should be a let (cat, ref mut bucket) = ...,
|
||||
// not a match
|
||||
(_, ref mut data) => {
|
||||
data.push(t);
|
||||
}
|
||||
//TODO(tkuehn): would be nice to have tuple.second_mut()
|
||||
(_, ref mut data) => data.push(t),
|
||||
},
|
||||
PrintMsg => match self.last_msg {
|
||||
// only print if more data has arrived since the last printout
|
||||
Some(TimeMsg(*)) => self.print_buckets(),
|
||||
_ => {}
|
||||
},
|
||||
|
@ -155,20 +155,19 @@ impl Profiler {
|
|||
"_category_", "_mean (ms)_", "_median (ms)_",
|
||||
"_min (ms)_", "_max (ms)_", "_bucket size_"));
|
||||
for self.buckets.mut_iter().advance |bucket| {
|
||||
match *bucket {
|
||||
(category, ref mut data) => {
|
||||
tim_sort(*data);
|
||||
let data_len = data.len();
|
||||
if data_len > 0 {
|
||||
let (mean, median, min, max) =
|
||||
(data.iter().fold(0f64, |a, b| a + *b) / (data_len as f64),
|
||||
data[data_len / 2],
|
||||
data.iter().min(),
|
||||
data.iter().max());
|
||||
println(fmt!("%-30s: %15.4? %15.4? %15.4? %15.4? %15u",
|
||||
category.format(), mean, median, min, max, data_len));
|
||||
}
|
||||
}
|
||||
let (category, data) = match *bucket {
|
||||
(category, ref mut data) => (category, data),
|
||||
};
|
||||
tim_sort(*data);
|
||||
let data_len = data.len();
|
||||
if data_len > 0 {
|
||||
let (mean, median, &min, &max) =
|
||||
(data.iter().fold(0f, |a, b| a + *b) / (data_len as float),
|
||||
data[data_len / 2],
|
||||
data.iter().min().unwrap(),
|
||||
data.iter().max().unwrap());
|
||||
println(fmt!("%-30s: %15.4f %15.4f %15.4f %15.4f %15u",
|
||||
category.format(), mean, median, min, max, data_len));
|
||||
}
|
||||
}
|
||||
println("");
|
||||
|
@ -183,7 +182,7 @@ pub fn profile<T>(category: ProfilerCategory,
|
|||
let start_time = precise_time_ns();
|
||||
let val = callback();
|
||||
let end_time = precise_time_ns();
|
||||
let ms = ((end_time - start_time) as f64 / 1000000f64);
|
||||
let ms = ((end_time - start_time) as float / 1000000f);
|
||||
profiler_chan.send(TimeMsg(category, ms));
|
||||
return val;
|
||||
}
|
||||
|
@ -192,8 +191,8 @@ pub fn time<T>(msg: &str, callback: &fn() -> T) -> T{
|
|||
let start_time = precise_time_ns();
|
||||
let val = callback();
|
||||
let end_time = precise_time_ns();
|
||||
let ms = ((end_time - start_time) as f64 / 1000000f64);
|
||||
if ms >= 5f64 {
|
||||
let ms = ((end_time - start_time) as float / 1000000f);
|
||||
if ms >= 5f {
|
||||
debug!("%s took %? ms", msg, ms);
|
||||
}
|
||||
return val;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue