Auto merge of #7054 - Manishearth:rollup, r=Manishearth

Rollup of 3 pull requests

- Successful merges: #7028, #7034, #7039
- Failed merges:

<!-- Reviewable:start -->
[<img src="https://reviewable.io/review_button.png" height=40 alt="Review on Reviewable"/>](https://reviewable.io/reviews/servo/servo/7054)
<!-- Reviewable:end -->
This commit is contained in:
bors-servo 2015-08-06 17:43:09 -06:00
commit 76b4bae6ee
14 changed files with 123 additions and 85 deletions

View file

@ -74,7 +74,6 @@ features = [ "serde_serialization" ]
log = "0.3"
num = "0.1.24"
time = "0.1.17"
libc = "0.1"
gleam = "0.1"
euclid = "0.1"

View file

@ -24,7 +24,6 @@ use euclid::scale_factor::ScaleFactor;
use gfx::font_cache_task::FontCacheTask;
use ipc_channel::ipc::{self, IpcSender};
use layout_traits::{LayoutControlChan, LayoutTaskFactory};
use libc;
use msg::compositor_msg::{Epoch, LayerId};
use msg::constellation_msg::AnimationState;
use msg::constellation_msg::Msg as ConstellationMsg;
@ -48,6 +47,7 @@ use std::collections::HashMap;
use std::io::{self, Write};
use std::marker::PhantomData;
use std::mem::replace;
use std::process;
use std::sync::mpsc::{Receiver, Sender, channel};
use style::viewport::ViewportConstraints;
use url::Url;
@ -547,7 +547,7 @@ impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
// Hard fail exists for test runners so we crash and that's good enough.
let mut stderr = io::stderr();
stderr.write_all("Pipeline failed in hard-fail mode. Crashing!\n".as_bytes()).unwrap();
unsafe { libc::exit(1); }
process::exit(1);
}
self.close_pipeline(pipeline_id, ExitPipelineMode::Force);

View file

@ -35,7 +35,6 @@ extern crate util;
extern crate gleam;
extern crate clipboard;
extern crate libc;
extern crate time;
extern crate url;

View file

@ -16,8 +16,7 @@ use euclid::Matrix4;
use euclid::point::Point2D;
use euclid::rect::Rect;
use euclid::size::Size2D;
use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER;
use ipc_channel::ipc::IpcSender;
use layers::platform::surface::{NativeDisplay, NativeSurface};
use layers::layers::{BufferRequest, LayerBuffer, LayerBufferSet};
use msg::compositor_msg::{Epoch, FrameTreeId, LayerId, LayerKind};
@ -25,7 +24,7 @@ use msg::compositor_msg::{LayerProperties, PaintListener, ScrollPolicy};
use msg::constellation_msg::Msg as ConstellationMsg;
use msg::constellation_msg::{ConstellationChan, Failure, PipelineId};
use msg::constellation_msg::PipelineExitType;
use profile_traits::mem::{self, Reporter, ReporterRequest, ReportsChan};
use profile_traits::mem::{self, ReportsChan};
use profile_traits::time::{self, profile};
use rand::{self, Rng};
use skia::gl_context::GLContext;
@ -167,25 +166,10 @@ impl<C> PaintTask<C> where C: PaintListener + Send + 'static {
canvas_map: HashMap::new()
};
// Register the memory reporter.
let reporter_name = format!("paint-reporter-{}", id.0);
let (reporter_sender, reporter_receiver) =
ipc::channel::<ReporterRequest>().unwrap();
let paint_chan_for_reporter = chrome_to_paint_chan.clone();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |message| {
// Just injects an appropriate event into the paint task's queue.
let request: ReporterRequest = message.to().unwrap();
paint_chan_for_reporter.send(ChromeToPaintMsg::CollectReports(
request.reports_channel)).unwrap();
});
mem_profiler_chan.send(mem::ProfilerMsg::RegisterReporter(
reporter_name.clone(),
Reporter(reporter_sender)));
paint_task.start();
let msg = mem::ProfilerMsg::UnregisterReporter(reporter_name);
mem_profiler_chan.send(msg);
mem_profiler_chan.run_with_memory_reporting(|| {
paint_task.start();
}, reporter_name, chrome_to_paint_chan, ChromeToPaintMsg::CollectReports);
// Tell all the worker threads to shut down.
for worker_thread in paint_task.worker_threads.iter_mut() {

View file

@ -47,7 +47,7 @@ use log;
use msg::compositor_msg::{Epoch, ScrollPolicy, LayerId};
use msg::constellation_msg::Msg as ConstellationMsg;
use msg::constellation_msg::{ConstellationChan, Failure, PipelineExitType, PipelineId};
use profile_traits::mem::{self, Report, Reporter, ReporterRequest, ReportKind, ReportsChan};
use profile_traits::mem::{self, Report, ReportKind, ReportsChan};
use profile_traits::time::{self, ProfilerMetadata, profile};
use profile_traits::time::{TimerMetadataFrameType, TimerMetadataReflowType};
use net_traits::{load_bytes_iter, PendingAsyncLoad};
@ -257,25 +257,10 @@ impl LayoutTaskFactory for LayoutTask {
time_profiler_chan,
mem_profiler_chan.clone());
// Create a memory reporter thread.
let reporter_name = format!("layout-reporter-{}", id.0);
let (reporter_sender, reporter_receiver) =
ipc::channel::<ReporterRequest>().unwrap();
let layout_chan_for_reporter = layout_chan.clone();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |message| {
// Just injects an appropriate event into the layout task's queue.
let request: ReporterRequest = message.to().unwrap();
layout_chan_for_reporter.0.send(Msg::CollectReports(request.reports_channel))
.unwrap();
});
mem_profiler_chan.send(mem::ProfilerMsg::RegisterReporter(
reporter_name.clone(),
Reporter(reporter_sender)));
layout.start();
let msg = mem::ProfilerMsg::UnregisterReporter(reporter_name);
mem_profiler_chan.send(msg);
mem_profiler_chan.run_with_memory_reporting(|| {
layout.start();
}, reporter_name, layout_chan.0, Msg::CollectReports);
}
shutdown_chan.send(()).unwrap();
}, ConstellationMsg::Failure(failure_msg), con_chan);

View file

@ -6,6 +6,7 @@
//! rest of Servo. These APIs are here instead of in `profile` so that these
//! modules won't have to depend on `profile`.
#![feature(box_syntax)]
#![feature(custom_derive, plugin)]
#![plugin(serde_macros)]

View file

@ -6,7 +6,23 @@
#![deny(missing_docs)]
use ipc_channel::ipc::IpcSender;
use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER;
use std::sync::mpsc::Sender;
use std::marker::Send;
/// A trait to abstract away the various kinds of message senders we use.
pub trait OpaqueSender<T> {
/// Send a message.
fn send(&self, message: T);
}
impl<T> OpaqueSender<T> for Sender<T> {
fn send(&self, message: T) {
Sender::send(self, message).unwrap();
}
}
/// Front-end representation of the profiler used to communicate with the
/// profiler.
@ -21,6 +37,31 @@ impl ProfilerChan {
let ProfilerChan(ref c) = *self;
c.send(msg).unwrap();
}
/// Runs `f()` with memory profiling.
pub fn run_with_memory_reporting<F, M, T, C>(&self, f: F,
reporter_name: String,
channel_for_reporter: C,
msg: M)
where F: FnOnce(),
M: Fn(ReportsChan) -> T + Send + 'static,
T: Send + 'static,
C: OpaqueSender<T> + Send + 'static
{
// Register the memory reporter.
let (reporter_sender, reporter_receiver) = ipc::channel().unwrap();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |message| {
// Just injects an appropriate event into the paint task's queue.
let request: ReporterRequest = message.to().unwrap();
channel_for_reporter.send(msg(request.reports_channel));
});
self.send(ProfilerMsg::RegisterReporter(reporter_name.clone(),
Reporter(reporter_sender)));
f();
self.send(ProfilerMsg::UnregisterReporter(reporter_name));
}
}
/// The various kinds of memory measurement.

View file

@ -28,12 +28,11 @@ use script_task::StackRootTLS;
use devtools_traits::DevtoolScriptControlMsg;
use msg::constellation_msg::PipelineId;
use net_traits::load_whole_resource;
use profile_traits::mem::{self, Reporter, ReporterRequest};
use util::task::spawn_named;
use util::task_state;
use util::task_state::{SCRIPT, IN_WORKER};
use ipc_channel::ipc::{self, IpcReceiver};
use ipc_channel::ipc::IpcReceiver;
use ipc_channel::router::ROUTER;
use js::jsapi::{JSContext, RootedValue, HandleValue};
use js::jsapi::{JSAutoRequest, JSAutoCompartment};
@ -192,26 +191,12 @@ impl DedicatedWorkerGlobalScope {
scope.execute_script(source);
}
// Register this task as a memory reporter.
let reporter_name = format!("worker-reporter-{}", random::<u64>());
let (reporter_sender, reporter_receiver) = ipc::channel().unwrap();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |reporter_request| {
// Just injects an appropriate event into the worker task's queue.
let reporter_request: ReporterRequest = reporter_request.to().unwrap();
parent_sender.send(ScriptMsg::CollectReports(
reporter_request.reports_channel)).unwrap()
});
scope.mem_profiler_chan().send(mem::ProfilerMsg::RegisterReporter(
reporter_name.clone(),
Reporter(reporter_sender)));
while let Ok(event) = global.receive_event() {
global.handle_event(event);
}
// Unregister this task as a memory reporter.
let msg = mem::ProfilerMsg::UnregisterReporter(reporter_name);
scope.mem_profiler_chan().send(msg);
scope.mem_profiler_chan().run_with_memory_reporting(|| {
while let Ok(event) = global.receive_event() {
global.handle_event(event);
}
}, reporter_name, parent_sender, ScriptMsg::CollectReports);
});
}
}

View file

@ -73,7 +73,7 @@ use net_traits::LoadData as NetLoadData;
use net_traits::{AsyncResponseTarget, ResourceTask, LoadConsumer, ControlMsg, Metadata};
use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask, ImageCacheResult};
use net_traits::storage_task::StorageTask;
use profile_traits::mem::{self, Report, Reporter, ReporterRequest, ReportKind, ReportsChan};
use profile_traits::mem::{self, Report, ReportKind, ReportsChan, OpaqueSender};
use string_cache::Atom;
use util::str::DOMString;
use util::task::spawn_named_with_send_on_failure;
@ -216,6 +216,12 @@ pub trait ScriptChan {
fn clone(&self) -> Box<ScriptChan+Send>;
}
impl OpaqueSender<ScriptMsg> for Box<ScriptChan+Send> {
fn send(&self, msg: ScriptMsg) {
ScriptChan::send(&**self, msg).unwrap();
}
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
@ -437,24 +443,10 @@ impl ScriptTaskFactory for ScriptTask {
load_data.url.clone());
script_task.start_page_load(new_load, load_data);
// Register this task as a memory reporter.
let reporter_name = format!("script-reporter-{}", id.0);
let (reporter_sender, reporter_receiver) = ipc::channel().unwrap();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |reporter_request| {
// Just injects an appropriate event into the worker task's queue.
let reporter_request: ReporterRequest = reporter_request.to().unwrap();
channel_for_reporter.send(ScriptMsg::CollectReports(
reporter_request.reports_channel)).unwrap()
});
let reporter = Reporter(reporter_sender);
let msg = mem::ProfilerMsg::RegisterReporter(reporter_name.clone(), reporter);
mem_profiler_chan.send(msg);
script_task.start();
// Unregister this task as a memory reporter.
let msg = mem::ProfilerMsg::UnregisterReporter(reporter_name);
mem_profiler_chan.send(msg);
mem_profiler_chan.run_with_memory_reporting(|| {
script_task.start();
}, reporter_name, channel_for_reporter, ScriptMsg::CollectReports);
// This must always be the very last operation performed before the task completes
failsafe.neuter();

View file

@ -178,7 +178,6 @@ dependencies = [
"ipc-channel 0.1.0 (git+https://github.com/pcwalton/ipc-channel)",
"layers 0.1.0 (git+https://github.com/servo/rust-layers)",
"layout_traits 0.0.1",
"libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"net 0.0.1",