Auto merge of #24758 - gterzian:impl_timer_task_source, r=nox

Implement timer-task-source, time-out service worker

<!-- Please describe your changes on the following line: -->

Implements the timer task-source, and folds the IPC glue-code into a single route set by the globalscope.

Also switches service worker to using a dedicated "time-out" mechanism, which previously relied on the timer mechanism(and I think didn't actually implement script timers).

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: -->
- [ ] `./mach build -d` does not report any errors
- [ ] `./mach test-tidy` does not report any errors
- [ ] These changes fix #24747 (GitHub issue number if applicable)

<!-- Either: -->
- [ ] There are tests for these changes OR
- [ ] These changes do not require tests because ___

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
bors-servo 2019-11-26 16:26:20 -05:00 committed by GitHub
commit a922c497fa
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 241 additions and 204 deletions

View file

@ -81,32 +81,27 @@ impl ScriptPort for Receiver<DedicatedWorkerScriptMsg> {
}
pub trait WorkerEventLoopMethods {
type TimerMsg: Send;
type WorkerMsg: QueuedTaskConversion + Send;
type Event;
fn timer_event_port(&self) -> &Receiver<Self::TimerMsg>;
fn task_queue(&self) -> &TaskQueue<Self::WorkerMsg>;
fn handle_event(&self, event: Self::Event);
fn handle_worker_post_event(&self, worker: &TrustedWorkerAddress) -> Option<AutoWorkerReset>;
fn from_worker_msg(&self, msg: Self::WorkerMsg) -> Self::Event;
fn from_timer_msg(&self, msg: Self::TimerMsg) -> Self::Event;
fn from_devtools_msg(&self, msg: DevtoolScriptControlMsg) -> Self::Event;
}
// https://html.spec.whatwg.org/multipage/#worker-event-loop
pub fn run_worker_event_loop<T, TimerMsg, WorkerMsg, Event>(
pub fn run_worker_event_loop<T, WorkerMsg, Event>(
worker_scope: &T,
worker: Option<&TrustedWorkerAddress>,
) where
TimerMsg: Send,
WorkerMsg: QueuedTaskConversion + Send,
T: WorkerEventLoopMethods<TimerMsg = TimerMsg, WorkerMsg = WorkerMsg, Event = Event>
T: WorkerEventLoopMethods<WorkerMsg = WorkerMsg, Event = Event>
+ DerivedFrom<WorkerGlobalScope>
+ DerivedFrom<GlobalScope>
+ DomObject,
{
let scope = worker_scope.upcast::<WorkerGlobalScope>();
let timer_event_port = worker_scope.timer_event_port();
let devtools_port = match scope.from_devtools_sender() {
Some(_) => Some(scope.from_devtools_receiver()),
None => None,
@ -117,7 +112,6 @@ pub fn run_worker_event_loop<T, TimerMsg, WorkerMsg, Event>(
task_queue.take_tasks(msg.unwrap());
worker_scope.from_worker_msg(task_queue.recv().unwrap())
},
recv(timer_event_port) -> msg => worker_scope.from_timer_msg(msg.unwrap()),
recv(devtools_port.unwrap_or(&crossbeam_channel::never())) -> msg =>
worker_scope.from_devtools_msg(msg.unwrap()),
};
@ -132,13 +126,10 @@ pub fn run_worker_event_loop<T, TimerMsg, WorkerMsg, Event>(
// Batch all events that are ready.
// The task queue will throttle non-priority tasks if necessary.
match task_queue.try_recv() {
Err(_) => match timer_event_port.try_recv() {
Err(_) => match devtools_port.map(|port| port.try_recv()) {
None => {},
Some(Err(_)) => break,
Some(Ok(ev)) => sequential.push(worker_scope.from_devtools_msg(ev)),
},
Ok(ev) => sequential.push(worker_scope.from_timer_msg(ev)),
Err(_) => match devtools_port.map(|port| port.try_recv()) {
None => {},
Some(Err(_)) => break,
Some(Ok(ev)) => sequential.push(worker_scope.from_devtools_msg(ev)),
},
Ok(ev) => sequential.push(worker_scope.from_worker_msg(ev)),
}

View file

@ -37,7 +37,7 @@ use crate::task_source::TaskSourceName;
use crossbeam_channel::{unbounded, Receiver, Sender};
use devtools_traits::DevtoolScriptControlMsg;
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::ipc::IpcReceiver;
use ipc_channel::router::ROUTER;
use js::jsapi::JS_AddInterruptCallback;
use js::jsapi::{Heap, JSContext, JSObject};
@ -48,7 +48,7 @@ use net_traits::image_cache::ImageCache;
use net_traits::request::{CredentialsMode, Destination, ParserMetadata};
use net_traits::request::{Referrer, RequestBuilder, RequestMode};
use net_traits::IpcSend;
use script_traits::{TimerEvent, TimerSource, WorkerGlobalScopeInit, WorkerScriptLoadOrigin};
use script_traits::{WorkerGlobalScopeInit, WorkerScriptLoadOrigin};
use servo_rand::random;
use servo_url::ServoUrl;
use std::mem::replace;
@ -93,7 +93,6 @@ pub enum DedicatedWorkerScriptMsg {
pub enum MixedMessage {
FromWorker(DedicatedWorkerScriptMsg),
FromScheduler((TrustedWorkerAddress, TimerEvent)),
FromDevtools(DevtoolScriptControlMsg),
}
@ -174,8 +173,6 @@ pub struct DedicatedWorkerGlobalScope {
task_queue: TaskQueue<DedicatedWorkerScriptMsg>,
#[ignore_malloc_size_of = "Defined in std"]
own_sender: Sender<DedicatedWorkerScriptMsg>,
#[ignore_malloc_size_of = "Defined in std"]
timer_event_port: Receiver<(TrustedWorkerAddress, TimerEvent)>,
#[ignore_malloc_size_of = "Trusted<T> has unclear ownership like Dom<T>"]
worker: DomRefCell<Option<TrustedWorkerAddress>>,
#[ignore_malloc_size_of = "Can't measure trait objects"]
@ -186,14 +183,9 @@ pub struct DedicatedWorkerGlobalScope {
}
impl WorkerEventLoopMethods for DedicatedWorkerGlobalScope {
type TimerMsg = (TrustedWorkerAddress, TimerEvent);
type WorkerMsg = DedicatedWorkerScriptMsg;
type Event = MixedMessage;
fn timer_event_port(&self) -> &Receiver<(TrustedWorkerAddress, TimerEvent)> {
&self.timer_event_port
}
fn task_queue(&self) -> &TaskQueue<DedicatedWorkerScriptMsg> {
&self.task_queue
}
@ -211,10 +203,6 @@ impl WorkerEventLoopMethods for DedicatedWorkerGlobalScope {
MixedMessage::FromWorker(msg)
}
fn from_timer_msg(&self, msg: (TrustedWorkerAddress, TimerEvent)) -> MixedMessage {
MixedMessage::FromScheduler(msg)
}
fn from_devtools_msg(&self, msg: DevtoolScriptControlMsg) -> MixedMessage {
MixedMessage::FromDevtools(msg)
}
@ -231,8 +219,6 @@ impl DedicatedWorkerGlobalScope {
parent_sender: Box<dyn ScriptChan + Send>,
own_sender: Sender<DedicatedWorkerScriptMsg>,
receiver: Receiver<DedicatedWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<(TrustedWorkerAddress, TimerEvent)>,
closing: Arc<AtomicBool>,
image_cache: Arc<dyn ImageCache>,
) -> DedicatedWorkerGlobalScope {
@ -244,12 +230,10 @@ impl DedicatedWorkerGlobalScope {
worker_url,
runtime,
from_devtools_receiver,
timer_event_chan,
Some(closing),
),
task_queue: TaskQueue::new(receiver, own_sender.clone()),
own_sender: own_sender,
timer_event_port: timer_event_port,
parent_sender: parent_sender,
worker: DomRefCell::new(None),
image_cache: image_cache,
@ -267,8 +251,6 @@ impl DedicatedWorkerGlobalScope {
parent_sender: Box<dyn ScriptChan + Send>,
own_sender: Sender<DedicatedWorkerScriptMsg>,
receiver: Receiver<DedicatedWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<(TrustedWorkerAddress, TimerEvent)>,
closing: Arc<AtomicBool>,
image_cache: Arc<dyn ImageCache>,
) -> DomRoot<DedicatedWorkerGlobalScope> {
@ -283,8 +265,6 @@ impl DedicatedWorkerGlobalScope {
parent_sender,
own_sender,
receiver,
timer_event_chan,
timer_event_port,
closing,
image_cache,
));
@ -366,17 +346,6 @@ impl DedicatedWorkerGlobalScope {
devtools_mpsc_chan,
);
let (timer_tx, timer_rx) = unbounded();
let (timer_ipc_chan, timer_ipc_port) = ipc::channel().unwrap();
let worker_for_route = worker.clone();
ROUTER.add_route(
timer_ipc_port.to_opaque(),
Box::new(move |message| {
let event = message.to().unwrap();
timer_tx.send((worker_for_route.clone(), event)).unwrap();
}),
);
let global = DedicatedWorkerGlobalScope::new(
init,
DOMString::from_string(worker_name),
@ -387,8 +356,6 @@ impl DedicatedWorkerGlobalScope {
parent_sender.clone(),
own_sender,
receiver,
timer_ipc_chan,
timer_rx,
closing,
image_cache,
);
@ -517,14 +484,6 @@ impl DedicatedWorkerGlobalScope {
},
_ => debug!("got an unusable devtools control message inside the worker!"),
},
MixedMessage::FromScheduler((linked_worker, timer_event)) => match timer_event {
TimerEvent(TimerSource::FromWorker, id) => {
let _ar = AutoWorkerReset::new(self, linked_worker);
let scope = self.upcast::<WorkerGlobalScope>();
scope.handle_fire_timer(id);
},
TimerEvent(_, _) => panic!("A worker received a TimerEvent from a window."),
},
MixedMessage::FromWorker(DedicatedWorkerScriptMsg::CommonWorker(
linked_worker,
msg,

View file

@ -15,7 +15,6 @@ use crate::dom::globalscope::GlobalScope;
use crate::dom::windowproxy::WindowProxy;
use crate::script_runtime::JSContext;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use js::jsapi::{Heap, JSObject};
use js::jsval::{JSVal, UndefinedValue};
use js::rust::{CustomAutoRooter, CustomAutoRooterGuard, HandleValue};
@ -48,8 +47,6 @@ impl DissimilarOriginWindow {
#[allow(unsafe_code)]
pub fn new(global_to_clone_from: &GlobalScope, window_proxy: &WindowProxy) -> DomRoot<Self> {
let cx = global_to_clone_from.get_cx();
// Any timer events fired on this window are ignored.
let (timer_event_chan, _) = ipc::channel().unwrap();
let win = Box::new(Self {
globalscope: GlobalScope::new_inherited(
PipelineId::new(),
@ -59,7 +56,6 @@ impl DissimilarOriginWindow {
global_to_clone_from.script_to_constellation_chan().clone(),
global_to_clone_from.scheduler_chan().clone(),
global_to_clone_from.resource_threads().clone(),
timer_event_chan,
global_to_clone_from.origin().clone(),
// FIXME(nox): The microtask queue is probably not important
// here, but this whole DOM interface is a hack anyway.

View file

@ -39,6 +39,7 @@ use crate::task_source::networking::NetworkingTaskSource;
use crate::task_source::performance_timeline::PerformanceTimelineTaskSource;
use crate::task_source::port_message::PortMessageQueue;
use crate::task_source::remote_event::RemoteEventTaskSource;
use crate::task_source::timer::TimerTaskSource;
use crate::task_source::websocket::WebsocketTaskSource;
use crate::task_source::TaskSource;
use crate::task_source::TaskSourceName;
@ -134,8 +135,13 @@ pub struct GlobalScope {
/// including resource_thread, filemanager_thread and storage_thread
resource_threads: ResourceThreads,
/// The mechanism by which time-outs and intervals are scheduled.
/// <https://html.spec.whatwg.org/multipage/#timers>
timers: OneshotTimers,
/// Have timers been initialized?
init_timers: Cell<bool>,
/// The origin of the globalscope
origin: MutableOrigin,
@ -188,6 +194,13 @@ struct MessageListener {
context: Trusted<GlobalScope>,
}
/// A wrapper between timer events coming in over IPC, and the event-loop.
struct TimerListener {
canceller: TaskCanceller,
task_source: TimerTaskSource,
context: Trusted<GlobalScope>,
}
/// Data representing a message-port managed by this global.
#[derive(JSTraceable, MallocSizeOf)]
pub enum ManagedMessagePort {
@ -212,6 +225,34 @@ pub enum MessagePortState {
UnManaged,
}
impl TimerListener {
/// Handle a timer-event coming-in over IPC,
/// by queuing the appropriate task on the relevant event-loop.
fn handle(&self, event: TimerEvent) {
let context = self.context.clone();
// Step 18, queue a task,
// https://html.spec.whatwg.org/multipage/#timer-initialisation-steps
let _ = self.task_source.queue_with_canceller(
task!(timer_event: move || {
let global = context.root();
let TimerEvent(source, id) = event;
match source {
TimerSource::FromWorker => {
global.downcast::<WorkerGlobalScope>().expect("Window timer delivered to worker");
},
TimerSource::FromWindow(pipeline) => {
assert_eq!(pipeline, global.pipeline_id());
global.downcast::<Window>().expect("Worker timer delivered to window");
},
};
// Step 7, substeps run in a task.
global.fire_timer(id);
}),
&self.canceller,
);
}
}
impl MessageListener {
/// A new message came in, handle it via a task enqueued on the event-loop.
/// A task is required, since we are using a trusted globalscope,
@ -297,7 +338,6 @@ impl GlobalScope {
script_to_constellation_chan: ScriptToConstellationChan,
scheduler_chan: IpcSender<TimerSchedulerMsg>,
resource_threads: ResourceThreads,
timer_event_chan: IpcSender<TimerEvent>,
origin: MutableOrigin,
microtask_queue: Rc<MicrotaskQueue>,
is_headless: bool,
@ -318,7 +358,8 @@ impl GlobalScope {
scheduler_chan: scheduler_chan.clone(),
in_error_reporting_mode: Default::default(),
resource_threads,
timers: OneshotTimers::new(timer_event_chan, scheduler_chan),
timers: OneshotTimers::new(scheduler_chan),
init_timers: Default::default(),
origin,
microtask_queue,
list_auto_close_worker: Default::default(),
@ -349,6 +390,36 @@ impl GlobalScope {
false
}
/// Setup the IPC-to-event-loop glue for timers to schedule themselves.
fn setup_timers(&self) {
if self.init_timers.get() {
return;
}
self.init_timers.set(true);
let (timer_ipc_chan, timer_ipc_port) = ipc::channel().unwrap();
self.timers.setup_scheduling(timer_ipc_chan);
// Setup route from IPC to task-queue for the timer-task-source.
let context = Trusted::new(&*self);
let (task_source, canceller) = (
self.timer_task_source(),
self.task_canceller(TaskSourceName::Timer),
);
let timer_listener = TimerListener {
context,
task_source,
canceller,
};
ROUTER.add_route(
timer_ipc_port.to_opaque(),
Box::new(move |message| {
let event = message.to().unwrap();
timer_listener.handle(event);
}),
);
}
/// Complete the transfer of a message-port.
fn complete_port_transfer(&self, port_id: MessagePortId, tasks: VecDeque<PortMessageTask>) {
let should_start = if let MessagePortState::Managed(_id, message_ports) =
@ -1063,6 +1134,18 @@ impl GlobalScope {
unreachable!();
}
/// `TaskSource` to send messages to the timer queue of
/// this global scope.
pub fn timer_task_source(&self) -> TimerTaskSource {
if let Some(window) = self.downcast::<Window>() {
return window.task_manager().timer_task_source();
}
if let Some(worker) = self.downcast::<WorkerGlobalScope>() {
return worker.timer_task_source();
}
unreachable!();
}
/// `TaskSource` to send messages to the remote-event task source of
/// this global scope.
pub fn remote_event_task_source(&self) -> RemoteEventTaskSource {
@ -1145,11 +1228,13 @@ impl GlobalScope {
)
}
/// <https://html.spec.whatwg.org/multipage/#timer-initialisation-steps>
pub fn schedule_callback(
&self,
callback: OneshotTimerCallback,
duration: MsDuration,
) -> OneshotTimerHandle {
self.setup_timers();
self.timers
.schedule_callback(callback, duration, self.timer_source())
}
@ -1158,6 +1243,7 @@ impl GlobalScope {
self.timers.unschedule_callback(handle);
}
/// <https://html.spec.whatwg.org/multipage/#timer-initialisation-steps>
pub fn set_timeout_or_interval(
&self,
callback: TimerCallback,
@ -1165,6 +1251,7 @@ impl GlobalScope {
timeout: i32,
is_interval: IsInterval,
) -> i32 {
self.setup_timers();
self.timers.set_timeout_or_interval(
self,
callback,
@ -1176,27 +1263,27 @@ impl GlobalScope {
}
pub fn clear_timeout_or_interval(&self, handle: i32) {
self.timers.clear_timeout_or_interval(self, handle)
self.timers.clear_timeout_or_interval(self, handle);
}
pub fn fire_timer(&self, handle: TimerEventId) {
self.timers.fire_timer(handle, self)
self.timers.fire_timer(handle, self);
}
pub fn resume(&self) {
self.timers.resume()
self.timers.resume();
}
pub fn suspend(&self) {
self.timers.suspend()
self.timers.suspend();
}
pub fn slow_down_timers(&self) {
self.timers.slow_down()
self.timers.slow_down();
}
pub fn speed_up_timers(&self) {
self.timers.speed_up()
self.timers.speed_up();
}
fn timer_source(&self) -> TimerSource {

View file

@ -28,24 +28,22 @@ use crate::script_runtime::{
};
use crate::task_queue::{QueuedTask, QueuedTaskConversion, TaskQueue};
use crate::task_source::TaskSourceName;
use crossbeam_channel::{unbounded, Receiver, Sender};
use crossbeam_channel::{after, unbounded, Receiver, Sender};
use devtools_traits::DevtoolScriptControlMsg;
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::ipc::{IpcReceiver, IpcSender};
use ipc_channel::router::ROUTER;
use js::jsapi::{JSContext, JS_AddInterruptCallback};
use js::jsval::UndefinedValue;
use msg::constellation_msg::PipelineId;
use net_traits::request::{CredentialsMode, Destination, ParserMetadata, Referrer, RequestBuilder};
use net_traits::{CustomResponseMediator, IpcSend};
use script_traits::{
ScopeThings, ServiceWorkerMsg, TimerEvent, WorkerGlobalScopeInit, WorkerScriptLoadOrigin,
};
use script_traits::{ScopeThings, ServiceWorkerMsg, WorkerGlobalScopeInit, WorkerScriptLoadOrigin};
use servo_config::pref;
use servo_rand::random;
use servo_url::ServoUrl;
use std::thread;
use std::time::Duration;
use std::time::{Duration, Instant};
use style::thread_state::{self, ThreadState};
/// Messages used to control service worker event loop
@ -118,7 +116,6 @@ impl QueuedTaskConversion for ServiceWorkerScriptMsg {
pub enum MixedMessage {
FromServiceWorker(ServiceWorkerScriptMsg),
FromDevtools(DevtoolScriptControlMsg),
FromTimeoutThread(()),
}
#[derive(Clone, JSTraceable)]
@ -147,26 +144,30 @@ unsafe_no_jsmanaged_fields!(TaskQueue<ServiceWorkerScriptMsg>);
#[dom_struct]
pub struct ServiceWorkerGlobalScope {
workerglobalscope: WorkerGlobalScope,
#[ignore_malloc_size_of = "Defined in std"]
task_queue: TaskQueue<ServiceWorkerScriptMsg>,
#[ignore_malloc_size_of = "Defined in std"]
own_sender: Sender<ServiceWorkerScriptMsg>,
/// A port on which a single "time-out" message can be received,
/// indicating the sw should stop running,
/// while still draining the task-queue
// and running all enqueued, and not cancelled, tasks.
#[ignore_malloc_size_of = "Defined in std"]
timer_event_port: Receiver<()>,
time_out_port: Receiver<Instant>,
#[ignore_malloc_size_of = "Defined in std"]
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl,
}
impl WorkerEventLoopMethods for ServiceWorkerGlobalScope {
type TimerMsg = ();
type WorkerMsg = ServiceWorkerScriptMsg;
type Event = MixedMessage;
fn timer_event_port(&self) -> &Receiver<()> {
&self.timer_event_port
}
fn task_queue(&self) -> &TaskQueue<ServiceWorkerScriptMsg> {
&self.task_queue
}
@ -183,10 +184,6 @@ impl WorkerEventLoopMethods for ServiceWorkerGlobalScope {
MixedMessage::FromServiceWorker(msg)
}
fn from_timer_msg(&self, msg: ()) -> MixedMessage {
MixedMessage::FromTimeoutThread(msg)
}
fn from_devtools_msg(&self, msg: DevtoolScriptControlMsg) -> MixedMessage {
MixedMessage::FromDevtools(msg)
}
@ -200,8 +197,7 @@ impl ServiceWorkerGlobalScope {
runtime: Runtime,
own_sender: Sender<ServiceWorkerScriptMsg>,
receiver: Receiver<ServiceWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<()>,
time_out_port: Receiver<Instant>,
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl,
) -> ServiceWorkerGlobalScope {
@ -213,12 +209,11 @@ impl ServiceWorkerGlobalScope {
worker_url,
runtime,
from_devtools_receiver,
timer_event_chan,
None,
),
task_queue: TaskQueue::new(receiver, own_sender.clone()),
timer_event_port: timer_event_port,
own_sender: own_sender,
time_out_port,
swmanager_sender: swmanager_sender,
scope_url: scope_url,
}
@ -232,8 +227,7 @@ impl ServiceWorkerGlobalScope {
runtime: Runtime,
own_sender: Sender<ServiceWorkerScriptMsg>,
receiver: Receiver<ServiceWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<()>,
time_out_port: Receiver<Instant>,
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl,
) -> DomRoot<ServiceWorkerGlobalScope> {
@ -245,8 +239,7 @@ impl ServiceWorkerGlobalScope {
runtime,
own_sender,
receiver,
timer_event_chan,
timer_event_port,
time_out_port,
swmanager_sender,
scope_url,
));
@ -320,9 +313,12 @@ impl ServiceWorkerGlobalScope {
let (devtools_mpsc_chan, devtools_mpsc_port) = unbounded();
ROUTER
.route_ipc_receiver_to_crossbeam_sender(devtools_receiver, devtools_mpsc_chan);
// TODO XXXcreativcoder use this timer_ipc_port, when we have a service worker instance here
let (timer_ipc_chan, _timer_ipc_port) = ipc::channel().unwrap();
let (timer_chan, timer_port) = unbounded();
// Service workers are time limited
// https://w3c.github.io/ServiceWorker/#service-worker-lifetime
let sw_lifetime_timeout = pref!(dom.serviceworker.timeout_seconds) as u64;
let time_out_port = after(Duration::new(sw_lifetime_timeout, 0));
let global = ServiceWorkerGlobalScope::new(
init,
url,
@ -330,8 +326,7 @@ impl ServiceWorkerGlobalScope {
runtime,
own_sender,
receiver,
timer_ipc_chan,
timer_port,
time_out_port,
swmanager_sender,
scope_url,
);
@ -343,15 +338,6 @@ impl ServiceWorkerGlobalScope {
}
scope.execute_script(DOMString::from(source));
// Service workers are time limited
thread::Builder::new()
.name("SWTimeoutThread".to_owned())
.spawn(move || {
let sw_lifetime_timeout = pref!(dom.serviceworker.timeout_seconds) as u64;
thread::sleep(Duration::new(sw_lifetime_timeout, 0));
let _ = timer_chan.send(());
})
.expect("Thread spawning failed");
global.dispatch_activate();
let reporter_name = format!("service-worker-reporter-{}", random::<u64>());
@ -364,8 +350,9 @@ impl ServiceWorkerGlobalScope {
// by inside settings until it is destroyed.
// The worker processing model remains on this step
// until the event loop is destroyed,
// which happens after the closing flag is set to true.
while !scope.is_closing() {
// which happens after the closing flag is set to true,
// or until the worker has run beyond its allocated time.
while !scope.is_closing() || !global.has_timed_out() {
run_worker_event_loop(&*global, None);
}
},
@ -398,15 +385,21 @@ impl ServiceWorkerGlobalScope {
self.handle_script_event(msg);
true
},
MixedMessage::FromTimeoutThread(_) => {
let _ = self
.swmanager_sender
.send(ServiceWorkerMsg::Timeout(self.scope_url.clone()));
false
},
}
}
fn has_timed_out(&self) -> bool {
// Note: this should be included in the `select` inside `run_worker_event_loop`,
// otherwise a block on the select can prevent the timeout.
if self.time_out_port.try_recv().is_ok() {
let _ = self
.swmanager_sender
.send(ServiceWorkerMsg::Timeout(self.scope_url.clone()));
return true;
}
false
}
fn handle_script_event(&self, msg: ServiceWorkerScriptMsg) {
use self::ServiceWorkerScriptMsg::*;

View file

@ -106,8 +106,7 @@ use script_layout_interface::{PendingImageState, TrustedNodeAddress};
use script_traits::webdriver_msg::{WebDriverJSError, WebDriverJSResult};
use script_traits::{ConstellationControlMsg, DocumentState, HistoryEntryReplacement, LoadData};
use script_traits::{
ScriptMsg, ScriptToConstellationChan, ScrollState, StructuredSerializedData, TimerEvent,
TimerEventId,
ScriptMsg, ScriptToConstellationChan, ScrollState, StructuredSerializedData, TimerEventId,
};
use script_traits::{TimerSchedulerMsg, WindowSizeData, WindowSizeType};
use selectors::attr::CaseSensitivity;
@ -2206,7 +2205,6 @@ impl Window {
constellation_chan: ScriptToConstellationChan,
control_chan: IpcSender<ConstellationControlMsg>,
scheduler_chan: IpcSender<TimerSchedulerMsg>,
timer_event_chan: IpcSender<TimerEvent>,
layout_chan: Sender<Msg>,
pipelineid: PipelineId,
parent_info: Option<PipelineId>,
@ -2250,7 +2248,6 @@ impl Window {
constellation_chan,
scheduler_chan,
resource_threads,
timer_event_chan,
origin,
microtask_queue,
is_headless,

View file

@ -34,6 +34,7 @@ use crate::task_source::networking::NetworkingTaskSource;
use crate::task_source::performance_timeline::PerformanceTimelineTaskSource;
use crate::task_source::port_message::PortMessageQueue;
use crate::task_source::remote_event::RemoteEventTaskSource;
use crate::task_source::timer::TimerTaskSource;
use crate::task_source::websocket::WebsocketTaskSource;
use crate::timers::{IsInterval, TimerCallback};
use crossbeam_channel::Receiver;
@ -50,7 +51,6 @@ use net_traits::request::{
};
use net_traits::IpcSend;
use script_traits::WorkerGlobalScopeInit;
use script_traits::{TimerEvent, TimerEventId};
use servo_url::{MutableOrigin, ServoUrl};
use std::cell::Ref;
use std::default::Default;
@ -120,7 +120,6 @@ impl WorkerGlobalScope {
worker_url: ServoUrl,
runtime: Runtime,
from_devtools_receiver: Receiver<DevtoolScriptControlMsg>,
timer_event_chan: IpcSender<TimerEvent>,
closing: Option<Arc<AtomicBool>>,
) -> Self {
// Install a pipeline-namespace in the current thread.
@ -134,7 +133,6 @@ impl WorkerGlobalScope {
init.script_to_constellation_chan,
init.scheduler_chan,
init.resource_threads,
timer_event_chan,
MutableOrigin::new(init.origin),
runtime.microtask_queue.clone(),
init.is_headless,
@ -437,6 +435,10 @@ impl WorkerGlobalScope {
PortMessageQueue(self.script_chan(), self.pipeline_id())
}
pub fn timer_task_source(&self) -> TimerTaskSource {
TimerTaskSource(self.script_chan(), self.pipeline_id())
}
pub fn remote_event_task_source(&self) -> RemoteEventTaskSource {
RemoteEventTaskSource(self.script_chan(), self.pipeline_id())
}
@ -466,10 +468,6 @@ impl WorkerGlobalScope {
}
}
pub fn handle_fire_timer(&self, timer_id: TimerEventId) {
self.upcast::<GlobalScope>().fire_timer(timer_id);
}
pub fn close(&self) {
if let Some(ref closing) = self.closing {
closing.store(true, Ordering::SeqCst);

View file

@ -15,7 +15,6 @@ use crate::script_thread::MainThreadScriptMsg;
use crossbeam_channel::Sender;
use devtools_traits::ScriptToDevtoolsControlMsg;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use ipc_channel::ipc::IpcSender;
use js::jsval::UndefinedValue;
use js::rust::Runtime;
@ -55,8 +54,6 @@ impl WorkletGlobalScope {
executor: WorkletExecutor,
init: &WorkletGlobalScopeInit,
) -> Self {
// Any timer events fired on this global are ignored.
let (timer_event_chan, _) = ipc::channel().unwrap();
let script_to_constellation_chan = ScriptToConstellationChan {
sender: init.to_constellation_sender.clone(),
pipeline_id,
@ -70,7 +67,6 @@ impl WorkletGlobalScope {
script_to_constellation_chan,
init.scheduler_chan.clone(),
init.resource_threads.clone(),
timer_event_chan,
MutableOrigin::new(ImmutableOrigin::new_opaque()),
Default::default(),
init.is_headless,

View file

@ -82,6 +82,7 @@ use crate::task_source::networking::NetworkingTaskSource;
use crate::task_source::performance_timeline::PerformanceTimelineTaskSource;
use crate::task_source::port_message::PortMessageQueue;
use crate::task_source::remote_event::RemoteEventTaskSource;
use crate::task_source::timer::TimerTaskSource;
use crate::task_source::user_interaction::UserInteractionTaskSource;
use crate::task_source::websocket::WebsocketTaskSource;
use crate::task_source::TaskSource;
@ -140,8 +141,8 @@ use script_traits::{
use script_traits::{InitialScriptState, JsEvalResult, LayoutMsg, LoadData, LoadOrigin};
use script_traits::{MediaSessionActionType, MouseButton, MouseEventType, NewLayoutInfo};
use script_traits::{Painter, ProgressiveWebMetricType, ScriptMsg, ScriptThreadFactory};
use script_traits::{ScriptToConstellationChan, TimerEvent, TimerSchedulerMsg};
use script_traits::{TimerSource, TouchEventType, TouchId, UntrustedNodeAddress, WheelDelta};
use script_traits::{ScriptToConstellationChan, TimerSchedulerMsg};
use script_traits::{TouchEventType, TouchId, UntrustedNodeAddress, WheelDelta};
use script_traits::{UpdatePipelineIdReason, WindowSizeData, WindowSizeType};
use servo_atoms::Atom;
use servo_url::{ImmutableOrigin, MutableOrigin, ServoUrl};
@ -264,7 +265,6 @@ enum MixedMessage {
FromScript(MainThreadScriptMsg),
FromDevtools(DevtoolScriptControlMsg),
FromImageCache((PipelineId, PendingImageResponse)),
FromScheduler(TimerEvent),
}
/// Messages used to control the script event loop.
@ -570,6 +570,8 @@ pub struct ScriptThread {
port_message_sender: Box<dyn ScriptChan>,
timer_task_sender: Box<dyn ScriptChan>,
remote_event_task_sender: Box<dyn ScriptChan>,
/// A channel to hand out to threads that need to respond to a message from the script thread.
@ -613,8 +615,6 @@ pub struct ScriptThread {
closed_pipelines: DomRefCell<HashSet<PipelineId>>,
scheduler_chan: IpcSender<TimerSchedulerMsg>,
timer_event_chan: Sender<TimerEvent>,
timer_event_port: Receiver<TimerEvent>,
content_process_shutdown_chan: Sender<()>,
@ -1266,8 +1266,6 @@ impl ScriptThread {
let devtools_port =
ROUTER.route_ipc_receiver_to_new_crossbeam_receiver(ipc_devtools_receiver);
let (timer_event_chan, timer_event_port) = unbounded();
// Ask the router to proxy IPC messages from the control port to us.
let control_port = ROUTER.route_ipc_receiver_to_new_crossbeam_receiver(state.control_port);
@ -1309,6 +1307,7 @@ impl ScriptThread {
port_message_sender: boxed_script_sender.clone(),
file_reading_task_sender: boxed_script_sender.clone(),
performance_timeline_task_sender: boxed_script_sender.clone(),
timer_task_sender: boxed_script_sender.clone(),
remote_event_task_sender: boxed_script_sender.clone(),
history_traversal_task_sender: chan.clone(),
@ -1330,8 +1329,6 @@ impl ScriptThread {
closed_pipelines: DomRefCell::new(HashSet::new()),
scheduler_chan: state.scheduler_chan,
timer_event_chan: timer_event_chan,
timer_event_port: timer_event_port,
content_process_shutdown_chan: state.content_process_shutdown_chan,
@ -1394,8 +1391,8 @@ impl ScriptThread {
/// Handle incoming control messages.
fn handle_msgs(&self) -> bool {
use self::MixedMessage::FromScript;
use self::MixedMessage::{FromConstellation, FromDevtools, FromImageCache};
use self::MixedMessage::{FromScheduler, FromScript};
// Handle pending resize events.
// Gather them first to avoid a double mut borrow on self.
@ -1430,7 +1427,6 @@ impl ScriptThread {
FromScript(event)
},
recv(self.control_port) -> msg => FromConstellation(msg.unwrap()),
recv(self.timer_event_port) -> msg => FromScheduler(msg.unwrap()),
recv(self.devtools_chan.as_ref().map(|_| &self.devtools_port).unwrap_or(&crossbeam_channel::never())) -> msg
=> FromDevtools(msg.unwrap()),
recv(self.image_cache_port) -> msg => FromImageCache(msg.unwrap()),
@ -1528,15 +1524,12 @@ impl ScriptThread {
// on and execute the sequential non-resize events we've seen.
match self.control_port.try_recv() {
Err(_) => match self.task_queue.try_recv() {
Err(_) => match self.timer_event_port.try_recv() {
Err(_) => match self.devtools_port.try_recv() {
Err(_) => match self.image_cache_port.try_recv() {
Err(_) => break,
Ok(ev) => event = FromImageCache(ev),
},
Ok(ev) => event = FromDevtools(ev),
Err(_) => match self.devtools_port.try_recv() {
Err(_) => match self.image_cache_port.try_recv() {
Err(_) => break,
Ok(ev) => event = FromImageCache(ev),
},
Ok(ev) => event = FromScheduler(ev),
Ok(ev) => event = FromDevtools(ev),
},
Ok(ev) => event = FromScript(ev),
},
@ -1560,7 +1553,6 @@ impl ScriptThread {
},
FromConstellation(inner_msg) => self.handle_msg_from_constellation(inner_msg),
FromScript(inner_msg) => self.handle_msg_from_script(inner_msg),
FromScheduler(inner_msg) => self.handle_timer_event(inner_msg),
FromDevtools(inner_msg) => self.handle_msg_from_devtools(inner_msg),
FromImageCache(inner_msg) => self.handle_msg_from_image_cache(inner_msg),
}
@ -1633,7 +1625,6 @@ impl ScriptThread {
},
_ => ScriptThreadEventCategory::ScriptEvent,
},
MixedMessage::FromScheduler(_) => ScriptThreadEventCategory::TimerEvent,
}
}
@ -1737,13 +1728,6 @@ impl ScriptThread {
MainThreadScriptMsg::WakeUp => None,
},
MixedMessage::FromImageCache((pipeline_id, _)) => Some(pipeline_id),
MixedMessage::FromScheduler(ref timer_event) => {
let TimerEvent(source, _) = *timer_event;
match source {
TimerSource::FromWindow(pipeline_id) => Some(pipeline_id),
_ => None,
}
},
}
}
@ -1988,36 +1972,6 @@ impl ScriptThread {
}
}
fn handle_timer_event(&self, timer_event: TimerEvent) {
let TimerEvent(source, id) = timer_event;
let pipeline_id = match source {
TimerSource::FromWindow(pipeline_id) => pipeline_id,
TimerSource::FromWorker => panic!("Worker timeouts must not be sent to script thread"),
};
let window = self.documents.borrow().find_window(pipeline_id);
let window = match window {
Some(w) => {
if w.Closed() {
return warn!(
"Received fire timer msg for a discarded browsing context whose pipeline is pending exit {}.",
pipeline_id
);
}
w
},
None => {
return warn!(
"Received fire timer msg for a closed pipeline {}.",
pipeline_id
);
},
};
window.handle_fire_timer(id);
}
fn handle_msg_from_devtools(&self, msg: DevtoolScriptControlMsg) {
let documents = self.documents.borrow();
match msg {
@ -2824,6 +2778,10 @@ impl ScriptThread {
RemoteEventTaskSource(self.remote_event_task_sender.clone(), pipeline_id)
}
pub fn timer_task_source(&self, pipeline_id: PipelineId) -> TimerTaskSource {
TimerTaskSource(self.timer_task_sender.clone(), pipeline_id)
}
pub fn websocket_task_source(&self, pipeline_id: PipelineId) -> WebsocketTaskSource {
WebsocketTaskSource(self.remote_event_task_sender.clone(), pipeline_id)
}
@ -3197,12 +3155,6 @@ impl ScriptThread {
let MainThreadScriptChan(ref sender) = self.chan;
let (ipc_timer_event_chan, ipc_timer_event_port) = ipc::channel().unwrap();
ROUTER.route_ipc_receiver_to_crossbeam_sender(
ipc_timer_event_port,
self.timer_event_chan.clone(),
);
let origin = if final_url.as_str() == "about:blank" || final_url.as_str() == "about:srcdoc"
{
incomplete.origin.clone()
@ -3226,6 +3178,7 @@ impl ScriptThread {
self.port_message_queue(incomplete.pipeline_id),
self.user_interaction_task_source(incomplete.pipeline_id),
self.remote_event_task_source(incomplete.pipeline_id),
self.timer_task_source(incomplete.pipeline_id),
self.websocket_task_source(incomplete.pipeline_id),
);
// Create the window and document objects.
@ -3243,7 +3196,6 @@ impl ScriptThread {
script_to_constellation_chan,
self.control_chan.clone(),
self.scheduler_chan.clone(),
ipc_timer_event_chan,
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,

View file

@ -12,6 +12,7 @@ use crate::task_source::networking::NetworkingTaskSource;
use crate::task_source::performance_timeline::PerformanceTimelineTaskSource;
use crate::task_source::port_message::PortMessageQueue;
use crate::task_source::remote_event::RemoteEventTaskSource;
use crate::task_source::timer::TimerTaskSource;
use crate::task_source::user_interaction::UserInteractionTaskSource;
use crate::task_source::websocket::WebsocketTaskSource;
use crate::task_source::TaskSourceName;
@ -54,6 +55,8 @@ pub struct TaskManager {
#[ignore_malloc_size_of = "task sources are hard"]
remote_event_task_source: RemoteEventTaskSource,
#[ignore_malloc_size_of = "task sources are hard"]
timer_task_source: TimerTaskSource,
#[ignore_malloc_size_of = "task sources are hard"]
websocket_task_source: WebsocketTaskSource,
}
@ -68,6 +71,7 @@ impl TaskManager {
port_message_queue: PortMessageQueue,
user_interaction_task_source: UserInteractionTaskSource,
remote_event_task_source: RemoteEventTaskSource,
timer_task_source: TimerTaskSource,
websocket_task_source: WebsocketTaskSource,
) -> Self {
TaskManager {
@ -80,6 +84,7 @@ impl TaskManager {
port_message_queue,
user_interaction_task_source,
remote_event_task_source,
timer_task_source,
websocket_task_source,
task_cancellers: Default::default(),
}
@ -157,6 +162,14 @@ impl TaskManager {
RemoteEvent
);
task_source_functions!(
self,
timer_task_source_with_canceller,
timer_task_source,
TimerTaskSource,
Timer
);
task_source_functions!(
self,
websocket_task_source_with_canceller,

View file

@ -10,6 +10,7 @@ pub mod networking;
pub mod performance_timeline;
pub mod port_message;
pub mod remote_event;
pub mod timer;
pub mod user_interaction;
pub mod websocket;
@ -34,6 +35,7 @@ pub enum TaskSourceName {
RemoteEvent,
MediaElement,
Websocket,
Timer,
}
impl TaskSourceName {

View file

@ -0,0 +1,42 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::script_runtime::{CommonScriptMsg, ScriptChan, ScriptThreadEventCategory};
use crate::task::{TaskCanceller, TaskOnce};
use crate::task_source::{TaskSource, TaskSourceName};
use msg::constellation_msg::PipelineId;
use std::fmt;
#[derive(JSTraceable)]
/// https://html.spec.whatwg.org/multipage/#timer-task-source
pub struct TimerTaskSource(pub Box<dyn ScriptChan + Send + 'static>, pub PipelineId);
impl Clone for TimerTaskSource {
fn clone(&self) -> TimerTaskSource {
TimerTaskSource(self.0.clone(), self.1.clone())
}
}
impl fmt::Debug for TimerTaskSource {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TimerTaskSource(...)")
}
}
impl TaskSource for TimerTaskSource {
const NAME: TaskSourceName = TaskSourceName::Timer;
fn queue_with_canceller<T>(&self, task: T, canceller: &TaskCanceller) -> Result<(), ()>
where
T: TaskOnce + 'static,
{
let msg = CommonScriptMsg::Task(
ScriptThreadEventCategory::TimerEvent,
Box::new(canceller.wrap_task(task)),
Some(self.1),
Self::NAME,
);
self.0.send(msg).map_err(|_| ())
}
}

View file

@ -34,8 +34,12 @@ pub struct OneshotTimerHandle(i32);
pub struct OneshotTimers {
js_timers: JsTimers,
#[ignore_malloc_size_of = "Defined in std"]
timer_event_chan: IpcSender<TimerEvent>,
/// The sender, to be cloned for each timer,
/// on which the timer scheduler in the constellation can send an event
/// when the timer is due.
timer_event_chan: DomRefCell<Option<IpcSender<TimerEvent>>>,
#[ignore_malloc_size_of = "Defined in std"]
/// The sender to the timer scheduler in the constellation.
scheduler_chan: IpcSender<TimerSchedulerMsg>,
next_timer_handle: Cell<OneshotTimerHandle>,
timers: DomRefCell<Vec<OneshotTimer>>,
@ -109,13 +113,10 @@ impl PartialEq for OneshotTimer {
}
impl OneshotTimers {
pub fn new(
timer_event_chan: IpcSender<TimerEvent>,
scheduler_chan: IpcSender<TimerSchedulerMsg>,
) -> OneshotTimers {
pub fn new(scheduler_chan: IpcSender<TimerSchedulerMsg>) -> OneshotTimers {
OneshotTimers {
js_timers: JsTimers::new(),
timer_event_chan: timer_event_chan,
timer_event_chan: DomRefCell::new(None),
scheduler_chan: scheduler_chan,
next_timer_handle: Cell::new(OneshotTimerHandle(1)),
timers: DomRefCell::new(Vec::new()),
@ -125,6 +126,12 @@ impl OneshotTimers {
}
}
pub fn setup_scheduling(&self, timer_event_chan: IpcSender<TimerEvent>) {
let mut chan = self.timer_event_chan.borrow_mut();
assert!(chan.is_none());
*chan = Some(timer_event_chan);
}
pub fn schedule_callback(
&self,
callback: OneshotTimerCallback,
@ -279,7 +286,10 @@ impl OneshotTimers {
.saturating_sub(precise_time_ms().get()),
);
let request = TimerEventRequest(
self.timer_event_chan.clone(),
self.timer_event_chan
.borrow()
.clone()
.expect("Timer event chan not setup to schedule timers."),
timer.source,
expected_event_id,
delay,
@ -331,6 +341,7 @@ pub struct JsTimerHandle(i32);
#[derive(DenyPublicFields, JSTraceable, MallocSizeOf)]
pub struct JsTimers {
next_timer_handle: Cell<JsTimerHandle>,
/// https://html.spec.whatwg.org/multipage/#list-of-active-timers
active_timers: DomRefCell<HashMap<JsTimerHandle, JsTimerEntry>>,
/// The nesting level of the currently executing timer task or 0.
nesting_level: Cell<u32>,