mirror of
https://github.com/servo/servo.git
synced 2025-08-04 13:10:20 +01:00
Auto merge of #17910 - moz-servo-sync:gecko-backout, r=moz-servo-sync
Multiple gecko backouts Backed out changeset a417b9d7712d for vendoring bustage. r=backout on a CLOSED TREE Backs out https://github.com/servo/servo/pull/17565 <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/17910) <!-- Reviewable:end --> --- Backed out changeset c424ad1c5f94 for build failures a=backout CLOSED TREE Backs out https://github.com/servo/servo/pull/17892
This commit is contained in:
commit
ef2d48dbcc
19 changed files with 304 additions and 578 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -2607,8 +2607,6 @@ dependencies = [
|
|||
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cssparser 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"heapsize_derive 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3089,7 +3087,6 @@ dependencies = [
|
|||
"heapsize_derive 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"selectors 0.19.0",
|
||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"webrender_api 0.48.0 (git+https://github.com/servo/webrender)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3469,7 +3466,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "webrender"
|
||||
version = "0.48.0"
|
||||
source = "git+https://github.com/servo/webrender#1ec6fb9ae47d8356920975a4be60552608dbfa1b"
|
||||
source = "git+https://github.com/servo/webrender#283192c41743a59da87b065cbc14c659d94c90b5"
|
||||
dependencies = [
|
||||
"app_units 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bincode 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3498,7 +3495,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "webrender_api"
|
||||
version = "0.48.0"
|
||||
source = "git+https://github.com/servo/webrender#1ec6fb9ae47d8356920975a4be60552608dbfa1b"
|
||||
source = "git+https://github.com/servo/webrender#283192c41743a59da87b065cbc14c659d94c90b5"
|
||||
dependencies = [
|
||||
"app_units 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bincode 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
|
|
@ -6,7 +6,7 @@ use CompositionPipeline;
|
|||
use SendableFrameTree;
|
||||
use compositor_thread::{CompositorProxy, CompositorReceiver};
|
||||
use compositor_thread::{InitialCompositorState, Msg, RenderListener};
|
||||
use euclid::{Point2D, TypedPoint2D, TypedVector2D, ScaleFactor};
|
||||
use euclid::{Point2D, TypedPoint2D, TypedVector2D, TypedRect, ScaleFactor, TypedSize2D};
|
||||
use gfx_traits::Epoch;
|
||||
use gleam::gl;
|
||||
use image::{DynamicImage, ImageFormat, RgbImage};
|
||||
|
@ -34,8 +34,7 @@ use style_traits::viewport::ViewportConstraints;
|
|||
use time::{precise_time_ns, precise_time_s};
|
||||
use touch::{TouchHandler, TouchAction};
|
||||
use webrender;
|
||||
use webrender_api::{self, ClipId, DeviceUintRect, DeviceUintSize, LayoutPoint, LayoutVector2D};
|
||||
use webrender_api::{ScrollEventPhase, ScrollLocation, ScrollClamping};
|
||||
use webrender_api::{self, ClipId, LayoutPoint, LayoutVector2D, ScrollEventPhase, ScrollLocation, ScrollClamping};
|
||||
use windowing::{self, MouseWindowEvent, WindowEvent, WindowMethods};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
|
@ -111,10 +110,10 @@ pub struct IOCompositor<Window: WindowMethods> {
|
|||
scale: ScaleFactor<f32, LayerPixel, DevicePixel>,
|
||||
|
||||
/// The size of the rendering area.
|
||||
frame_size: DeviceUintSize,
|
||||
frame_size: TypedSize2D<u32, DevicePixel>,
|
||||
|
||||
/// The position and size of the window within the rendering area.
|
||||
window_rect: DeviceUintRect,
|
||||
window_rect: TypedRect<u32, DevicePixel>,
|
||||
|
||||
/// "Mobile-style" zoom that does not reflow the page.
|
||||
viewport_zoom: PinchZoomFactor,
|
||||
|
@ -180,9 +179,6 @@ pub struct IOCompositor<Window: WindowMethods> {
|
|||
/// The webrender renderer.
|
||||
webrender: webrender::Renderer,
|
||||
|
||||
/// The active webrender document.
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
|
||||
/// The webrender interface, if enabled.
|
||||
webrender_api: webrender_api::RenderApi,
|
||||
|
||||
|
@ -382,8 +378,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
scroll_in_progress: false,
|
||||
in_scroll_transaction: None,
|
||||
webrender: state.webrender,
|
||||
webrender_document: state.webrender_document,
|
||||
webrender_api: state.webrender_api,
|
||||
webrender_api: state.webrender_api_sender.create_api(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -680,8 +675,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
self.root_pipeline = Some(frame_tree.pipeline.clone());
|
||||
|
||||
let pipeline_id = frame_tree.pipeline.id.to_webrender();
|
||||
self.webrender_api.set_root_pipeline(self.webrender_document, pipeline_id);
|
||||
self.webrender_api.generate_frame(self.webrender_document, None);
|
||||
self.webrender_api.set_root_pipeline(pipeline_id);
|
||||
self.webrender_api.generate_frame(None);
|
||||
|
||||
self.create_pipeline_details_for_frame_tree(&frame_tree);
|
||||
|
||||
|
@ -705,7 +700,14 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
fn send_window_size(&self, size_type: WindowSizeType) {
|
||||
let dppx = self.page_zoom * self.hidpi_factor();
|
||||
|
||||
self.webrender_api.set_window_parameters(self.webrender_document, self.frame_size, self.window_rect);
|
||||
let window_rect = {
|
||||
let offset = webrender_api::DeviceUintPoint::new(self.window_rect.origin.x, self.window_rect.origin.y);
|
||||
let size = webrender_api::DeviceUintSize::new(self.window_rect.size.width, self.window_rect.size.height);
|
||||
webrender_api::DeviceUintRect::new(offset, size)
|
||||
};
|
||||
|
||||
let frame_size = webrender_api::DeviceUintSize::new(self.frame_size.width, self.frame_size.height);
|
||||
self.webrender_api.set_window_parameters(frame_size, window_rect);
|
||||
|
||||
let initial_viewport = self.window_rect.size.to_f32() / dppx;
|
||||
|
||||
|
@ -725,9 +727,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
}
|
||||
|
||||
fn scroll_fragment_to_point(&mut self, id: ClipId, point: Point2D<f32>) {
|
||||
self.webrender_api.scroll_node_with_id(self.webrender_document,
|
||||
LayoutPoint::from_untyped(&point),
|
||||
id,
|
||||
self.webrender_api.scroll_node_with_id(LayoutPoint::from_untyped(&point), id,
|
||||
ScrollClamping::ToContentBounds);
|
||||
}
|
||||
|
||||
|
@ -821,12 +821,12 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
WindowEvent::ToggleWebRenderProfiler => {
|
||||
let profiler_enabled = self.webrender.get_profiler_enabled();
|
||||
self.webrender.set_profiler_enabled(!profiler_enabled);
|
||||
self.webrender_api.generate_frame(self.webrender_document, None);
|
||||
self.webrender_api.generate_frame(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn on_resize_window_event(&mut self, new_size: DeviceUintSize) {
|
||||
fn on_resize_window_event(&mut self, new_size: TypedSize2D<u32, DevicePixel>) {
|
||||
debug!("compositor resizing to {:?}", new_size.to_untyped());
|
||||
|
||||
// A size change could also mean a resolution change.
|
||||
|
@ -1123,7 +1123,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
(combined_event.cursor.to_f32() / self.scale).to_untyped();
|
||||
let location = webrender_api::ScrollLocation::Delta(delta);
|
||||
let cursor = webrender_api::WorldPoint::from_untyped(&cursor);
|
||||
self.webrender_api.scroll(self.webrender_document, location, cursor, combined_event.phase);
|
||||
self.webrender_api.scroll(location, cursor, combined_event.phase);
|
||||
last_combined_event = None
|
||||
}
|
||||
}
|
||||
|
@ -1179,7 +1179,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
};
|
||||
let cursor = (combined_event.cursor.to_f32() / self.scale).to_untyped();
|
||||
let cursor = webrender_api::WorldPoint::from_untyped(&cursor);
|
||||
self.webrender_api.scroll(self.webrender_document, scroll_location, cursor, combined_event.phase);
|
||||
self.webrender_api.scroll(scroll_location, cursor, combined_event.phase);
|
||||
self.waiting_for_results_of_scroll = true
|
||||
}
|
||||
|
||||
|
@ -1277,7 +1277,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
|
||||
fn update_page_zoom_for_webrender(&mut self) {
|
||||
let page_zoom = webrender_api::ZoomFactor::new(self.page_zoom.get());
|
||||
self.webrender_api.set_page_zoom(self.webrender_document, page_zoom);
|
||||
self.webrender_api.set_page_zoom(page_zoom);
|
||||
}
|
||||
|
||||
/// Simulate a pinch zoom
|
||||
|
@ -1315,7 +1315,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
|
||||
fn send_viewport_rects(&self) {
|
||||
let mut scroll_states_per_pipeline = HashMap::new();
|
||||
for scroll_layer_state in self.webrender_api.get_scroll_node_state(self.webrender_document) {
|
||||
for scroll_layer_state in self.webrender_api.get_scroll_node_state() {
|
||||
if scroll_layer_state.id.external_id().is_none() &&
|
||||
!scroll_layer_state.id.is_root_scroll_node() {
|
||||
continue;
|
||||
|
@ -1464,7 +1464,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
debug!("compositor: compositing");
|
||||
|
||||
// Paint the scene.
|
||||
self.webrender.render(self.frame_size);
|
||||
let size = webrender_api::DeviceUintSize::from_untyped(&self.frame_size.to_untyped());
|
||||
self.webrender.render(size);
|
||||
});
|
||||
|
||||
let rv = match target {
|
||||
|
@ -1564,7 +1565,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
}
|
||||
|
||||
if self.webrender.layers_are_bouncing_back() {
|
||||
self.webrender_api.tick_scrolling_bounce_animations(self.webrender_document);
|
||||
self.webrender_api.tick_scrolling_bounce_animations();
|
||||
self.send_viewport_rects()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -194,6 +194,5 @@ pub struct InitialCompositorState {
|
|||
pub mem_profiler_chan: mem::ProfilerChan,
|
||||
/// Instance of webrender API
|
||||
pub webrender: webrender::Renderer,
|
||||
pub webrender_document: webrender_api::DocumentId,
|
||||
pub webrender_api: webrender_api::RenderApi,
|
||||
pub webrender_api_sender: webrender_api::RenderApiSender,
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
use compositor_thread::EventLoopWaker;
|
||||
use euclid::{Point2D, Size2D};
|
||||
use euclid::{ScaleFactor, TypedPoint2D, TypedSize2D};
|
||||
use euclid::{TypedPoint2D, TypedRect, ScaleFactor, TypedSize2D};
|
||||
use gleam::gl;
|
||||
use ipc_channel::ipc::IpcSender;
|
||||
use msg::constellation_msg::{Key, KeyModifiers, KeyState, TraversalDirection};
|
||||
|
@ -18,7 +18,7 @@ use std::fmt::{Debug, Error, Formatter};
|
|||
use std::rc::Rc;
|
||||
use style_traits::DevicePixel;
|
||||
use style_traits::cursor::Cursor;
|
||||
use webrender_api::{DeviceUintSize, DeviceUintRect, ScrollLocation};
|
||||
use webrender_api::ScrollLocation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum MouseWindowEvent {
|
||||
|
@ -41,7 +41,7 @@ pub enum WindowEvent {
|
|||
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
|
||||
Refresh,
|
||||
/// Sent when the window is resized.
|
||||
Resize(DeviceUintSize),
|
||||
Resize(TypedSize2D<u32, DevicePixel>),
|
||||
/// Touchpad Pressure
|
||||
TouchpadPressure(TypedPoint2D<f32, DevicePixel>, f32, TouchpadPressurePhase),
|
||||
/// Sent when a new URL is to be loaded.
|
||||
|
@ -105,9 +105,9 @@ pub enum AnimationState {
|
|||
|
||||
pub trait WindowMethods {
|
||||
/// Returns the rendering area size in hardware pixels.
|
||||
fn framebuffer_size(&self) -> DeviceUintSize;
|
||||
fn framebuffer_size(&self) -> TypedSize2D<u32, DevicePixel>;
|
||||
/// Returns the position and size of the window within the rendering area.
|
||||
fn window_rect(&self) -> DeviceUintRect;
|
||||
fn window_rect(&self) -> TypedRect<u32, DevicePixel>;
|
||||
/// Returns the size of the window in density-independent "px" units.
|
||||
fn size(&self) -> TypedSize2D<f32, DeviceIndependentPixel>;
|
||||
/// Presents the window to the screen (perhaps by page flipping).
|
||||
|
|
|
@ -228,11 +228,8 @@ pub struct Constellation<Message, LTF, STF> {
|
|||
/// timer thread.
|
||||
scheduler_chan: IpcSender<TimerSchedulerMsg>,
|
||||
|
||||
/// A single WebRender document the constellation operates on.
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
|
||||
/// A channel for the constellation to send messages to the
|
||||
/// WebRender thread.
|
||||
/// Webrender thread.
|
||||
webrender_api_sender: webrender_api::RenderApiSender,
|
||||
|
||||
/// The set of all event loops in the browser. We generate a new
|
||||
|
@ -328,9 +325,6 @@ pub struct InitialConstellationState {
|
|||
/// A channel to the memory profiler thread.
|
||||
pub mem_profiler_chan: mem::ProfilerChan,
|
||||
|
||||
/// Webrender document ID.
|
||||
pub webrender_document: webrender_api::DocumentId,
|
||||
|
||||
/// Webrender API.
|
||||
pub webrender_api_sender: webrender_api::RenderApiSender,
|
||||
|
||||
|
@ -567,7 +561,6 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
|
|||
webdriver: WebDriverData::new(),
|
||||
scheduler_chan: TimerScheduler::start(),
|
||||
document_states: HashMap::new(),
|
||||
webrender_document: state.webrender_document,
|
||||
webrender_api_sender: state.webrender_api_sender,
|
||||
shutting_down: false,
|
||||
handled_warnings: VecDeque::new(),
|
||||
|
@ -671,9 +664,9 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
|
|||
|
||||
let result = Pipeline::spawn::<Message, LTF, STF>(InitialPipelineState {
|
||||
id: pipeline_id,
|
||||
browsing_context_id,
|
||||
top_level_browsing_context_id,
|
||||
parent_info,
|
||||
browsing_context_id: browsing_context_id,
|
||||
top_level_browsing_context_id: top_level_browsing_context_id,
|
||||
parent_info: parent_info,
|
||||
constellation_chan: self.script_sender.clone(),
|
||||
layout_to_constellation_chan: self.layout_sender.clone(),
|
||||
scheduler_chan: self.scheduler_chan.clone(),
|
||||
|
@ -682,18 +675,17 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
|
|||
bluetooth_thread: self.bluetooth_thread.clone(),
|
||||
swmanager_thread: self.swmanager_sender.clone(),
|
||||
font_cache_thread: self.font_cache_thread.clone(),
|
||||
resource_threads,
|
||||
resource_threads: resource_threads,
|
||||
time_profiler_chan: self.time_profiler_chan.clone(),
|
||||
mem_profiler_chan: self.mem_profiler_chan.clone(),
|
||||
window_size: initial_window_size,
|
||||
event_loop,
|
||||
load_data,
|
||||
event_loop: event_loop,
|
||||
load_data: load_data,
|
||||
device_pixel_ratio: self.window_size.device_pixel_ratio,
|
||||
pipeline_namespace_id: self.next_pipeline_namespace_id(),
|
||||
prev_visibility,
|
||||
prev_visibility: prev_visibility,
|
||||
webrender_api_sender: self.webrender_api_sender.clone(),
|
||||
webrender_document: self.webrender_document,
|
||||
is_private,
|
||||
is_private: is_private,
|
||||
webvr_thread: self.webvr_thread.clone()
|
||||
});
|
||||
|
||||
|
|
|
@ -156,7 +156,6 @@ pub struct InitialPipelineState {
|
|||
/// Information about the page to load.
|
||||
pub load_data: LoadData,
|
||||
|
||||
|
||||
/// The ID of the pipeline namespace for this script thread.
|
||||
pub pipeline_namespace_id: PipelineNamespaceId,
|
||||
|
||||
|
@ -166,9 +165,6 @@ pub struct InitialPipelineState {
|
|||
/// Webrender api.
|
||||
pub webrender_api_sender: webrender_api::RenderApiSender,
|
||||
|
||||
/// The ID of the document processed by this script thread.
|
||||
pub webrender_document: webrender_api::DocumentId,
|
||||
|
||||
/// Whether this pipeline is considered private.
|
||||
pub is_private: bool,
|
||||
/// A channel to the webvr thread.
|
||||
|
@ -269,7 +265,6 @@ impl Pipeline {
|
|||
script_content_process_shutdown_chan: script_content_process_shutdown_chan,
|
||||
script_content_process_shutdown_port: script_content_process_shutdown_port,
|
||||
webrender_api_sender: state.webrender_api_sender,
|
||||
webrender_document: state.webrender_document,
|
||||
webvr_thread: state.webvr_thread,
|
||||
};
|
||||
|
||||
|
@ -469,7 +464,6 @@ pub struct UnprivilegedPipelineContent {
|
|||
script_content_process_shutdown_chan: IpcSender<()>,
|
||||
script_content_process_shutdown_port: IpcReceiver<()>,
|
||||
webrender_api_sender: webrender_api::RenderApiSender,
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
webvr_thread: Option<IpcSender<WebVRMsg>>,
|
||||
}
|
||||
|
||||
|
@ -516,7 +510,6 @@ impl UnprivilegedPipelineContent {
|
|||
self.mem_profiler_chan,
|
||||
Some(self.layout_content_process_shutdown_chan),
|
||||
self.webrender_api_sender,
|
||||
self.webrender_document,
|
||||
self.prefs.get("layout.threads").expect("exists").value()
|
||||
.as_u64().expect("count") as usize,
|
||||
paint_time_metrics);
|
||||
|
|
|
@ -242,9 +242,6 @@ pub struct LayoutThread {
|
|||
/// Webrender interface.
|
||||
webrender_api: webrender_api::RenderApi,
|
||||
|
||||
/// Webrender document.
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
|
||||
/// The timer object to control the timing of the animations. This should
|
||||
/// only be a test-mode timer during testing for animations.
|
||||
timer: Timer,
|
||||
|
@ -278,7 +275,6 @@ impl LayoutThreadFactory for LayoutThread {
|
|||
mem_profiler_chan: mem::ProfilerChan,
|
||||
content_process_shutdown_chan: Option<IpcSender<()>>,
|
||||
webrender_api_sender: webrender_api::RenderApiSender,
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
layout_threads: usize,
|
||||
paint_time_metrics: PaintTimeMetrics) {
|
||||
thread::Builder::new().name(format!("LayoutThread {:?}", id)).spawn(move || {
|
||||
|
@ -302,7 +298,6 @@ impl LayoutThreadFactory for LayoutThread {
|
|||
time_profiler_chan,
|
||||
mem_profiler_chan.clone(),
|
||||
webrender_api_sender,
|
||||
webrender_document,
|
||||
layout_threads,
|
||||
paint_time_metrics);
|
||||
|
||||
|
@ -465,7 +460,6 @@ impl LayoutThread {
|
|||
time_profiler_chan: time::ProfilerChan,
|
||||
mem_profiler_chan: mem::ProfilerChan,
|
||||
webrender_api_sender: webrender_api::RenderApiSender,
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
layout_threads: usize,
|
||||
paint_time_metrics: PaintTimeMetrics)
|
||||
-> LayoutThread {
|
||||
|
@ -539,7 +533,6 @@ impl LayoutThread {
|
|||
epoch: Cell::new(Epoch(0)),
|
||||
viewport_size: Size2D::new(Au(0), Au(0)),
|
||||
webrender_api: webrender_api_sender.create_api(),
|
||||
webrender_document,
|
||||
stylist: stylist,
|
||||
rw_data: Arc::new(Mutex::new(
|
||||
LayoutThreadData {
|
||||
|
@ -809,7 +802,6 @@ impl LayoutThread {
|
|||
self.mem_profiler_chan.clone(),
|
||||
info.content_process_shutdown_chan,
|
||||
self.webrender_api.clone_sender(),
|
||||
self.webrender_document,
|
||||
info.layout_threads,
|
||||
info.paint_time_metrics);
|
||||
}
|
||||
|
@ -1053,13 +1045,12 @@ impl LayoutThread {
|
|||
self.paint_time_metrics.maybe_set_first_contentful_paint(self, &display_list);
|
||||
|
||||
self.webrender_api.set_display_list(
|
||||
self.webrender_document,
|
||||
webrender_api::Epoch(epoch.0),
|
||||
Some(get_root_flow_background_color(layout_root)),
|
||||
webrender_api::Epoch(epoch.0),
|
||||
viewport_size,
|
||||
builder.finalize(),
|
||||
true);
|
||||
self.webrender_api.generate_frame(self.webrender_document, None);
|
||||
self.webrender_api.generate_frame(None);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -50,7 +50,6 @@ pub trait LayoutThreadFactory {
|
|||
mem_profiler_chan: mem::ProfilerChan,
|
||||
content_process_shutdown_chan: Option<IpcSender<()>>,
|
||||
webrender_api_sender: webrender_api::RenderApiSender,
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
layout_threads: usize,
|
||||
paint_time_metrics: PaintTimeMetrics);
|
||||
}
|
||||
|
|
|
@ -7,8 +7,9 @@
|
|||
use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
|
||||
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
|
||||
use dom::bindings::inheritance::Castable;
|
||||
use dom::bindings::js::{JS, Root};
|
||||
use dom::bindings::js::{JS, MutNullableJS, Root};
|
||||
use dom::bindings::str::DOMString;
|
||||
use dom::bindings::trace::JSTraceable;
|
||||
use dom::comment::Comment;
|
||||
use dom::document::Document;
|
||||
use dom::documenttype::DocumentType;
|
||||
|
@ -19,167 +20,25 @@ use dom::htmltemplateelement::HTMLTemplateElement;
|
|||
use dom::node::Node;
|
||||
use dom::processinginstruction::ProcessingInstruction;
|
||||
use dom::virtualmethods::vtable_for;
|
||||
use html5ever::{Attribute as HtmlAttribute, ExpandedName, LocalName, QualName};
|
||||
use html5ever::{Attribute, LocalName, QualName, ExpandedName};
|
||||
use html5ever::buffer_queue::BufferQueue;
|
||||
use html5ever::tendril::{SendTendril, StrTendril, Tendril};
|
||||
use html5ever::tendril::fmt::UTF8;
|
||||
use html5ever::tendril::StrTendril;
|
||||
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
|
||||
use html5ever::tree_builder::{ElementFlags, NodeOrText as HtmlNodeOrText, NextParserState, QuirksMode, TreeSink};
|
||||
use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts};
|
||||
use html5ever::tree_builder::{NodeOrText, TreeSink, NextParserState, QuirksMode, ElementFlags};
|
||||
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts};
|
||||
use js::jsapi::JSTracer;
|
||||
use servo_url::ServoUrl;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::vec_deque::VecDeque;
|
||||
use std::sync::mpsc::{channel, Receiver, Sender};
|
||||
use std::thread;
|
||||
use style::context::QuirksMode as ServoQuirksMode;
|
||||
|
||||
type ParseNodeId = usize;
|
||||
|
||||
#[derive(Clone, HeapSizeOf, JSTraceable)]
|
||||
pub struct ParseNode {
|
||||
id: ParseNodeId,
|
||||
qual_name: Option<QualName>,
|
||||
}
|
||||
|
||||
#[derive(HeapSizeOf, JSTraceable)]
|
||||
enum NodeOrText {
|
||||
Node(ParseNode),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
#[derive(HeapSizeOf, JSTraceable)]
|
||||
struct Attribute {
|
||||
name: QualName,
|
||||
value: String,
|
||||
}
|
||||
|
||||
#[derive(HeapSizeOf, JSTraceable)]
|
||||
enum ParseOperation {
|
||||
GetTemplateContents { target: ParseNodeId, contents: ParseNodeId },
|
||||
|
||||
CreateElement {
|
||||
node: ParseNodeId,
|
||||
name: QualName,
|
||||
attrs: Vec<Attribute>,
|
||||
current_line: u64
|
||||
},
|
||||
|
||||
CreateComment { text: String, node: ParseNodeId },
|
||||
AppendBeforeSibling { sibling: ParseNodeId, node: NodeOrText },
|
||||
Append { parent: ParseNodeId, node: NodeOrText },
|
||||
|
||||
AppendDoctypeToDocument {
|
||||
name: String,
|
||||
public_id: String,
|
||||
system_id: String
|
||||
},
|
||||
|
||||
AddAttrsIfMissing { target: ParseNodeId, attrs: Vec<Attribute> },
|
||||
RemoveFromParent { target: ParseNodeId },
|
||||
MarkScriptAlreadyStarted { node: ParseNodeId },
|
||||
ReparentChildren { parent: ParseNodeId, new_parent: ParseNodeId },
|
||||
AssociateWithForm { target: ParseNodeId, form: ParseNodeId },
|
||||
|
||||
CreatePI {
|
||||
node: ParseNodeId,
|
||||
target: String,
|
||||
data: String
|
||||
},
|
||||
|
||||
Pop { node: ParseNodeId },
|
||||
|
||||
SetQuirksMode {
|
||||
#[ignore_heap_size_of = "Defined in style"]
|
||||
mode: ServoQuirksMode
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(HeapSizeOf)]
|
||||
enum ToTokenizerMsg {
|
||||
// From HtmlTokenizer
|
||||
TokenizerResultDone {
|
||||
#[ignore_heap_size_of = "Defined in html5ever"]
|
||||
updated_input: VecDeque<SendTendril<UTF8>>
|
||||
},
|
||||
TokenizerResultScript {
|
||||
script: ParseNode,
|
||||
#[ignore_heap_size_of = "Defined in html5ever"]
|
||||
updated_input: VecDeque<SendTendril<UTF8>>
|
||||
},
|
||||
End, // Sent to Tokenizer to signify HtmlTokenizer's end method has returned
|
||||
|
||||
// From Sink
|
||||
ProcessOperation(ParseOperation),
|
||||
IsSameTree(ParseNodeId, ParseNodeId),
|
||||
HasParentNode(ParseNodeId),
|
||||
}
|
||||
|
||||
#[derive(HeapSizeOf)]
|
||||
enum ToHtmlTokenizerMsg {
|
||||
Feed {
|
||||
#[ignore_heap_size_of = "Defined in html5ever"]
|
||||
input: VecDeque<SendTendril<UTF8>>
|
||||
},
|
||||
End,
|
||||
SetPlainTextState,
|
||||
}
|
||||
|
||||
// Responses to the queries asked by the the Sink to the Tokenizer,
|
||||
// using the messages types in FromSinkMsg.
|
||||
#[derive(HeapSizeOf)]
|
||||
enum ToSinkMsg {
|
||||
IsSameTree(bool),
|
||||
HasParentNode(bool),
|
||||
}
|
||||
|
||||
fn create_buffer_queue(mut buffers: VecDeque<SendTendril<UTF8>>) -> BufferQueue {
|
||||
let mut buffer_queue = BufferQueue::new();
|
||||
while let Some(st) = buffers.pop_front() {
|
||||
buffer_queue.push_back(StrTendril::from(st));
|
||||
}
|
||||
buffer_queue
|
||||
}
|
||||
|
||||
// The async HTML Tokenizer consists of two separate types working together: the Tokenizer
|
||||
// (defined below), which lives on the main thread, and the HtmlTokenizer, defined in html5ever, which
|
||||
// lives on the parser thread.
|
||||
// Steps:
|
||||
// 1. A call to Tokenizer::new will spin up a new parser thread, creating an HtmlTokenizer instance,
|
||||
// which starts listening for messages from Tokenizer.
|
||||
// 2. Upon receiving an input from ServoParser, the Tokenizer forwards it to HtmlTokenizer, where it starts
|
||||
// creating the necessary tree actions based on the input.
|
||||
// 3. HtmlTokenizer sends these tree actions to the Tokenizer as soon as it creates them. The Tokenizer
|
||||
// then executes the received actions.
|
||||
//
|
||||
// _____________ _______________
|
||||
// | | ToHtmlTokenizerMsg | |
|
||||
// | |------------------------>| |
|
||||
// | | | |
|
||||
// | | ToTokenizerMsg | HtmlTokenizer |
|
||||
// | |<------------------------| |
|
||||
// | Tokenizer | | |
|
||||
// | | ToTokenizerMsg | ________ |
|
||||
// | |<------------------------|---| | |
|
||||
// | | | | Sink | |
|
||||
// | | ToSinkMsg | | | |
|
||||
// | |-------------------------|-->|________| |
|
||||
// |_____________| |_______________|
|
||||
//
|
||||
#[derive(HeapSizeOf, JSTraceable)]
|
||||
#[must_root]
|
||||
pub struct Tokenizer {
|
||||
document: JS<Document>,
|
||||
#[ignore_heap_size_of = "Defined in std"]
|
||||
receiver: Receiver<ToTokenizerMsg>,
|
||||
#[ignore_heap_size_of = "Defined in std"]
|
||||
html_tokenizer_sender: Sender<ToHtmlTokenizerMsg>,
|
||||
#[ignore_heap_size_of = "Defined in std"]
|
||||
sink_sender: Sender<ToSinkMsg>,
|
||||
nodes: HashMap<ParseNodeId, JS<Node>>,
|
||||
url: ServoUrl,
|
||||
#[ignore_heap_size_of = "Defined in html5ever"]
|
||||
inner: HtmlTokenizer<TreeBuilder<ParseNode, Sink>>,
|
||||
}
|
||||
|
||||
impl Tokenizer {
|
||||
|
@ -188,125 +47,171 @@ impl Tokenizer {
|
|||
url: ServoUrl,
|
||||
fragment_context: Option<super::FragmentContext>)
|
||||
-> Self {
|
||||
// Messages from the Tokenizer (main thread) to HtmlTokenizer (parser thread)
|
||||
let (to_html_tokenizer_sender, html_tokenizer_receiver) = channel();
|
||||
// Messages from the Tokenizer (main thread) to Sink (parser thread)
|
||||
let (to_sink_sender, sink_receiver) = channel();
|
||||
// Messages from HtmlTokenizer and Sink (parser thread) to Tokenizer (main thread)
|
||||
let (to_tokenizer_sender, tokenizer_receiver) = channel();
|
||||
let mut sink = Sink::new(url, document);
|
||||
|
||||
let mut tokenizer = Tokenizer {
|
||||
document: JS::from_ref(document),
|
||||
receiver: tokenizer_receiver,
|
||||
html_tokenizer_sender: to_html_tokenizer_sender,
|
||||
sink_sender: to_sink_sender,
|
||||
nodes: HashMap::new(),
|
||||
url: url
|
||||
let options = TreeBuilderOpts {
|
||||
ignore_missing_rules: true,
|
||||
.. Default::default()
|
||||
};
|
||||
tokenizer.insert_node(0, JS::from_ref(document.upcast()));
|
||||
|
||||
let mut sink = Sink::new(to_tokenizer_sender.clone(), sink_receiver);
|
||||
let mut ctxt_parse_node = None;
|
||||
let mut form_parse_node = None;
|
||||
let mut fragment_context_is_some = false;
|
||||
if let Some(fc) = fragment_context {
|
||||
let node = sink.new_parse_node();
|
||||
tokenizer.insert_node(node.id, JS::from_ref(fc.context_elem));
|
||||
ctxt_parse_node = Some(node);
|
||||
let inner = if let Some(fc) = fragment_context {
|
||||
let ctxt_parse_node = sink.new_parse_node();
|
||||
sink.nodes.insert(ctxt_parse_node.id, JS::from_ref(fc.context_elem));
|
||||
|
||||
form_parse_node = fc.form_elem.map(|form_elem| {
|
||||
let form_parse_node = fc.form_elem.map(|form_elem| {
|
||||
let node = sink.new_parse_node();
|
||||
tokenizer.insert_node(node.id, JS::from_ref(form_elem));
|
||||
sink.nodes.insert(node.id, JS::from_ref(form_elem));
|
||||
node
|
||||
});
|
||||
fragment_context_is_some = true;
|
||||
};
|
||||
|
||||
// Create new thread for HtmlTokenizer. This is where parser actions
|
||||
// will be generated from the input provided. These parser actions are then passed
|
||||
// onto the main thread to be executed.
|
||||
thread::Builder::new().name(String::from("HTML Parser")).spawn(move || {
|
||||
run(sink,
|
||||
fragment_context_is_some,
|
||||
let tb = TreeBuilder::new_for_fragment(
|
||||
sink,
|
||||
ctxt_parse_node,
|
||||
form_parse_node,
|
||||
to_tokenizer_sender,
|
||||
html_tokenizer_receiver);
|
||||
}).expect("HTML Parser thread spawning failed");
|
||||
options);
|
||||
|
||||
tokenizer
|
||||
let tok_options = TokenizerOpts {
|
||||
initial_state: Some(tb.tokenizer_state_for_context_elem()),
|
||||
.. Default::default()
|
||||
};
|
||||
|
||||
HtmlTokenizer::new(tb, tok_options)
|
||||
} else {
|
||||
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
|
||||
};
|
||||
|
||||
Tokenizer {
|
||||
inner: inner,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> {
|
||||
let mut send_tendrils = VecDeque::new();
|
||||
while let Some(str) = input.pop_front() {
|
||||
send_tendrils.push_back(SendTendril::from(str));
|
||||
}
|
||||
|
||||
// Send message to parser thread, asking it to start reading from the input.
|
||||
// Parser operation messages will be sent to main thread as they are evaluated.
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::Feed { input: send_tendrils }).unwrap();
|
||||
|
||||
loop {
|
||||
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
|
||||
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
||||
ToTokenizerMsg::IsSameTree(ref x_id, ref y_id) => {
|
||||
let x = self.get_node(x_id);
|
||||
let y = self.get_node(y_id);
|
||||
|
||||
let x = x.downcast::<Element>().expect("Element node expected");
|
||||
let y = y.downcast::<Element>().expect("Element node expected");
|
||||
self.sink_sender.send(ToSinkMsg::IsSameTree(x.is_in_same_home_subtree(y))).unwrap();
|
||||
},
|
||||
ToTokenizerMsg::HasParentNode(ref id) => {
|
||||
let res = self.get_node(id).GetParentNode().is_some();
|
||||
self.sink_sender.send(ToSinkMsg::HasParentNode(res)).unwrap();
|
||||
},
|
||||
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
|
||||
let buffer_queue = create_buffer_queue(updated_input);
|
||||
*input = buffer_queue;
|
||||
return Ok(());
|
||||
},
|
||||
ToTokenizerMsg::TokenizerResultScript { script, updated_input } => {
|
||||
let buffer_queue = create_buffer_queue(updated_input);
|
||||
*input = buffer_queue;
|
||||
let script = self.get_node(&script.id);
|
||||
return Err(Root::from_ref(script.downcast().unwrap()));
|
||||
}
|
||||
ToTokenizerMsg::End => unreachable!(),
|
||||
};
|
||||
match self.inner.feed(input) {
|
||||
TokenizerResult::Done => Ok(()),
|
||||
TokenizerResult::Script(script) => {
|
||||
let nodes = &self.inner.sink.sink.nodes;
|
||||
let script = nodes.get(&script.id).unwrap();
|
||||
Err(Root::from_ref(script.downcast().unwrap()))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn end(&mut self) {
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::End).unwrap();
|
||||
loop {
|
||||
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
|
||||
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
||||
ToTokenizerMsg::IsSameTree(ref x_id, ref y_id) => {
|
||||
let x = self.get_node(x_id);
|
||||
let y = self.get_node(y_id);
|
||||
|
||||
let x = x.downcast::<Element>().expect("Element node expected");
|
||||
let y = y.downcast::<Element>().expect("Element node expected");
|
||||
self.sink_sender.send(ToSinkMsg::IsSameTree(x.is_in_same_home_subtree(y))).unwrap();
|
||||
},
|
||||
ToTokenizerMsg::HasParentNode(ref id) => {
|
||||
let res = self.get_node(id).GetParentNode().is_some();
|
||||
self.sink_sender.send(ToSinkMsg::HasParentNode(res)).unwrap();
|
||||
},
|
||||
ToTokenizerMsg::End => return,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
self.inner.end();
|
||||
}
|
||||
|
||||
pub fn url(&self) -> &ServoUrl {
|
||||
&self.url
|
||||
&self.inner.sink.sink.base_url
|
||||
}
|
||||
|
||||
pub fn set_plaintext_state(&mut self) {
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::SetPlainTextState).unwrap();
|
||||
self.inner.set_plaintext_state();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<ParseNode, Sink>> {
|
||||
unsafe fn trace(&self, trc: *mut JSTracer) {
|
||||
struct Tracer(*mut JSTracer);
|
||||
let tracer = Tracer(trc);
|
||||
|
||||
impl HtmlTracer for Tracer {
|
||||
type Handle = ParseNode;
|
||||
#[allow(unrooted_must_root)]
|
||||
fn trace_handle(&self, node: &ParseNode) {
|
||||
unsafe { node.trace(self.0); }
|
||||
}
|
||||
}
|
||||
|
||||
let tree_builder = &self.sink;
|
||||
tree_builder.trace_handles(&tracer);
|
||||
tree_builder.sink.trace(trc);
|
||||
}
|
||||
}
|
||||
|
||||
type ParseNodeId = usize;
|
||||
|
||||
#[derive(JSTraceable, Clone, HeapSizeOf)]
|
||||
pub struct ParseNode {
|
||||
id: ParseNodeId,
|
||||
qual_name: Option<QualName>,
|
||||
}
|
||||
|
||||
#[derive(JSTraceable, HeapSizeOf)]
|
||||
struct ParseNodeData {
|
||||
contents: Option<ParseNode>,
|
||||
is_integration_point: bool,
|
||||
}
|
||||
|
||||
impl Default for ParseNodeData {
|
||||
fn default() -> ParseNodeData {
|
||||
ParseNodeData {
|
||||
contents: None,
|
||||
is_integration_point: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ParseOperation {
|
||||
GetTemplateContents(ParseNodeId, ParseNodeId),
|
||||
CreateElement(ParseNodeId, QualName, Vec<Attribute>),
|
||||
CreateComment(StrTendril, ParseNodeId),
|
||||
// sibling, node to be inserted
|
||||
AppendBeforeSibling(ParseNodeId, NodeOrText<ParseNode>),
|
||||
// parent, node to be inserted
|
||||
Append(ParseNodeId, NodeOrText<ParseNode>),
|
||||
AppendDoctypeToDocument(StrTendril, StrTendril, StrTendril),
|
||||
AddAttrsIfMissing(ParseNodeId, Vec<Attribute>),
|
||||
RemoveFromParent(ParseNodeId),
|
||||
MarkScriptAlreadyStarted(ParseNodeId),
|
||||
ReparentChildren(ParseNodeId, ParseNodeId),
|
||||
AssociateWithForm(ParseNodeId, ParseNodeId),
|
||||
CreatePI(ParseNodeId, StrTendril, StrTendril),
|
||||
Pop(ParseNodeId),
|
||||
}
|
||||
|
||||
#[derive(JSTraceable, HeapSizeOf)]
|
||||
#[must_root]
|
||||
pub struct Sink {
|
||||
base_url: ServoUrl,
|
||||
document: JS<Document>,
|
||||
current_line: u64,
|
||||
script: MutNullableJS<HTMLScriptElement>,
|
||||
parse_node_data: HashMap<ParseNodeId, ParseNodeData>,
|
||||
next_parse_node_id: Cell<ParseNodeId>,
|
||||
nodes: HashMap<ParseNodeId, JS<Node>>,
|
||||
document_node: ParseNode,
|
||||
}
|
||||
|
||||
impl Sink {
|
||||
fn new(base_url: ServoUrl, document: &Document) -> Sink {
|
||||
let mut sink = Sink {
|
||||
base_url: base_url,
|
||||
document: JS::from_ref(document),
|
||||
current_line: 1,
|
||||
script: Default::default(),
|
||||
parse_node_data: HashMap::new(),
|
||||
next_parse_node_id: Cell::new(1),
|
||||
nodes: HashMap::new(),
|
||||
document_node: ParseNode {
|
||||
id: 0,
|
||||
qual_name: None,
|
||||
}
|
||||
};
|
||||
let data = ParseNodeData::default();
|
||||
sink.insert_parse_node_data(0, data);
|
||||
sink.insert_node(0, JS::from_ref(document.upcast()));
|
||||
sink
|
||||
}
|
||||
|
||||
fn new_parse_node(&mut self) -> ParseNode {
|
||||
let id = self.next_parse_node_id.get();
|
||||
let data = ParseNodeData::default();
|
||||
self.insert_parse_node_data(id, data);
|
||||
self.next_parse_node_id.set(id + 1);
|
||||
ParseNode {
|
||||
id: id,
|
||||
qual_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_node(&mut self, id: ParseNodeId, node: JS<Node>) {
|
||||
|
@ -317,17 +222,29 @@ impl Tokenizer {
|
|||
self.nodes.get(id).expect("Node not found!")
|
||||
}
|
||||
|
||||
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
|
||||
assert!(self.parse_node_data.insert(id, data).is_none());
|
||||
}
|
||||
|
||||
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
|
||||
self.parse_node_data.get(id).expect("Parse Node data not found!")
|
||||
}
|
||||
|
||||
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
|
||||
self.parse_node_data.get_mut(id).expect("Parse Node data not found!")
|
||||
}
|
||||
|
||||
fn process_operation(&mut self, op: ParseOperation) {
|
||||
let document = Root::from_ref(&**self.get_node(&0));
|
||||
let document = document.downcast::<Document>().expect("Document node should be downcasted!");
|
||||
match op {
|
||||
ParseOperation::GetTemplateContents { target, contents } => {
|
||||
ParseOperation::GetTemplateContents(target, contents) => {
|
||||
let target = Root::from_ref(&**self.get_node(&target));
|
||||
let template = target.downcast::<HTMLTemplateElement>().expect(
|
||||
"Tried to extract contents from non-template element while parsing");
|
||||
self.insert_node(contents, JS::from_ref(template.Content().upcast()));
|
||||
}
|
||||
ParseOperation::CreateElement { node, name, attrs, current_line } => {
|
||||
ParseOperation::CreateElement(id, name, attrs) => {
|
||||
let is = attrs.iter()
|
||||
.find(|attr| attr.name.local.eq_str_ignore_ascii_case("is"))
|
||||
.map(|attr| LocalName::from(&*attr.value));
|
||||
|
@ -335,72 +252,68 @@ impl Tokenizer {
|
|||
let elem = Element::create(name,
|
||||
is,
|
||||
&*self.document,
|
||||
ElementCreator::ParserCreated(current_line),
|
||||
ElementCreator::ParserCreated(self.current_line),
|
||||
CustomElementCreationMode::Synchronous);
|
||||
for attr in attrs {
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(attr.value), None);
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(String::from(attr.value)), None);
|
||||
}
|
||||
|
||||
self.insert_node(node, JS::from_ref(elem.upcast()));
|
||||
self.insert_node(id, JS::from_ref(elem.upcast()));
|
||||
}
|
||||
ParseOperation::CreateComment { text, node } => {
|
||||
let comment = Comment::new(DOMString::from(text), document);
|
||||
self.insert_node(node, JS::from_ref(&comment.upcast()));
|
||||
ParseOperation::CreateComment(text, id) => {
|
||||
let comment = Comment::new(DOMString::from(String::from(text)), document);
|
||||
self.insert_node(id, JS::from_ref(&comment.upcast()));
|
||||
}
|
||||
ParseOperation::AppendBeforeSibling { sibling, node } => {
|
||||
ParseOperation::AppendBeforeSibling(sibling, node) => {
|
||||
let node = match node {
|
||||
NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
|
||||
Tendril::from(text)
|
||||
)
|
||||
NodeOrText::AppendNode(n) => NodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::AppendText(text) => NodeOrText::AppendText(text)
|
||||
};
|
||||
let sibling = &**self.get_node(&sibling);
|
||||
let parent = &*sibling.GetParentNode().expect("append_before_sibling called on node without parent");
|
||||
|
||||
super::insert(parent, Some(sibling), node);
|
||||
}
|
||||
ParseOperation::Append { parent, node } => {
|
||||
ParseOperation::Append(parent, node) => {
|
||||
let node = match node {
|
||||
NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
|
||||
Tendril::from(text)
|
||||
)
|
||||
NodeOrText::AppendNode(n) => NodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::AppendText(text) => NodeOrText::AppendText(text)
|
||||
};
|
||||
|
||||
let parent = &**self.get_node(&parent);
|
||||
super::insert(parent, None, node);
|
||||
}
|
||||
ParseOperation::AppendDoctypeToDocument { name, public_id, system_id } => {
|
||||
ParseOperation::AppendDoctypeToDocument(name, public_id, system_id) => {
|
||||
let doctype = DocumentType::new(
|
||||
DOMString::from(String::from(name)), Some(DOMString::from(public_id)),
|
||||
Some(DOMString::from(system_id)), document);
|
||||
DOMString::from(String::from(name)), Some(DOMString::from(String::from(public_id))),
|
||||
Some(DOMString::from(String::from(system_id))), document);
|
||||
|
||||
document.upcast::<Node>().AppendChild(doctype.upcast()).expect("Appending failed");
|
||||
}
|
||||
ParseOperation::AddAttrsIfMissing { target, attrs } => {
|
||||
let elem = self.get_node(&target).downcast::<Element>()
|
||||
ParseOperation::AddAttrsIfMissing(target_id, attrs) => {
|
||||
let elem = self.get_node(&target_id).downcast::<Element>()
|
||||
.expect("tried to set attrs on non-Element in HTML parsing");
|
||||
for attr in attrs {
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(attr.value), None);
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(String::from(attr.value)), None);
|
||||
}
|
||||
}
|
||||
ParseOperation::RemoveFromParent { target } => {
|
||||
ParseOperation::RemoveFromParent(target) => {
|
||||
if let Some(ref parent) = self.get_node(&target).GetParentNode() {
|
||||
parent.RemoveChild(&**self.get_node(&target)).unwrap();
|
||||
}
|
||||
}
|
||||
ParseOperation::MarkScriptAlreadyStarted { node } => {
|
||||
ParseOperation::MarkScriptAlreadyStarted(node) => {
|
||||
let script = self.get_node(&node).downcast::<HTMLScriptElement>();
|
||||
script.map(|script| script.set_already_started(true));
|
||||
}
|
||||
ParseOperation::ReparentChildren { parent, new_parent } => {
|
||||
ParseOperation::ReparentChildren(parent, new_parent) => {
|
||||
let parent = self.get_node(&parent);
|
||||
let new_parent = self.get_node(&new_parent);
|
||||
while let Some(child) = parent.GetFirstChild() {
|
||||
new_parent.AppendChild(&child).unwrap();
|
||||
}
|
||||
}
|
||||
ParseOperation::AssociateWithForm { target, form } => {
|
||||
ParseOperation::AssociateWithForm(target, form) => {
|
||||
let form = self.get_node(&form);
|
||||
let form = Root::downcast::<HTMLFormElement>(Root::from_ref(&**form))
|
||||
.expect("Owner must be a form element");
|
||||
|
@ -416,141 +329,20 @@ impl Tokenizer {
|
|||
assert!(node.NodeName() == "KEYGEN", "Unknown form-associatable element");
|
||||
}
|
||||
}
|
||||
ParseOperation::Pop { node } => {
|
||||
ParseOperation::Pop(node) => {
|
||||
vtable_for(self.get_node(&node)).pop();
|
||||
}
|
||||
ParseOperation::CreatePI { node, target, data } => {
|
||||
ParseOperation::CreatePI(node, target, data) => {
|
||||
let pi = ProcessingInstruction::new(
|
||||
DOMString::from(target),
|
||||
DOMString::from(data),
|
||||
document);
|
||||
DOMString::from(String::from(target)),
|
||||
DOMString::from(String::from(data)),
|
||||
document);
|
||||
self.insert_node(node, JS::from_ref(pi.upcast()));
|
||||
}
|
||||
ParseOperation::SetQuirksMode { mode } => {
|
||||
document.set_quirks_mode(mode);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run(sink: Sink,
|
||||
fragment_context_is_some: bool,
|
||||
ctxt_parse_node: Option<ParseNode>,
|
||||
form_parse_node: Option<ParseNode>,
|
||||
sender: Sender<ToTokenizerMsg>,
|
||||
receiver: Receiver<ToHtmlTokenizerMsg>) {
|
||||
let options = TreeBuilderOpts {
|
||||
ignore_missing_rules: true,
|
||||
.. Default::default()
|
||||
};
|
||||
|
||||
let mut html_tokenizer = if fragment_context_is_some {
|
||||
let tb = TreeBuilder::new_for_fragment(
|
||||
sink,
|
||||
ctxt_parse_node.unwrap(),
|
||||
form_parse_node,
|
||||
options);
|
||||
|
||||
let tok_options = TokenizerOpts {
|
||||
initial_state: Some(tb.tokenizer_state_for_context_elem()),
|
||||
.. Default::default()
|
||||
};
|
||||
|
||||
HtmlTokenizer::new(tb, tok_options)
|
||||
} else {
|
||||
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
|
||||
};
|
||||
|
||||
loop {
|
||||
match receiver.recv().expect("Unexpected channel panic in html parser thread") {
|
||||
ToHtmlTokenizerMsg::Feed { input } => {
|
||||
let mut input = create_buffer_queue(input);
|
||||
let res = html_tokenizer.feed(&mut input);
|
||||
|
||||
// Gather changes to 'input' and place them in 'updated_input',
|
||||
// which will be sent to the main thread to update feed method's 'input'
|
||||
let mut updated_input = VecDeque::new();
|
||||
while let Some(st) = input.pop_front() {
|
||||
updated_input.push_back(SendTendril::from(st));
|
||||
}
|
||||
|
||||
let res = match res {
|
||||
TokenizerResult::Done => ToTokenizerMsg::TokenizerResultDone { updated_input },
|
||||
TokenizerResult::Script(script) => ToTokenizerMsg::TokenizerResultScript { script, updated_input }
|
||||
};
|
||||
sender.send(res).unwrap();
|
||||
},
|
||||
ToHtmlTokenizerMsg::End => {
|
||||
html_tokenizer.end();
|
||||
sender.send(ToTokenizerMsg::End).unwrap();
|
||||
break;
|
||||
},
|
||||
ToHtmlTokenizerMsg::SetPlainTextState => html_tokenizer.set_plaintext_state()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(JSTraceable, HeapSizeOf, Default)]
|
||||
struct ParseNodeData {
|
||||
contents: Option<ParseNode>,
|
||||
is_integration_point: bool,
|
||||
}
|
||||
|
||||
pub struct Sink {
|
||||
current_line: u64,
|
||||
parse_node_data: HashMap<ParseNodeId, ParseNodeData>,
|
||||
next_parse_node_id: Cell<ParseNodeId>,
|
||||
document_node: ParseNode,
|
||||
sender: Sender<ToTokenizerMsg>,
|
||||
receiver: Receiver<ToSinkMsg>,
|
||||
}
|
||||
|
||||
impl Sink {
|
||||
fn new(sender: Sender<ToTokenizerMsg>, receiver: Receiver<ToSinkMsg>) -> Sink {
|
||||
let mut sink = Sink {
|
||||
current_line: 1,
|
||||
parse_node_data: HashMap::new(),
|
||||
next_parse_node_id: Cell::new(1),
|
||||
document_node: ParseNode {
|
||||
id: 0,
|
||||
qual_name: None,
|
||||
},
|
||||
sender: sender,
|
||||
receiver: receiver,
|
||||
};
|
||||
let data = ParseNodeData::default();
|
||||
sink.insert_parse_node_data(0, data);
|
||||
sink
|
||||
}
|
||||
|
||||
fn new_parse_node(&mut self) -> ParseNode {
|
||||
let id = self.next_parse_node_id.get();
|
||||
let data = ParseNodeData::default();
|
||||
self.insert_parse_node_data(id, data);
|
||||
self.next_parse_node_id.set(id + 1);
|
||||
ParseNode {
|
||||
id: id,
|
||||
qual_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn send_op(&self, op: ParseOperation) {
|
||||
self.sender.send(ToTokenizerMsg::ProcessOperation(op)).unwrap();
|
||||
}
|
||||
|
||||
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
|
||||
assert!(self.parse_node_data.insert(id, data).is_none());
|
||||
}
|
||||
|
||||
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
|
||||
self.parse_node_data.get(id).expect("Parse Node data not found!")
|
||||
}
|
||||
|
||||
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
|
||||
self.parse_node_data.get_mut(id).expect("Parse Node data not found!")
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unrooted_must_root)]
|
||||
impl TreeSink for Sink {
|
||||
type Output = Self;
|
||||
|
@ -571,7 +363,7 @@ impl TreeSink for Sink {
|
|||
let mut data = self.get_parse_node_data_mut(&target.id);
|
||||
data.contents = Some(node.clone());
|
||||
}
|
||||
self.send_op(ParseOperation::GetTemplateContents { target: target.id, contents: node.id });
|
||||
self.process_operation(ParseOperation::GetTemplateContents(target.id, node.id));
|
||||
node
|
||||
}
|
||||
|
||||
|
@ -584,20 +376,21 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn same_tree(&self, x: &Self::Handle, y: &Self::Handle) -> bool {
|
||||
self.sender.send(ToTokenizerMsg::IsSameTree(x.id, y.id)).unwrap();
|
||||
match self.receiver.recv().expect("Unexpected channel panic in html parser thread.") {
|
||||
ToSinkMsg::IsSameTree(result) => result,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
let x = self.get_node(&x.id);
|
||||
let y = self.get_node(&y.id);
|
||||
|
||||
let x = x.downcast::<Element>().expect("Element node expected");
|
||||
let y = y.downcast::<Element>().expect("Element node expected");
|
||||
x.is_in_same_home_subtree(y)
|
||||
}
|
||||
|
||||
fn create_element(&mut self, name: QualName, html_attrs: Vec<HtmlAttribute>, _flags: ElementFlags)
|
||||
fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>, _flags: ElementFlags)
|
||||
-> Self::Handle {
|
||||
let mut node = self.new_parse_node();
|
||||
node.qual_name = Some(name.clone());
|
||||
{
|
||||
let mut node_data = self.get_parse_node_data_mut(&node.id);
|
||||
node_data.is_integration_point = html_attrs.iter()
|
||||
node_data.is_integration_point = attrs.iter()
|
||||
.any(|attr| {
|
||||
let attr_value = &String::from(attr.value.clone());
|
||||
(attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) &&
|
||||
|
@ -605,57 +398,34 @@ impl TreeSink for Sink {
|
|||
attr_value.eq_ignore_ascii_case("application/xhtml+xml"))
|
||||
});
|
||||
}
|
||||
let attrs = html_attrs.into_iter()
|
||||
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
|
||||
|
||||
self.send_op(ParseOperation::CreateElement {
|
||||
node: node.id,
|
||||
name,
|
||||
attrs,
|
||||
current_line: self.current_line
|
||||
});
|
||||
self.process_operation(ParseOperation::CreateElement(node.id, name, attrs));
|
||||
node
|
||||
}
|
||||
|
||||
fn create_comment(&mut self, text: StrTendril) -> Self::Handle {
|
||||
let node = self.new_parse_node();
|
||||
self.send_op(ParseOperation::CreateComment { text: String::from(text), node: node.id });
|
||||
self.process_operation(ParseOperation::CreateComment(text, node.id));
|
||||
node
|
||||
}
|
||||
|
||||
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> ParseNode {
|
||||
let node = self.new_parse_node();
|
||||
self.send_op(ParseOperation::CreatePI {
|
||||
node: node.id,
|
||||
target: String::from(target),
|
||||
data: String::from(data)
|
||||
});
|
||||
self.process_operation(ParseOperation::CreatePI(node.id, target, data));
|
||||
node
|
||||
}
|
||||
|
||||
fn has_parent_node(&self, node: &Self::Handle) -> bool {
|
||||
self.sender.send(ToTokenizerMsg::HasParentNode(node.id)).unwrap();
|
||||
match self.receiver.recv().expect("Unexpected channel panic in html parser thread.") {
|
||||
ToSinkMsg::HasParentNode(result) => result,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
self.get_node(&node.id).GetParentNode().is_some()
|
||||
}
|
||||
|
||||
fn associate_with_form(&mut self, target: &Self::Handle, form: &Self::Handle) {
|
||||
self.send_op(ParseOperation::AssociateWithForm {
|
||||
target: target.id,
|
||||
form: form.id
|
||||
});
|
||||
self.process_operation(ParseOperation::AssociateWithForm(target.id, form.id));
|
||||
}
|
||||
|
||||
fn append_before_sibling(&mut self,
|
||||
sibling: &Self::Handle,
|
||||
new_node: HtmlNodeOrText<Self::Handle>) {
|
||||
let new_node = match new_node {
|
||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
|
||||
};
|
||||
self.send_op(ParseOperation::AppendBeforeSibling { sibling: sibling.id, node: new_node });
|
||||
new_node: NodeOrText<Self::Handle>) {
|
||||
self.process_operation(ParseOperation::AppendBeforeSibling(sibling.id, new_node));
|
||||
}
|
||||
|
||||
fn parse_error(&mut self, msg: Cow<'static, str>) {
|
||||
|
@ -668,38 +438,28 @@ impl TreeSink for Sink {
|
|||
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
|
||||
QuirksMode::NoQuirks => ServoQuirksMode::NoQuirks,
|
||||
};
|
||||
self.send_op(ParseOperation::SetQuirksMode { mode });
|
||||
self.document.set_quirks_mode(mode);
|
||||
}
|
||||
|
||||
fn append(&mut self, parent: &Self::Handle, child: HtmlNodeOrText<Self::Handle>) {
|
||||
let child = match child {
|
||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
|
||||
};
|
||||
self.send_op(ParseOperation::Append { parent: parent.id, node: child });
|
||||
fn append(&mut self, parent: &Self::Handle, child: NodeOrText<Self::Handle>) {
|
||||
self.process_operation(ParseOperation::Append(parent.id, child));
|
||||
}
|
||||
|
||||
fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril,
|
||||
system_id: StrTendril) {
|
||||
self.send_op(ParseOperation::AppendDoctypeToDocument {
|
||||
name: String::from(name),
|
||||
public_id: String::from(public_id),
|
||||
system_id: String::from(system_id)
|
||||
});
|
||||
self.process_operation(ParseOperation::AppendDoctypeToDocument(name, public_id, system_id));
|
||||
}
|
||||
|
||||
fn add_attrs_if_missing(&mut self, target: &Self::Handle, html_attrs: Vec<HtmlAttribute>) {
|
||||
let attrs = html_attrs.into_iter()
|
||||
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
|
||||
self.send_op(ParseOperation::AddAttrsIfMissing { target: target.id, attrs });
|
||||
fn add_attrs_if_missing(&mut self, target: &Self::Handle, attrs: Vec<Attribute>) {
|
||||
self.process_operation(ParseOperation::AddAttrsIfMissing(target.id, attrs));
|
||||
}
|
||||
|
||||
fn remove_from_parent(&mut self, target: &Self::Handle) {
|
||||
self.send_op(ParseOperation::RemoveFromParent { target: target.id });
|
||||
self.process_operation(ParseOperation::RemoveFromParent(target.id));
|
||||
}
|
||||
|
||||
fn mark_script_already_started(&mut self, node: &Self::Handle) {
|
||||
self.send_op(ParseOperation::MarkScriptAlreadyStarted { node: node.id });
|
||||
self.process_operation(ParseOperation::MarkScriptAlreadyStarted(node.id));
|
||||
}
|
||||
|
||||
fn complete_script(&mut self, _: &Self::Handle) -> NextParserState {
|
||||
|
@ -707,7 +467,7 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) {
|
||||
self.send_op(ParseOperation::ReparentChildren { parent: parent.id, new_parent: new_parent.id });
|
||||
self.process_operation(ParseOperation::ReparentChildren(parent.id, new_parent.id));
|
||||
}
|
||||
|
||||
/// https://html.spec.whatwg.org/multipage/#html-integration-point
|
||||
|
@ -722,6 +482,6 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn pop(&mut self, node: &Self::Handle) {
|
||||
self.send_op(ParseOperation::Pop { node: node.id });
|
||||
self.process_operation(ParseOperation::Pop(node.id));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,8 +66,10 @@ use std::fmt;
|
|||
use std::sync::Arc;
|
||||
use std::sync::mpsc::{Receiver, Sender, RecvTimeoutError};
|
||||
use style_traits::CSSPixel;
|
||||
use style_traits::DevicePixel;
|
||||
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
|
||||
use webrender_api::{ClipId, DevicePixel, ImageKey};
|
||||
use webrender_api::ClipId;
|
||||
use webrender_api::ImageKey;
|
||||
use webvr_traits::{WebVREvent, WebVRMsg};
|
||||
|
||||
pub use script_msg::{LayoutMsg, ScriptMsg, EventResult, LogEntry};
|
||||
|
|
|
@ -26,8 +26,6 @@ unstable = []
|
|||
bitflags = "0.7"
|
||||
matches = "0.1"
|
||||
cssparser = "0.18"
|
||||
heapsize = "0.4"
|
||||
heapsize_derive = "0.1"
|
||||
log = "0.3"
|
||||
fnv = "1.0"
|
||||
phf = "0.7.18"
|
||||
|
|
|
@ -48,7 +48,7 @@ pub enum VisitedHandlingMode {
|
|||
/// Which quirks mode is this document in.
|
||||
///
|
||||
/// See: https://quirks.spec.whatwg.org/
|
||||
#[derive(PartialEq, Eq, Copy, Clone, Hash, Debug, HeapSizeOf)]
|
||||
#[derive(PartialEq, Eq, Copy, Clone, Hash, Debug)]
|
||||
pub enum QuirksMode {
|
||||
/// Quirks mode.
|
||||
Quirks,
|
||||
|
|
|
@ -7,11 +7,9 @@
|
|||
|
||||
#[macro_use] extern crate bitflags;
|
||||
#[macro_use] extern crate cssparser;
|
||||
#[macro_use] extern crate heapsize_derive;
|
||||
#[macro_use] extern crate log;
|
||||
#[macro_use] extern crate matches;
|
||||
extern crate fnv;
|
||||
extern crate heapsize;
|
||||
extern crate phf;
|
||||
extern crate precomputed_hash;
|
||||
#[cfg(test)] #[macro_use] extern crate size_of_test;
|
||||
|
|
|
@ -174,6 +174,10 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
|
|||
None
|
||||
};
|
||||
|
||||
let framebuffer_size = window.framebuffer_size();
|
||||
let framebuffer_size = webrender_api::DeviceUintSize::new(framebuffer_size.width,
|
||||
framebuffer_size.height);
|
||||
|
||||
webrender::Renderer::new(window.gl(), webrender::RendererOptions {
|
||||
device_pixel_ratio: device_pixel_ratio,
|
||||
resource_override_path: Some(resource_path),
|
||||
|
@ -187,12 +191,9 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
|
|||
renderer_kind: renderer_kind,
|
||||
enable_subpixel_aa: opts.enable_subpixel_text_antialiasing,
|
||||
..Default::default()
|
||||
}).expect("Unable to initialize webrender!")
|
||||
}, framebuffer_size).expect("Unable to initialize webrender!")
|
||||
};
|
||||
|
||||
let webrender_api = webrender_api_sender.create_api();
|
||||
let webrender_document = webrender_api.add_document(window.framebuffer_size());
|
||||
|
||||
// Important that this call is done in a single-threaded fashion, we
|
||||
// can't defer it after `create_constellation` has started.
|
||||
script::init();
|
||||
|
@ -210,8 +211,7 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
|
|||
devtools_chan,
|
||||
supports_clipboard,
|
||||
&webrender,
|
||||
webrender_document,
|
||||
webrender_api_sender);
|
||||
webrender_api_sender.clone());
|
||||
|
||||
// Send the constellation's swmanager sender to service worker manager thread
|
||||
script::init_service_workers(sw_senders);
|
||||
|
@ -230,9 +230,8 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
|
|||
constellation_chan: constellation_chan.clone(),
|
||||
time_profiler_chan: time_profiler_chan,
|
||||
mem_profiler_chan: mem_profiler_chan,
|
||||
webrender,
|
||||
webrender_document,
|
||||
webrender_api,
|
||||
webrender: webrender,
|
||||
webrender_api_sender: webrender_api_sender,
|
||||
});
|
||||
|
||||
Browser {
|
||||
|
@ -288,7 +287,6 @@ fn create_constellation(user_agent: Cow<'static, str>,
|
|||
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
|
||||
supports_clipboard: bool,
|
||||
webrender: &webrender::Renderer,
|
||||
webrender_document: webrender_api::DocumentId,
|
||||
webrender_api_sender: webrender_api::RenderApiSender)
|
||||
-> (Sender<ConstellationMsg>, SWManagerSenders) {
|
||||
let bluetooth_thread: IpcSender<BluetoothRequest> = BluetoothThreadFactory::new();
|
||||
|
@ -304,18 +302,17 @@ fn create_constellation(user_agent: Cow<'static, str>,
|
|||
let resource_sender = public_resource_threads.sender();
|
||||
|
||||
let initial_state = InitialConstellationState {
|
||||
compositor_proxy,
|
||||
debugger_chan,
|
||||
devtools_chan,
|
||||
bluetooth_thread,
|
||||
font_cache_thread,
|
||||
public_resource_threads,
|
||||
private_resource_threads,
|
||||
time_profiler_chan,
|
||||
mem_profiler_chan,
|
||||
supports_clipboard,
|
||||
webrender_document,
|
||||
webrender_api_sender,
|
||||
compositor_proxy: compositor_proxy,
|
||||
debugger_chan: debugger_chan,
|
||||
devtools_chan: devtools_chan,
|
||||
bluetooth_thread: bluetooth_thread,
|
||||
font_cache_thread: font_cache_thread,
|
||||
public_resource_threads: public_resource_threads,
|
||||
private_resource_threads: private_resource_threads,
|
||||
time_profiler_chan: time_profiler_chan,
|
||||
mem_profiler_chan: mem_profiler_chan,
|
||||
supports_clipboard: supports_clipboard,
|
||||
webrender_api_sender: webrender_api_sender,
|
||||
};
|
||||
let (constellation_chan, from_swmanager_sender) =
|
||||
Constellation::<script_layout_interface::message::Msg,
|
||||
|
|
|
@ -22,4 +22,3 @@ heapsize = {version = "0.4", optional = true}
|
|||
heapsize_derive = {version = "0.1", optional = true}
|
||||
selectors = { path = "../selectors" }
|
||||
serde = {version = "1.0", optional = true}
|
||||
webrender_api = {git = "https://github.com/servo/webrender"}
|
||||
|
|
|
@ -21,9 +21,6 @@ extern crate euclid;
|
|||
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
|
||||
extern crate selectors;
|
||||
#[cfg(feature = "servo")] #[macro_use] extern crate serde;
|
||||
extern crate webrender_api;
|
||||
|
||||
pub use webrender_api::DevicePixel;
|
||||
|
||||
use cssparser::{CowRcStr, Token};
|
||||
use selectors::parser::SelectorParseError;
|
||||
|
@ -62,6 +59,12 @@ impl PinchZoomFactor {
|
|||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum CSSPixel {}
|
||||
|
||||
/// One hardware pixel.
|
||||
///
|
||||
/// This unit corresponds to the smallest addressable element of the display hardware.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum DevicePixel {}
|
||||
|
||||
// In summary, the hierarchy of pixel units and the factors to convert from one to the next:
|
||||
//
|
||||
// DevicePixel
|
||||
|
|
|
@ -27,8 +27,6 @@ mac-rel-wpt4:
|
|||
- ./mach build --release
|
||||
- ./mach test-wpt --release --processes 4 --total-chunks 4 --this-chunk 4 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
|
||||
- ./mach filter-intermittents wpt-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-wpt-errorsummary.log --use-tracker
|
||||
- ./mach test-wpt --release --pref dom.servoparser.async_html_tokenizer.enabled --processes=8 --log-raw test-async-parsing.log --log-errorsummary async-parsing-errorsummary.log --always-succeed domparsing html/syntax html/dom/documents html/dom/dynamic-markup-insertion
|
||||
- ./mach filter-intermittents async-parsing-errorsummary.log --log-intermittents async-parsing-intermittents.log --log-filteredsummary filtered-async-parsing-errorsummary.log --use-tracker
|
||||
|
||||
mac-dev-unit:
|
||||
- ./mach clean-nightlies --keep 3 --force
|
||||
|
|
|
@ -19,7 +19,7 @@ use wrappers::CefWrap;
|
|||
|
||||
use compositing::compositor_thread::EventLoopWaker;
|
||||
use compositing::windowing::{WindowEvent, WindowMethods};
|
||||
use euclid::{Point2D, TypedPoint2D, Size2D, TypedSize2D, ScaleFactor};
|
||||
use euclid::{Point2D, TypedPoint2D, TypedRect, Size2D, TypedSize2D, ScaleFactor};
|
||||
use gleam::gl;
|
||||
use msg::constellation_msg::{Key, KeyModifiers};
|
||||
use net_traits::net_error_list::NetError;
|
||||
|
@ -38,7 +38,6 @@ use style_traits::DevicePixel;
|
|||
extern crate x11;
|
||||
#[cfg(target_os="linux")]
|
||||
use self::x11::xlib::{XInitThreads,XOpenDisplay};
|
||||
use webrender_api::{DeviceUintSize, DeviceUintRect};
|
||||
|
||||
#[cfg(target_os="linux")]
|
||||
pub static mut DISPLAY: *mut c_void = 0 as *mut c_void;
|
||||
|
@ -47,7 +46,7 @@ pub static mut DISPLAY: *mut c_void = 0 as *mut c_void;
|
|||
#[derive(Clone)]
|
||||
pub struct Window {
|
||||
cef_browser: RefCell<Option<CefBrowser>>,
|
||||
size: DeviceUintSize,
|
||||
size: TypedSize2D<u32, DevicePixel>,
|
||||
gl: Rc<gl::Gl>,
|
||||
}
|
||||
|
||||
|
@ -175,7 +174,7 @@ impl WindowMethods for Window {
|
|||
self.gl.clone()
|
||||
}
|
||||
|
||||
fn framebuffer_size(&self) -> DeviceUintSize {
|
||||
fn framebuffer_size(&self) -> TypedSize2D<u32, DevicePixel> {
|
||||
let browser = self.cef_browser.borrow();
|
||||
match *browser {
|
||||
None => self.size,
|
||||
|
@ -206,16 +205,16 @@ impl WindowMethods for Window {
|
|||
}
|
||||
}
|
||||
|
||||
DeviceUintSize::new(rect.width as u32, rect.height as u32)
|
||||
TypedSize2D::new(rect.width as u32, rect.height as u32)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn window_rect(&self) -> DeviceUintRect {
|
||||
fn window_rect(&self) -> TypedRect<u32, DevicePixel> {
|
||||
let size = self.framebuffer_size();
|
||||
let origin = TypedPoint2D::zero();
|
||||
DeviceUintRect::new(origin, size)
|
||||
TypedRect::new(origin, size)
|
||||
}
|
||||
|
||||
fn size(&self) -> TypedSize2D<f32, DeviceIndependentPixel> {
|
||||
|
|
|
@ -8,7 +8,7 @@ use NestedEventLoopListener;
|
|||
use compositing::compositor_thread::EventLoopWaker;
|
||||
use compositing::windowing::{AnimationState, MouseWindowEvent};
|
||||
use compositing::windowing::{WindowEvent, WindowMethods};
|
||||
use euclid::{Point2D, Size2D, TypedPoint2D, TypedVector2D, ScaleFactor, TypedSize2D};
|
||||
use euclid::{Point2D, Size2D, TypedPoint2D, TypedVector2D, TypedRect, ScaleFactor, TypedSize2D};
|
||||
#[cfg(target_os = "windows")]
|
||||
use gdi32;
|
||||
use gleam::gl;
|
||||
|
@ -43,7 +43,7 @@ use style_traits::DevicePixel;
|
|||
use style_traits::cursor::Cursor;
|
||||
#[cfg(target_os = "windows")]
|
||||
use user32;
|
||||
use webrender_api::{DeviceUintRect, DeviceUintSize, ScrollLocation};
|
||||
use webrender_api::ScrollLocation;
|
||||
#[cfg(target_os = "windows")]
|
||||
use winapi;
|
||||
|
||||
|
@ -962,24 +962,24 @@ impl WindowMethods for Window {
|
|||
self.gl.clone()
|
||||
}
|
||||
|
||||
fn framebuffer_size(&self) -> DeviceUintSize {
|
||||
fn framebuffer_size(&self) -> TypedSize2D<u32, DevicePixel> {
|
||||
match self.kind {
|
||||
WindowKind::Window(ref window) => {
|
||||
let scale_factor = window.hidpi_factor() as u32;
|
||||
// TODO(ajeffrey): can this fail?
|
||||
let (width, height) = window.get_inner_size().expect("Failed to get window inner size.");
|
||||
DeviceUintSize::new(width, height) * scale_factor
|
||||
TypedSize2D::new(width * scale_factor, height * scale_factor)
|
||||
}
|
||||
WindowKind::Headless(ref context) => {
|
||||
DeviceUintSize::new(context.width, context.height)
|
||||
TypedSize2D::new(context.width, context.height)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn window_rect(&self) -> DeviceUintRect {
|
||||
fn window_rect(&self) -> TypedRect<u32, DevicePixel> {
|
||||
let size = self.framebuffer_size();
|
||||
let origin = TypedPoint2D::zero();
|
||||
DeviceUintRect::new(origin, size)
|
||||
TypedRect::new(origin, size)
|
||||
}
|
||||
|
||||
fn size(&self) -> TypedSize2D<f32, DeviceIndependentPixel> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue