mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
Upgrade rustc to 1.83 (#34793)
* Upgrade rustc to 1.83 Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix crown (change copied from linked clippy function) Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix named lifetime lint Signed-off-by: Nico Burns <nico@nicoburns.com> * Bump shell.nix Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix non-local impl warnings Signed-off-by: Nico Burns <nico@nicoburns.com> * Format with 1.83 formatting changes Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix manual non-local impl Signed-off-by: Nico Burns <nico@nicoburns.com> * More fixes for crown Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix tidy Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix needless_return lints Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix doc comment lint Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix missing wait lint Signed-off-by: Nico Burns <nico@nicoburns.com> * Allow needless_lifetimes lint Signed-off-by: Nico Burns <nico@nicoburns.com> * more doc comments Signed-off-by: Nico Burns <nico@nicoburns.com> * More needless_returns Signed-off-by: Nico Burns <nico@nicoburns.com> * is_empty lint Signed-off-by: Nico Burns <nico@nicoburns.com> * Fix needless_lifetime lints Signed-off-by: Nico Burns <nico@nicoburns.com> * fix div_ceil lint Signed-off-by: Nico Burns <nico@nicoburns.com> * Allow non-minimal bool Signed-off-by: Nico Burns <nico@nicoburns.com> * Non-local impl in constellation Signed-off-by: Nico Burns <nico@nicoburns.com> * Missing wait in constellation Signed-off-by: Nico Burns <nico@nicoburns.com> * fmt Signed-off-by: Nico Burns <nico@nicoburns.com> * remove useless lints table Signed-off-by: Nico Burns <nico@nicoburns.com> * Fixup comments Signed-off-by: Nico Burns <nico@nicoburns.com> * Allow non-local definition in sandboxing code to simplify feature flagging Signed-off-by: Nico Burns <nico@nicoburns.com> * Remove wait calls and allow zombie_processes lint Signed-off-by: Nico Burns <nico@nicoburns.com> --------- Signed-off-by: Nico Burns <nico@nicoburns.com>
This commit is contained in:
parent
d581acab3b
commit
deb819f233
35 changed files with 155 additions and 139 deletions
|
@ -13,7 +13,7 @@ authors = ["The Servo Project Developers"]
|
||||||
license = "MPL-2.0"
|
license = "MPL-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
publish = false
|
publish = false
|
||||||
rust-version = "1.82.0"
|
rust-version = "1.83.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
accountable-refcell = "0.2.0"
|
accountable-refcell = "0.2.0"
|
||||||
|
|
|
@ -189,7 +189,7 @@ fn matches_filters(device: &BluetoothDevice, filters: &BluetoothScanfilterSequen
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return filters.iter().any(|f| matches_filter(device, f));
|
filters.iter().any(|f| matches_filter(device, f))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_mock_adapter(adapter: &BluetoothAdapter) -> bool {
|
fn is_mock_adapter(adapter: &BluetoothAdapter) -> bool {
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
mod raqote_backend;
|
mod raqote_backend;
|
||||||
|
|
||||||
|
|
|
@ -1413,10 +1413,9 @@ impl<Window: WindowMethods + ?Sized> IOCompositor<Window> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hit_test_at_point(&self, point: DevicePoint) -> Option<CompositorHitTestResult> {
|
fn hit_test_at_point(&self, point: DevicePoint) -> Option<CompositorHitTestResult> {
|
||||||
return self
|
self.hit_test_at_point_with_flags_and_pipeline(point, HitTestFlags::empty(), None)
|
||||||
.hit_test_at_point_with_flags_and_pipeline(point, HitTestFlags::empty(), None)
|
|
||||||
.first()
|
.first()
|
||||||
.cloned();
|
.cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hit_test_at_point_with_flags_and_pipeline(
|
fn hit_test_at_point_with_flags_and_pipeline(
|
||||||
|
|
|
@ -2,6 +2,10 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
//! The constellation uses logging to perform crash reporting.
|
||||||
|
//! The constellation receives all `warn!`, `error!` and `panic!` messages,
|
||||||
|
//! and generates a crash report when it receives a panic.
|
||||||
|
|
||||||
use std::borrow::ToOwned;
|
use std::borrow::ToOwned;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
@ -14,10 +18,6 @@ use log::{Level, LevelFilter, Log, Metadata, Record};
|
||||||
use parking_lot::ReentrantMutex;
|
use parking_lot::ReentrantMutex;
|
||||||
use script_traits::{LogEntry, ScriptMsg as FromScriptMsg, ScriptToConstellationChan};
|
use script_traits::{LogEntry, ScriptMsg as FromScriptMsg, ScriptToConstellationChan};
|
||||||
|
|
||||||
/// The constellation uses logging to perform crash reporting.
|
|
||||||
/// The constellation receives all `warn!`, `error!` and `panic!` messages,
|
|
||||||
/// and generates a crash report when it receives a panic.
|
|
||||||
|
|
||||||
/// A logger directed at the constellation from content processes
|
/// A logger directed at the constellation from content processes
|
||||||
/// #[derive(Clone)]
|
/// #[derive(Clone)]
|
||||||
pub struct FromScriptLogger {
|
pub struct FromScriptLogger {
|
||||||
|
@ -25,10 +25,6 @@ pub struct FromScriptLogger {
|
||||||
pub script_to_constellation_chan: Arc<ReentrantMutex<ScriptToConstellationChan>>,
|
pub script_to_constellation_chan: Arc<ReentrantMutex<ScriptToConstellationChan>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The constellation uses logging to perform crash reporting.
|
|
||||||
/// The constellation receives all `warn!`, `error!` and `panic!` messages,
|
|
||||||
/// and generates a crash report when it receives a panic.
|
|
||||||
|
|
||||||
/// A logger directed at the constellation from content processes
|
/// A logger directed at the constellation from content processes
|
||||||
impl FromScriptLogger {
|
impl FromScriptLogger {
|
||||||
/// Create a new constellation logger.
|
/// Create a new constellation logger.
|
||||||
|
|
|
@ -158,6 +158,8 @@ pub fn spawn_multiprocess(content: UnprivilegedContent) -> Result<(), Error> {
|
||||||
let path_to_self = env::current_exe().expect("Failed to get current executor.");
|
let path_to_self = env::current_exe().expect("Failed to get current executor.");
|
||||||
let mut child_process = process::Command::new(path_to_self);
|
let mut child_process = process::Command::new(path_to_self);
|
||||||
setup_common(&mut child_process, token);
|
setup_common(&mut child_process, token);
|
||||||
|
|
||||||
|
#[allow(clippy::zombie_processes)]
|
||||||
let _ = child_process
|
let _ = child_process
|
||||||
.spawn()
|
.spawn()
|
||||||
.expect("Failed to start unsandboxed child process!");
|
.expect("Failed to start unsandboxed child process!");
|
||||||
|
@ -180,7 +182,10 @@ pub fn spawn_multiprocess(content: UnprivilegedContent) -> Result<(), Error> {
|
||||||
use gaol::sandbox::{self, Sandbox, SandboxMethods};
|
use gaol::sandbox::{self, Sandbox, SandboxMethods};
|
||||||
use ipc_channel::ipc::{IpcOneShotServer, IpcSender};
|
use ipc_channel::ipc::{IpcOneShotServer, IpcSender};
|
||||||
|
|
||||||
impl CommandMethods for sandbox::Command {
|
// TODO: Move this impl out of the function. It is only currently here to avoid
|
||||||
|
// duplicating the feature flagging.
|
||||||
|
#[allow(non_local_definitions)]
|
||||||
|
impl CommandMethods for gaol::sandbox::Command {
|
||||||
fn arg<T>(&mut self, arg: T)
|
fn arg<T>(&mut self, arg: T)
|
||||||
where
|
where
|
||||||
T: AsRef<OsStr>,
|
T: AsRef<OsStr>,
|
||||||
|
@ -216,6 +221,8 @@ pub fn spawn_multiprocess(content: UnprivilegedContent) -> Result<(), Error> {
|
||||||
let path_to_self = env::current_exe().expect("Failed to get current executor.");
|
let path_to_self = env::current_exe().expect("Failed to get current executor.");
|
||||||
let mut child_process = process::Command::new(path_to_self);
|
let mut child_process = process::Command::new(path_to_self);
|
||||||
setup_common(&mut child_process, token);
|
setup_common(&mut child_process, token);
|
||||||
|
|
||||||
|
#[allow(clippy::zombie_processes)]
|
||||||
let _ = child_process
|
let _ = child_process
|
||||||
.spawn()
|
.spawn()
|
||||||
.expect("Failed to start unsandboxed child process!");
|
.expect("Failed to start unsandboxed child process!");
|
||||||
|
|
|
@ -35,7 +35,7 @@ fn expand_dom_object(input: syn::DeriveInput) -> proc_macro2::TokenStream {
|
||||||
|
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||||
let mut items = quote! {
|
let items = quote! {
|
||||||
impl #impl_generics ::js::conversions::ToJSValConvertible for #name #ty_generics #where_clause {
|
impl #impl_generics ::js::conversions::ToJSValConvertible for #name #ty_generics #where_clause {
|
||||||
#[allow(unsafe_code)]
|
#[allow(unsafe_code)]
|
||||||
unsafe fn to_jsval(&self,
|
unsafe fn to_jsval(&self,
|
||||||
|
@ -66,7 +66,15 @@ fn expand_dom_object(input: syn::DeriveInput) -> proc_macro2::TokenStream {
|
||||||
crate::DomObject::reflector(self) == crate::DomObject::reflector(other)
|
crate::DomObject::reflector(self) == crate::DomObject::reflector(other)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut params = proc_macro2::TokenStream::new();
|
||||||
|
params.append_separated(
|
||||||
|
input.generics.type_params().map(|param| ¶m.ident),
|
||||||
|
quote! {,},
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut dummy_items = quote! {
|
||||||
// Generic trait with a blanket impl over `()` for all types.
|
// Generic trait with a blanket impl over `()` for all types.
|
||||||
// becomes ambiguous if impl
|
// becomes ambiguous if impl
|
||||||
trait NoDomObjectInDomObject<A> {
|
trait NoDomObjectInDomObject<A> {
|
||||||
|
@ -83,13 +91,7 @@ fn expand_dom_object(input: syn::DeriveInput) -> proc_macro2::TokenStream {
|
||||||
impl<T> NoDomObjectInDomObject<Invalid> for T where T: ?Sized + crate::DomObject {}
|
impl<T> NoDomObjectInDomObject<Invalid> for T where T: ?Sized + crate::DomObject {}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut params = proc_macro2::TokenStream::new();
|
dummy_items.append_all(field_types.iter().enumerate().map(|(i, ty)| {
|
||||||
params.append_separated(
|
|
||||||
input.generics.type_params().map(|param| ¶m.ident),
|
|
||||||
quote! {,},
|
|
||||||
);
|
|
||||||
|
|
||||||
items.append_all(field_types.iter().enumerate().map(|(i, ty)| {
|
|
||||||
let s = syn::Ident::new(&format!("S{i}"), proc_macro2::Span::call_site());
|
let s = syn::Ident::new(&format!("S{i}"), proc_macro2::Span::call_site());
|
||||||
quote! {
|
quote! {
|
||||||
struct #s<#params>(#params);
|
struct #s<#params>(#params);
|
||||||
|
@ -111,7 +113,8 @@ fn expand_dom_object(input: syn::DeriveInput) -> proc_macro2::TokenStream {
|
||||||
);
|
);
|
||||||
let tokens = quote! {
|
let tokens = quote! {
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
const #dummy_const: () = { #items };
|
const #dummy_const: () = { #dummy_items };
|
||||||
|
#items
|
||||||
};
|
};
|
||||||
|
|
||||||
tokens
|
tokens
|
||||||
|
|
|
@ -353,7 +353,7 @@ pub enum GlyphInfo<'a> {
|
||||||
Detail(&'a GlyphStore, ByteIndex, u16),
|
Detail(&'a GlyphStore, ByteIndex, u16),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> GlyphInfo<'a> {
|
impl GlyphInfo<'_> {
|
||||||
pub fn id(self) -> GlyphId {
|
pub fn id(self) -> GlyphId {
|
||||||
match self {
|
match self {
|
||||||
GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].id(),
|
GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].id(),
|
||||||
|
|
|
@ -62,6 +62,7 @@
|
||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
|
@ -1859,7 +1859,7 @@ impl<'container> PlacementState<'container> {
|
||||||
fn new(
|
fn new(
|
||||||
collapsible_with_parent_start_margin: CollapsibleWithParentStartMargin,
|
collapsible_with_parent_start_margin: CollapsibleWithParentStartMargin,
|
||||||
containing_block: &'container ContainingBlock<'container>,
|
containing_block: &'container ContainingBlock<'container>,
|
||||||
) -> PlacementState {
|
) -> PlacementState<'container> {
|
||||||
let is_inline_block_context =
|
let is_inline_block_context =
|
||||||
containing_block.style.get_box().clone_display() == Display::InlineBlock;
|
containing_block.style.get_box().clone_display() == Display::InlineBlock;
|
||||||
PlacementState {
|
PlacementState {
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
mod cell;
|
mod cell;
|
||||||
pub mod context;
|
pub mod context;
|
||||||
|
|
|
@ -218,7 +218,7 @@ impl Zero for CellOrTrackMeasure {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TableLayout<'a> {
|
impl<'a> TableLayout<'a> {
|
||||||
fn new(table: &'a Table) -> TableLayout {
|
fn new(table: &'a Table) -> TableLayout<'a> {
|
||||||
Self {
|
Self {
|
||||||
table,
|
table,
|
||||||
pbm: PaddingBorderMargin::zero(),
|
pbm: PaddingBorderMargin::zero(),
|
||||||
|
|
|
@ -75,7 +75,10 @@ impl Iterator for ChildIter {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl taffy::TraversePartialTree for TaffyContainerContext<'_> {
|
impl taffy::TraversePartialTree for TaffyContainerContext<'_> {
|
||||||
type ChildIter<'a> = ChildIter where Self: 'a;
|
type ChildIter<'a>
|
||||||
|
= ChildIter
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
|
||||||
fn child_ids(&self, _node_id: taffy::NodeId) -> Self::ChildIter<'_> {
|
fn child_ids(&self, _node_id: taffy::NodeId) -> Self::ChildIter<'_> {
|
||||||
ChildIter(0..self.source_child_nodes.len())
|
ChildIter(0..self.source_child_nodes.len())
|
||||||
|
@ -91,7 +94,10 @@ impl taffy::TraversePartialTree for TaffyContainerContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl taffy::LayoutPartialTree for TaffyContainerContext<'_> {
|
impl taffy::LayoutPartialTree for TaffyContainerContext<'_> {
|
||||||
type CoreContainerStyle<'a> = TaffyStyloStyle<&'a ComputedValues> where Self: 'a;
|
type CoreContainerStyle<'a>
|
||||||
|
= TaffyStyloStyle<&'a ComputedValues>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
|
||||||
fn get_core_container_style(&self, _node_id: taffy::NodeId) -> Self::CoreContainerStyle<'_> {
|
fn get_core_container_style(&self, _node_id: taffy::NodeId) -> Self::CoreContainerStyle<'_> {
|
||||||
TaffyStyloStyle(self.style)
|
TaffyStyloStyle(self.style)
|
||||||
|
@ -283,11 +289,13 @@ impl taffy::LayoutPartialTree for TaffyContainerContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl taffy::LayoutGridContainer for TaffyContainerContext<'_> {
|
impl taffy::LayoutGridContainer for TaffyContainerContext<'_> {
|
||||||
type GridContainerStyle<'a> = TaffyStyloStyle<&'a ComputedValues>
|
type GridContainerStyle<'a>
|
||||||
|
= TaffyStyloStyle<&'a ComputedValues>
|
||||||
where
|
where
|
||||||
Self: 'a;
|
Self: 'a;
|
||||||
|
|
||||||
type GridItemStyle<'a> = TaffyStyloStyle<AtomicRef<'a, ComputedValues>>
|
type GridItemStyle<'a>
|
||||||
|
= TaffyStyloStyle<AtomicRef<'a, ComputedValues>>
|
||||||
where
|
where
|
||||||
Self: 'a;
|
Self: 'a;
|
||||||
|
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl MallocSizeOf for String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T: ?Sized> MallocSizeOf for &'a T {
|
impl<T: ?Sized> MallocSizeOf for &'_ T {
|
||||||
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
// Zero makes sense for a non-owning reference.
|
// Zero makes sense for a non-owning reference.
|
||||||
0
|
0
|
||||||
|
@ -249,7 +249,7 @@ impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
|
impl<B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'_, B>
|
||||||
where
|
where
|
||||||
B::Owned: MallocSizeOf,
|
B::Owned: MallocSizeOf,
|
||||||
{
|
{
|
||||||
|
|
|
@ -206,6 +206,7 @@ impl ServoCookie {
|
||||||
|
|
||||||
// 3. The cookie-attribute-list contains an attribute with an attribute-name of "Path",
|
// 3. The cookie-attribute-list contains an attribute with an attribute-name of "Path",
|
||||||
// and the cookie's path is /.
|
// and the cookie's path is /.
|
||||||
|
#[allow(clippy::nonminimal_bool)]
|
||||||
if !has_path_specified || !cookie.path().is_some_and(|path| path == "/") {
|
if !has_path_specified || !cookie.path().is_some_and(|path| path == "/") {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,15 +25,15 @@ use webrender_traits::{CrossProcessCompositorApi, SerializableImageData};
|
||||||
|
|
||||||
use crate::resource_thread::CoreResourceThreadPool;
|
use crate::resource_thread::CoreResourceThreadPool;
|
||||||
|
|
||||||
///
|
//
|
||||||
/// TODO(gw): Remaining work on image cache:
|
// TODO(gw): Remaining work on image cache:
|
||||||
/// * Make use of the prefetch support in various parts of the code.
|
// * Make use of the prefetch support in various parts of the code.
|
||||||
/// * Profile time in GetImageIfAvailable - might be worth caching these
|
// * Profile time in GetImageIfAvailable - might be worth caching these
|
||||||
/// results per paint / layout.
|
// results per paint / layout.
|
||||||
///
|
//
|
||||||
/// MAYBE(Yoric):
|
// MAYBE(Yoric):
|
||||||
/// * For faster lookups, it might be useful to store the LoadKey in the
|
// * For faster lookups, it might be useful to store the LoadKey in the
|
||||||
/// DOM once we have performed a first load.
|
// DOM once we have performed a first load.
|
||||||
|
|
||||||
// ======================================================================
|
// ======================================================================
|
||||||
// Helper functions.
|
// Helper functions.
|
||||||
|
|
|
@ -56,7 +56,7 @@ fn main() {
|
||||||
#[derive(Eq, Hash, PartialEq)]
|
#[derive(Eq, Hash, PartialEq)]
|
||||||
struct Bytes<'a>(&'a str);
|
struct Bytes<'a>(&'a str);
|
||||||
|
|
||||||
impl<'a> FmtConst for Bytes<'a> {
|
impl FmtConst for Bytes<'_> {
|
||||||
fn fmt_const(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt_const(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
// https://github.com/rust-lang/rust/issues/55223
|
// https://github.com/rust-lang/rust/issues/55223
|
||||||
// should technically be just `write!(formatter, "b\"{}\"", self.0)
|
// should technically be just `write!(formatter, "b\"{}\"", self.0)
|
||||||
|
@ -65,7 +65,7 @@ impl<'a> FmtConst for Bytes<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> phf_shared::PhfHash for Bytes<'a> {
|
impl phf_shared::PhfHash for Bytes<'_> {
|
||||||
fn phf_hash<H: std::hash::Hasher>(&self, hasher: &mut H) {
|
fn phf_hash<H: std::hash::Hasher>(&self, hasher: &mut H) {
|
||||||
self.0.as_bytes().phf_hash(hasher)
|
self.0.as_bytes().phf_hash(hasher)
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,7 +179,7 @@ impl AudioBuffer {
|
||||||
*self.shared_channels.borrow_mut() = channels;
|
*self.shared_channels.borrow_mut() = channels;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return self.shared_channels.borrow();
|
self.shared_channels.borrow()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
//! Generic finalizer implementations for DOM binding implementations.
|
||||||
|
|
||||||
use std::any::type_name;
|
use std::any::type_name;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
@ -12,8 +14,6 @@ use js::jsval::UndefinedValue;
|
||||||
use crate::dom::bindings::utils::finalize_global as do_finalize_global;
|
use crate::dom::bindings::utils::finalize_global as do_finalize_global;
|
||||||
use crate::dom::bindings::weakref::{WeakBox, WeakReferenceable, DOM_WEAK_SLOT};
|
use crate::dom::bindings::weakref::{WeakBox, WeakReferenceable, DOM_WEAK_SLOT};
|
||||||
|
|
||||||
/// Generic finalizer implementations for DOM binding implementations.
|
|
||||||
|
|
||||||
pub unsafe fn finalize_common<T>(this: *const T) {
|
pub unsafe fn finalize_common<T>(this: *const T) {
|
||||||
if !this.is_null() {
|
if !this.is_null() {
|
||||||
// The pointer can be null if the object is the unforgeable holder of that interface.
|
// The pointer can be null if the object is the unforgeable holder of that interface.
|
||||||
|
|
|
@ -631,7 +631,7 @@ pub unsafe fn cross_origin_get_own_property_helper(
|
||||||
holder.handle_mut().into(),
|
holder.handle_mut().into(),
|
||||||
);
|
);
|
||||||
|
|
||||||
return JS_GetOwnPropertyDescriptorById(*cx, holder.handle().into(), id, desc, is_none);
|
JS_GetOwnPropertyDescriptorById(*cx, holder.handle().into(), id, desc, is_none)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implementation of [`CrossOriginPropertyFallback`].
|
/// Implementation of [`CrossOriginPropertyFallback`].
|
||||||
|
|
|
@ -483,9 +483,9 @@ impl HTMLFormElementMethods<crate::DomTypeHolder> for HTMLFormElement {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Step 6
|
// Step 6
|
||||||
return Some(RadioNodeListOrElement::Element(DomRoot::from_ref(
|
Some(RadioNodeListOrElement::Element(DomRoot::from_ref(
|
||||||
element_node.downcast::<Element>().unwrap(),
|
element_node.downcast::<Element>().unwrap(),
|
||||||
)));
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://html.spec.whatwg.org/multipage/#dom-a-rel
|
// https://html.spec.whatwg.org/multipage/#dom-a-rel
|
||||||
|
|
|
@ -293,7 +293,7 @@ fn parse_npt_seconds(s: &str) -> Result<f64, ()> {
|
||||||
|
|
||||||
fn parse_hms(s: &str) -> Result<f64, ()> {
|
fn parse_hms(s: &str) -> Result<f64, ()> {
|
||||||
let mut vec: VecDeque<&str> = s.split(':').collect();
|
let mut vec: VecDeque<&str> = s.split(':').collect();
|
||||||
vec.retain(|x| !x.eq(&""));
|
vec.retain(|x| !x.is_empty());
|
||||||
|
|
||||||
let result = match vec.len() {
|
let result = match vec.len() {
|
||||||
1 => {
|
1 => {
|
||||||
|
|
|
@ -198,8 +198,8 @@ impl Performance {
|
||||||
self.resource_timing_buffer_size_limit.set(0);
|
self.resource_timing_buffer_size_limit.set(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a PerformanceObserver to the list of observers with a set of
|
// Add a PerformanceObserver to the list of observers with a set of
|
||||||
/// observed entry types.
|
// observed entry types.
|
||||||
|
|
||||||
pub fn add_multiple_type_observer(
|
pub fn add_multiple_type_observer(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -219,11 +219,11 @@ enum ObservationState {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <https://drafts.csswg.org/resize-observer/#resizeobservation>
|
/// <https://drafts.csswg.org/resize-observer/#resizeobservation>
|
||||||
|
///
|
||||||
|
/// Note: `target` is kept out of here, to avoid having to root the `ResizeObservation`.
|
||||||
|
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-target>
|
||||||
#[derive(JSTraceable, MallocSizeOf)]
|
#[derive(JSTraceable, MallocSizeOf)]
|
||||||
struct ResizeObservation {
|
struct ResizeObservation {
|
||||||
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-target>
|
|
||||||
/// Note: `target` is kept out of here, to avoid having to root the `ResizeObservation`.
|
|
||||||
|
|
||||||
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-observedbox>
|
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-observedbox>
|
||||||
observed_box: RefCell<ResizeObserverBoxOptions>,
|
observed_box: RefCell<ResizeObserverBoxOptions>,
|
||||||
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-lastreportedsizes>
|
/// <https://drafts.csswg.org/resize-observer/#dom-resizeobservation-lastreportedsizes>
|
||||||
|
|
|
@ -701,7 +701,10 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Handle = ParseNode;
|
type Handle = ParseNode;
|
||||||
type ElemName<'a> = ExpandedName<'a> where Self: 'a;
|
type ElemName<'a>
|
||||||
|
= ExpandedName<'a>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
|
||||||
fn get_document(&self) -> Self::Handle {
|
fn get_document(&self) -> Self::Handle {
|
||||||
self.document_node.clone()
|
self.document_node.clone()
|
||||||
|
|
|
@ -1135,7 +1135,10 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Handle = Dom<Node>;
|
type Handle = Dom<Node>;
|
||||||
type ElemName<'a> = ExpandedName<'a> where Self: 'a;
|
type ElemName<'a>
|
||||||
|
= ExpandedName<'a>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn get_document(&self) -> Dom<Node> {
|
fn get_document(&self) -> Dom<Node> {
|
||||||
|
|
|
@ -435,8 +435,8 @@ fn valid_compressed_data_len(
|
||||||
let block_width = compression.block_width as u32;
|
let block_width = compression.block_width as u32;
|
||||||
let block_height = compression.block_height as u32;
|
let block_height = compression.block_height as u32;
|
||||||
|
|
||||||
let required_blocks_hor = (width + block_width - 1) / block_width;
|
let required_blocks_hor = width.div_ceil(block_width);
|
||||||
let required_blocks_ver = (height + block_height - 1) / block_height;
|
let required_blocks_ver = height.div_ceil(block_height);
|
||||||
let required_blocks = required_blocks_hor * required_blocks_ver;
|
let required_blocks = required_blocks_hor * required_blocks_ver;
|
||||||
|
|
||||||
let required_bytes = required_blocks * compression.bytes_per_block as u32;
|
let required_bytes = required_blocks * compression.bytes_per_block as u32;
|
||||||
|
|
|
@ -930,7 +930,7 @@ unsafe extern "C" fn getOwnPropertyDescriptor(
|
||||||
let mut slot = UndefinedValue();
|
let mut slot = UndefinedValue();
|
||||||
GetProxyPrivate(proxy.get(), &mut slot);
|
GetProxyPrivate(proxy.get(), &mut slot);
|
||||||
rooted!(in(cx) let target = slot.to_object());
|
rooted!(in(cx) let target = slot.to_object());
|
||||||
return JS_GetOwnPropertyDescriptorById(cx, target.handle().into(), id, desc, is_none);
|
JS_GetOwnPropertyDescriptorById(cx, target.handle().into(), id, desc, is_none)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unsafe_code, non_snake_case)]
|
#[allow(unsafe_code, non_snake_case)]
|
||||||
|
|
|
@ -98,6 +98,71 @@ struct XHRContext {
|
||||||
url: ServoUrl,
|
url: ServoUrl,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FetchResponseListener for XHRContext {
|
||||||
|
fn process_request_body(&mut self, _: RequestId) {
|
||||||
|
// todo
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_request_eof(&mut self, _: RequestId) {
|
||||||
|
// todo
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
|
||||||
|
let xhr = self.xhr.root();
|
||||||
|
let rv = xhr.process_headers_available(self.gen_id, metadata, CanGc::note());
|
||||||
|
if rv.is_err() {
|
||||||
|
*self.sync_status.borrow_mut() = Some(rv);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
|
||||||
|
self.xhr
|
||||||
|
.root()
|
||||||
|
.process_data_available(self.gen_id, chunk, CanGc::note());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_response_eof(
|
||||||
|
&mut self,
|
||||||
|
_: RequestId,
|
||||||
|
response: Result<ResourceFetchTiming, NetworkError>,
|
||||||
|
) {
|
||||||
|
let rv = self.xhr.root().process_response_complete(
|
||||||
|
self.gen_id,
|
||||||
|
response.map(|_| ()),
|
||||||
|
CanGc::note(),
|
||||||
|
);
|
||||||
|
*self.sync_status.borrow_mut() = Some(rv);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
|
||||||
|
&mut self.resource_timing
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resource_timing(&self) -> &ResourceFetchTiming {
|
||||||
|
&self.resource_timing
|
||||||
|
}
|
||||||
|
|
||||||
|
fn submit_resource_timing(&mut self) {
|
||||||
|
network_listener::submit_timing(self, CanGc::note())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ResourceTimingListener for XHRContext {
|
||||||
|
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
|
||||||
|
(InitiatorType::XMLHttpRequest, self.url.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
|
||||||
|
self.xhr.root().global()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PreInvoke for XHRContext {
|
||||||
|
fn should_invoke(&self) -> bool {
|
||||||
|
self.xhr.root().generation_id.get() == self.gen_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum XHRProgress {
|
pub enum XHRProgress {
|
||||||
/// Notify that headers have been received
|
/// Notify that headers have been received
|
||||||
|
@ -234,75 +299,6 @@ impl XMLHttpRequest {
|
||||||
init: RequestBuilder,
|
init: RequestBuilder,
|
||||||
cancellation_chan: ipc::IpcReceiver<()>,
|
cancellation_chan: ipc::IpcReceiver<()>,
|
||||||
) {
|
) {
|
||||||
impl FetchResponseListener for XHRContext {
|
|
||||||
fn process_request_body(&mut self, _: RequestId) {
|
|
||||||
// todo
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_request_eof(&mut self, _: RequestId) {
|
|
||||||
// todo
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_response(
|
|
||||||
&mut self,
|
|
||||||
_: RequestId,
|
|
||||||
metadata: Result<FetchMetadata, NetworkError>,
|
|
||||||
) {
|
|
||||||
let xhr = self.xhr.root();
|
|
||||||
let rv = xhr.process_headers_available(self.gen_id, metadata, CanGc::note());
|
|
||||||
if rv.is_err() {
|
|
||||||
*self.sync_status.borrow_mut() = Some(rv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
|
|
||||||
self.xhr
|
|
||||||
.root()
|
|
||||||
.process_data_available(self.gen_id, chunk, CanGc::note());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_response_eof(
|
|
||||||
&mut self,
|
|
||||||
_: RequestId,
|
|
||||||
response: Result<ResourceFetchTiming, NetworkError>,
|
|
||||||
) {
|
|
||||||
let rv = self.xhr.root().process_response_complete(
|
|
||||||
self.gen_id,
|
|
||||||
response.map(|_| ()),
|
|
||||||
CanGc::note(),
|
|
||||||
);
|
|
||||||
*self.sync_status.borrow_mut() = Some(rv);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
|
|
||||||
&mut self.resource_timing
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resource_timing(&self) -> &ResourceFetchTiming {
|
|
||||||
&self.resource_timing
|
|
||||||
}
|
|
||||||
|
|
||||||
fn submit_resource_timing(&mut self) {
|
|
||||||
network_listener::submit_timing(self, CanGc::note())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ResourceTimingListener for XHRContext {
|
|
||||||
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
|
|
||||||
(InitiatorType::XMLHttpRequest, self.url.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
|
|
||||||
self.xhr.root().global()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PreInvoke for XHRContext {
|
|
||||||
fn should_invoke(&self) -> bool {
|
|
||||||
self.xhr.root().generation_id.get() == self.gen_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
global.fetch(init, context, task_source, Some(cancellation_chan));
|
global.fetch(init, context, task_source, Some(cancellation_chan));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
#![register_tool(crown)]
|
#![register_tool(crown)]
|
||||||
#![cfg_attr(any(doc, clippy), allow(unknown_lints))]
|
#![cfg_attr(any(doc, clippy), allow(unknown_lints))]
|
||||||
#![deny(crown_is_not_used)]
|
#![deny(crown_is_not_used)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
// These are used a lot so let's keep them for now
|
// These are used a lot so let's keep them for now
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
#![crate_name = "webdriver_server"]
|
#![crate_name = "webdriver_server"]
|
||||||
#![crate_type = "rlib"]
|
#![crate_type = "rlib"]
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
mod actions;
|
mod actions;
|
||||||
mod capabilities;
|
mod capabilities;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
# Be sure to update shell.nix and support/crown/rust-toolchain.toml when bumping this!
|
# Be sure to update shell.nix and support/crown/rust-toolchain.toml when bumping this!
|
||||||
channel = "1.82.0"
|
channel = "1.83.0"
|
||||||
|
|
||||||
components = [
|
components = [
|
||||||
"clippy",
|
"clippy",
|
||||||
|
|
|
@ -10,7 +10,7 @@ with import (builtins.fetchTarball {
|
||||||
overlays = [
|
overlays = [
|
||||||
(import (builtins.fetchTarball {
|
(import (builtins.fetchTarball {
|
||||||
# Bumped the channel in rust-toolchain.toml? Bump this commit too!
|
# Bumped the channel in rust-toolchain.toml? Bump this commit too!
|
||||||
url = "https://github.com/oxalica/rust-overlay/archive/0be641045af6d8666c11c2c40e45ffc9667839b5.tar.gz";
|
url = "https://github.com/oxalica/rust-overlay/archive/10faa81b4c0135a04716cbd1649260d82b2890cd.tar.gz";
|
||||||
}))
|
}))
|
||||||
];
|
];
|
||||||
config = {
|
config = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.82.0"
|
channel = "1.83.0"
|
||||||
|
|
||||||
components = [
|
components = [
|
||||||
"clippy",
|
"clippy",
|
||||||
|
|
|
@ -99,14 +99,11 @@ fn find_primitive_impls<'tcx>(tcx: TyCtxt<'tcx>, name: &str) -> impl Iterator<It
|
||||||
"f64" => SimplifiedType::Float(FloatTy::F64),
|
"f64" => SimplifiedType::Float(FloatTy::F64),
|
||||||
#[allow(trivial_casts)]
|
#[allow(trivial_casts)]
|
||||||
_ => {
|
_ => {
|
||||||
return Result::<_, rustc_errors::ErrorGuaranteed>::Ok(&[] as &[_])
|
return [].iter().copied();
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.copied();
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
tcx.incoherent_impls(ty).into_iter().flatten().copied()
|
tcx.incoherent_impls(ty).iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn non_local_item_children_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Vec<Res> {
|
fn non_local_item_children_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Vec<Res> {
|
||||||
|
@ -235,7 +232,6 @@ pub fn def_path_res(cx: &LateContext<'_>, path: &[&str]) -> Vec<Res> {
|
||||||
let inherent_impl_children = tcx
|
let inherent_impl_children = tcx
|
||||||
.inherent_impls(def_id)
|
.inherent_impls(def_id)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
|
||||||
.flat_map(|&impl_def_id| item_children_by_name(tcx, impl_def_id, segment));
|
.flat_map(|&impl_def_id| item_children_by_name(tcx, impl_def_id, segment));
|
||||||
|
|
||||||
let direct_children = item_children_by_name(tcx, def_id, segment);
|
let direct_children = item_children_by_name(tcx, def_id, segment);
|
||||||
|
@ -280,7 +276,6 @@ pub fn def_local_res(cx: &LateContext<'_>, path: &str) -> Vec<Res> {
|
||||||
let inherent_impl_children = tcx
|
let inherent_impl_children = tcx
|
||||||
.inherent_impls(def_id)
|
.inherent_impls(def_id)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
|
||||||
.flat_map(|&impl_def_id| item_children_by_name(tcx, impl_def_id, segment));
|
.flat_map(|&impl_def_id| item_children_by_name(tcx, impl_def_id, segment));
|
||||||
|
|
||||||
let direct_children = item_children_by_name(tcx, def_id, segment);
|
let direct_children = item_children_by_name(tcx, def_id, segment);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue