mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
clippy: fix warnings in components/shared (#31565)
* clippy: fix some warnings in components/shared * fix: unit tests * fix: review comments
This commit is contained in:
parent
3a5ca785d3
commit
43f44965cd
14 changed files with 102 additions and 139 deletions
|
@ -49,7 +49,7 @@ impl BluetoothScanfilter {
|
||||||
name,
|
name,
|
||||||
name_prefix,
|
name_prefix,
|
||||||
services: ServiceUUIDSequence::new(services),
|
services: ServiceUUIDSequence::new(services),
|
||||||
manufacturer_data: manufacturer_data,
|
manufacturer_data,
|
||||||
service_data,
|
service_data,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -124,7 +124,7 @@ impl RequestDeviceoptions {
|
||||||
services: ServiceUUIDSequence,
|
services: ServiceUUIDSequence,
|
||||||
) -> RequestDeviceoptions {
|
) -> RequestDeviceoptions {
|
||||||
RequestDeviceoptions {
|
RequestDeviceoptions {
|
||||||
filters: filters,
|
filters,
|
||||||
optional_services: services,
|
optional_services: services,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,11 +115,11 @@ impl LinearGradientStyle {
|
||||||
stops: Vec<CanvasGradientStop>,
|
stops: Vec<CanvasGradientStop>,
|
||||||
) -> LinearGradientStyle {
|
) -> LinearGradientStyle {
|
||||||
LinearGradientStyle {
|
LinearGradientStyle {
|
||||||
x0: x0,
|
x0,
|
||||||
y0: y0,
|
y0,
|
||||||
x1: x1,
|
x1,
|
||||||
y1: y1,
|
y1,
|
||||||
stops: stops,
|
stops,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -146,13 +146,13 @@ impl RadialGradientStyle {
|
||||||
stops: Vec<CanvasGradientStop>,
|
stops: Vec<CanvasGradientStop>,
|
||||||
) -> RadialGradientStyle {
|
) -> RadialGradientStyle {
|
||||||
RadialGradientStyle {
|
RadialGradientStyle {
|
||||||
x0: x0,
|
x0,
|
||||||
y0: y0,
|
y0,
|
||||||
r0: r0,
|
r0,
|
||||||
x1: x1,
|
x1,
|
||||||
y1: y1,
|
y1,
|
||||||
r1: r1,
|
r1,
|
||||||
stops: stops,
|
stops,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -402,8 +402,9 @@ impl FromStr for CompositionOrBlending {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||||
pub enum TextAlign {
|
pub enum TextAlign {
|
||||||
|
#[default]
|
||||||
Start,
|
Start,
|
||||||
End,
|
End,
|
||||||
Left,
|
Left,
|
||||||
|
@ -426,17 +427,12 @@ impl FromStr for TextAlign {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TextAlign {
|
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||||
fn default() -> TextAlign {
|
|
||||||
TextAlign::Start
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
|
||||||
pub enum TextBaseline {
|
pub enum TextBaseline {
|
||||||
Top,
|
Top,
|
||||||
Hanging,
|
Hanging,
|
||||||
Middle,
|
Middle,
|
||||||
|
#[default]
|
||||||
Alphabetic,
|
Alphabetic,
|
||||||
Ideographic,
|
Ideographic,
|
||||||
Bottom,
|
Bottom,
|
||||||
|
@ -458,16 +454,11 @@ impl FromStr for TextBaseline {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TextBaseline {
|
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||||
fn default() -> TextBaseline {
|
|
||||||
TextBaseline::Alphabetic
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
|
||||||
pub enum Direction {
|
pub enum Direction {
|
||||||
Ltr,
|
Ltr,
|
||||||
Rtl,
|
Rtl,
|
||||||
|
#[default]
|
||||||
Inherit,
|
Inherit,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -483,9 +474,3 @@ impl FromStr for Direction {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Direction {
|
|
||||||
fn default() -> Direction {
|
|
||||||
Direction::Inherit
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -140,10 +140,7 @@ pub struct WebGLMsgSender {
|
||||||
|
|
||||||
impl WebGLMsgSender {
|
impl WebGLMsgSender {
|
||||||
pub fn new(id: WebGLContextId, sender: WebGLChan) -> Self {
|
pub fn new(id: WebGLContextId, sender: WebGLChan) -> Self {
|
||||||
WebGLMsgSender {
|
WebGLMsgSender { ctx_id: id, sender }
|
||||||
ctx_id: id,
|
|
||||||
sender: sender,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the WebGLContextId associated to this sender
|
/// Returns the WebGLContextId associated to this sender
|
||||||
|
@ -922,7 +919,7 @@ mod gl_ext_constants {
|
||||||
pub const COMPRESSED_RGBA_S3TC_DXT5_EXT: GLenum = 0x83F3;
|
pub const COMPRESSED_RGBA_S3TC_DXT5_EXT: GLenum = 0x83F3;
|
||||||
pub const COMPRESSED_RGB_ETC1_WEBGL: GLenum = 0x8D64;
|
pub const COMPRESSED_RGB_ETC1_WEBGL: GLenum = 0x8D64;
|
||||||
|
|
||||||
pub static COMPRESSIONS: &'static [GLenum] = &[
|
pub static COMPRESSIONS: &[GLenum] = &[
|
||||||
COMPRESSED_RGB_S3TC_DXT1_EXT,
|
COMPRESSED_RGB_S3TC_DXT1_EXT,
|
||||||
COMPRESSED_RGBA_S3TC_DXT1_EXT,
|
COMPRESSED_RGBA_S3TC_DXT1_EXT,
|
||||||
COMPRESSED_RGBA_S3TC_DXT3_EXT,
|
COMPRESSED_RGBA_S3TC_DXT3_EXT,
|
||||||
|
@ -1061,18 +1058,18 @@ impl TexFormat {
|
||||||
|
|
||||||
/// Returns whether this format is a known sized or unsized format.
|
/// Returns whether this format is a known sized or unsized format.
|
||||||
pub fn is_sized(&self) -> bool {
|
pub fn is_sized(&self) -> bool {
|
||||||
match self {
|
!matches!(
|
||||||
|
self,
|
||||||
TexFormat::DepthComponent |
|
TexFormat::DepthComponent |
|
||||||
TexFormat::DepthStencil |
|
TexFormat::DepthStencil |
|
||||||
TexFormat::Alpha |
|
TexFormat::Alpha |
|
||||||
TexFormat::Red |
|
TexFormat::Red |
|
||||||
TexFormat::RG |
|
TexFormat::RG |
|
||||||
TexFormat::RGB |
|
TexFormat::RGB |
|
||||||
TexFormat::RGBA |
|
TexFormat::RGBA |
|
||||||
TexFormat::Luminance |
|
TexFormat::Luminance |
|
||||||
TexFormat::LuminanceAlpha => false,
|
TexFormat::LuminanceAlpha
|
||||||
_ => true,
|
)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_unsized(self) -> TexFormat {
|
pub fn to_unsized(self) -> TexFormat {
|
||||||
|
|
|
@ -25,9 +25,9 @@ impl Epoch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<WebRenderEpoch> for Epoch {
|
impl From<Epoch> for WebRenderEpoch {
|
||||||
fn into(self) -> WebRenderEpoch {
|
fn from(val: Epoch) -> Self {
|
||||||
WebRenderEpoch(self.0)
|
WebRenderEpoch(val.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ pub fn combine_id_with_fragment_type(id: usize, fragment_type: FragmentType) ->
|
||||||
|
|
||||||
pub fn node_id_from_scroll_id(id: usize) -> Option<usize> {
|
pub fn node_id_from_scroll_id(id: usize) -> Option<usize> {
|
||||||
if (id & !SPECIAL_SCROLL_ROOT_ID_MASK) != 0 {
|
if (id & !SPECIAL_SCROLL_ROOT_ID_MASK) != 0 {
|
||||||
return Some((id & !3) as usize);
|
return Some(id & !3);
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,17 +28,17 @@ impl PrintTree {
|
||||||
|
|
||||||
self.print_level_prefix();
|
self.print_level_prefix();
|
||||||
|
|
||||||
let items: Vec<&str> = queued_title.split("\n").collect();
|
let items: Vec<&str> = queued_title.split('\n').collect();
|
||||||
println!("\u{251C}\u{2500} {}", items[0]);
|
println!("\u{251C}\u{2500} {}", items[0]);
|
||||||
for i in 1..items.len() {
|
for i in 1..items.len() {
|
||||||
self.print_level_child_indentation();
|
self.print_level_child_indentation();
|
||||||
print!("{}", items[i]);
|
print!("{}", items[i]);
|
||||||
if i < items.len() {
|
if i < items.len() {
|
||||||
print!("\n");
|
println!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.level = self.level + 1;
|
self.level += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ascend one level in the tree.
|
/// Ascend one level in the tree.
|
||||||
|
@ -69,13 +69,13 @@ impl PrintTree {
|
||||||
fn flush_queued_item(&mut self, prefix: &str) {
|
fn flush_queued_item(&mut self, prefix: &str) {
|
||||||
if let Some(queued_item) = self.queued_item.take() {
|
if let Some(queued_item) = self.queued_item.take() {
|
||||||
self.print_level_prefix();
|
self.print_level_prefix();
|
||||||
let items: Vec<&str> = queued_item.split("\n").collect();
|
let items: Vec<&str> = queued_item.split('\n').collect();
|
||||||
println!("{} {}", prefix, items[0]);
|
println!("{} {}", prefix, items[0]);
|
||||||
for i in 1..items.len() {
|
for i in 1..items.len() {
|
||||||
self.print_level_child_indentation();
|
self.print_level_child_indentation();
|
||||||
print!("{}", items[i]);
|
print!("{}", items[i]);
|
||||||
if i < items.len() {
|
if i < items.len() {
|
||||||
print!("\n");
|
println!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
//! The high-level interface from script to constellation. Using this abstract interface helps
|
//! The high-level interface from script to constellation. Using this abstract interface helps
|
||||||
//! reduce coupling between these two components.
|
//! reduce coupling between these two components.
|
||||||
|
|
||||||
|
#![allow(clippy::new_without_default)]
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -80,17 +82,19 @@ pub struct PipelineNamespaceInstaller {
|
||||||
namespace_receiver: IpcReceiver<PipelineNamespaceId>,
|
namespace_receiver: IpcReceiver<PipelineNamespaceId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PipelineNamespaceInstaller {
|
impl Default for PipelineNamespaceInstaller {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let (namespace_sender, namespace_receiver) =
|
let (namespace_sender, namespace_receiver) =
|
||||||
ipc::channel().expect("PipelineNamespaceInstaller ipc channel failure");
|
ipc::channel().expect("PipelineNamespaceInstaller ipc channel failure");
|
||||||
PipelineNamespaceInstaller {
|
Self {
|
||||||
request_sender: None,
|
request_sender: None,
|
||||||
namespace_sender: namespace_sender,
|
namespace_sender,
|
||||||
namespace_receiver: namespace_receiver,
|
namespace_receiver,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PipelineNamespaceInstaller {
|
||||||
/// Provide a request sender to send requests to the constellation.
|
/// Provide a request sender to send requests to the constellation.
|
||||||
pub fn set_sender(&mut self, sender: IpcSender<PipelineNamespaceRequest>) {
|
pub fn set_sender(&mut self, sender: IpcSender<PipelineNamespaceRequest>) {
|
||||||
self.request_sender = Some(sender);
|
self.request_sender = Some(sender);
|
||||||
|
@ -121,7 +125,7 @@ lazy_static! {
|
||||||
///
|
///
|
||||||
/// Use PipelineNamespace::fetch_install to install a unique pipeline-namespace from the calling thread.
|
/// Use PipelineNamespace::fetch_install to install a unique pipeline-namespace from the calling thread.
|
||||||
static ref PIPELINE_NAMESPACE_INSTALLER: Arc<Mutex<PipelineNamespaceInstaller>> =
|
static ref PIPELINE_NAMESPACE_INSTALLER: Arc<Mutex<PipelineNamespaceInstaller>> =
|
||||||
Arc::new(Mutex::new(PipelineNamespaceInstaller::new()));
|
Arc::new(Mutex::new(PipelineNamespaceInstaller::default()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
|
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
|
||||||
|
@ -247,7 +251,7 @@ size_of_test!(BrowsingContextId, 8);
|
||||||
size_of_test!(Option<BrowsingContextId>, 8);
|
size_of_test!(Option<BrowsingContextId>, 8);
|
||||||
|
|
||||||
impl BrowsingContextId {
|
impl BrowsingContextId {
|
||||||
pub fn new() -> BrowsingContextId {
|
pub fn new() -> Self {
|
||||||
PIPELINE_NAMESPACE.with(|tls| {
|
PIPELINE_NAMESPACE.with(|tls| {
|
||||||
let mut namespace = tls.get().expect("No namespace set for this thread!");
|
let mut namespace = tls.get().expect("No namespace set for this thread!");
|
||||||
let new_browsing_context_id = namespace.next_browsing_context_id();
|
let new_browsing_context_id = namespace.next_browsing_context_id();
|
||||||
|
@ -292,6 +296,7 @@ impl TopLevelBrowsingContextId {
|
||||||
pub fn new() -> TopLevelBrowsingContextId {
|
pub fn new() -> TopLevelBrowsingContextId {
|
||||||
TopLevelBrowsingContextId(BrowsingContextId::new())
|
TopLevelBrowsingContextId(BrowsingContextId::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Each script and layout thread should have the top-level browsing context id installed,
|
/// Each script and layout thread should have the top-level browsing context id installed,
|
||||||
/// since it is used by crash reporting.
|
/// since it is used by crash reporting.
|
||||||
pub fn install(id: TopLevelBrowsingContextId) {
|
pub fn install(id: TopLevelBrowsingContextId) {
|
||||||
|
@ -544,7 +549,7 @@ impl fmt::Debug for HangAlert {
|
||||||
"\n The following component is experiencing a transient hang: \n {:?}",
|
"\n The following component is experiencing a transient hang: \n {:?}",
|
||||||
component_id
|
component_id
|
||||||
)?;
|
)?;
|
||||||
(annotation.clone(), None)
|
(*annotation, None)
|
||||||
},
|
},
|
||||||
HangAlert::Permanent(component_id, annotation, profile) => {
|
HangAlert::Permanent(component_id, annotation, profile) => {
|
||||||
write!(
|
write!(
|
||||||
|
@ -552,7 +557,7 @@ impl fmt::Debug for HangAlert {
|
||||||
"\n The following component is experiencing a permanent hang: \n {:?}",
|
"\n The following component is experiencing a permanent hang: \n {:?}",
|
||||||
component_id
|
component_id
|
||||||
)?;
|
)?;
|
||||||
(annotation.clone(), profile.clone())
|
(*annotation, profile.clone())
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -14,5 +14,5 @@ pub fn get_value_from_header_list(name: &str, headers: &HeaderMap) -> Option<Vec
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 2
|
// Step 2
|
||||||
return Some(values.collect::<Vec<&[u8]>>().join(&[0x2C, 0x20][..]));
|
Some(values.collect::<Vec<&[u8]>>().join(&[0x2C, 0x20][..]))
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,12 +59,12 @@ pub fn load_from_memory(buffer: &[u8], cors_status: CorsStatus) -> Option<Image>
|
||||||
Ok(_) => match image::load_from_memory(buffer) {
|
Ok(_) => match image::load_from_memory(buffer) {
|
||||||
Ok(image) => {
|
Ok(image) => {
|
||||||
let mut rgba = image.into_rgba8();
|
let mut rgba = image.into_rgba8();
|
||||||
pixels::rgba8_byte_swap_colors_inplace(&mut *rgba);
|
pixels::rgba8_byte_swap_colors_inplace(&mut rgba);
|
||||||
Some(Image {
|
Some(Image {
|
||||||
width: rgba.width(),
|
width: rgba.width(),
|
||||||
height: rgba.height(),
|
height: rgba.height(),
|
||||||
format: PixelFormat::BGRA8,
|
format: PixelFormat::BGRA8,
|
||||||
bytes: IpcSharedMemory::from_bytes(&*rgba),
|
bytes: IpcSharedMemory::from_bytes(&rgba),
|
||||||
id: None,
|
id: None,
|
||||||
cors_status,
|
cors_status,
|
||||||
})
|
})
|
||||||
|
|
|
@ -42,10 +42,7 @@ pub struct ImageResponder {
|
||||||
|
|
||||||
impl ImageResponder {
|
impl ImageResponder {
|
||||||
pub fn new(sender: IpcSender<PendingImageResponse>, id: PendingImageId) -> ImageResponder {
|
pub fn new(sender: IpcSender<PendingImageResponse>, id: PendingImageId) -> ImageResponder {
|
||||||
ImageResponder {
|
ImageResponder { sender, id }
|
||||||
sender: sender,
|
|
||||||
id: id,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn respond(&self, response: ImageResponse) {
|
pub fn respond(&self, response: ImageResponse) {
|
||||||
|
@ -54,7 +51,7 @@ impl ImageResponder {
|
||||||
// That's not a case that's worth warning about.
|
// That's not a case that's worth warning about.
|
||||||
// TODO(#15501): are there cases in which we should perform cleanup?
|
// TODO(#15501): are there cases in which we should perform cleanup?
|
||||||
let _ = self.sender.send(PendingImageResponse {
|
let _ = self.sender.send(PendingImageResponse {
|
||||||
response: response,
|
response,
|
||||||
id: self.id,
|
id: self.id,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,9 +94,9 @@ impl CustomResponse {
|
||||||
body: Vec<u8>,
|
body: Vec<u8>,
|
||||||
) -> CustomResponse {
|
) -> CustomResponse {
|
||||||
CustomResponse {
|
CustomResponse {
|
||||||
headers: headers,
|
headers,
|
||||||
raw_status: raw_status,
|
raw_status,
|
||||||
body: body,
|
body,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -567,7 +567,7 @@ pub enum ResourceTimingType {
|
||||||
impl ResourceFetchTiming {
|
impl ResourceFetchTiming {
|
||||||
pub fn new(timing_type: ResourceTimingType) -> ResourceFetchTiming {
|
pub fn new(timing_type: ResourceTimingType) -> ResourceFetchTiming {
|
||||||
ResourceFetchTiming {
|
ResourceFetchTiming {
|
||||||
timing_type: timing_type,
|
timing_type,
|
||||||
timing_check_passed: true,
|
timing_check_passed: true,
|
||||||
domain_lookup_start: 0,
|
domain_lookup_start: 0,
|
||||||
redirect_count: 0,
|
redirect_count: 0,
|
||||||
|
@ -587,12 +587,12 @@ impl ResourceFetchTiming {
|
||||||
// TODO currently this is being set with precise time ns when it should be time since
|
// TODO currently this is being set with precise time ns when it should be time since
|
||||||
// time origin (as described in Performance::now)
|
// time origin (as described in Performance::now)
|
||||||
pub fn set_attribute(&mut self, attribute: ResourceAttribute) {
|
pub fn set_attribute(&mut self, attribute: ResourceAttribute) {
|
||||||
let should_attribute_always_be_updated = match attribute {
|
let should_attribute_always_be_updated = matches!(
|
||||||
|
attribute,
|
||||||
ResourceAttribute::FetchStart |
|
ResourceAttribute::FetchStart |
|
||||||
ResourceAttribute::ResponseEnd |
|
ResourceAttribute::ResponseEnd |
|
||||||
ResourceAttribute::StartTime(_) => true,
|
ResourceAttribute::StartTime(_)
|
||||||
_ => false,
|
);
|
||||||
};
|
|
||||||
if !self.timing_check_passed && !should_attribute_always_be_updated {
|
if !self.timing_check_passed && !should_attribute_always_be_updated {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -782,7 +782,7 @@ impl NetworkError {
|
||||||
/// Normalize `slice`, as defined by
|
/// Normalize `slice`, as defined by
|
||||||
/// [the Fetch Spec](https://fetch.spec.whatwg.org/#concept-header-value-normalize).
|
/// [the Fetch Spec](https://fetch.spec.whatwg.org/#concept-header-value-normalize).
|
||||||
pub fn trim_http_whitespace(mut slice: &[u8]) -> &[u8] {
|
pub fn trim_http_whitespace(mut slice: &[u8]) -> &[u8] {
|
||||||
const HTTP_WS_BYTES: &'static [u8] = b"\x09\x0A\x0D\x20";
|
const HTTP_WS_BYTES: &[u8] = b"\x09\x0A\x0D\x20";
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match slice.split_first() {
|
match slice.split_first() {
|
||||||
|
|
|
@ -21,7 +21,7 @@ use embedder_traits::resources::{self, Resource};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use servo_url::{Host, ImmutableOrigin, ServoUrl};
|
use servo_url::{Host, ImmutableOrigin, ServoUrl};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct PubDomainRules {
|
pub struct PubDomainRules {
|
||||||
rules: HashSet<String>,
|
rules: HashSet<String>,
|
||||||
wildcards: HashSet<String>,
|
wildcards: HashSet<String>,
|
||||||
|
@ -37,12 +37,12 @@ impl<'a> FromIterator<&'a str> for PubDomainRules {
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = &'a str>,
|
T: IntoIterator<Item = &'a str>,
|
||||||
{
|
{
|
||||||
let mut result = PubDomainRules::new();
|
let mut result = PubDomainRules::default();
|
||||||
for item in iter {
|
for item in iter {
|
||||||
if item.starts_with("!") {
|
if let Some(stripped) = item.strip_prefix('!') {
|
||||||
result.exceptions.insert(String::from(&item[1..]));
|
result.exceptions.insert(String::from(stripped));
|
||||||
} else if item.starts_with("*.") {
|
} else if let Some(stripped) = item.strip_prefix("*.") {
|
||||||
result.wildcards.insert(String::from(&item[2..]));
|
result.wildcards.insert(String::from(stripped));
|
||||||
} else {
|
} else {
|
||||||
result.rules.insert(String::from(item));
|
result.rules.insert(String::from(item));
|
||||||
}
|
}
|
||||||
|
@ -52,13 +52,6 @@ impl<'a> FromIterator<&'a str> for PubDomainRules {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PubDomainRules {
|
impl PubDomainRules {
|
||||||
pub fn new() -> PubDomainRules {
|
|
||||||
PubDomainRules {
|
|
||||||
rules: HashSet::new(),
|
|
||||||
wildcards: HashSet::new(),
|
|
||||||
exceptions: HashSet::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn parse(content: &str) -> PubDomainRules {
|
pub fn parse(content: &str) -> PubDomainRules {
|
||||||
content
|
content
|
||||||
.lines()
|
.lines()
|
||||||
|
@ -68,23 +61,21 @@ impl PubDomainRules {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
fn suffix_pair<'a>(&self, domain: &'a str) -> (&'a str, &'a str) {
|
fn suffix_pair<'a>(&self, domain: &'a str) -> (&'a str, &'a str) {
|
||||||
let domain = domain.trim_start_matches(".");
|
let domain = domain.trim_start_matches('.');
|
||||||
let mut suffix = domain;
|
let mut suffix = domain;
|
||||||
let mut prev_suffix = domain;
|
let mut prev_suffix = domain;
|
||||||
for (index, _) in domain.match_indices(".") {
|
for (index, _) in domain.match_indices('.') {
|
||||||
let next_suffix = &domain[index + 1..];
|
let next_suffix = &domain[index + 1..];
|
||||||
if self.exceptions.contains(suffix) {
|
if self.exceptions.contains(suffix) {
|
||||||
return (next_suffix, suffix);
|
return (next_suffix, suffix);
|
||||||
} else if self.wildcards.contains(next_suffix) {
|
|
||||||
return (suffix, prev_suffix);
|
|
||||||
} else if self.rules.contains(suffix) {
|
|
||||||
return (suffix, prev_suffix);
|
|
||||||
} else {
|
|
||||||
prev_suffix = suffix;
|
|
||||||
suffix = next_suffix;
|
|
||||||
}
|
}
|
||||||
|
if self.wildcards.contains(next_suffix) || self.rules.contains(suffix) {
|
||||||
|
return (suffix, prev_suffix);
|
||||||
|
}
|
||||||
|
prev_suffix = suffix;
|
||||||
|
suffix = next_suffix;
|
||||||
}
|
}
|
||||||
return (suffix, prev_suffix);
|
(suffix, prev_suffix)
|
||||||
}
|
}
|
||||||
pub fn public_suffix<'a>(&self, domain: &'a str) -> &'a str {
|
pub fn public_suffix<'a>(&self, domain: &'a str) -> &'a str {
|
||||||
let (public, _) = self.suffix_pair(domain);
|
let (public, _) = self.suffix_pair(domain);
|
||||||
|
@ -98,8 +89,8 @@ impl PubDomainRules {
|
||||||
// Speeded-up version of
|
// Speeded-up version of
|
||||||
// domain != "" &&
|
// domain != "" &&
|
||||||
// self.public_suffix(domain) == domain.
|
// self.public_suffix(domain) == domain.
|
||||||
let domain = domain.trim_start_matches(".");
|
let domain = domain.trim_start_matches('.');
|
||||||
match domain.find(".") {
|
match domain.find('.') {
|
||||||
None => !domain.is_empty(),
|
None => !domain.is_empty(),
|
||||||
Some(index) => {
|
Some(index) => {
|
||||||
!self.exceptions.contains(domain) && self.wildcards.contains(&domain[index + 1..]) ||
|
!self.exceptions.contains(domain) && self.wildcards.contains(&domain[index + 1..]) ||
|
||||||
|
@ -111,8 +102,8 @@ impl PubDomainRules {
|
||||||
// Speeded-up version of
|
// Speeded-up version of
|
||||||
// self.public_suffix(domain) != domain &&
|
// self.public_suffix(domain) != domain &&
|
||||||
// self.registrable_suffix(domain) == domain.
|
// self.registrable_suffix(domain) == domain.
|
||||||
let domain = domain.trim_start_matches(".");
|
let domain = domain.trim_start_matches('.');
|
||||||
match domain.find(".") {
|
match domain.find('.') {
|
||||||
None => false,
|
None => false,
|
||||||
Some(index) => {
|
Some(index) => {
|
||||||
self.exceptions.contains(domain) ||
|
self.exceptions.contains(domain) ||
|
||||||
|
@ -151,7 +142,7 @@ pub fn is_reg_domain(domain: &str) -> bool {
|
||||||
pub fn reg_host(url: &ServoUrl) -> Option<Host> {
|
pub fn reg_host(url: &ServoUrl) -> Option<Host> {
|
||||||
match url.origin() {
|
match url.origin() {
|
||||||
ImmutableOrigin::Tuple(_, Host::Domain(domain), _) => {
|
ImmutableOrigin::Tuple(_, Host::Domain(domain), _) => {
|
||||||
Some(Host::Domain(String::from(reg_suffix(&*domain))))
|
Some(Host::Domain(String::from(reg_suffix(&domain))))
|
||||||
},
|
},
|
||||||
ImmutableOrigin::Tuple(_, ip, _) => Some(ip),
|
ImmutableOrigin::Tuple(_, ip, _) => Some(ip),
|
||||||
ImmutableOrigin::Opaque(_) => None,
|
ImmutableOrigin::Opaque(_) => None,
|
||||||
|
|
|
@ -264,7 +264,7 @@ impl RequestBuilder {
|
||||||
pub fn new(url: ServoUrl, referrer: Referrer) -> RequestBuilder {
|
pub fn new(url: ServoUrl, referrer: Referrer) -> RequestBuilder {
|
||||||
RequestBuilder {
|
RequestBuilder {
|
||||||
method: Method::GET,
|
method: Method::GET,
|
||||||
url: url,
|
url,
|
||||||
headers: HeaderMap::new(),
|
headers: HeaderMap::new(),
|
||||||
unsafe_request: false,
|
unsafe_request: false,
|
||||||
body: None,
|
body: None,
|
||||||
|
@ -277,7 +277,7 @@ impl RequestBuilder {
|
||||||
credentials_mode: CredentialsMode::CredentialsSameOrigin,
|
credentials_mode: CredentialsMode::CredentialsSameOrigin,
|
||||||
use_url_credentials: false,
|
use_url_credentials: false,
|
||||||
origin: ImmutableOrigin::new_opaque(),
|
origin: ImmutableOrigin::new_opaque(),
|
||||||
referrer: referrer,
|
referrer,
|
||||||
referrer_policy: None,
|
referrer_policy: None,
|
||||||
pipeline_id: None,
|
pipeline_id: None,
|
||||||
redirect_mode: RedirectMode::Follow,
|
redirect_mode: RedirectMode::Follow,
|
||||||
|
@ -524,9 +524,9 @@ impl Request {
|
||||||
initiator: Initiator::None,
|
initiator: Initiator::None,
|
||||||
destination: Destination::None,
|
destination: Destination::None,
|
||||||
origin: origin.unwrap_or(Origin::Client),
|
origin: origin.unwrap_or(Origin::Client),
|
||||||
referrer: referrer,
|
referrer,
|
||||||
referrer_policy: None,
|
referrer_policy: None,
|
||||||
pipeline_id: pipeline_id,
|
pipeline_id,
|
||||||
synchronous: false,
|
synchronous: false,
|
||||||
mode: RequestMode::NoCors,
|
mode: RequestMode::NoCors,
|
||||||
use_cors_preflight: false,
|
use_cors_preflight: false,
|
||||||
|
@ -540,7 +540,7 @@ impl Request {
|
||||||
redirect_count: 0,
|
redirect_count: 0,
|
||||||
response_tainting: ResponseTainting::Basic,
|
response_tainting: ResponseTainting::Basic,
|
||||||
csp_list: None,
|
csp_list: None,
|
||||||
https_state: https_state,
|
https_state,
|
||||||
crash: None,
|
crash: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -189,10 +189,7 @@ impl Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_network_error(&self) -> bool {
|
pub fn is_network_error(&self) -> bool {
|
||||||
match self.response_type {
|
matches!(self.response_type, ResponseType::Error(..))
|
||||||
ResponseType::Error(..) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_network_error(&self) -> Option<&NetworkError> {
|
pub fn get_network_error(&self) -> Option<&NetworkError> {
|
||||||
|
@ -204,7 +201,7 @@ impl Response {
|
||||||
|
|
||||||
pub fn actual_response(&self) -> &Response {
|
pub fn actual_response(&self) -> &Response {
|
||||||
if self.return_internal && self.internal_response.is_some() {
|
if self.return_internal && self.internal_response.is_some() {
|
||||||
&**self.internal_response.as_ref().unwrap()
|
self.internal_response.as_ref().unwrap()
|
||||||
} else {
|
} else {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -212,7 +209,7 @@ impl Response {
|
||||||
|
|
||||||
pub fn actual_response_mut(&mut self) -> &mut Response {
|
pub fn actual_response_mut(&mut self) -> &mut Response {
|
||||||
if self.return_internal && self.internal_response.is_some() {
|
if self.return_internal && self.internal_response.is_some() {
|
||||||
&mut **self.internal_response.as_mut().unwrap()
|
self.internal_response.as_mut().unwrap()
|
||||||
} else {
|
} else {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -258,10 +255,7 @@ impl Response {
|
||||||
|
|
||||||
ResponseType::Basic => {
|
ResponseType::Basic => {
|
||||||
let headers = old_headers.iter().filter(|(name, _)| {
|
let headers = old_headers.iter().filter(|(name, _)| {
|
||||||
match &*name.as_str().to_ascii_lowercase() {
|
!matches!(&*name.as_str().to_ascii_lowercase(), "set-cookie" | "set-cookie2")
|
||||||
"set-cookie" | "set-cookie2" => false,
|
|
||||||
_ => true
|
|
||||||
}
|
|
||||||
}).map(|(n, v)| (n.clone(), v.clone())).collect();
|
}).map(|(n, v)| (n.clone(), v.clone())).collect();
|
||||||
response.headers = headers;
|
response.headers = headers;
|
||||||
},
|
},
|
||||||
|
@ -315,7 +309,7 @@ impl Response {
|
||||||
metadata.status = response.raw_status.clone();
|
metadata.status = response.raw_status.clone();
|
||||||
metadata.https_state = response.https_state;
|
metadata.https_state = response.https_state;
|
||||||
metadata.referrer = response.referrer.clone();
|
metadata.referrer = response.referrer.clone();
|
||||||
metadata.referrer_policy = response.referrer_policy.clone();
|
metadata.referrer_policy = response.referrer_policy;
|
||||||
metadata.redirected = response.actual_response().url_list.len() > 1;
|
metadata.redirected = response.actual_response().url_list.len() > 1;
|
||||||
metadata
|
metadata
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ pub enum WebrenderImageHandlerType {
|
||||||
/// List of Webrender external images to be shared among all external image
|
/// List of Webrender external images to be shared among all external image
|
||||||
/// consumers (WebGL, Media, WebGPU).
|
/// consumers (WebGL, Media, WebGPU).
|
||||||
/// It ensures that external image identifiers are unique.
|
/// It ensures that external image identifiers are unique.
|
||||||
|
#[derive(Default)]
|
||||||
pub struct WebrenderExternalImageRegistry {
|
pub struct WebrenderExternalImageRegistry {
|
||||||
/// Map of all generated external images.
|
/// Map of all generated external images.
|
||||||
external_images: HashMap<ExternalImageId, WebrenderImageHandlerType>,
|
external_images: HashMap<ExternalImageId, WebrenderImageHandlerType>,
|
||||||
|
@ -47,13 +48,6 @@ pub struct WebrenderExternalImageRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WebrenderExternalImageRegistry {
|
impl WebrenderExternalImageRegistry {
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
external_images: HashMap::new(),
|
|
||||||
next_image_id: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next_id(&mut self, handler_type: WebrenderImageHandlerType) -> ExternalImageId {
|
pub fn next_id(&mut self, handler_type: WebrenderImageHandlerType) -> ExternalImageId {
|
||||||
self.next_image_id += 1;
|
self.next_image_id += 1;
|
||||||
let key = ExternalImageId(self.next_image_id);
|
let key = ExternalImageId(self.next_image_id);
|
||||||
|
@ -84,7 +78,7 @@ pub struct WebrenderExternalImageHandlers {
|
||||||
|
|
||||||
impl WebrenderExternalImageHandlers {
|
impl WebrenderExternalImageHandlers {
|
||||||
pub fn new() -> (Self, Arc<Mutex<WebrenderExternalImageRegistry>>) {
|
pub fn new() -> (Self, Arc<Mutex<WebrenderExternalImageRegistry>>) {
|
||||||
let external_images = Arc::new(Mutex::new(WebrenderExternalImageRegistry::new()));
|
let external_images = Arc::new(Mutex::new(WebrenderExternalImageRegistry::default()));
|
||||||
(
|
(
|
||||||
Self {
|
Self {
|
||||||
webgl_handler: None,
|
webgl_handler: None,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue