mirror of
https://github.com/servo/servo.git
synced 2025-07-23 07:13:52 +01:00
clippy: fix warnings in components/shared (#31565)
* clippy: fix some warnings in components/shared * fix: unit tests * fix: review comments
This commit is contained in:
parent
3a5ca785d3
commit
43f44965cd
14 changed files with 102 additions and 139 deletions
|
@ -49,7 +49,7 @@ impl BluetoothScanfilter {
|
|||
name,
|
||||
name_prefix,
|
||||
services: ServiceUUIDSequence::new(services),
|
||||
manufacturer_data: manufacturer_data,
|
||||
manufacturer_data,
|
||||
service_data,
|
||||
}
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ impl RequestDeviceoptions {
|
|||
services: ServiceUUIDSequence,
|
||||
) -> RequestDeviceoptions {
|
||||
RequestDeviceoptions {
|
||||
filters: filters,
|
||||
filters,
|
||||
optional_services: services,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,11 +115,11 @@ impl LinearGradientStyle {
|
|||
stops: Vec<CanvasGradientStop>,
|
||||
) -> LinearGradientStyle {
|
||||
LinearGradientStyle {
|
||||
x0: x0,
|
||||
y0: y0,
|
||||
x1: x1,
|
||||
y1: y1,
|
||||
stops: stops,
|
||||
x0,
|
||||
y0,
|
||||
x1,
|
||||
y1,
|
||||
stops,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -146,13 +146,13 @@ impl RadialGradientStyle {
|
|||
stops: Vec<CanvasGradientStop>,
|
||||
) -> RadialGradientStyle {
|
||||
RadialGradientStyle {
|
||||
x0: x0,
|
||||
y0: y0,
|
||||
r0: r0,
|
||||
x1: x1,
|
||||
y1: y1,
|
||||
r1: r1,
|
||||
stops: stops,
|
||||
x0,
|
||||
y0,
|
||||
r0,
|
||||
x1,
|
||||
y1,
|
||||
r1,
|
||||
stops,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -402,8 +402,9 @@ impl FromStr for CompositionOrBlending {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
pub enum TextAlign {
|
||||
#[default]
|
||||
Start,
|
||||
End,
|
||||
Left,
|
||||
|
@ -426,17 +427,12 @@ impl FromStr for TextAlign {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for TextAlign {
|
||||
fn default() -> TextAlign {
|
||||
TextAlign::Start
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
pub enum TextBaseline {
|
||||
Top,
|
||||
Hanging,
|
||||
Middle,
|
||||
#[default]
|
||||
Alphabetic,
|
||||
Ideographic,
|
||||
Bottom,
|
||||
|
@ -458,16 +454,11 @@ impl FromStr for TextBaseline {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for TextBaseline {
|
||||
fn default() -> TextBaseline {
|
||||
TextBaseline::Alphabetic
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
#[derive(Clone, Copy, Debug, Default, Deserialize, MallocSizeOf, PartialEq, Serialize)]
|
||||
pub enum Direction {
|
||||
Ltr,
|
||||
Rtl,
|
||||
#[default]
|
||||
Inherit,
|
||||
}
|
||||
|
||||
|
@ -483,9 +474,3 @@ impl FromStr for Direction {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Direction {
|
||||
fn default() -> Direction {
|
||||
Direction::Inherit
|
||||
}
|
||||
}
|
||||
|
|
|
@ -140,10 +140,7 @@ pub struct WebGLMsgSender {
|
|||
|
||||
impl WebGLMsgSender {
|
||||
pub fn new(id: WebGLContextId, sender: WebGLChan) -> Self {
|
||||
WebGLMsgSender {
|
||||
ctx_id: id,
|
||||
sender: sender,
|
||||
}
|
||||
WebGLMsgSender { ctx_id: id, sender }
|
||||
}
|
||||
|
||||
/// Returns the WebGLContextId associated to this sender
|
||||
|
@ -922,7 +919,7 @@ mod gl_ext_constants {
|
|||
pub const COMPRESSED_RGBA_S3TC_DXT5_EXT: GLenum = 0x83F3;
|
||||
pub const COMPRESSED_RGB_ETC1_WEBGL: GLenum = 0x8D64;
|
||||
|
||||
pub static COMPRESSIONS: &'static [GLenum] = &[
|
||||
pub static COMPRESSIONS: &[GLenum] = &[
|
||||
COMPRESSED_RGB_S3TC_DXT1_EXT,
|
||||
COMPRESSED_RGBA_S3TC_DXT1_EXT,
|
||||
COMPRESSED_RGBA_S3TC_DXT3_EXT,
|
||||
|
@ -1061,18 +1058,18 @@ impl TexFormat {
|
|||
|
||||
/// Returns whether this format is a known sized or unsized format.
|
||||
pub fn is_sized(&self) -> bool {
|
||||
match self {
|
||||
!matches!(
|
||||
self,
|
||||
TexFormat::DepthComponent |
|
||||
TexFormat::DepthStencil |
|
||||
TexFormat::Alpha |
|
||||
TexFormat::Red |
|
||||
TexFormat::RG |
|
||||
TexFormat::RGB |
|
||||
TexFormat::RGBA |
|
||||
TexFormat::Luminance |
|
||||
TexFormat::LuminanceAlpha => false,
|
||||
_ => true,
|
||||
}
|
||||
TexFormat::DepthStencil |
|
||||
TexFormat::Alpha |
|
||||
TexFormat::Red |
|
||||
TexFormat::RG |
|
||||
TexFormat::RGB |
|
||||
TexFormat::RGBA |
|
||||
TexFormat::Luminance |
|
||||
TexFormat::LuminanceAlpha
|
||||
)
|
||||
}
|
||||
|
||||
pub fn to_unsized(self) -> TexFormat {
|
||||
|
|
|
@ -25,9 +25,9 @@ impl Epoch {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<WebRenderEpoch> for Epoch {
|
||||
fn into(self) -> WebRenderEpoch {
|
||||
WebRenderEpoch(self.0)
|
||||
impl From<Epoch> for WebRenderEpoch {
|
||||
fn from(val: Epoch) -> Self {
|
||||
WebRenderEpoch(val.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,7 +114,7 @@ pub fn combine_id_with_fragment_type(id: usize, fragment_type: FragmentType) ->
|
|||
|
||||
pub fn node_id_from_scroll_id(id: usize) -> Option<usize> {
|
||||
if (id & !SPECIAL_SCROLL_ROOT_ID_MASK) != 0 {
|
||||
return Some((id & !3) as usize);
|
||||
return Some(id & !3);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
@ -28,17 +28,17 @@ impl PrintTree {
|
|||
|
||||
self.print_level_prefix();
|
||||
|
||||
let items: Vec<&str> = queued_title.split("\n").collect();
|
||||
let items: Vec<&str> = queued_title.split('\n').collect();
|
||||
println!("\u{251C}\u{2500} {}", items[0]);
|
||||
for i in 1..items.len() {
|
||||
self.print_level_child_indentation();
|
||||
print!("{}", items[i]);
|
||||
if i < items.len() {
|
||||
print!("\n");
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
self.level = self.level + 1;
|
||||
self.level += 1;
|
||||
}
|
||||
|
||||
/// Ascend one level in the tree.
|
||||
|
@ -69,13 +69,13 @@ impl PrintTree {
|
|||
fn flush_queued_item(&mut self, prefix: &str) {
|
||||
if let Some(queued_item) = self.queued_item.take() {
|
||||
self.print_level_prefix();
|
||||
let items: Vec<&str> = queued_item.split("\n").collect();
|
||||
let items: Vec<&str> = queued_item.split('\n').collect();
|
||||
println!("{} {}", prefix, items[0]);
|
||||
for i in 1..items.len() {
|
||||
self.print_level_child_indentation();
|
||||
print!("{}", items[i]);
|
||||
if i < items.len() {
|
||||
print!("\n");
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
//! The high-level interface from script to constellation. Using this abstract interface helps
|
||||
//! reduce coupling between these two components.
|
||||
|
||||
#![allow(clippy::new_without_default)]
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::num::NonZeroU32;
|
||||
use std::sync::Arc;
|
||||
|
@ -80,17 +82,19 @@ pub struct PipelineNamespaceInstaller {
|
|||
namespace_receiver: IpcReceiver<PipelineNamespaceId>,
|
||||
}
|
||||
|
||||
impl PipelineNamespaceInstaller {
|
||||
pub fn new() -> Self {
|
||||
impl Default for PipelineNamespaceInstaller {
|
||||
fn default() -> Self {
|
||||
let (namespace_sender, namespace_receiver) =
|
||||
ipc::channel().expect("PipelineNamespaceInstaller ipc channel failure");
|
||||
PipelineNamespaceInstaller {
|
||||
Self {
|
||||
request_sender: None,
|
||||
namespace_sender: namespace_sender,
|
||||
namespace_receiver: namespace_receiver,
|
||||
namespace_sender,
|
||||
namespace_receiver,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PipelineNamespaceInstaller {
|
||||
/// Provide a request sender to send requests to the constellation.
|
||||
pub fn set_sender(&mut self, sender: IpcSender<PipelineNamespaceRequest>) {
|
||||
self.request_sender = Some(sender);
|
||||
|
@ -121,7 +125,7 @@ lazy_static! {
|
|||
///
|
||||
/// Use PipelineNamespace::fetch_install to install a unique pipeline-namespace from the calling thread.
|
||||
static ref PIPELINE_NAMESPACE_INSTALLER: Arc<Mutex<PipelineNamespaceInstaller>> =
|
||||
Arc::new(Mutex::new(PipelineNamespaceInstaller::new()));
|
||||
Arc::new(Mutex::new(PipelineNamespaceInstaller::default()));
|
||||
}
|
||||
|
||||
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
|
||||
|
@ -247,7 +251,7 @@ size_of_test!(BrowsingContextId, 8);
|
|||
size_of_test!(Option<BrowsingContextId>, 8);
|
||||
|
||||
impl BrowsingContextId {
|
||||
pub fn new() -> BrowsingContextId {
|
||||
pub fn new() -> Self {
|
||||
PIPELINE_NAMESPACE.with(|tls| {
|
||||
let mut namespace = tls.get().expect("No namespace set for this thread!");
|
||||
let new_browsing_context_id = namespace.next_browsing_context_id();
|
||||
|
@ -292,6 +296,7 @@ impl TopLevelBrowsingContextId {
|
|||
pub fn new() -> TopLevelBrowsingContextId {
|
||||
TopLevelBrowsingContextId(BrowsingContextId::new())
|
||||
}
|
||||
|
||||
/// Each script and layout thread should have the top-level browsing context id installed,
|
||||
/// since it is used by crash reporting.
|
||||
pub fn install(id: TopLevelBrowsingContextId) {
|
||||
|
@ -544,7 +549,7 @@ impl fmt::Debug for HangAlert {
|
|||
"\n The following component is experiencing a transient hang: \n {:?}",
|
||||
component_id
|
||||
)?;
|
||||
(annotation.clone(), None)
|
||||
(*annotation, None)
|
||||
},
|
||||
HangAlert::Permanent(component_id, annotation, profile) => {
|
||||
write!(
|
||||
|
@ -552,7 +557,7 @@ impl fmt::Debug for HangAlert {
|
|||
"\n The following component is experiencing a permanent hang: \n {:?}",
|
||||
component_id
|
||||
)?;
|
||||
(annotation.clone(), profile.clone())
|
||||
(*annotation, profile.clone())
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -14,5 +14,5 @@ pub fn get_value_from_header_list(name: &str, headers: &HeaderMap) -> Option<Vec
|
|||
}
|
||||
|
||||
// Step 2
|
||||
return Some(values.collect::<Vec<&[u8]>>().join(&[0x2C, 0x20][..]));
|
||||
Some(values.collect::<Vec<&[u8]>>().join(&[0x2C, 0x20][..]))
|
||||
}
|
||||
|
|
|
@ -59,12 +59,12 @@ pub fn load_from_memory(buffer: &[u8], cors_status: CorsStatus) -> Option<Image>
|
|||
Ok(_) => match image::load_from_memory(buffer) {
|
||||
Ok(image) => {
|
||||
let mut rgba = image.into_rgba8();
|
||||
pixels::rgba8_byte_swap_colors_inplace(&mut *rgba);
|
||||
pixels::rgba8_byte_swap_colors_inplace(&mut rgba);
|
||||
Some(Image {
|
||||
width: rgba.width(),
|
||||
height: rgba.height(),
|
||||
format: PixelFormat::BGRA8,
|
||||
bytes: IpcSharedMemory::from_bytes(&*rgba),
|
||||
bytes: IpcSharedMemory::from_bytes(&rgba),
|
||||
id: None,
|
||||
cors_status,
|
||||
})
|
||||
|
|
|
@ -42,10 +42,7 @@ pub struct ImageResponder {
|
|||
|
||||
impl ImageResponder {
|
||||
pub fn new(sender: IpcSender<PendingImageResponse>, id: PendingImageId) -> ImageResponder {
|
||||
ImageResponder {
|
||||
sender: sender,
|
||||
id: id,
|
||||
}
|
||||
ImageResponder { sender, id }
|
||||
}
|
||||
|
||||
pub fn respond(&self, response: ImageResponse) {
|
||||
|
@ -54,7 +51,7 @@ impl ImageResponder {
|
|||
// That's not a case that's worth warning about.
|
||||
// TODO(#15501): are there cases in which we should perform cleanup?
|
||||
let _ = self.sender.send(PendingImageResponse {
|
||||
response: response,
|
||||
response,
|
||||
id: self.id,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -94,9 +94,9 @@ impl CustomResponse {
|
|||
body: Vec<u8>,
|
||||
) -> CustomResponse {
|
||||
CustomResponse {
|
||||
headers: headers,
|
||||
raw_status: raw_status,
|
||||
body: body,
|
||||
headers,
|
||||
raw_status,
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -567,7 +567,7 @@ pub enum ResourceTimingType {
|
|||
impl ResourceFetchTiming {
|
||||
pub fn new(timing_type: ResourceTimingType) -> ResourceFetchTiming {
|
||||
ResourceFetchTiming {
|
||||
timing_type: timing_type,
|
||||
timing_type,
|
||||
timing_check_passed: true,
|
||||
domain_lookup_start: 0,
|
||||
redirect_count: 0,
|
||||
|
@ -587,12 +587,12 @@ impl ResourceFetchTiming {
|
|||
// TODO currently this is being set with precise time ns when it should be time since
|
||||
// time origin (as described in Performance::now)
|
||||
pub fn set_attribute(&mut self, attribute: ResourceAttribute) {
|
||||
let should_attribute_always_be_updated = match attribute {
|
||||
let should_attribute_always_be_updated = matches!(
|
||||
attribute,
|
||||
ResourceAttribute::FetchStart |
|
||||
ResourceAttribute::ResponseEnd |
|
||||
ResourceAttribute::StartTime(_) => true,
|
||||
_ => false,
|
||||
};
|
||||
ResourceAttribute::ResponseEnd |
|
||||
ResourceAttribute::StartTime(_)
|
||||
);
|
||||
if !self.timing_check_passed && !should_attribute_always_be_updated {
|
||||
return;
|
||||
}
|
||||
|
@ -782,7 +782,7 @@ impl NetworkError {
|
|||
/// Normalize `slice`, as defined by
|
||||
/// [the Fetch Spec](https://fetch.spec.whatwg.org/#concept-header-value-normalize).
|
||||
pub fn trim_http_whitespace(mut slice: &[u8]) -> &[u8] {
|
||||
const HTTP_WS_BYTES: &'static [u8] = b"\x09\x0A\x0D\x20";
|
||||
const HTTP_WS_BYTES: &[u8] = b"\x09\x0A\x0D\x20";
|
||||
|
||||
loop {
|
||||
match slice.split_first() {
|
||||
|
|
|
@ -21,7 +21,7 @@ use embedder_traits::resources::{self, Resource};
|
|||
use lazy_static::lazy_static;
|
||||
use servo_url::{Host, ImmutableOrigin, ServoUrl};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PubDomainRules {
|
||||
rules: HashSet<String>,
|
||||
wildcards: HashSet<String>,
|
||||
|
@ -37,12 +37,12 @@ impl<'a> FromIterator<&'a str> for PubDomainRules {
|
|||
where
|
||||
T: IntoIterator<Item = &'a str>,
|
||||
{
|
||||
let mut result = PubDomainRules::new();
|
||||
let mut result = PubDomainRules::default();
|
||||
for item in iter {
|
||||
if item.starts_with("!") {
|
||||
result.exceptions.insert(String::from(&item[1..]));
|
||||
} else if item.starts_with("*.") {
|
||||
result.wildcards.insert(String::from(&item[2..]));
|
||||
if let Some(stripped) = item.strip_prefix('!') {
|
||||
result.exceptions.insert(String::from(stripped));
|
||||
} else if let Some(stripped) = item.strip_prefix("*.") {
|
||||
result.wildcards.insert(String::from(stripped));
|
||||
} else {
|
||||
result.rules.insert(String::from(item));
|
||||
}
|
||||
|
@ -52,13 +52,6 @@ impl<'a> FromIterator<&'a str> for PubDomainRules {
|
|||
}
|
||||
|
||||
impl PubDomainRules {
|
||||
pub fn new() -> PubDomainRules {
|
||||
PubDomainRules {
|
||||
rules: HashSet::new(),
|
||||
wildcards: HashSet::new(),
|
||||
exceptions: HashSet::new(),
|
||||
}
|
||||
}
|
||||
pub fn parse(content: &str) -> PubDomainRules {
|
||||
content
|
||||
.lines()
|
||||
|
@ -68,23 +61,21 @@ impl PubDomainRules {
|
|||
.collect()
|
||||
}
|
||||
fn suffix_pair<'a>(&self, domain: &'a str) -> (&'a str, &'a str) {
|
||||
let domain = domain.trim_start_matches(".");
|
||||
let domain = domain.trim_start_matches('.');
|
||||
let mut suffix = domain;
|
||||
let mut prev_suffix = domain;
|
||||
for (index, _) in domain.match_indices(".") {
|
||||
for (index, _) in domain.match_indices('.') {
|
||||
let next_suffix = &domain[index + 1..];
|
||||
if self.exceptions.contains(suffix) {
|
||||
return (next_suffix, suffix);
|
||||
} else if self.wildcards.contains(next_suffix) {
|
||||
return (suffix, prev_suffix);
|
||||
} else if self.rules.contains(suffix) {
|
||||
return (suffix, prev_suffix);
|
||||
} else {
|
||||
prev_suffix = suffix;
|
||||
suffix = next_suffix;
|
||||
}
|
||||
if self.wildcards.contains(next_suffix) || self.rules.contains(suffix) {
|
||||
return (suffix, prev_suffix);
|
||||
}
|
||||
prev_suffix = suffix;
|
||||
suffix = next_suffix;
|
||||
}
|
||||
return (suffix, prev_suffix);
|
||||
(suffix, prev_suffix)
|
||||
}
|
||||
pub fn public_suffix<'a>(&self, domain: &'a str) -> &'a str {
|
||||
let (public, _) = self.suffix_pair(domain);
|
||||
|
@ -98,8 +89,8 @@ impl PubDomainRules {
|
|||
// Speeded-up version of
|
||||
// domain != "" &&
|
||||
// self.public_suffix(domain) == domain.
|
||||
let domain = domain.trim_start_matches(".");
|
||||
match domain.find(".") {
|
||||
let domain = domain.trim_start_matches('.');
|
||||
match domain.find('.') {
|
||||
None => !domain.is_empty(),
|
||||
Some(index) => {
|
||||
!self.exceptions.contains(domain) && self.wildcards.contains(&domain[index + 1..]) ||
|
||||
|
@ -111,8 +102,8 @@ impl PubDomainRules {
|
|||
// Speeded-up version of
|
||||
// self.public_suffix(domain) != domain &&
|
||||
// self.registrable_suffix(domain) == domain.
|
||||
let domain = domain.trim_start_matches(".");
|
||||
match domain.find(".") {
|
||||
let domain = domain.trim_start_matches('.');
|
||||
match domain.find('.') {
|
||||
None => false,
|
||||
Some(index) => {
|
||||
self.exceptions.contains(domain) ||
|
||||
|
@ -151,7 +142,7 @@ pub fn is_reg_domain(domain: &str) -> bool {
|
|||
pub fn reg_host(url: &ServoUrl) -> Option<Host> {
|
||||
match url.origin() {
|
||||
ImmutableOrigin::Tuple(_, Host::Domain(domain), _) => {
|
||||
Some(Host::Domain(String::from(reg_suffix(&*domain))))
|
||||
Some(Host::Domain(String::from(reg_suffix(&domain))))
|
||||
},
|
||||
ImmutableOrigin::Tuple(_, ip, _) => Some(ip),
|
||||
ImmutableOrigin::Opaque(_) => None,
|
||||
|
|
|
@ -264,7 +264,7 @@ impl RequestBuilder {
|
|||
pub fn new(url: ServoUrl, referrer: Referrer) -> RequestBuilder {
|
||||
RequestBuilder {
|
||||
method: Method::GET,
|
||||
url: url,
|
||||
url,
|
||||
headers: HeaderMap::new(),
|
||||
unsafe_request: false,
|
||||
body: None,
|
||||
|
@ -277,7 +277,7 @@ impl RequestBuilder {
|
|||
credentials_mode: CredentialsMode::CredentialsSameOrigin,
|
||||
use_url_credentials: false,
|
||||
origin: ImmutableOrigin::new_opaque(),
|
||||
referrer: referrer,
|
||||
referrer,
|
||||
referrer_policy: None,
|
||||
pipeline_id: None,
|
||||
redirect_mode: RedirectMode::Follow,
|
||||
|
@ -524,9 +524,9 @@ impl Request {
|
|||
initiator: Initiator::None,
|
||||
destination: Destination::None,
|
||||
origin: origin.unwrap_or(Origin::Client),
|
||||
referrer: referrer,
|
||||
referrer,
|
||||
referrer_policy: None,
|
||||
pipeline_id: pipeline_id,
|
||||
pipeline_id,
|
||||
synchronous: false,
|
||||
mode: RequestMode::NoCors,
|
||||
use_cors_preflight: false,
|
||||
|
@ -540,7 +540,7 @@ impl Request {
|
|||
redirect_count: 0,
|
||||
response_tainting: ResponseTainting::Basic,
|
||||
csp_list: None,
|
||||
https_state: https_state,
|
||||
https_state,
|
||||
crash: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,10 +189,7 @@ impl Response {
|
|||
}
|
||||
|
||||
pub fn is_network_error(&self) -> bool {
|
||||
match self.response_type {
|
||||
ResponseType::Error(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.response_type, ResponseType::Error(..))
|
||||
}
|
||||
|
||||
pub fn get_network_error(&self) -> Option<&NetworkError> {
|
||||
|
@ -204,7 +201,7 @@ impl Response {
|
|||
|
||||
pub fn actual_response(&self) -> &Response {
|
||||
if self.return_internal && self.internal_response.is_some() {
|
||||
&**self.internal_response.as_ref().unwrap()
|
||||
self.internal_response.as_ref().unwrap()
|
||||
} else {
|
||||
self
|
||||
}
|
||||
|
@ -212,7 +209,7 @@ impl Response {
|
|||
|
||||
pub fn actual_response_mut(&mut self) -> &mut Response {
|
||||
if self.return_internal && self.internal_response.is_some() {
|
||||
&mut **self.internal_response.as_mut().unwrap()
|
||||
self.internal_response.as_mut().unwrap()
|
||||
} else {
|
||||
self
|
||||
}
|
||||
|
@ -258,10 +255,7 @@ impl Response {
|
|||
|
||||
ResponseType::Basic => {
|
||||
let headers = old_headers.iter().filter(|(name, _)| {
|
||||
match &*name.as_str().to_ascii_lowercase() {
|
||||
"set-cookie" | "set-cookie2" => false,
|
||||
_ => true
|
||||
}
|
||||
!matches!(&*name.as_str().to_ascii_lowercase(), "set-cookie" | "set-cookie2")
|
||||
}).map(|(n, v)| (n.clone(), v.clone())).collect();
|
||||
response.headers = headers;
|
||||
},
|
||||
|
@ -315,7 +309,7 @@ impl Response {
|
|||
metadata.status = response.raw_status.clone();
|
||||
metadata.https_state = response.https_state;
|
||||
metadata.referrer = response.referrer.clone();
|
||||
metadata.referrer_policy = response.referrer_policy.clone();
|
||||
metadata.referrer_policy = response.referrer_policy;
|
||||
metadata.redirected = response.actual_response().url_list.len() > 1;
|
||||
metadata
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ pub enum WebrenderImageHandlerType {
|
|||
/// List of Webrender external images to be shared among all external image
|
||||
/// consumers (WebGL, Media, WebGPU).
|
||||
/// It ensures that external image identifiers are unique.
|
||||
#[derive(Default)]
|
||||
pub struct WebrenderExternalImageRegistry {
|
||||
/// Map of all generated external images.
|
||||
external_images: HashMap<ExternalImageId, WebrenderImageHandlerType>,
|
||||
|
@ -47,13 +48,6 @@ pub struct WebrenderExternalImageRegistry {
|
|||
}
|
||||
|
||||
impl WebrenderExternalImageRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
external_images: HashMap::new(),
|
||||
next_image_id: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_id(&mut self, handler_type: WebrenderImageHandlerType) -> ExternalImageId {
|
||||
self.next_image_id += 1;
|
||||
let key = ExternalImageId(self.next_image_id);
|
||||
|
@ -84,7 +78,7 @@ pub struct WebrenderExternalImageHandlers {
|
|||
|
||||
impl WebrenderExternalImageHandlers {
|
||||
pub fn new() -> (Self, Arc<Mutex<WebrenderExternalImageRegistry>>) {
|
||||
let external_images = Arc::new(Mutex::new(WebrenderExternalImageRegistry::new()));
|
||||
let external_images = Arc::new(Mutex::new(WebrenderExternalImageRegistry::default()));
|
||||
(
|
||||
Self {
|
||||
webgl_handler: None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue