Update all network-related dependencies to the latest versions (#34630)

* Update all network-related dependencies to the latest versions:
* rustls
* hyper
* http
* headers
* tungstenite
* async-tungstenite

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix panics with 1xx responses in WPT tests.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Use reported response length when calculating available ranges.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Remove unreachable match arm.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Clean up commented fragments in blob and file handlers.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Remove unreachable match arm.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix clippy warning.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Cleanup.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix up unit tests for dependency upgrades.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* Update aws-lc-sys to fix Windows builds.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Use ring instead of aws-lc-sys.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* embedding: Require embedder to initialize a rustls CryptoProvider.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* Disable aws-lc-rs pending OhOS build fixes.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

---------

Signed-off-by: Josh Matthews <josh@joshmatthews.net>
This commit is contained in:
Josh Matthews 2025-01-08 00:47:58 -05:00 committed by GitHub
parent 270df6e263
commit 76e0a1872b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 1342 additions and 1050 deletions

306
Cargo.lock generated
View file

@ -284,38 +284,17 @@ dependencies = [
"syn",
]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "async-tungstenite"
version = "0.23.0"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc"
checksum = "90e661b6cb0a6eb34d02c520b052daa3aa9ac0cc02495c9d066bbce13ead132b"
dependencies = [
"futures-io",
"futures-util",
"log",
"pin-project-lite",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tungstenite",
@ -1047,7 +1026,7 @@ dependencies = [
"euclid",
"fonts",
"gaol",
"http",
"http 1.2.0",
"ipc-channel",
"keyboard-types",
"log",
@ -1474,8 +1453,8 @@ dependencies = [
"crossbeam-channel",
"devtools_traits",
"embedder_traits",
"headers",
"http",
"headers 0.4.0",
"http 1.2.0",
"ipc-channel",
"log",
"net_traits",
@ -1493,7 +1472,7 @@ version = "0.0.1"
dependencies = [
"base",
"bitflags 2.6.0",
"http",
"http 1.2.0",
"ipc-channel",
"malloc_size_of_derive",
"net_traits",
@ -2887,7 +2866,26 @@ dependencies = [
"futures-core",
"futures-sink",
"futures-util",
"http",
"http 0.2.12",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "h2"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e"
dependencies = [
"atomic-waker",
"bytes",
"fnv",
"futures-core",
"futures-sink",
"http 1.2.0",
"indexmap",
"slab",
"tokio",
@ -2937,8 +2935,23 @@ checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270"
dependencies = [
"base64 0.21.7",
"bytes",
"headers-core",
"http",
"headers-core 0.2.0",
"http 0.2.12",
"httpdate",
"mime",
"sha1",
]
[[package]]
name = "headers"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
dependencies = [
"base64 0.21.7",
"bytes",
"headers-core 0.3.0",
"http 1.2.0",
"httpdate",
"mime",
"sha1",
@ -2950,7 +2963,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
dependencies = [
"http",
"http 0.2.12",
]
[[package]]
name = "headers-core"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
dependencies = [
"http 1.2.0",
]
[[package]]
@ -3043,6 +3065,17 @@ dependencies = [
"itoa",
]
[[package]]
name = "http"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "0.4.6"
@ -3050,7 +3083,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
"bytes",
"http",
"http 0.2.12",
"pin-project-lite",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http 1.2.0",
]
[[package]]
name = "http-body-util"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f"
dependencies = [
"bytes",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"pin-project-lite",
]
@ -3082,9 +3138,9 @@ dependencies = [
"futures-channel",
"futures-core",
"futures-util",
"h2",
"http",
"http-body",
"h2 0.3.26",
"http 0.2.12",
"http-body 0.4.6",
"httparse",
"httpdate",
"itoa",
@ -3097,29 +3153,72 @@ dependencies = [
]
[[package]]
name = "hyper-rustls"
version = "0.24.2"
name = "hyper"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"h2 0.4.7",
"http 1.2.0",
"http-body 1.0.1",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333"
dependencies = [
"futures-util",
"http",
"hyper",
"http 1.2.0",
"hyper 1.5.2",
"hyper-util",
"log",
"rustls",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
"webpki-roots",
]
[[package]]
name = "hyper-util"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"hyper 1.5.2",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
]
[[package]]
name = "hyper_serde"
version = "0.13.2"
dependencies = [
"cookie 0.18.1",
"headers",
"http",
"hyper",
"headers 0.4.0",
"http 1.2.0",
"hyper 1.5.2",
"mime",
"serde",
"serde_bytes",
@ -4607,10 +4706,12 @@ dependencies = [
"futures-core",
"futures-util",
"generic-array",
"headers",
"http",
"hyper",
"headers 0.4.0",
"http 1.2.0",
"http-body-util",
"hyper 1.5.2",
"hyper-rustls",
"hyper-util",
"hyper_serde",
"imsz",
"ipc-channel",
@ -4624,6 +4725,7 @@ dependencies = [
"rayon",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"serde",
"serde_json",
"servo_allocator",
@ -4636,8 +4738,8 @@ dependencies = [
"tokio",
"tokio-rustls",
"tokio-stream",
"tokio-test",
"tokio-util",
"tower-service",
"tungstenite",
"url",
"uuid",
@ -4655,9 +4757,10 @@ dependencies = [
"cookie 0.18.1",
"crossbeam-channel",
"embedder_traits",
"headers",
"http",
"hyper",
"headers 0.4.0",
"http 1.2.0",
"hyper 1.5.2",
"hyper-util",
"hyper_serde",
"ipc-channel",
"log",
@ -4666,7 +4769,7 @@ dependencies = [
"num-traits",
"percent-encoding",
"pixels",
"rustls",
"rustls-pki-types",
"serde",
"servo_arc",
"servo_malloc_size_of",
@ -5893,32 +5996,42 @@ dependencies = [
[[package]]
name = "rustls"
version = "0.21.12"
version = "0.23.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1"
dependencies = [
"log",
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki",
"sct",
"subtle",
"zeroize",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"base64 0.21.7",
"rustls-pki-types",
]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
name = "rustls-pki-types"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b"
[[package]]
name = "rustls-webpki"
version = "0.102.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
dependencies = [
"ring",
"rustls-pki-types",
"untrusted",
]
@ -5996,9 +6109,9 @@ dependencies = [
"fonts_traits",
"fxhash",
"glow",
"headers",
"headers 0.4.0",
"html5ever",
"http",
"http 1.2.0",
"hyper_serde",
"image",
"indexmap",
@ -6125,7 +6238,7 @@ dependencies = [
"devtools_traits",
"embedder_traits",
"euclid",
"http",
"http 1.2.0",
"hyper_serde",
"ipc-channel",
"keyboard-types",
@ -6149,16 +6262,6 @@ dependencies = [
"webxr-api",
]
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "sctk-adwaita"
version = "0.10.1"
@ -6528,7 +6631,7 @@ dependencies = [
"crossbeam-channel",
"dom",
"euclid",
"http",
"http 1.2.0",
"indexmap",
"keyboard-types",
"malloc_size_of",
@ -6593,10 +6696,10 @@ dependencies = [
"gilrs",
"gleam",
"glow",
"headers",
"headers 0.4.0",
"hilog",
"hitrace",
"http",
"http 1.2.0",
"image",
"ipc-channel",
"jni",
@ -6616,6 +6719,7 @@ dependencies = [
"ohos-sys",
"ohos-vsync",
"raw-window-handle",
"rustls",
"serde_json",
"servo_allocator",
"shellwords",
@ -7445,9 +7549,9 @@ dependencies = [
[[package]]
name = "tokio-rustls"
version = "0.24.1"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37"
dependencies = [
"rustls",
"tokio",
@ -7464,19 +7568,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
dependencies = [
"async-stream",
"bytes",
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "tokio-util"
version = "0.7.13"
@ -7652,21 +7743,21 @@ checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31"
[[package]]
name = "tungstenite"
version = "0.20.1"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a"
dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http",
"http 1.2.0",
"httparse",
"log",
"rand",
"rustls",
"rustls-pki-types",
"sha1",
"thiserror 1.0.69",
"url",
"utf-8",
]
@ -7917,9 +8008,9 @@ dependencies = [
"bytes",
"futures-channel",
"futures-util",
"headers",
"http",
"hyper",
"headers 0.3.9",
"http 0.2.12",
"hyper 0.14.30",
"log",
"mime",
"mime_guess",
@ -8165,7 +8256,7 @@ dependencies = [
"base64 0.21.7",
"bytes",
"cookie 0.16.2",
"http",
"http 0.2.12",
"icu_segmenter",
"log",
"serde",
@ -8189,7 +8280,7 @@ dependencies = [
"cookie 0.18.1",
"crossbeam-channel",
"euclid",
"http",
"http 0.2.12",
"image",
"ipc-channel",
"keyboard-types",
@ -8227,9 +8318,12 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.25.4"
version = "0.26.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "webrender"
@ -9056,6 +9150,12 @@ dependencies = [
"synstructure",
]
[[package]]
name = "zeroize"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
[[package]]
name = "zerotrie"
version = "0.1.3"

View file

@ -22,7 +22,7 @@ aes-kw = { version = "0.2.1", features = ["alloc"] }
aes-gcm = "0.10.3"
app_units = "0.7"
arrayvec = "0.7"
async-tungstenite = { version = "0.23", features = ["tokio-rustls-webpki-roots"] }
async-tungstenite = { version = "0.28", features = ["tokio-rustls-webpki-roots"] }
atomic_refcell = "0.1.13"
background_hang_monitor_api = { path = "components/shared/background_hang_monitor" }
backtrace = "0.3"
@ -64,13 +64,15 @@ gstreamer-gl = "0.23"
gstreamer-gl-sys = "0.23"
gstreamer-sys = "0.23"
gstreamer-video = "0.23"
headers = "0.3"
headers = "0.4"
hitrace = "0.1.4"
html5ever = "0.29"
http = "0.2"
hyper = "0.14"
hyper-rustls = { version = "0.24", default-features = false, features = ["acceptor", "http1", "http2", "logging", "tls12", "webpki-tokio"] }
http = "1.0"
http-body-util = "0.1"
hyper = "1.0"
hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "logging", "tls12", "webpki-tokio"] }
hyper_serde = { path = "components/hyper_serde" }
hyper-util = "0.1"
icu_segmenter = "1.5.0"
image = "0.24"
imsz = "0.2"
@ -103,8 +105,9 @@ rand_isaac = "0.3"
rayon = "1"
regex = "1.11"
ring = "0.17.8"
rustls = { version = "0.21.12", features = ["dangerous_configuration"] }
rustls-pemfile = "1.0.4"
rustls = { version = "0.23", default-features = false, features = ["logging", "std", "tls12"] }
rustls-pemfile = "2.0"
rustls-pki-types = "1.0"
script_layout_interface = { path = "components/shared/script_layout" }
script_traits = { path = "components/shared/script" }
selectors = { git = "https://github.com/servo/stylo", branch = "2024-12-04" }
@ -136,11 +139,12 @@ tikv-jemallocator = "0.6.0"
time_03 = { package = "time", version = "0.3", features = ["large-dates", "local-offset", "serde"] }
to_shmem = { git = "https://github.com/servo/stylo", branch = "2024-12-04" }
tokio = "1"
tokio-rustls = "0.24"
tokio-rustls = { version = "0.26", default-features = false, features = ["logging"] }
tower-service = "0.3"
tracing = "0.1.41"
tracing-perfetto = "0.1.5"
tracing-subscriber = "0.3.19"
tungstenite = "0.20"
tungstenite = "0.24"
uluru = "3.0"
unicode-bidi = "0.3.18"
unicode-properties = { version = "0.1.3", features = ["emoji"] }
@ -149,7 +153,7 @@ unicode-segmentation = "1.12.0"
url = "2.5"
uuid = { version = "1.11.0", features = ["v4"] }
webdriver = "0.51.0"
webpki-roots = "0.25"
webpki-roots = "0.26"
webrender = { git = "https://github.com/servo/webrender", branch = "0.65", features = ["capture"] }
webrender_api = { git = "https://github.com/servo/webrender", branch = "0.65" }
webrender_traits = { path = "components/shared/webrender" }

View file

@ -33,9 +33,11 @@ futures-util = { version = "0.3.30", default-features = false }
generic-array = "0.14"
headers = { workspace = true }
http = { workspace = true }
hyper = { workspace = true, features = ["client", "http1", "http2", "tcp", "stream"] }
http-body-util = { workspace = true }
hyper = { workspace = true, features = ["client", "http1", "http2"] }
hyper-rustls = { workspace = true }
hyper_serde = { workspace = true }
hyper-util = { workspace = true, features = ["client", "client-legacy", "tokio"] }
imsz = { workspace = true }
ipc-channel = { workspace = true }
log = { workspace = true }
@ -49,6 +51,7 @@ profile_traits = { workspace = true }
rayon = { workspace = true }
rustls = { workspace = true }
rustls-pemfile = { workspace = true }
rustls-pki-types = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
servo_allocator = { path = "../allocator" }
@ -62,6 +65,7 @@ tokio = { workspace = true, features = ["sync", "macros", "rt-multi-thread"] }
tokio-util = { version = "0.7.12", default-features = false, features = ["codec", "io"] }
tokio-rustls = { workspace = true }
tokio-stream = "0.1"
tower-service = { workspace = true }
tungstenite = { workspace = true }
url = { workspace = true }
uuid = { workspace = true }
@ -72,9 +76,9 @@ webpki-roots = { workspace = true }
[dev-dependencies]
flate2 = "1"
futures = { version = "0.3", features = ["compat"] }
tokio-test = "0.4"
tokio-stream = { version = "0.1", features = ["net"] }
hyper = { workspace = true, features = ["full"] }
hyper-util = { workspace = true, features = ["server-graceful"] }
rustls = { workspace = true, features = ["ring"] }
[[test]]
name = "main"

View file

@ -9,14 +9,17 @@ use std::sync::{Arc, Mutex};
use futures::task::{Context, Poll};
use futures::Future;
use http::uri::{Authority, Uri as Destination};
use hyper::client::HttpConnector as HyperHttpConnector;
use http_body_util::combinators::BoxBody;
use hyper::body::Bytes;
use hyper::rt::Executor;
use hyper::service::Service;
use hyper::{Body, Client};
use hyper_rustls::HttpsConnector as HyperRustlsHttpsConnector;
use hyper_util::client::legacy::connect::HttpConnector as HyperHttpConnector;
use hyper_util::client::legacy::Client;
use log::warn;
use rustls::client::WebPkiVerifier;
use rustls::{Certificate, ClientConfig, OwnedTrustAnchor, RootCertStore, ServerName};
use rustls::client::WebPkiServerVerifier;
use rustls::{ClientConfig, RootCertStore};
use rustls_pki_types::{CertificateDer, ServerName, UnixTime};
use tower_service::Service;
use crate::async_runtime::HANDLE;
use crate::hosts::replace_host;
@ -80,10 +83,10 @@ pub type TlsConfig = ClientConfig;
struct CertificateErrorOverrideManagerInternal {
/// A mapping of certificates and their hosts, which have seen certificate errors.
/// This is used to later create an override in this [CertificateErrorOverrideManager].
certificates_failing_to_verify: HashMap<ServerName, Certificate>,
certificates_failing_to_verify: HashMap<ServerName<'static>, CertificateDer<'static>>,
/// A list of certificates that should be accepted despite encountering verification
/// errors.
overrides: Vec<Certificate>,
overrides: Vec<CertificateDer<'static>>,
}
/// This data structure is used to track certificate verification errors and overrides.
@ -100,7 +103,7 @@ impl CertificateErrorOverrideManager {
/// Add a certificate to this manager's list of certificates for which to ignore
/// validation errors.
pub fn add_override(&self, certificate: &Certificate) {
pub fn add_override(&self, certificate: &CertificateDer<'static>) {
self.0.lock().unwrap().overrides.push(certificate.clone());
}
@ -110,9 +113,9 @@ impl CertificateErrorOverrideManager {
pub(crate) fn remove_certificate_failing_verification(
&self,
host: &str,
) -> Option<Certificate> {
) -> Option<CertificateDer<'static>> {
let server_name = match ServerName::try_from(host) {
Ok(name) => name,
Ok(name) => name.to_owned(),
Err(error) => {
warn!("Could not convert host string into RustTLS ServerName: {error:?}");
return None;
@ -149,11 +152,12 @@ pub fn create_tls_config(
override_manager,
);
rustls::ClientConfig::builder()
.with_safe_defaults()
.dangerous()
.with_custom_certificate_verifier(Arc::new(verifier))
.with_no_client_auth()
}
#[derive(Clone)]
struct TokioExecutor {}
impl<F> Executor<F> for TokioExecutor
@ -165,8 +169,9 @@ where
}
}
#[derive(Debug)]
struct CertificateVerificationOverrideVerifier {
webpki_verifier: WebPkiVerifier,
webpki_verifier: Arc<WebPkiServerVerifier>,
ignore_certificate_errors: bool,
override_manager: CertificateErrorOverrideManager,
}
@ -178,18 +183,8 @@ impl CertificateVerificationOverrideVerifier {
override_manager: CertificateErrorOverrideManager,
) -> Self {
let root_cert_store = match ca_certficates {
CACertificates::Default => {
let mut root_cert_store = rustls::RootCertStore::empty();
root_cert_store.add_trust_anchors(webpki_roots::TLS_SERVER_ROOTS.iter().map(
|trust_anchor| {
OwnedTrustAnchor::from_subject_spki_name_constraints(
trust_anchor.subject,
trust_anchor.spki,
trust_anchor.name_constraints,
)
},
));
root_cert_store
CACertificates::Default => rustls::RootCertStore {
roots: webpki_roots::TLS_SERVER_ROOTS.to_vec(),
},
CACertificates::Override(root_cert_store) => root_cert_store,
};
@ -197,28 +192,52 @@ impl CertificateVerificationOverrideVerifier {
Self {
// See https://github.com/rustls/rustls/blame/v/0.21.6/rustls/src/client/builder.rs#L141
// This is the default verifier for Rustls that we are wrapping.
webpki_verifier: WebPkiVerifier::new(root_cert_store, None),
webpki_verifier: WebPkiServerVerifier::builder(root_cert_store.into())
.build()
.unwrap(),
ignore_certificate_errors,
override_manager,
}
}
}
impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerifier {
impl rustls::client::danger::ServerCertVerifier for CertificateVerificationOverrideVerifier {
fn verify_tls12_signature(
&self,
message: &[u8],
cert: &CertificateDer<'_>,
dss: &rustls::DigitallySignedStruct,
) -> Result<rustls::client::danger::HandshakeSignatureValid, rustls::Error> {
self.webpki_verifier
.verify_tls12_signature(message, cert, dss)
}
fn verify_tls13_signature(
&self,
message: &[u8],
cert: &CertificateDer<'_>,
dss: &rustls::DigitallySignedStruct,
) -> Result<rustls::client::danger::HandshakeSignatureValid, rustls::Error> {
self.webpki_verifier
.verify_tls13_signature(message, cert, dss)
}
fn supported_verify_schemes(&self) -> Vec<rustls::SignatureScheme> {
self.webpki_verifier.supported_verify_schemes()
}
fn verify_server_cert(
&self,
end_entity: &Certificate,
intermediates: &[Certificate],
server_name: &ServerName,
scts: &mut dyn Iterator<Item = &[u8]>,
end_entity: &CertificateDer<'_>,
intermediates: &[CertificateDer<'_>],
server_name: &ServerName<'_>,
ocsp_response: &[u8],
now: std::time::SystemTime,
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
now: UnixTime,
) -> Result<rustls::client::danger::ServerCertVerified, rustls::Error> {
let error = match self.webpki_verifier.verify_server_cert(
end_entity,
intermediates,
server_name,
scts,
ocsp_response,
now,
) {
@ -228,13 +247,13 @@ impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerif
if self.ignore_certificate_errors {
warn!("Ignoring certficate error: {error:?}");
return Ok(rustls::client::ServerCertVerified::assertion());
return Ok(rustls::client::danger::ServerCertVerified::assertion());
}
// If there's an override for this certificate, just accept it.
for cert_with_exception in &*self.override_manager.0.lock().unwrap().overrides {
if *end_entity == *cert_with_exception {
return Ok(rustls::client::ServerCertVerified::assertion());
return Ok(rustls::client::danger::ServerCertVerified::assertion());
}
}
self.override_manager
@ -242,12 +261,14 @@ impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerif
.lock()
.unwrap()
.certificates_failing_to_verify
.insert(server_name.clone(), end_entity.clone());
.insert(server_name.to_owned(), end_entity.clone().into_owned());
Err(error)
}
}
pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, Body> {
pub type BoxedBody = BoxBody<Bytes, hyper::Error>;
pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, BoxedBody> {
let connector = hyper_rustls::HttpsConnectorBuilder::new()
.with_tls_config(tls_config)
.https_or_http()
@ -255,8 +276,7 @@ pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, Body> {
.enable_http2()
.wrap_connector(ServoHttpConnector::new());
Client::builder()
Client::builder(TokioExecutor {})
.http1_title_case_headers(true)
.executor(TokioExecutor {})
.build(connector)
}

View file

@ -28,12 +28,16 @@ use futures::task::{Context, Poll};
use futures::{Future, Stream};
use futures_util::StreamExt;
use headers::{ContentLength, HeaderMapExt};
use http_body_util::BodyExt;
use hyper::body::Body;
use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING};
use hyper::{Body, Response};
use hyper::Response;
use servo_config::pref;
use tokio_util::codec::{BytesCodec, FramedRead};
use tokio_util::io::StreamReader;
use crate::connector::BoxedBody;
pub const DECODER_BUFFER_SIZE: usize = 8192;
/// A response decompressor over a non-blocking stream of bytes.
@ -81,7 +85,7 @@ impl Decoder {
/// This decoder will emit the underlying bytes as-is.
#[inline]
fn plain_text(
body: Body,
body: BoxedBody,
is_secure_scheme: bool,
content_length: Option<ContentLength>,
) -> Decoder {
@ -95,7 +99,7 @@ impl Decoder {
/// This decoder will buffer and decompress bytes that are encoded in the expected format.
#[inline]
fn pending(
body: Body,
body: BoxedBody,
type_: DecoderType,
is_secure_scheme: bool,
content_length: Option<ContentLength>,
@ -114,7 +118,7 @@ impl Decoder {
/// how to decode the content body of the response.
///
/// Uses the correct variant by inspecting the Content-Encoding header.
pub fn detect(response: Response<Body>, is_secure_scheme: bool) -> Response<Decoder> {
pub fn detect(response: Response<BoxedBody>, is_secure_scheme: bool) -> Response<Decoder> {
let values = response
.headers()
.get_all(CONTENT_ENCODING)
@ -225,7 +229,7 @@ impl Future for Pending {
}
struct BodyStream {
body: Body,
body: BoxedBody,
is_secure_scheme: bool,
content_length: Option<ContentLength>,
total_read: u64,
@ -234,14 +238,16 @@ struct BodyStream {
impl BodyStream {
fn empty() -> Self {
BodyStream {
body: Body::empty(),
body: http_body_util::Empty::new()
.map_err(|_| unreachable!())
.boxed(),
is_secure_scheme: false,
content_length: None,
total_read: 0,
}
}
fn new(body: Body, is_secure_scheme: bool, content_length: Option<ContentLength>) -> Self {
fn new(body: BoxedBody, is_secure_scheme: bool, content_length: Option<ContentLength>) -> Self {
BodyStream {
body,
is_secure_scheme,
@ -255,8 +261,11 @@ impl Stream for BodyStream {
type Item = Result<Bytes, io::Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match futures_core::ready!(Pin::new(&mut self.body).poll_next(cx)) {
match futures_core::ready!(Pin::new(&mut self.body).poll_frame(cx)) {
Some(Ok(bytes)) => {
let Ok(bytes) = bytes.into_data() else {
return Poll::Ready(None);
};
self.total_read += bytes.len() as u64;
Poll::Ready(Some(Ok(bytes)))
},

View file

@ -31,7 +31,7 @@ use net_traits::{
FetchTaskTarget, NetworkError, ReferrerPolicy, ResourceAttribute, ResourceFetchTiming,
ResourceTimeValue, ResourceTimingType,
};
use rustls::Certificate;
use rustls_pki_types::CertificateDer;
use serde::{Deserialize, Serialize};
use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl;
@ -675,7 +675,7 @@ fn handle_allowcert_request(request: &mut Request, context: &FetchContext) -> io
context
.state
.override_manager
.add_override(&Certificate(cert_bytes));
.add_override(&CertificateDer::from_slice(&cert_bytes).into_owned());
Ok(())
}

View file

@ -11,7 +11,7 @@ use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex, RwLock, Weak};
use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
use headers::{ContentLength, ContentType, HeaderMap, HeaderMapExt};
use headers::{ContentLength, ContentType, HeaderMap, HeaderMapExt, Range};
use http::header::{self, HeaderValue};
use ipc_channel::ipc::{self, IpcSender};
use log::warn;
@ -30,6 +30,7 @@ use url::Url;
use uuid::Uuid;
use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds};
use crate::protocols::get_range_request_bounds;
use crate::resource_thread::CoreResourceThreadPool;
pub const FILE_CHUNK_SIZE: usize = 32768; //32 KB
@ -132,7 +133,7 @@ impl FileManager {
file_token: &FileTokenCheck,
origin: FileOrigin,
response: &mut Response,
range: RangeRequestBounds,
range: Option<Range>,
) -> Result<(), BlobURLStoreError> {
self.fetch_blob_buf(
done_sender,
@ -140,7 +141,7 @@ impl FileManager {
&id,
file_token,
&origin,
range,
BlobBounds::Unresolved(range),
response,
)
}
@ -285,13 +286,17 @@ impl FileManager {
id: &Uuid,
file_token: &FileTokenCheck,
origin_in: &FileOrigin,
range: RangeRequestBounds,
bounds: BlobBounds,
response: &mut Response,
) -> Result<(), BlobURLStoreError> {
let file_impl = self.store.get_impl(id, file_token, origin_in)?;
match file_impl {
FileImpl::Memory(buf) => {
let range = range
let bounds = match bounds {
BlobBounds::Unresolved(range) => get_range_request_bounds(range, buf.size),
BlobBounds::Resolved(bounds) => bounds,
};
let range = bounds
.get_final(Some(buf.size))
.map_err(|_| BlobURLStoreError::InvalidRange)?;
@ -323,7 +328,11 @@ impl FileManager {
let file = File::open(&metadata.path)
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
let range = range
let bounds = match bounds {
BlobBounds::Unresolved(range) => get_range_request_bounds(range, metadata.size),
BlobBounds::Resolved(bounds) => bounds,
};
let range = bounds
.get_final(Some(metadata.size))
.map_err(|_| BlobURLStoreError::InvalidRange)?;
@ -362,15 +371,16 @@ impl FileManager {
FileImpl::Sliced(parent_id, inner_rel_pos) => {
// Next time we don't need to check validity since
// we have already done that for requesting URL if necessary.
let bounds = RangeRequestBounds::Final(
RelativePos::full_range().slice_inner(&inner_rel_pos),
);
self.fetch_blob_buf(
done_sender,
cancellation_listener,
&parent_id,
file_token,
origin_in,
RangeRequestBounds::Final(
RelativePos::full_range().slice_inner(&inner_rel_pos),
),
BlobBounds::Resolved(bounds),
response,
)
},
@ -378,6 +388,11 @@ impl FileManager {
}
}
enum BlobBounds {
Unresolved(Option<Range>),
Resolved(RangeRequestBounds),
}
/// File manager's data store. It maintains a thread-safe mapping
/// from FileID to FileStoreEntry which might have different backend implementation.
/// Access to the content is encapsulated as methods of this struct.

View file

@ -345,7 +345,7 @@ fn create_resource_with_bytes_from_resource(
fn handle_range_request(
request: &Request,
candidates: &[&CachedResource],
range_spec: Vec<(Bound<u64>, Bound<u64>)>,
range_spec: &Range,
done_chan: &mut DoneChannel,
) -> Option<CachedResponse> {
let mut complete_cached_resources = candidates
@ -354,10 +354,7 @@ fn handle_range_request(
let partial_cached_resources = candidates
.iter()
.filter(|resource| resource.status == StatusCode::PARTIAL_CONTENT);
match (
range_spec.first().unwrap(),
complete_cached_resources.next(),
) {
if let Some(complete_resource) = complete_cached_resources.next() {
// TODO: take the full range spec into account.
// If we have a complete resource, take the request range from the body.
// When there isn't a complete resource available, we loop over cached partials,
@ -366,172 +363,145 @@ fn handle_range_request(
// see <https://tools.ietf.org/html/rfc7233#section-4.3>.
// TODO: add support for complete and partial resources,
// whose body is in the ResponseBody::Receiving state.
(&(Bound::Included(beginning), Bound::Included(end)), Some(complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
if end == u64::MAX {
// Prevent overflow on the addition below.
return None;
}
let b = beginning as usize;
let e = end as usize + 1;
let requested = body.get(b..e);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response =
create_cached_response(request, &new_resource, &cached_headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
let body_len = match *complete_resource.body.lock().unwrap() {
ResponseBody::Done(ref body) => body.len(),
_ => 0,
};
let bound = range_spec
.satisfiable_ranges(body_len.try_into().unwrap())
.next()
.unwrap();
match bound {
(Bound::Included(beginning), Bound::Included(end)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
if end == u64::MAX {
// Prevent overflow on the addition below.
return None;
}
}
}
},
(&(Bound::Included(beginning), Bound::Included(end)), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end) = match content_range {
Some(range) => {
if let Some(bytes_range) = range.bytes_range() {
bytes_range
} else {
continue;
}
},
_ => continue,
};
if res_beginning <= beginning && res_end >= end {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let b = beginning as usize - res_beginning as usize;
let e = end as usize - res_beginning as usize + 1;
body.get(b..e)
},
_ => continue,
};
let b = beginning as usize;
let e = end as usize + 1;
let requested = body.get(b..e);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(
request,
&new_resource,
&cached_headers,
done_chan,
);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
}
},
(&(Bound::Included(beginning), Bound::Unbounded), Some(complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let requested = body.get(b..);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response =
create_cached_response(request, &new_resource, &cached_headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
},
(&(Bound::Included(beginning), Bound::Unbounded), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
_ => continue,
}
} else {
continue;
};
if total == 0 {
// Prevent overflow in the below operations from occuring.
continue;
};
if res_beginning < beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
},
(Bound::Included(beginning), Bound::Unbounded) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let requested = body.get(b..);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(
request,
&new_resource,
&cached_headers,
done_chan,
);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
}
},
(&(Bound::Unbounded, Bound::Included(offset)), Some(complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let from_byte = body.len() - offset as usize;
let requested = body.get(from_byte..);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response =
create_cached_response(request, &new_resource, &cached_headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
},
(&(Bound::Unbounded, Bound::Included(offset)), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
_ => continue,
}
} else {
continue;
};
if total < res_beginning || total < res_end || offset == 0 || offset == u64::MAX {
// Prevent overflow in the below operations from occuring.
continue;
}
if (total - res_beginning) > (offset - 1) && (total - res_end) < offset + 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = body.len() - offset as usize;
body.get(from_byte..)
},
_ => return None,
}
} else {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let Some(body_len) = content_range.as_ref().and_then(|range| range.bytes_len()) else {
continue;
};
match range_spec.satisfiable_ranges(body_len - 1).next().unwrap() {
(Bound::Included(beginning), Bound::Included(end)) => {
let (res_beginning, res_end) = match content_range {
Some(range) => {
if let Some(bytes_range) = range.bytes_range() {
bytes_range
} else {
continue;
}
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
if res_beginning <= beginning && res_end >= end {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let b = beginning as usize - res_beginning as usize;
let e = end as usize - res_beginning as usize + 1;
body.get(b..e)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
}
},
(Bound::Included(beginning), Bound::Unbounded) => {
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => {
(bytes_range.0, bytes_range.1, total)
},
_ => continue,
}
} else {
continue;
};
if total == 0 {
// Prevent overflow in the below operations from occuring.
continue;
};
if res_beginning <= beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
},
_ => continue,
}
},
// All the cases with Bound::Excluded should be unreachable anyway
_ => return None,
}
}
None
}
@ -607,12 +577,7 @@ impl HttpCache {
}
// Support for range requests
if let Some(range_spec) = request.headers.typed_get::<Range>() {
return handle_range_request(
request,
candidates.as_slice(),
range_spec.iter().collect(),
done_chan,
);
return handle_range_request(request, candidates.as_slice(), &range_spec, done_chan);
}
while let Some(cached_resource) = candidates.pop() {
// Not a Range request.

View file

@ -2,7 +2,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use core::convert::Infallible;
use std::collections::{HashMap, HashSet};
use std::iter::FromIterator;
use std::sync::{Arc as StdArc, Condvar, Mutex, RwLock};
@ -19,7 +18,7 @@ use devtools_traits::{
use embedder_traits::{
EmbedderMsg, EmbedderProxy, PromptCredentialsInput, PromptDefinition, PromptOrigin,
};
use futures::{future, StreamExt, TryFutureExt, TryStreamExt};
use futures::{future, TryFutureExt, TryStreamExt};
use headers::authorization::Basic;
use headers::{
AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowMethods,
@ -32,10 +31,14 @@ use http::header::{
CONTENT_TYPE,
};
use http::{HeaderMap, Method, Request as HyperRequest, StatusCode};
use http_body_util::combinators::BoxBody;
use http_body_util::{BodyExt, Full};
use hyper::body::{Bytes, Frame};
use hyper::ext::ReasonPhrase;
use hyper::header::{HeaderName, TRANSFER_ENCODING};
use hyper::{Body, Client, Response as HyperResponse};
use hyper::Response as HyperResponse;
use hyper_serde::Serde;
use hyper_util::client::legacy::Client;
use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER;
use log::{debug, error, info, log_enabled, warn};
@ -101,7 +104,7 @@ pub struct HttpState {
pub http_cache_state: HttpCacheState,
pub auth_cache: RwLock<AuthCache>,
pub history_states: RwLock<HashMap<HistoryStateId, Vec<u8>>>,
pub client: Client<Connector, Body>,
pub client: Client<Connector, crate::connector::BoxedBody>,
pub override_manager: CertificateErrorOverrideManager,
pub embedder_proxy: Mutex<EmbedderProxy>,
}
@ -440,7 +443,7 @@ enum BodyChunk {
enum BodyStream {
/// A receiver that can be used in Body::wrap_stream,
/// for streaming the request over the network.
Chunked(TokioReceiver<Vec<u8>>),
Chunked(TokioReceiver<Result<Frame<Bytes>, hyper::Error>>),
/// A body whose bytes are buffered
/// and sent in one chunk over the network.
Buffered(UnboundedReceiver<BodyChunk>),
@ -450,7 +453,7 @@ enum BodyStream {
/// used to enqueue chunks.
enum BodySink {
/// A Tokio sender used to feed chunks to the network stream.
Chunked(TokioSender<Vec<u8>>),
Chunked(TokioSender<Result<Frame<Bytes>, hyper::Error>>),
/// A Crossbeam sender used to send chunks to the fetch worker,
/// where they will be buffered
/// in order to ensure they are not streamed them over the network.
@ -463,7 +466,7 @@ impl BodySink {
BodySink::Chunked(ref sender) => {
let sender = sender.clone();
HANDLE.lock().unwrap().as_mut().unwrap().spawn(async move {
let _ = sender.send(bytes).await;
let _ = sender.send(Ok(Frame::data(bytes.into()))).await;
});
},
BodySink::Buffered(ref sender) => {
@ -484,7 +487,7 @@ impl BodySink {
#[allow(clippy::too_many_arguments)]
async fn obtain_response(
client: &Client<Connector, Body>,
client: &Client<Connector, crate::connector::BoxedBody>,
url: &ServoUrl,
method: &Method,
request_headers: &mut HeaderMap,
@ -584,7 +587,7 @@ async fn obtain_response(
let body = match stream {
BodyStream::Chunked(receiver) => {
let stream = ReceiverStream::new(receiver);
Body::wrap_stream(stream.map(Ok::<_, Infallible>))
BoxBody::new(http_body_util::StreamBody::new(stream))
},
BodyStream::Buffered(mut receiver) => {
// Accumulate bytes received over IPC into a vector.
@ -598,7 +601,7 @@ async fn obtain_response(
None => warn!("Failed to read all chunks from request body."),
}
}
body.into()
Full::new(body.into()).map_err(|_| unreachable!()).boxed()
},
};
HyperRequest::builder()
@ -609,7 +612,11 @@ async fn obtain_response(
HyperRequest::builder()
.method(method)
.uri(encoded_url)
.body(Body::empty())
.body(
http_body_util::Empty::new()
.map_err(|_| unreachable!())
.boxed(),
)
};
context
@ -695,7 +702,10 @@ async fn obtain_response(
None
};
future::ready(Ok((Decoder::detect(res, is_secure_scheme), msg)))
future::ready(Ok((
Decoder::detect(res.map(|r| r.boxed()), is_secure_scheme),
msg,
)))
})
.map_err(move |error| {
NetworkError::from_hyper_error(

View file

@ -16,9 +16,7 @@ use net_traits::{NetworkError, ResourceFetchTiming};
use tokio::sync::mpsc::unbounded_channel;
use crate::fetch::methods::{Data, DoneChannel, FetchContext};
use crate::protocols::{
get_range_request_bounds, partial_content, range_not_satisfiable_error, ProtocolHandler,
};
use crate::protocols::{partial_content, range_not_satisfiable_error, ProtocolHandler};
#[derive(Default)]
pub struct BlobProtocolHander {}
@ -42,9 +40,6 @@ impl ProtocolHandler for BlobProtocolHander {
let range_header = request.headers.typed_get::<Range>();
let is_range_request = range_header.is_some();
// We will get a final version of this range once we have
// the length of the data backing the blob.
let range = get_range_request_bounds(range_header);
let (id, origin) = match parse_blob_url(&url) {
Ok((id, origin)) => (id, origin),
@ -73,7 +68,7 @@ impl ProtocolHandler for BlobProtocolHander {
&context.file_token,
origin,
&mut response,
range,
range_header,
) {
let _ = done_sender.send(Data::Done);
let err = match err {

View file

@ -58,7 +58,9 @@ impl ProtocolHandler for FileProtocolHander {
let range_header = request.headers.typed_get::<Range>();
let is_range_request = range_header.is_some();
let Ok(range) = get_range_request_bounds(range_header).get_final(file_size) else {
let Ok(range) = get_range_request_bounds(range_header, file_size.unwrap_or(0))
.get_final(file_size)
else {
range_not_satisfiable_error(&mut response);
return Box::pin(ready(response));
};

View file

@ -106,19 +106,15 @@ pub fn range_not_satisfiable_error(response: &mut Response) {
}
/// Get the range bounds if the `Range` header is present.
pub fn get_range_request_bounds(range: Option<Range>) -> RangeRequestBounds {
pub fn get_range_request_bounds(range: Option<Range>, len: u64) -> RangeRequestBounds {
if let Some(ref range) = range {
let (start, end) = match range
.iter()
.collect::<Vec<(Bound<u64>, Bound<u64>)>>()
.first()
{
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
Some(&(Bound::Included(start), Bound::Included(end))) => {
let (start, end) = match range.satisfiable_ranges(len).next() {
Some((Bound::Included(start), Bound::Unbounded)) => (start, None),
Some((Bound::Included(start), Bound::Included(end))) => {
// `end` should be less or equal to `start`.
(start, Some(i64::max(start as i64, end as i64)))
},
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
Some((Bound::Unbounded, Bound::Included(offset))) => {
return RangeRequestBounds::Pending(offset);
},
_ => (0, None),

View file

@ -61,8 +61,8 @@ fn load_root_cert_store_from_file(file_path: String) -> io::Result<RootCertStore
let mut root_cert_store = RootCertStore::empty();
let mut pem = BufReader::new(File::open(file_path)?);
let certs = rustls_pemfile::certs(&mut pem)?;
root_cert_store.add_parsable_certificates(&certs);
let certs: Result<Vec<_>, _> = rustls_pemfile::certs(&mut pem).collect();
root_cert_store.add_parsable_certificates(certs?);
Ok(root_cert_store)
}

View file

@ -21,7 +21,9 @@ use headers::{
};
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
use http::{Method, StatusCode};
use hyper::{Body, Request as HyperRequest, Response as HyperResponse};
use http_body_util::combinators::BoxBody;
use hyper::body::{Bytes, Incoming};
use hyper::{Request as HyperRequest, Response as HyperResponse};
use mime::{self, Mime};
use net::fetch::cors_cache::CorsCache;
use net::fetch::methods::{self, CancellationListener, FetchContext};
@ -41,13 +43,12 @@ use net_traits::{
};
use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl;
use tokio_test::block_on;
use uuid::Uuid;
use crate::http_loader::{expect_devtools_http_request, expect_devtools_http_response};
use crate::{
create_embedder_proxy, create_http_state, fetch, fetch_with_context, fetch_with_cors_cache,
make_server, make_ssl_server, new_fetch_context, DEFAULT_USER_AGENT,
make_body, make_server, make_ssl_server, new_fetch_context, DEFAULT_USER_AGENT,
};
// TODO write a struct that impls Handler for storing test values
@ -55,9 +56,11 @@ use crate::{
#[test]
fn test_fetch_response_is_not_network_error() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -89,9 +92,11 @@ fn test_fetch_on_bad_port_is_network_error() {
#[test]
fn test_fetch_response_body_matches_const_message() {
static MESSAGE: &'static [u8] = b"Hello World!";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -191,7 +196,7 @@ fn test_fetch_blob() {
expected: bytes.to_vec(),
};
block_on(methods::fetch(&mut request, &mut target, &context));
crate::HANDLE.block_on(methods::fetch(&mut request, &mut target, &context));
let fetch_response = receiver.recv().unwrap();
assert!(!fetch_response.is_network_error());
@ -285,38 +290,41 @@ fn test_fetch_bogus_scheme() {
fn test_cors_preflight_fetch() {
static ACK: &'static [u8] = b"ACK";
let state = Arc::new(AtomicUsize::new(0));
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
assert!(!request
.headers()
.get(header::REFERER)
.unwrap()
.to_str()
.unwrap()
.contains("a.html"));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = ACK.to_vec().into();
}
};
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
if request.method() == Method::OPTIONS &&
state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
assert!(!request
.headers()
.get(header::REFERER)
.unwrap()
.to_str()
.unwrap()
.contains("a.html"));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler);
let target_url = url.clone().join("a.html").unwrap();
@ -340,34 +348,37 @@ fn test_cors_preflight_cache_fetch() {
let state = Arc::new(AtomicUsize::new(0));
let counter = state.clone();
let mut cache = CorsCache::default();
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
response
.headers_mut()
.typed_insert(AccessControlMaxAge::from(Duration::new(6000, 0)));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = ACK.to_vec().into();
}
};
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
if request.method() == Method::OPTIONS &&
state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
response
.headers_mut()
.typed_insert(AccessControlMaxAge::from(Duration::new(6000, 0)));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build();
@ -403,31 +414,34 @@ fn test_cors_preflight_cache_fetch() {
fn test_cors_preflight_fetch_network_error() {
static ACK: &'static [u8] = b"ACK";
let state = Arc::new(AtomicUsize::new(0));
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = ACK.to_vec().into();
}
};
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
if request.method() == Method::OPTIONS &&
state.clone().fetch_add(1, Ordering::SeqCst) == 0
{
assert!(request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request
.headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response
.headers_mut()
.typed_insert(AccessControlAllowCredentials);
response
.headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
} else {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build();
@ -443,18 +457,20 @@ fn test_cors_preflight_fetch_network_error() {
#[test]
fn test_fetch_response_is_basic_filtered() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.insert(header::SET_COOKIE, HeaderValue::from_static(""));
// this header is obsoleted, so hyper doesn't implement it, but it's still covered by the spec
response.headers_mut().insert(
HeaderName::from_static("set-cookie2"),
HeaderValue::from_bytes(&vec![]).unwrap(),
);
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
response
.headers_mut()
.insert(header::SET_COOKIE, HeaderValue::from_static(""));
// this header is obsoleted, so hyper doesn't implement it, but it's still covered by the spec
response.headers_mut().insert(
HeaderName::from_static("set-cookie2"),
HeaderValue::from_bytes(&vec![]).unwrap(),
);
*response.body_mut() = MESSAGE.to_vec().into();
};
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -476,47 +492,49 @@ fn test_fetch_response_is_basic_filtered() {
#[test]
fn test_fetch_response_is_cors_filtered() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
// this is mandatory for the Cors Check to pass
// TODO test using different url encodings with this value ie. punycode
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
// this is mandatory for the Cors Check to pass
// TODO test using different url encodings with this value ie. punycode
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
// these are the headers that should be kept after filtering
response.headers_mut().typed_insert(CacheControl::new());
response.headers_mut().insert(
header::CONTENT_LANGUAGE,
HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(ContentType::from(mime::TEXT_HTML));
response
.headers_mut()
.typed_insert(Expires::from(SystemTime::now() + Duration::new(86400, 0)));
response
.headers_mut()
.typed_insert(LastModified::from(SystemTime::now()));
response.headers_mut().typed_insert(Pragma::no_cache());
// these are the headers that should be kept after filtering
response.headers_mut().typed_insert(CacheControl::new());
response.headers_mut().insert(
header::CONTENT_LANGUAGE,
HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(ContentType::from(mime::TEXT_HTML));
response
.headers_mut()
.typed_insert(Expires::from(SystemTime::now() + Duration::new(86400, 0)));
response
.headers_mut()
.typed_insert(LastModified::from(SystemTime::now()));
response.headers_mut().typed_insert(Pragma::no_cache());
// these headers should not be kept after filtering, even though they are given a pass
response
.headers_mut()
.insert(header::SET_COOKIE, HeaderValue::from_static(""));
response.headers_mut().insert(
HeaderName::from_static("set-cookie2"),
HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(AccessControlAllowHeaders::from_iter(vec![
HeaderName::from_static("set-cookie"),
// these headers should not be kept after filtering, even though they are given a pass
response
.headers_mut()
.insert(header::SET_COOKIE, HeaderValue::from_static(""));
response.headers_mut().insert(
HeaderName::from_static("set-cookie2"),
]));
HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(AccessControlAllowHeaders::from_iter(vec![
HeaderName::from_static("set-cookie"),
HeaderName::from_static("set-cookie2"),
]));
*response.body_mut() = MESSAGE.to_vec().into();
};
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
// an origin mis-match will stop it from defaulting to a basic filtered response
@ -546,9 +564,11 @@ fn test_fetch_response_is_cors_filtered() {
#[test]
fn test_fetch_response_is_opaque_filtered() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
// an origin mis-match will fall through to an Opaque filtered response
@ -577,24 +597,26 @@ fn test_fetch_response_is_opaque_filtered() {
#[test]
fn test_fetch_response_is_opaque_redirect_filtered() {
static MESSAGE: &'static [u8] = b"";
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects == 1 {
*response.body_mut() = MESSAGE.to_vec().into();
} else {
*response.status_mut() = StatusCode::FOUND;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
}
};
if redirects == 1 {
*response.body_mut() = make_body(MESSAGE.to_vec());
} else {
*response.status_mut() = StatusCode::FOUND;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
}
};
let (server, url) = make_server(handler);
@ -626,9 +648,11 @@ fn test_fetch_with_local_urls_only() {
// If flag `local_urls_only` is set, fetching a non-local URL must result in network error.
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, server_url) = make_server(handler);
let do_fetch = |url: ServoUrl| {
@ -661,9 +685,11 @@ fn test_fetch_with_local_urls_only() {
#[test]
fn test_fetch_with_hsts() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_ssl_server(handler);
@ -710,14 +736,16 @@ fn test_fetch_with_hsts() {
#[test]
fn test_load_adds_host_to_hsts_list_when_url_is_https() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.typed_insert(StrictTransportSecurity::excluding_subdomains(
Duration::from_secs(31536000),
));
*response.body_mut() = b"Yay!".to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
response
.headers_mut()
.typed_insert(StrictTransportSecurity::excluding_subdomains(
Duration::from_secs(31536000),
));
*response.body_mut() = make_body(b"Yay!".to_vec());
};
let (server, mut url) = make_ssl_server(handler);
url.as_mut_url().set_scheme("https").unwrap();
@ -772,9 +800,11 @@ fn test_load_adds_host_to_hsts_list_when_url_is_https() {
#[test]
fn test_fetch_self_signed() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = b"Yay!".to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(b"Yay!".to_vec());
};
let (server, mut url) = make_ssl_server(handler);
url.as_mut_url().set_scheme("https").unwrap();
@ -834,9 +864,11 @@ fn test_fetch_self_signed() {
#[test]
fn test_fetch_with_sri_network_error() {
static MESSAGE: &'static [u8] = b"alert('Hello, Network Error');";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -858,9 +890,11 @@ fn test_fetch_with_sri_network_error() {
#[test]
fn test_fetch_with_sri_sucess() {
static MESSAGE: &'static [u8] = b"alert('Hello, world.');";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -888,18 +922,20 @@ fn test_fetch_blocked_nosniff() {
const HEADER: &'static str = "x-content-type-options";
const VALUE: &'static [u8] = b"nosniff";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let mime_header = ContentType::from(mime.clone());
response.headers_mut().typed_insert(mime_header);
assert!(response.headers().contains_key(header::CONTENT_TYPE));
// Add the nosniff header
response.headers_mut().insert(
HeaderName::from_static(HEADER),
HeaderValue::from_bytes(VALUE).unwrap(),
);
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
let mime_header = ContentType::from(mime.clone());
response.headers_mut().typed_insert(mime_header);
assert!(response.headers().contains_key(header::CONTENT_TYPE));
// Add the nosniff header
response.headers_mut().insert(
HeaderName::from_static(HEADER),
HeaderValue::from_bytes(VALUE).unwrap(),
);
*response.body_mut() = MESSAGE.to_vec().into();
};
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
@ -926,25 +962,27 @@ fn test_fetch_blocked_nosniff() {
}
fn setup_server_and_fetch(message: &'static [u8], redirect_cap: u32) -> Response {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects >= redirect_cap {
*response.body_mut() = message.to_vec().into();
} else {
*response.status_mut() = StatusCode::FOUND;
let url = format!("{redirects}", redirects = redirects + 1);
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_str(&url).unwrap());
}
};
if redirects >= redirect_cap {
*response.body_mut() = make_body(message.to_vec());
} else {
*response.status_mut() = StatusCode::FOUND;
let url = format!("{redirects}", redirects = redirects + 1);
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_str(&url).unwrap());
}
};
let (server, url) = make_server(handler);
@ -997,44 +1035,46 @@ fn test_fetch_redirect_updates_method_runner(
method: Method,
) {
let handler_method = method.clone();
let handler_tx = Arc::new(Mutex::new(tx));
let handler_tx = Arc::new(tx);
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let mut test_pass = true;
let mut test_pass = true;
if redirects == 0 {
*response.status_mut() = StatusCode::TEMPORARY_REDIRECT;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
} else if redirects == 1 {
// this makes sure that the request method does't change from the wrong status code
if handler_method != Method::GET && request.method() == Method::GET {
if redirects == 0 {
*response.status_mut() = StatusCode::TEMPORARY_REDIRECT;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
} else if redirects == 1 {
// this makes sure that the request method does't change from the wrong status code
if handler_method != Method::GET && request.method() == Method::GET {
test_pass = false;
}
*response.status_mut() = status_code;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("2"));
} else if request.method() != Method::GET {
test_pass = false;
}
*response.status_mut() = status_code;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("2"));
} else if request.method() != Method::GET {
test_pass = false;
}
// the first time this handler is reached, nothing is being tested, so don't send anything
if redirects > 0 {
handler_tx.lock().unwrap().send(test_pass).unwrap();
}
};
// the first time this handler is reached, nothing is being tested, so don't send anything
if redirects > 0 {
handler_tx.send(test_pass).unwrap();
}
};
let (server, url) = make_server(handler);
let (server, url) = crate::make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
.origin(url.origin())
@ -1114,9 +1154,11 @@ fn response_is_done(response: &Response) -> bool {
#[test]
fn test_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b"this message should be retrieved in full";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -1131,9 +1173,11 @@ fn test_fetch_async_returns_complete_response() {
#[test]
fn test_opaque_filtered_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);
// an origin mis-match will fall through to an Opaque filtered response
@ -1149,24 +1193,26 @@ fn test_opaque_filtered_fetch_async_returns_complete_response() {
#[test]
fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b"";
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let handler =
move |request: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
let redirects = request
.uri()
.path()
.split("/")
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects == 1 {
*response.body_mut() = MESSAGE.to_vec().into();
} else {
*response.status_mut() = StatusCode::FOUND;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
}
};
if redirects == 1 {
*response.body_mut() = make_body(MESSAGE.to_vec());
} else {
*response.status_mut() = StatusCode::FOUND;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1"));
}
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -1186,9 +1232,11 @@ fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
#[cfg(not(target_os = "windows"))]
fn test_fetch_with_devtools() {
static MESSAGE: &'static [u8] = b"Yay!";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.body_mut() = MESSAGE.to_vec().into();
};
let handler =
move |_: HyperRequest<Incoming>,
response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler);

File diff suppressed because it is too large Load diff

View file

@ -30,11 +30,13 @@ use crossbeam_channel::{unbounded, Sender};
use devtools_traits::DevtoolsControlMsg;
use embedder_traits::{EmbedderProxy, EmbedderReceiver, EventLoopWaker};
use futures::future::ready;
use futures::StreamExt;
use hyper::server::conn::Http;
use hyper::server::Server as HyperServer;
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Request as HyperRequest, Response as HyperResponse};
use http_body_util::combinators::BoxBody;
use http_body_util::{BodyExt, Empty, Full};
use hyper::body::{Bytes, Incoming};
use hyper::server::conn::http1;
use hyper::service::service_fn;
use hyper::{Request as HyperRequest, Response as HyperResponse};
use hyper_util::rt::tokio::TokioIo;
use net::connector::{create_http_client, create_tls_config};
use net::fetch::cors_cache::CorsCache;
use net::fetch::methods::{self, CancellationListener, FetchContext};
@ -46,30 +48,26 @@ use net_traits::filemanager_thread::FileTokenCheck;
use net_traits::request::Request;
use net_traits::response::Response;
use net_traits::{FetchTaskTarget, ResourceFetchTiming, ResourceTimingType};
use rustls::{self, Certificate, PrivateKey};
use rustls_pemfile::{certs, pkcs8_private_keys};
use rustls_pki_types::{CertificateDer, PrivateKeyDer};
use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl;
use tokio::net::{TcpListener, TcpStream};
use tokio::runtime::{Builder, Runtime};
use tokio_rustls::{self, TlsAcceptor};
use tokio_stream::wrappers::TcpListenerStream;
use tokio_test::block_on;
pub static HANDLE: LazyLock<Mutex<Runtime>> = LazyLock::new(|| {
Mutex::new(
Builder::new_multi_thread()
.enable_io()
.worker_threads(10)
.build()
.unwrap(),
)
pub static HANDLE: LazyLock<Runtime> = LazyLock::new(|| {
Builder::new_multi_thread()
.enable_io()
.worker_threads(10)
.build()
.unwrap()
});
const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow.";
struct FetchResponseCollector {
sender: Sender<Response>,
sender: Option<tokio::sync::oneshot::Sender<Response>>,
}
fn create_embedder_proxy() -> EmbedderProxy {
@ -149,6 +147,8 @@ fn receive_credential_prompt_msgs(
}
fn create_http_state(fc: Option<EmbedderProxy>) -> HttpState {
let _ = rustls::crypto::ring::default_provider().install_default();
let override_manager = net::connector::CertificateErrorOverrideManager::new();
HttpState {
hsts_list: RwLock::new(net::hsts::HstsList::default()),
@ -197,7 +197,7 @@ impl FetchTaskTarget for FetchResponseCollector {
fn process_response_chunk(&mut self, _: &Request, _: Vec<u8>) {}
/// Fired when the response is fully fetched
fn process_response_eof(&mut self, _: &Request, response: &Response) {
let _ = self.sender.send(response.clone());
let _ = self.sender.take().unwrap().send(response.clone());
}
}
@ -206,18 +206,22 @@ fn fetch(request: &mut Request, dc: Option<Sender<DevtoolsControlMsg>>) -> Respo
}
fn fetch_with_context(request: &mut Request, mut context: &mut FetchContext) -> Response {
let (sender, receiver) = unbounded();
let mut target = FetchResponseCollector { sender: sender };
block_on(async move {
let (sender, receiver) = tokio::sync::oneshot::channel();
let mut target = FetchResponseCollector {
sender: Some(sender),
};
HANDLE.block_on(async move {
methods::fetch(request, &mut target, &mut context).await;
receiver.recv().unwrap()
receiver.await.unwrap()
})
}
fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Response {
let (sender, receiver) = unbounded();
let mut target = FetchResponseCollector { sender: sender };
block_on(async move {
let (sender, receiver) = tokio::sync::oneshot::channel();
let mut target = FetchResponseCollector {
sender: Some(sender),
};
HANDLE.block_on(async move {
methods::fetch_with_cors_cache(
request,
cache,
@ -225,13 +229,13 @@ fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Respon
&mut new_fetch_context(None, None, None),
)
.await;
receiver.recv().unwrap()
receiver.await.unwrap()
})
}
pub(crate) struct Server {
pub close_channel: tokio::sync::oneshot::Sender<()>,
pub certificates: Option<Vec<Certificate>>,
pub certificates: Option<Vec<CertificateDer<'static>>>,
}
impl Server {
@ -242,34 +246,59 @@ impl Server {
fn make_server<H>(handler: H) -> (Server, ServoUrl)
where
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static,
H: Fn(HyperRequest<Incoming>, &mut HyperResponse<BoxBody<Bytes, hyper::Error>>)
+ Send
+ Sync
+ 'static,
{
let handler = Arc::new(handler);
let listener = StdTcpListener::bind("0.0.0.0:0").unwrap();
listener.set_nonblocking(true).unwrap();
let listener = HANDLE.block_on(async move { TcpListener::from_std(listener).unwrap() });
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
let url = ServoUrl::parse(&url_string).unwrap();
let (tx, rx) = tokio::sync::oneshot::channel::<()>();
let graceful = hyper_util::server::graceful::GracefulShutdown::new();
let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();
let server = async move {
HyperServer::from_tcp(listener)
.unwrap()
.serve(make_service_fn(move |_| {
let handler = handler.clone();
ready(Ok::<_, Infallible>(service_fn(
move |req: HyperRequest<Body>| {
let mut response = HyperResponse::new(Vec::<u8>::new().into());
handler(req, &mut response);
ready(Ok::<_, Infallible>(response))
},
)))
}))
.with_graceful_shutdown(async move {
rx.await.ok();
})
.await
.expect("Could not start server");
loop {
let stream = tokio::select! {
stream = listener.accept() => stream.unwrap().0,
_val = &mut rx => {
let _ = graceful.shutdown();
break;
}
};
let handler = handler.clone();
let stream = stream.into_std().unwrap();
stream
.set_read_timeout(Some(std::time::Duration::new(5, 0)))
.unwrap();
let stream = TcpStream::from_std(stream).unwrap();
let http = http1::Builder::new();
let conn = http.serve_connection(
TokioIo::new(stream),
service_fn(move |req: HyperRequest<Incoming>| {
let mut response =
HyperResponse::new(Empty::new().map_err(|_| unreachable!()).boxed());
handler(req, &mut response);
ready(Ok::<_, Infallible>(response))
}),
);
let conn = graceful.watch(conn);
HANDLE.spawn(async move {
let _ = conn.await;
});
}
};
HANDLE.lock().unwrap().spawn(server);
let _ = HANDLE.spawn(server);
(
Server {
close_channel: tx,
@ -281,37 +310,40 @@ where
/// Given a path to a file containing PEM certificates, load and parse them into
/// a vector of RusTLS [Certificate]s.
fn load_certificates_from_pem(path: &PathBuf) -> std::io::Result<Vec<Certificate>> {
fn load_certificates_from_pem(path: &PathBuf) -> std::io::Result<Vec<CertificateDer<'static>>> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let certs = certs(&mut reader)?;
Ok(certs.into_iter().map(Certificate).collect())
certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
/// Given a path to a file containing PEM keys, load and parse them into
/// a vector of RusTLS [PrivateKey]s.
fn load_private_key_from_file(path: &PathBuf) -> Result<PrivateKey, Box<dyn std::error::Error>> {
fn load_private_key_from_file(
path: &PathBuf,
) -> Result<PrivateKeyDer<'static>, Box<dyn std::error::Error>> {
let file = File::open(&path)?;
let mut reader = BufReader::new(file);
let mut keys = pkcs8_private_keys(&mut reader)?;
let mut keys = pkcs8_private_keys(&mut reader).collect::<Result<Vec<_>, _>>()?;
match keys.len() {
0 => Err(format!("No PKCS8-encoded private key found in {path:?}").into()),
1 => Ok(PrivateKey(keys.remove(0))),
1 => Ok(PrivateKeyDer::try_from(keys.remove(0))?),
_ => Err(format!("More than one PKCS8-encoded private key found in {path:?}").into()),
}
}
fn make_ssl_server<H>(handler: H) -> (Server, ServoUrl)
where
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static,
H: Fn(HyperRequest<Incoming>, &mut HyperResponse<BoxBody<Bytes, hyper::Error>>)
+ Send
+ Sync
+ 'static,
{
let handler = Arc::new(handler);
let listener = StdTcpListener::bind("[::0]:0").unwrap();
let listener = HANDLE
.lock()
.unwrap()
.block_on(async move { TcpListener::from_std(listener).unwrap() });
listener.set_nonblocking(true).unwrap();
let listener = HANDLE.block_on(async move { TcpListener::from_std(listener).unwrap() });
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
let url = ServoUrl::parse(&url_string).unwrap();
@ -325,27 +357,20 @@ where
let key = load_private_key_from_file(&key_path).expect("Invalid key");
let config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certificates.clone(), key)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))
.expect("Could not create rustls ServerConfig");
let acceptor = TlsAcceptor::from(Arc::new(config));
let mut listener = TcpListenerStream::new(listener);
let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();
let server = async move {
loop {
let stream = tokio::select! {
stream = listener.next() => stream,
stream = listener.accept() => stream.unwrap().0,
_ = &mut rx => break
};
let stream = match stream {
Some(stream) => stream.expect("Could not accept stream: "),
_ => break,
};
let stream = stream.into_std().unwrap();
stream
.set_read_timeout(Some(std::time::Duration::new(5, 0)))
@ -363,11 +388,12 @@ where
},
};
let _ = Http::new()
let _ = http1::Builder::new()
.serve_connection(
stream,
service_fn(move |req: HyperRequest<Body>| {
let mut response = HyperResponse::new(Body::empty());
TokioIo::new(stream),
service_fn(move |req: HyperRequest<Incoming>| {
let mut response =
HyperResponse::new(Empty::new().map_err(|_| unreachable!()).boxed());
handler(req, &mut response);
ready(Ok::<_, Infallible>(response))
}),
@ -376,7 +402,7 @@ where
}
};
HANDLE.lock().unwrap().spawn(server);
HANDLE.spawn(server);
(
Server {
@ -386,3 +412,9 @@ where
url,
)
}
pub fn make_body(bytes: Vec<u8>) -> BoxBody<Bytes, hyper::Error> {
Full::new(Bytes::from(bytes))
.map_err(|_| unreachable!())
.boxed()
}

View file

@ -23,6 +23,7 @@ headers = { workspace = true }
http = { workspace = true }
hyper = { workspace = true }
hyper_serde = { workspace = true }
hyper-util = { workspace = true, features = ["client-legacy"] }
ipc-channel = { workspace = true }
log = { workspace = true }
malloc_size_of = { workspace = true }
@ -31,7 +32,7 @@ mime = { workspace = true }
num-traits = { workspace = true }
percent-encoding = { workspace = true }
pixels = { path = "../../pixels" }
rustls = { workspace = true }
rustls-pki-types = { workspace = true }
serde = { workspace = true }
servo_arc = { workspace = true }
servo_rand = { path = "../../rand" }

View file

@ -15,8 +15,8 @@ use cookie::Cookie;
use crossbeam_channel::{unbounded, Receiver, Sender};
use headers::{ContentType, HeaderMapExt, ReferrerPolicy as ReferrerPolicyHeader};
use http::{Error as HttpError, HeaderMap, StatusCode};
use hyper::Error as HyperError;
use hyper_serde::Serde;
use hyper_util::client::legacy::Error as HyperError;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::router::ROUTER;
use ipc_channel::Error as IpcError;
@ -24,7 +24,7 @@ use malloc_size_of::malloc_size_of_is_0;
use malloc_size_of_derive::MallocSizeOf;
use mime::Mime;
use request::RequestId;
use rustls::Certificate;
use rustls_pki_types::CertificateDer;
use serde::{Deserialize, Serialize};
use servo_rand::RngCore;
use servo_url::{ImmutableOrigin, ServoUrl};
@ -882,10 +882,10 @@ pub enum NetworkError {
}
impl NetworkError {
pub fn from_hyper_error(error: &HyperError, certificate: Option<Certificate>) -> Self {
pub fn from_hyper_error(error: &HyperError, certificate: Option<CertificateDer>) -> Self {
let error_string = error.to_string();
match certificate {
Some(certificate) => NetworkError::SslValidation(error_string, certificate.0),
Some(certificate) => NetworkError::SslValidation(error_string, certificate.to_vec()),
_ => NetworkError::Internal(error_string),
}
}

View file

@ -18,7 +18,7 @@ compositing_traits = { workspace = true }
cookie = { workspace = true }
crossbeam-channel = { workspace = true }
euclid = { workspace = true }
http = { workspace = true }
http = { version = "0.2" }
image = { workspace = true }
ipc-channel = { workspace = true }
keyboard-types = { workspace = true }

View file

@ -155,6 +155,14 @@ skip = [
# wgpu depends on thiserror 2, while rest is still on 1
"thiserror",
"thiserror-impl",
# duplicated by webdriver
"h2",
"headers",
"headers-core",
"http",
"http-body",
"hyper",
]
# github.com organizations to allow git sources for

View file

@ -65,6 +65,7 @@ getopts = { workspace = true }
hitrace = { workspace = true, optional = true }
mime_guess = { workspace = true }
url = { workspace = true }
rustls = { version = "0.23", default-features = false, features = ["ring"] }
tokio = { workspace = true }
tracing = { workspace = true, optional = true }
tracing-subscriber = { workspace = true, optional = true, features = ["env-filter"] }

View file

@ -16,6 +16,7 @@ use crate::panic_hook;
pub fn main() {
crate::crash_handler::install();
crate::init_tracing();
crate::init_crypto();
crate::resources::init();
// Parse the command line options and store them globally

View file

@ -61,6 +61,7 @@ pub fn init(
callbacks: Box<dyn HostTrait>,
) -> Result<(), &'static str> {
crate::init_tracing();
crate::init_crypto();
resources::set(Box::new(ResourceReaderInstance::new()));
if let Some(prefs) = init_opts.prefs {

View file

@ -43,6 +43,7 @@ pub fn init(
) -> Result<ServoGlue, &'static str> {
info!("Entered simpleservo init function");
crate::init_tracing();
crate::init_crypto();
let resource_dir = PathBuf::from(&options.resource_dir).join("servo");
resources::set(Box::new(ResourceReaderInstance::new(resource_dir)));
let mut args = vec!["servoshell".to_string()];

View file

@ -38,6 +38,12 @@ pub fn main() {
desktop::cli::main()
}
pub fn init_crypto() {
rustls::crypto::ring::default_provider()
.install_default()
.expect("Error initializing crypto provider");
}
pub fn init_tracing() {
#[cfg(feature = "tracing")]
{