Update all network-related dependencies to the latest versions (#34630)

* Update all network-related dependencies to the latest versions:
* rustls
* hyper
* http
* headers
* tungstenite
* async-tungstenite

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix panics with 1xx responses in WPT tests.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Use reported response length when calculating available ranges.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Remove unreachable match arm.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Clean up commented fragments in blob and file handlers.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Remove unreachable match arm.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix clippy warning.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Cleanup.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Fix up unit tests for dependency upgrades.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* Update aws-lc-sys to fix Windows builds.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* net: Use ring instead of aws-lc-sys.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* embedding: Require embedder to initialize a rustls CryptoProvider.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

* Disable aws-lc-rs pending OhOS build fixes.

Signed-off-by: Josh Matthews <josh@joshmatthews.net>

---------

Signed-off-by: Josh Matthews <josh@joshmatthews.net>
This commit is contained in:
Josh Matthews 2025-01-08 00:47:58 -05:00 committed by GitHub
parent 270df6e263
commit 76e0a1872b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 1342 additions and 1050 deletions

306
Cargo.lock generated
View file

@ -284,38 +284,17 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "async-tungstenite" name = "async-tungstenite"
version = "0.23.0" version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" checksum = "90e661b6cb0a6eb34d02c520b052daa3aa9ac0cc02495c9d066bbce13ead132b"
dependencies = [ dependencies = [
"futures-io", "futures-io",
"futures-util", "futures-util",
"log", "log",
"pin-project-lite", "pin-project-lite",
"rustls-pki-types",
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"tungstenite", "tungstenite",
@ -1047,7 +1026,7 @@ dependencies = [
"euclid", "euclid",
"fonts", "fonts",
"gaol", "gaol",
"http", "http 1.2.0",
"ipc-channel", "ipc-channel",
"keyboard-types", "keyboard-types",
"log", "log",
@ -1474,8 +1453,8 @@ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"devtools_traits", "devtools_traits",
"embedder_traits", "embedder_traits",
"headers", "headers 0.4.0",
"http", "http 1.2.0",
"ipc-channel", "ipc-channel",
"log", "log",
"net_traits", "net_traits",
@ -1493,7 +1472,7 @@ version = "0.0.1"
dependencies = [ dependencies = [
"base", "base",
"bitflags 2.6.0", "bitflags 2.6.0",
"http", "http 1.2.0",
"ipc-channel", "ipc-channel",
"malloc_size_of_derive", "malloc_size_of_derive",
"net_traits", "net_traits",
@ -2887,7 +2866,26 @@ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
"futures-util", "futures-util",
"http", "http 0.2.12",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "h2"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e"
dependencies = [
"atomic-waker",
"bytes",
"fnv",
"futures-core",
"futures-sink",
"http 1.2.0",
"indexmap", "indexmap",
"slab", "slab",
"tokio", "tokio",
@ -2937,8 +2935,23 @@ checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
"headers-core", "headers-core 0.2.0",
"http", "http 0.2.12",
"httpdate",
"mime",
"sha1",
]
[[package]]
name = "headers"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
dependencies = [
"base64 0.21.7",
"bytes",
"headers-core 0.3.0",
"http 1.2.0",
"httpdate", "httpdate",
"mime", "mime",
"sha1", "sha1",
@ -2950,7 +2963,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
dependencies = [ dependencies = [
"http", "http 0.2.12",
]
[[package]]
name = "headers-core"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
dependencies = [
"http 1.2.0",
] ]
[[package]] [[package]]
@ -3043,6 +3065,17 @@ dependencies = [
"itoa", "itoa",
] ]
[[package]]
name = "http"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]] [[package]]
name = "http-body" name = "http-body"
version = "0.4.6" version = "0.4.6"
@ -3050,7 +3083,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [ dependencies = [
"bytes", "bytes",
"http", "http 0.2.12",
"pin-project-lite",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http 1.2.0",
]
[[package]]
name = "http-body-util"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f"
dependencies = [
"bytes",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"pin-project-lite", "pin-project-lite",
] ]
@ -3082,9 +3138,9 @@ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"futures-util", "futures-util",
"h2", "h2 0.3.26",
"http", "http 0.2.12",
"http-body", "http-body 0.4.6",
"httparse", "httparse",
"httpdate", "httpdate",
"itoa", "itoa",
@ -3097,29 +3153,72 @@ dependencies = [
] ]
[[package]] [[package]]
name = "hyper-rustls" name = "hyper"
version = "0.24.2" version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"h2 0.4.7",
"http 1.2.0",
"http-body 1.0.1",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333"
dependencies = [ dependencies = [
"futures-util", "futures-util",
"http", "http 1.2.0",
"hyper", "hyper 1.5.2",
"hyper-util",
"log", "log",
"rustls", "rustls",
"rustls-pki-types",
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"tower-service",
"webpki-roots", "webpki-roots",
] ]
[[package]]
name = "hyper-util"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"hyper 1.5.2",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
]
[[package]] [[package]]
name = "hyper_serde" name = "hyper_serde"
version = "0.13.2" version = "0.13.2"
dependencies = [ dependencies = [
"cookie 0.18.1", "cookie 0.18.1",
"headers", "headers 0.4.0",
"http", "http 1.2.0",
"hyper", "hyper 1.5.2",
"mime", "mime",
"serde", "serde",
"serde_bytes", "serde_bytes",
@ -4607,10 +4706,12 @@ dependencies = [
"futures-core", "futures-core",
"futures-util", "futures-util",
"generic-array", "generic-array",
"headers", "headers 0.4.0",
"http", "http 1.2.0",
"hyper", "http-body-util",
"hyper 1.5.2",
"hyper-rustls", "hyper-rustls",
"hyper-util",
"hyper_serde", "hyper_serde",
"imsz", "imsz",
"ipc-channel", "ipc-channel",
@ -4624,6 +4725,7 @@ dependencies = [
"rayon", "rayon",
"rustls", "rustls",
"rustls-pemfile", "rustls-pemfile",
"rustls-pki-types",
"serde", "serde",
"serde_json", "serde_json",
"servo_allocator", "servo_allocator",
@ -4636,8 +4738,8 @@ dependencies = [
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"tokio-stream", "tokio-stream",
"tokio-test",
"tokio-util", "tokio-util",
"tower-service",
"tungstenite", "tungstenite",
"url", "url",
"uuid", "uuid",
@ -4655,9 +4757,10 @@ dependencies = [
"cookie 0.18.1", "cookie 0.18.1",
"crossbeam-channel", "crossbeam-channel",
"embedder_traits", "embedder_traits",
"headers", "headers 0.4.0",
"http", "http 1.2.0",
"hyper", "hyper 1.5.2",
"hyper-util",
"hyper_serde", "hyper_serde",
"ipc-channel", "ipc-channel",
"log", "log",
@ -4666,7 +4769,7 @@ dependencies = [
"num-traits", "num-traits",
"percent-encoding", "percent-encoding",
"pixels", "pixels",
"rustls", "rustls-pki-types",
"serde", "serde",
"servo_arc", "servo_arc",
"servo_malloc_size_of", "servo_malloc_size_of",
@ -5893,32 +5996,42 @@ dependencies = [
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.21.12" version = "0.23.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1"
dependencies = [ dependencies = [
"log", "log",
"once_cell",
"ring", "ring",
"rustls-pki-types",
"rustls-webpki", "rustls-webpki",
"sct", "subtle",
"zeroize",
] ]
[[package]] [[package]]
name = "rustls-pemfile" name = "rustls-pemfile"
version = "1.0.4" version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [ dependencies = [
"base64 0.21.7", "rustls-pki-types",
] ]
[[package]] [[package]]
name = "rustls-webpki" name = "rustls-pki-types"
version = "0.101.7" version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b"
[[package]]
name = "rustls-webpki"
version = "0.102.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
dependencies = [ dependencies = [
"ring", "ring",
"rustls-pki-types",
"untrusted", "untrusted",
] ]
@ -5996,9 +6109,9 @@ dependencies = [
"fonts_traits", "fonts_traits",
"fxhash", "fxhash",
"glow", "glow",
"headers", "headers 0.4.0",
"html5ever", "html5ever",
"http", "http 1.2.0",
"hyper_serde", "hyper_serde",
"image", "image",
"indexmap", "indexmap",
@ -6125,7 +6238,7 @@ dependencies = [
"devtools_traits", "devtools_traits",
"embedder_traits", "embedder_traits",
"euclid", "euclid",
"http", "http 1.2.0",
"hyper_serde", "hyper_serde",
"ipc-channel", "ipc-channel",
"keyboard-types", "keyboard-types",
@ -6149,16 +6262,6 @@ dependencies = [
"webxr-api", "webxr-api",
] ]
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring",
"untrusted",
]
[[package]] [[package]]
name = "sctk-adwaita" name = "sctk-adwaita"
version = "0.10.1" version = "0.10.1"
@ -6528,7 +6631,7 @@ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"dom", "dom",
"euclid", "euclid",
"http", "http 1.2.0",
"indexmap", "indexmap",
"keyboard-types", "keyboard-types",
"malloc_size_of", "malloc_size_of",
@ -6593,10 +6696,10 @@ dependencies = [
"gilrs", "gilrs",
"gleam", "gleam",
"glow", "glow",
"headers", "headers 0.4.0",
"hilog", "hilog",
"hitrace", "hitrace",
"http", "http 1.2.0",
"image", "image",
"ipc-channel", "ipc-channel",
"jni", "jni",
@ -6616,6 +6719,7 @@ dependencies = [
"ohos-sys", "ohos-sys",
"ohos-vsync", "ohos-vsync",
"raw-window-handle", "raw-window-handle",
"rustls",
"serde_json", "serde_json",
"servo_allocator", "servo_allocator",
"shellwords", "shellwords",
@ -7445,9 +7549,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.24.1" version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37"
dependencies = [ dependencies = [
"rustls", "rustls",
"tokio", "tokio",
@ -7464,19 +7568,6 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-test"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
dependencies = [
"async-stream",
"bytes",
"futures-core",
"tokio",
"tokio-stream",
]
[[package]] [[package]]
name = "tokio-util" name = "tokio-util"
version = "0.7.13" version = "0.7.13"
@ -7652,21 +7743,21 @@ checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31"
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.20.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"bytes", "bytes",
"data-encoding", "data-encoding",
"http", "http 1.2.0",
"httparse", "httparse",
"log", "log",
"rand", "rand",
"rustls", "rustls",
"rustls-pki-types",
"sha1", "sha1",
"thiserror 1.0.69", "thiserror 1.0.69",
"url",
"utf-8", "utf-8",
] ]
@ -7917,9 +8008,9 @@ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-util", "futures-util",
"headers", "headers 0.3.9",
"http", "http 0.2.12",
"hyper", "hyper 0.14.30",
"log", "log",
"mime", "mime",
"mime_guess", "mime_guess",
@ -8165,7 +8256,7 @@ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
"cookie 0.16.2", "cookie 0.16.2",
"http", "http 0.2.12",
"icu_segmenter", "icu_segmenter",
"log", "log",
"serde", "serde",
@ -8189,7 +8280,7 @@ dependencies = [
"cookie 0.18.1", "cookie 0.18.1",
"crossbeam-channel", "crossbeam-channel",
"euclid", "euclid",
"http", "http 0.2.12",
"image", "image",
"ipc-channel", "ipc-channel",
"keyboard-types", "keyboard-types",
@ -8227,9 +8318,12 @@ dependencies = [
[[package]] [[package]]
name = "webpki-roots" name = "webpki-roots"
version = "0.25.4" version = "0.26.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e"
dependencies = [
"rustls-pki-types",
]
[[package]] [[package]]
name = "webrender" name = "webrender"
@ -9056,6 +9150,12 @@ dependencies = [
"synstructure", "synstructure",
] ]
[[package]]
name = "zeroize"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
[[package]] [[package]]
name = "zerotrie" name = "zerotrie"
version = "0.1.3" version = "0.1.3"

View file

@ -22,7 +22,7 @@ aes-kw = { version = "0.2.1", features = ["alloc"] }
aes-gcm = "0.10.3" aes-gcm = "0.10.3"
app_units = "0.7" app_units = "0.7"
arrayvec = "0.7" arrayvec = "0.7"
async-tungstenite = { version = "0.23", features = ["tokio-rustls-webpki-roots"] } async-tungstenite = { version = "0.28", features = ["tokio-rustls-webpki-roots"] }
atomic_refcell = "0.1.13" atomic_refcell = "0.1.13"
background_hang_monitor_api = { path = "components/shared/background_hang_monitor" } background_hang_monitor_api = { path = "components/shared/background_hang_monitor" }
backtrace = "0.3" backtrace = "0.3"
@ -64,13 +64,15 @@ gstreamer-gl = "0.23"
gstreamer-gl-sys = "0.23" gstreamer-gl-sys = "0.23"
gstreamer-sys = "0.23" gstreamer-sys = "0.23"
gstreamer-video = "0.23" gstreamer-video = "0.23"
headers = "0.3" headers = "0.4"
hitrace = "0.1.4" hitrace = "0.1.4"
html5ever = "0.29" html5ever = "0.29"
http = "0.2" http = "1.0"
hyper = "0.14" http-body-util = "0.1"
hyper-rustls = { version = "0.24", default-features = false, features = ["acceptor", "http1", "http2", "logging", "tls12", "webpki-tokio"] } hyper = "1.0"
hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "logging", "tls12", "webpki-tokio"] }
hyper_serde = { path = "components/hyper_serde" } hyper_serde = { path = "components/hyper_serde" }
hyper-util = "0.1"
icu_segmenter = "1.5.0" icu_segmenter = "1.5.0"
image = "0.24" image = "0.24"
imsz = "0.2" imsz = "0.2"
@ -103,8 +105,9 @@ rand_isaac = "0.3"
rayon = "1" rayon = "1"
regex = "1.11" regex = "1.11"
ring = "0.17.8" ring = "0.17.8"
rustls = { version = "0.21.12", features = ["dangerous_configuration"] } rustls = { version = "0.23", default-features = false, features = ["logging", "std", "tls12"] }
rustls-pemfile = "1.0.4" rustls-pemfile = "2.0"
rustls-pki-types = "1.0"
script_layout_interface = { path = "components/shared/script_layout" } script_layout_interface = { path = "components/shared/script_layout" }
script_traits = { path = "components/shared/script" } script_traits = { path = "components/shared/script" }
selectors = { git = "https://github.com/servo/stylo", branch = "2024-12-04" } selectors = { git = "https://github.com/servo/stylo", branch = "2024-12-04" }
@ -136,11 +139,12 @@ tikv-jemallocator = "0.6.0"
time_03 = { package = "time", version = "0.3", features = ["large-dates", "local-offset", "serde"] } time_03 = { package = "time", version = "0.3", features = ["large-dates", "local-offset", "serde"] }
to_shmem = { git = "https://github.com/servo/stylo", branch = "2024-12-04" } to_shmem = { git = "https://github.com/servo/stylo", branch = "2024-12-04" }
tokio = "1" tokio = "1"
tokio-rustls = "0.24" tokio-rustls = { version = "0.26", default-features = false, features = ["logging"] }
tower-service = "0.3"
tracing = "0.1.41" tracing = "0.1.41"
tracing-perfetto = "0.1.5" tracing-perfetto = "0.1.5"
tracing-subscriber = "0.3.19" tracing-subscriber = "0.3.19"
tungstenite = "0.20" tungstenite = "0.24"
uluru = "3.0" uluru = "3.0"
unicode-bidi = "0.3.18" unicode-bidi = "0.3.18"
unicode-properties = { version = "0.1.3", features = ["emoji"] } unicode-properties = { version = "0.1.3", features = ["emoji"] }
@ -149,7 +153,7 @@ unicode-segmentation = "1.12.0"
url = "2.5" url = "2.5"
uuid = { version = "1.11.0", features = ["v4"] } uuid = { version = "1.11.0", features = ["v4"] }
webdriver = "0.51.0" webdriver = "0.51.0"
webpki-roots = "0.25" webpki-roots = "0.26"
webrender = { git = "https://github.com/servo/webrender", branch = "0.65", features = ["capture"] } webrender = { git = "https://github.com/servo/webrender", branch = "0.65", features = ["capture"] }
webrender_api = { git = "https://github.com/servo/webrender", branch = "0.65" } webrender_api = { git = "https://github.com/servo/webrender", branch = "0.65" }
webrender_traits = { path = "components/shared/webrender" } webrender_traits = { path = "components/shared/webrender" }

View file

@ -33,9 +33,11 @@ futures-util = { version = "0.3.30", default-features = false }
generic-array = "0.14" generic-array = "0.14"
headers = { workspace = true } headers = { workspace = true }
http = { workspace = true } http = { workspace = true }
hyper = { workspace = true, features = ["client", "http1", "http2", "tcp", "stream"] } http-body-util = { workspace = true }
hyper = { workspace = true, features = ["client", "http1", "http2"] }
hyper-rustls = { workspace = true } hyper-rustls = { workspace = true }
hyper_serde = { workspace = true } hyper_serde = { workspace = true }
hyper-util = { workspace = true, features = ["client", "client-legacy", "tokio"] }
imsz = { workspace = true } imsz = { workspace = true }
ipc-channel = { workspace = true } ipc-channel = { workspace = true }
log = { workspace = true } log = { workspace = true }
@ -49,6 +51,7 @@ profile_traits = { workspace = true }
rayon = { workspace = true } rayon = { workspace = true }
rustls = { workspace = true } rustls = { workspace = true }
rustls-pemfile = { workspace = true } rustls-pemfile = { workspace = true }
rustls-pki-types = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
servo_allocator = { path = "../allocator" } servo_allocator = { path = "../allocator" }
@ -62,6 +65,7 @@ tokio = { workspace = true, features = ["sync", "macros", "rt-multi-thread"] }
tokio-util = { version = "0.7.12", default-features = false, features = ["codec", "io"] } tokio-util = { version = "0.7.12", default-features = false, features = ["codec", "io"] }
tokio-rustls = { workspace = true } tokio-rustls = { workspace = true }
tokio-stream = "0.1" tokio-stream = "0.1"
tower-service = { workspace = true }
tungstenite = { workspace = true } tungstenite = { workspace = true }
url = { workspace = true } url = { workspace = true }
uuid = { workspace = true } uuid = { workspace = true }
@ -72,9 +76,9 @@ webpki-roots = { workspace = true }
[dev-dependencies] [dev-dependencies]
flate2 = "1" flate2 = "1"
futures = { version = "0.3", features = ["compat"] } futures = { version = "0.3", features = ["compat"] }
tokio-test = "0.4"
tokio-stream = { version = "0.1", features = ["net"] }
hyper = { workspace = true, features = ["full"] } hyper = { workspace = true, features = ["full"] }
hyper-util = { workspace = true, features = ["server-graceful"] }
rustls = { workspace = true, features = ["ring"] }
[[test]] [[test]]
name = "main" name = "main"

View file

@ -9,14 +9,17 @@ use std::sync::{Arc, Mutex};
use futures::task::{Context, Poll}; use futures::task::{Context, Poll};
use futures::Future; use futures::Future;
use http::uri::{Authority, Uri as Destination}; use http::uri::{Authority, Uri as Destination};
use hyper::client::HttpConnector as HyperHttpConnector; use http_body_util::combinators::BoxBody;
use hyper::body::Bytes;
use hyper::rt::Executor; use hyper::rt::Executor;
use hyper::service::Service;
use hyper::{Body, Client};
use hyper_rustls::HttpsConnector as HyperRustlsHttpsConnector; use hyper_rustls::HttpsConnector as HyperRustlsHttpsConnector;
use hyper_util::client::legacy::connect::HttpConnector as HyperHttpConnector;
use hyper_util::client::legacy::Client;
use log::warn; use log::warn;
use rustls::client::WebPkiVerifier; use rustls::client::WebPkiServerVerifier;
use rustls::{Certificate, ClientConfig, OwnedTrustAnchor, RootCertStore, ServerName}; use rustls::{ClientConfig, RootCertStore};
use rustls_pki_types::{CertificateDer, ServerName, UnixTime};
use tower_service::Service;
use crate::async_runtime::HANDLE; use crate::async_runtime::HANDLE;
use crate::hosts::replace_host; use crate::hosts::replace_host;
@ -80,10 +83,10 @@ pub type TlsConfig = ClientConfig;
struct CertificateErrorOverrideManagerInternal { struct CertificateErrorOverrideManagerInternal {
/// A mapping of certificates and their hosts, which have seen certificate errors. /// A mapping of certificates and their hosts, which have seen certificate errors.
/// This is used to later create an override in this [CertificateErrorOverrideManager]. /// This is used to later create an override in this [CertificateErrorOverrideManager].
certificates_failing_to_verify: HashMap<ServerName, Certificate>, certificates_failing_to_verify: HashMap<ServerName<'static>, CertificateDer<'static>>,
/// A list of certificates that should be accepted despite encountering verification /// A list of certificates that should be accepted despite encountering verification
/// errors. /// errors.
overrides: Vec<Certificate>, overrides: Vec<CertificateDer<'static>>,
} }
/// This data structure is used to track certificate verification errors and overrides. /// This data structure is used to track certificate verification errors and overrides.
@ -100,7 +103,7 @@ impl CertificateErrorOverrideManager {
/// Add a certificate to this manager's list of certificates for which to ignore /// Add a certificate to this manager's list of certificates for which to ignore
/// validation errors. /// validation errors.
pub fn add_override(&self, certificate: &Certificate) { pub fn add_override(&self, certificate: &CertificateDer<'static>) {
self.0.lock().unwrap().overrides.push(certificate.clone()); self.0.lock().unwrap().overrides.push(certificate.clone());
} }
@ -110,9 +113,9 @@ impl CertificateErrorOverrideManager {
pub(crate) fn remove_certificate_failing_verification( pub(crate) fn remove_certificate_failing_verification(
&self, &self,
host: &str, host: &str,
) -> Option<Certificate> { ) -> Option<CertificateDer<'static>> {
let server_name = match ServerName::try_from(host) { let server_name = match ServerName::try_from(host) {
Ok(name) => name, Ok(name) => name.to_owned(),
Err(error) => { Err(error) => {
warn!("Could not convert host string into RustTLS ServerName: {error:?}"); warn!("Could not convert host string into RustTLS ServerName: {error:?}");
return None; return None;
@ -149,11 +152,12 @@ pub fn create_tls_config(
override_manager, override_manager,
); );
rustls::ClientConfig::builder() rustls::ClientConfig::builder()
.with_safe_defaults() .dangerous()
.with_custom_certificate_verifier(Arc::new(verifier)) .with_custom_certificate_verifier(Arc::new(verifier))
.with_no_client_auth() .with_no_client_auth()
} }
#[derive(Clone)]
struct TokioExecutor {} struct TokioExecutor {}
impl<F> Executor<F> for TokioExecutor impl<F> Executor<F> for TokioExecutor
@ -165,8 +169,9 @@ where
} }
} }
#[derive(Debug)]
struct CertificateVerificationOverrideVerifier { struct CertificateVerificationOverrideVerifier {
webpki_verifier: WebPkiVerifier, webpki_verifier: Arc<WebPkiServerVerifier>,
ignore_certificate_errors: bool, ignore_certificate_errors: bool,
override_manager: CertificateErrorOverrideManager, override_manager: CertificateErrorOverrideManager,
} }
@ -178,18 +183,8 @@ impl CertificateVerificationOverrideVerifier {
override_manager: CertificateErrorOverrideManager, override_manager: CertificateErrorOverrideManager,
) -> Self { ) -> Self {
let root_cert_store = match ca_certficates { let root_cert_store = match ca_certficates {
CACertificates::Default => { CACertificates::Default => rustls::RootCertStore {
let mut root_cert_store = rustls::RootCertStore::empty(); roots: webpki_roots::TLS_SERVER_ROOTS.to_vec(),
root_cert_store.add_trust_anchors(webpki_roots::TLS_SERVER_ROOTS.iter().map(
|trust_anchor| {
OwnedTrustAnchor::from_subject_spki_name_constraints(
trust_anchor.subject,
trust_anchor.spki,
trust_anchor.name_constraints,
)
},
));
root_cert_store
}, },
CACertificates::Override(root_cert_store) => root_cert_store, CACertificates::Override(root_cert_store) => root_cert_store,
}; };
@ -197,28 +192,52 @@ impl CertificateVerificationOverrideVerifier {
Self { Self {
// See https://github.com/rustls/rustls/blame/v/0.21.6/rustls/src/client/builder.rs#L141 // See https://github.com/rustls/rustls/blame/v/0.21.6/rustls/src/client/builder.rs#L141
// This is the default verifier for Rustls that we are wrapping. // This is the default verifier for Rustls that we are wrapping.
webpki_verifier: WebPkiVerifier::new(root_cert_store, None), webpki_verifier: WebPkiServerVerifier::builder(root_cert_store.into())
.build()
.unwrap(),
ignore_certificate_errors, ignore_certificate_errors,
override_manager, override_manager,
} }
} }
} }
impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerifier { impl rustls::client::danger::ServerCertVerifier for CertificateVerificationOverrideVerifier {
fn verify_tls12_signature(
&self,
message: &[u8],
cert: &CertificateDer<'_>,
dss: &rustls::DigitallySignedStruct,
) -> Result<rustls::client::danger::HandshakeSignatureValid, rustls::Error> {
self.webpki_verifier
.verify_tls12_signature(message, cert, dss)
}
fn verify_tls13_signature(
&self,
message: &[u8],
cert: &CertificateDer<'_>,
dss: &rustls::DigitallySignedStruct,
) -> Result<rustls::client::danger::HandshakeSignatureValid, rustls::Error> {
self.webpki_verifier
.verify_tls13_signature(message, cert, dss)
}
fn supported_verify_schemes(&self) -> Vec<rustls::SignatureScheme> {
self.webpki_verifier.supported_verify_schemes()
}
fn verify_server_cert( fn verify_server_cert(
&self, &self,
end_entity: &Certificate, end_entity: &CertificateDer<'_>,
intermediates: &[Certificate], intermediates: &[CertificateDer<'_>],
server_name: &ServerName, server_name: &ServerName<'_>,
scts: &mut dyn Iterator<Item = &[u8]>,
ocsp_response: &[u8], ocsp_response: &[u8],
now: std::time::SystemTime, now: UnixTime,
) -> Result<rustls::client::ServerCertVerified, rustls::Error> { ) -> Result<rustls::client::danger::ServerCertVerified, rustls::Error> {
let error = match self.webpki_verifier.verify_server_cert( let error = match self.webpki_verifier.verify_server_cert(
end_entity, end_entity,
intermediates, intermediates,
server_name, server_name,
scts,
ocsp_response, ocsp_response,
now, now,
) { ) {
@ -228,13 +247,13 @@ impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerif
if self.ignore_certificate_errors { if self.ignore_certificate_errors {
warn!("Ignoring certficate error: {error:?}"); warn!("Ignoring certficate error: {error:?}");
return Ok(rustls::client::ServerCertVerified::assertion()); return Ok(rustls::client::danger::ServerCertVerified::assertion());
} }
// If there's an override for this certificate, just accept it. // If there's an override for this certificate, just accept it.
for cert_with_exception in &*self.override_manager.0.lock().unwrap().overrides { for cert_with_exception in &*self.override_manager.0.lock().unwrap().overrides {
if *end_entity == *cert_with_exception { if *end_entity == *cert_with_exception {
return Ok(rustls::client::ServerCertVerified::assertion()); return Ok(rustls::client::danger::ServerCertVerified::assertion());
} }
} }
self.override_manager self.override_manager
@ -242,12 +261,14 @@ impl rustls::client::ServerCertVerifier for CertificateVerificationOverrideVerif
.lock() .lock()
.unwrap() .unwrap()
.certificates_failing_to_verify .certificates_failing_to_verify
.insert(server_name.clone(), end_entity.clone()); .insert(server_name.to_owned(), end_entity.clone().into_owned());
Err(error) Err(error)
} }
} }
pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, Body> { pub type BoxedBody = BoxBody<Bytes, hyper::Error>;
pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, BoxedBody> {
let connector = hyper_rustls::HttpsConnectorBuilder::new() let connector = hyper_rustls::HttpsConnectorBuilder::new()
.with_tls_config(tls_config) .with_tls_config(tls_config)
.https_or_http() .https_or_http()
@ -255,8 +276,7 @@ pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, Body> {
.enable_http2() .enable_http2()
.wrap_connector(ServoHttpConnector::new()); .wrap_connector(ServoHttpConnector::new());
Client::builder() Client::builder(TokioExecutor {})
.http1_title_case_headers(true) .http1_title_case_headers(true)
.executor(TokioExecutor {})
.build(connector) .build(connector)
} }

View file

@ -28,12 +28,16 @@ use futures::task::{Context, Poll};
use futures::{Future, Stream}; use futures::{Future, Stream};
use futures_util::StreamExt; use futures_util::StreamExt;
use headers::{ContentLength, HeaderMapExt}; use headers::{ContentLength, HeaderMapExt};
use http_body_util::BodyExt;
use hyper::body::Body;
use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING}; use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING};
use hyper::{Body, Response}; use hyper::Response;
use servo_config::pref; use servo_config::pref;
use tokio_util::codec::{BytesCodec, FramedRead}; use tokio_util::codec::{BytesCodec, FramedRead};
use tokio_util::io::StreamReader; use tokio_util::io::StreamReader;
use crate::connector::BoxedBody;
pub const DECODER_BUFFER_SIZE: usize = 8192; pub const DECODER_BUFFER_SIZE: usize = 8192;
/// A response decompressor over a non-blocking stream of bytes. /// A response decompressor over a non-blocking stream of bytes.
@ -81,7 +85,7 @@ impl Decoder {
/// This decoder will emit the underlying bytes as-is. /// This decoder will emit the underlying bytes as-is.
#[inline] #[inline]
fn plain_text( fn plain_text(
body: Body, body: BoxedBody,
is_secure_scheme: bool, is_secure_scheme: bool,
content_length: Option<ContentLength>, content_length: Option<ContentLength>,
) -> Decoder { ) -> Decoder {
@ -95,7 +99,7 @@ impl Decoder {
/// This decoder will buffer and decompress bytes that are encoded in the expected format. /// This decoder will buffer and decompress bytes that are encoded in the expected format.
#[inline] #[inline]
fn pending( fn pending(
body: Body, body: BoxedBody,
type_: DecoderType, type_: DecoderType,
is_secure_scheme: bool, is_secure_scheme: bool,
content_length: Option<ContentLength>, content_length: Option<ContentLength>,
@ -114,7 +118,7 @@ impl Decoder {
/// how to decode the content body of the response. /// how to decode the content body of the response.
/// ///
/// Uses the correct variant by inspecting the Content-Encoding header. /// Uses the correct variant by inspecting the Content-Encoding header.
pub fn detect(response: Response<Body>, is_secure_scheme: bool) -> Response<Decoder> { pub fn detect(response: Response<BoxedBody>, is_secure_scheme: bool) -> Response<Decoder> {
let values = response let values = response
.headers() .headers()
.get_all(CONTENT_ENCODING) .get_all(CONTENT_ENCODING)
@ -225,7 +229,7 @@ impl Future for Pending {
} }
struct BodyStream { struct BodyStream {
body: Body, body: BoxedBody,
is_secure_scheme: bool, is_secure_scheme: bool,
content_length: Option<ContentLength>, content_length: Option<ContentLength>,
total_read: u64, total_read: u64,
@ -234,14 +238,16 @@ struct BodyStream {
impl BodyStream { impl BodyStream {
fn empty() -> Self { fn empty() -> Self {
BodyStream { BodyStream {
body: Body::empty(), body: http_body_util::Empty::new()
.map_err(|_| unreachable!())
.boxed(),
is_secure_scheme: false, is_secure_scheme: false,
content_length: None, content_length: None,
total_read: 0, total_read: 0,
} }
} }
fn new(body: Body, is_secure_scheme: bool, content_length: Option<ContentLength>) -> Self { fn new(body: BoxedBody, is_secure_scheme: bool, content_length: Option<ContentLength>) -> Self {
BodyStream { BodyStream {
body, body,
is_secure_scheme, is_secure_scheme,
@ -255,8 +261,11 @@ impl Stream for BodyStream {
type Item = Result<Bytes, io::Error>; type Item = Result<Bytes, io::Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match futures_core::ready!(Pin::new(&mut self.body).poll_next(cx)) { match futures_core::ready!(Pin::new(&mut self.body).poll_frame(cx)) {
Some(Ok(bytes)) => { Some(Ok(bytes)) => {
let Ok(bytes) = bytes.into_data() else {
return Poll::Ready(None);
};
self.total_read += bytes.len() as u64; self.total_read += bytes.len() as u64;
Poll::Ready(Some(Ok(bytes))) Poll::Ready(Some(Ok(bytes)))
}, },

View file

@ -31,7 +31,7 @@ use net_traits::{
FetchTaskTarget, NetworkError, ReferrerPolicy, ResourceAttribute, ResourceFetchTiming, FetchTaskTarget, NetworkError, ReferrerPolicy, ResourceAttribute, ResourceFetchTiming,
ResourceTimeValue, ResourceTimingType, ResourceTimeValue, ResourceTimingType,
}; };
use rustls::Certificate; use rustls_pki_types::CertificateDer;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use servo_arc::Arc as ServoArc; use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl; use servo_url::ServoUrl;
@ -675,7 +675,7 @@ fn handle_allowcert_request(request: &mut Request, context: &FetchContext) -> io
context context
.state .state
.override_manager .override_manager
.add_override(&Certificate(cert_bytes)); .add_override(&CertificateDer::from_slice(&cert_bytes).into_owned());
Ok(()) Ok(())
} }

View file

@ -11,7 +11,7 @@ use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex, RwLock, Weak}; use std::sync::{Arc, Mutex, RwLock, Weak};
use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern}; use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
use headers::{ContentLength, ContentType, HeaderMap, HeaderMapExt}; use headers::{ContentLength, ContentType, HeaderMap, HeaderMapExt, Range};
use http::header::{self, HeaderValue}; use http::header::{self, HeaderValue};
use ipc_channel::ipc::{self, IpcSender}; use ipc_channel::ipc::{self, IpcSender};
use log::warn; use log::warn;
@ -30,6 +30,7 @@ use url::Url;
use uuid::Uuid; use uuid::Uuid;
use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds}; use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds};
use crate::protocols::get_range_request_bounds;
use crate::resource_thread::CoreResourceThreadPool; use crate::resource_thread::CoreResourceThreadPool;
pub const FILE_CHUNK_SIZE: usize = 32768; //32 KB pub const FILE_CHUNK_SIZE: usize = 32768; //32 KB
@ -132,7 +133,7 @@ impl FileManager {
file_token: &FileTokenCheck, file_token: &FileTokenCheck,
origin: FileOrigin, origin: FileOrigin,
response: &mut Response, response: &mut Response,
range: RangeRequestBounds, range: Option<Range>,
) -> Result<(), BlobURLStoreError> { ) -> Result<(), BlobURLStoreError> {
self.fetch_blob_buf( self.fetch_blob_buf(
done_sender, done_sender,
@ -140,7 +141,7 @@ impl FileManager {
&id, &id,
file_token, file_token,
&origin, &origin,
range, BlobBounds::Unresolved(range),
response, response,
) )
} }
@ -285,13 +286,17 @@ impl FileManager {
id: &Uuid, id: &Uuid,
file_token: &FileTokenCheck, file_token: &FileTokenCheck,
origin_in: &FileOrigin, origin_in: &FileOrigin,
range: RangeRequestBounds, bounds: BlobBounds,
response: &mut Response, response: &mut Response,
) -> Result<(), BlobURLStoreError> { ) -> Result<(), BlobURLStoreError> {
let file_impl = self.store.get_impl(id, file_token, origin_in)?; let file_impl = self.store.get_impl(id, file_token, origin_in)?;
match file_impl { match file_impl {
FileImpl::Memory(buf) => { FileImpl::Memory(buf) => {
let range = range let bounds = match bounds {
BlobBounds::Unresolved(range) => get_range_request_bounds(range, buf.size),
BlobBounds::Resolved(bounds) => bounds,
};
let range = bounds
.get_final(Some(buf.size)) .get_final(Some(buf.size))
.map_err(|_| BlobURLStoreError::InvalidRange)?; .map_err(|_| BlobURLStoreError::InvalidRange)?;
@ -323,7 +328,11 @@ impl FileManager {
let file = File::open(&metadata.path) let file = File::open(&metadata.path)
.map_err(|e| BlobURLStoreError::External(e.to_string()))?; .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
let range = range let bounds = match bounds {
BlobBounds::Unresolved(range) => get_range_request_bounds(range, metadata.size),
BlobBounds::Resolved(bounds) => bounds,
};
let range = bounds
.get_final(Some(metadata.size)) .get_final(Some(metadata.size))
.map_err(|_| BlobURLStoreError::InvalidRange)?; .map_err(|_| BlobURLStoreError::InvalidRange)?;
@ -362,15 +371,16 @@ impl FileManager {
FileImpl::Sliced(parent_id, inner_rel_pos) => { FileImpl::Sliced(parent_id, inner_rel_pos) => {
// Next time we don't need to check validity since // Next time we don't need to check validity since
// we have already done that for requesting URL if necessary. // we have already done that for requesting URL if necessary.
let bounds = RangeRequestBounds::Final(
RelativePos::full_range().slice_inner(&inner_rel_pos),
);
self.fetch_blob_buf( self.fetch_blob_buf(
done_sender, done_sender,
cancellation_listener, cancellation_listener,
&parent_id, &parent_id,
file_token, file_token,
origin_in, origin_in,
RangeRequestBounds::Final( BlobBounds::Resolved(bounds),
RelativePos::full_range().slice_inner(&inner_rel_pos),
),
response, response,
) )
}, },
@ -378,6 +388,11 @@ impl FileManager {
} }
} }
enum BlobBounds {
Unresolved(Option<Range>),
Resolved(RangeRequestBounds),
}
/// File manager's data store. It maintains a thread-safe mapping /// File manager's data store. It maintains a thread-safe mapping
/// from FileID to FileStoreEntry which might have different backend implementation. /// from FileID to FileStoreEntry which might have different backend implementation.
/// Access to the content is encapsulated as methods of this struct. /// Access to the content is encapsulated as methods of this struct.

View file

@ -345,7 +345,7 @@ fn create_resource_with_bytes_from_resource(
fn handle_range_request( fn handle_range_request(
request: &Request, request: &Request,
candidates: &[&CachedResource], candidates: &[&CachedResource],
range_spec: Vec<(Bound<u64>, Bound<u64>)>, range_spec: &Range,
done_chan: &mut DoneChannel, done_chan: &mut DoneChannel,
) -> Option<CachedResponse> { ) -> Option<CachedResponse> {
let mut complete_cached_resources = candidates let mut complete_cached_resources = candidates
@ -354,10 +354,7 @@ fn handle_range_request(
let partial_cached_resources = candidates let partial_cached_resources = candidates
.iter() .iter()
.filter(|resource| resource.status == StatusCode::PARTIAL_CONTENT); .filter(|resource| resource.status == StatusCode::PARTIAL_CONTENT);
match ( if let Some(complete_resource) = complete_cached_resources.next() {
range_spec.first().unwrap(),
complete_cached_resources.next(),
) {
// TODO: take the full range spec into account. // TODO: take the full range spec into account.
// If we have a complete resource, take the request range from the body. // If we have a complete resource, take the request range from the body.
// When there isn't a complete resource available, we loop over cached partials, // When there isn't a complete resource available, we loop over cached partials,
@ -366,172 +363,145 @@ fn handle_range_request(
// see <https://tools.ietf.org/html/rfc7233#section-4.3>. // see <https://tools.ietf.org/html/rfc7233#section-4.3>.
// TODO: add support for complete and partial resources, // TODO: add support for complete and partial resources,
// whose body is in the ResponseBody::Receiving state. // whose body is in the ResponseBody::Receiving state.
(&(Bound::Included(beginning), Bound::Included(end)), Some(complete_resource)) => { let body_len = match *complete_resource.body.lock().unwrap() {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { ResponseBody::Done(ref body) => body.len(),
if end == u64::MAX { _ => 0,
// Prevent overflow on the addition below. };
return None; let bound = range_spec
} .satisfiable_ranges(body_len.try_into().unwrap())
let b = beginning as usize; .next()
let e = end as usize + 1; .unwrap();
let requested = body.get(b..e); match bound {
if let Some(bytes) = requested { (Bound::Included(beginning), Bound::Included(end)) => {
let new_resource = if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
create_resource_with_bytes_from_resource(bytes, complete_resource); if end == u64::MAX {
let cached_headers = new_resource.metadata.headers.lock().unwrap(); // Prevent overflow on the addition below.
let cached_response = return None;
create_cached_response(request, &new_resource, &cached_headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
} }
} let b = beginning as usize;
} let e = end as usize + 1;
}, let requested = body.get(b..e);
(&(Bound::Included(beginning), Bound::Included(end)), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end) = match content_range {
Some(range) => {
if let Some(bytes_range) = range.bytes_range() {
bytes_range
} else {
continue;
}
},
_ => continue,
};
if res_beginning <= beginning && res_end >= end {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let b = beginning as usize - res_beginning as usize;
let e = end as usize - res_beginning as usize + 1;
body.get(b..e)
},
_ => continue,
};
if let Some(bytes) = requested { if let Some(bytes) = requested {
let new_resource = let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource); create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_response = let cached_headers = new_resource.metadata.headers.lock().unwrap();
create_cached_response(request, &new_resource, &headers, done_chan); let cached_response = create_cached_response(
request,
&new_resource,
&cached_headers,
done_chan,
);
if let Some(cached_response) = cached_response { if let Some(cached_response) = cached_response {
return Some(cached_response); return Some(cached_response);
} }
} }
} }
} },
}, (Bound::Included(beginning), Bound::Unbounded) => {
(&(Bound::Included(beginning), Bound::Unbounded), Some(complete_resource)) => { if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { let b = beginning as usize;
let b = beginning as usize; let requested = body.get(b..);
let requested = body.get(b..);
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response =
create_cached_response(request, &new_resource, &cached_headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
},
(&(Bound::Included(beginning), Bound::Unbounded), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
_ => continue,
}
} else {
continue;
};
if total == 0 {
// Prevent overflow in the below operations from occuring.
continue;
};
if res_beginning < beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested { if let Some(bytes) = requested {
let new_resource = let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource); create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_response = let cached_headers = new_resource.metadata.headers.lock().unwrap();
create_cached_response(request, &new_resource, &headers, done_chan); let cached_response = create_cached_response(
request,
&new_resource,
&cached_headers,
done_chan,
);
if let Some(cached_response) = cached_response { if let Some(cached_response) = cached_response {
return Some(cached_response); return Some(cached_response);
} }
} }
} }
} },
}, _ => return None,
(&(Bound::Unbounded, Bound::Included(offset)), Some(complete_resource)) => { }
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { } else {
let from_byte = body.len() - offset as usize; for partial_resource in partial_cached_resources {
let requested = body.get(from_byte..); let headers = partial_resource.metadata.headers.lock().unwrap();
if let Some(bytes) = requested { let content_range = headers.typed_get::<ContentRange>();
let new_resource =
create_resource_with_bytes_from_resource(bytes, complete_resource); let Some(body_len) = content_range.as_ref().and_then(|range| range.bytes_len()) else {
let cached_headers = new_resource.metadata.headers.lock().unwrap(); continue;
let cached_response = };
create_cached_response(request, &new_resource, &cached_headers, done_chan); match range_spec.satisfiable_ranges(body_len - 1).next().unwrap() {
if let Some(cached_response) = cached_response { (Bound::Included(beginning), Bound::Included(end)) => {
return Some(cached_response); let (res_beginning, res_end) = match content_range {
} Some(range) => {
} if let Some(bytes_range) = range.bytes_range() {
} bytes_range
}, } else {
(&(Bound::Unbounded, Bound::Included(offset)), None) => { continue;
for partial_resource in partial_cached_resources { }
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.typed_get::<ContentRange>();
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
_ => continue,
}
} else {
continue;
};
if total < res_beginning || total < res_end || offset == 0 || offset == u64::MAX {
// Prevent overflow in the below operations from occuring.
continue;
}
if (total - res_beginning) > (offset - 1) && (total - res_end) < offset + 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = body.len() - offset as usize;
body.get(from_byte..)
}, },
_ => continue, _ => continue,
}; };
if let Some(bytes) = requested { if res_beginning <= beginning && res_end >= end {
let new_resource = let resource_body = &*partial_resource.body.lock().unwrap();
create_resource_with_bytes_from_resource(bytes, partial_resource); let requested = match resource_body {
let cached_response = ResponseBody::Done(body) => {
create_cached_response(request, &new_resource, &headers, done_chan); let b = beginning as usize - res_beginning as usize;
if let Some(cached_response) = cached_response { let e = end as usize - res_beginning as usize + 1;
return Some(cached_response); body.get(b..e)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
} }
} }
} },
(Bound::Included(beginning), Bound::Unbounded) => {
let (res_beginning, res_end, total) = if let Some(range) = content_range {
match (range.bytes_range(), range.bytes_len()) {
(Some(bytes_range), Some(total)) => {
(bytes_range.0, bytes_range.1, total)
},
_ => continue,
}
} else {
continue;
};
if total == 0 {
// Prevent overflow in the below operations from occuring.
continue;
};
if res_beginning <= beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
ResponseBody::Done(body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource =
create_resource_with_bytes_from_resource(bytes, partial_resource);
let cached_response =
create_cached_response(request, &new_resource, &headers, done_chan);
if let Some(cached_response) = cached_response {
return Some(cached_response);
}
}
}
},
_ => continue,
} }
}, }
// All the cases with Bound::Excluded should be unreachable anyway
_ => return None,
} }
None None
} }
@ -607,12 +577,7 @@ impl HttpCache {
} }
// Support for range requests // Support for range requests
if let Some(range_spec) = request.headers.typed_get::<Range>() { if let Some(range_spec) = request.headers.typed_get::<Range>() {
return handle_range_request( return handle_range_request(request, candidates.as_slice(), &range_spec, done_chan);
request,
candidates.as_slice(),
range_spec.iter().collect(),
done_chan,
);
} }
while let Some(cached_resource) = candidates.pop() { while let Some(cached_resource) = candidates.pop() {
// Not a Range request. // Not a Range request.

View file

@ -2,7 +2,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use core::convert::Infallible;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::iter::FromIterator; use std::iter::FromIterator;
use std::sync::{Arc as StdArc, Condvar, Mutex, RwLock}; use std::sync::{Arc as StdArc, Condvar, Mutex, RwLock};
@ -19,7 +18,7 @@ use devtools_traits::{
use embedder_traits::{ use embedder_traits::{
EmbedderMsg, EmbedderProxy, PromptCredentialsInput, PromptDefinition, PromptOrigin, EmbedderMsg, EmbedderProxy, PromptCredentialsInput, PromptDefinition, PromptOrigin,
}; };
use futures::{future, StreamExt, TryFutureExt, TryStreamExt}; use futures::{future, TryFutureExt, TryStreamExt};
use headers::authorization::Basic; use headers::authorization::Basic;
use headers::{ use headers::{
AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowMethods, AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowMethods,
@ -32,10 +31,14 @@ use http::header::{
CONTENT_TYPE, CONTENT_TYPE,
}; };
use http::{HeaderMap, Method, Request as HyperRequest, StatusCode}; use http::{HeaderMap, Method, Request as HyperRequest, StatusCode};
use http_body_util::combinators::BoxBody;
use http_body_util::{BodyExt, Full};
use hyper::body::{Bytes, Frame};
use hyper::ext::ReasonPhrase; use hyper::ext::ReasonPhrase;
use hyper::header::{HeaderName, TRANSFER_ENCODING}; use hyper::header::{HeaderName, TRANSFER_ENCODING};
use hyper::{Body, Client, Response as HyperResponse}; use hyper::Response as HyperResponse;
use hyper_serde::Serde; use hyper_serde::Serde;
use hyper_util::client::legacy::Client;
use ipc_channel::ipc::{self, IpcSender}; use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER; use ipc_channel::router::ROUTER;
use log::{debug, error, info, log_enabled, warn}; use log::{debug, error, info, log_enabled, warn};
@ -101,7 +104,7 @@ pub struct HttpState {
pub http_cache_state: HttpCacheState, pub http_cache_state: HttpCacheState,
pub auth_cache: RwLock<AuthCache>, pub auth_cache: RwLock<AuthCache>,
pub history_states: RwLock<HashMap<HistoryStateId, Vec<u8>>>, pub history_states: RwLock<HashMap<HistoryStateId, Vec<u8>>>,
pub client: Client<Connector, Body>, pub client: Client<Connector, crate::connector::BoxedBody>,
pub override_manager: CertificateErrorOverrideManager, pub override_manager: CertificateErrorOverrideManager,
pub embedder_proxy: Mutex<EmbedderProxy>, pub embedder_proxy: Mutex<EmbedderProxy>,
} }
@ -440,7 +443,7 @@ enum BodyChunk {
enum BodyStream { enum BodyStream {
/// A receiver that can be used in Body::wrap_stream, /// A receiver that can be used in Body::wrap_stream,
/// for streaming the request over the network. /// for streaming the request over the network.
Chunked(TokioReceiver<Vec<u8>>), Chunked(TokioReceiver<Result<Frame<Bytes>, hyper::Error>>),
/// A body whose bytes are buffered /// A body whose bytes are buffered
/// and sent in one chunk over the network. /// and sent in one chunk over the network.
Buffered(UnboundedReceiver<BodyChunk>), Buffered(UnboundedReceiver<BodyChunk>),
@ -450,7 +453,7 @@ enum BodyStream {
/// used to enqueue chunks. /// used to enqueue chunks.
enum BodySink { enum BodySink {
/// A Tokio sender used to feed chunks to the network stream. /// A Tokio sender used to feed chunks to the network stream.
Chunked(TokioSender<Vec<u8>>), Chunked(TokioSender<Result<Frame<Bytes>, hyper::Error>>),
/// A Crossbeam sender used to send chunks to the fetch worker, /// A Crossbeam sender used to send chunks to the fetch worker,
/// where they will be buffered /// where they will be buffered
/// in order to ensure they are not streamed them over the network. /// in order to ensure they are not streamed them over the network.
@ -463,7 +466,7 @@ impl BodySink {
BodySink::Chunked(ref sender) => { BodySink::Chunked(ref sender) => {
let sender = sender.clone(); let sender = sender.clone();
HANDLE.lock().unwrap().as_mut().unwrap().spawn(async move { HANDLE.lock().unwrap().as_mut().unwrap().spawn(async move {
let _ = sender.send(bytes).await; let _ = sender.send(Ok(Frame::data(bytes.into()))).await;
}); });
}, },
BodySink::Buffered(ref sender) => { BodySink::Buffered(ref sender) => {
@ -484,7 +487,7 @@ impl BodySink {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
async fn obtain_response( async fn obtain_response(
client: &Client<Connector, Body>, client: &Client<Connector, crate::connector::BoxedBody>,
url: &ServoUrl, url: &ServoUrl,
method: &Method, method: &Method,
request_headers: &mut HeaderMap, request_headers: &mut HeaderMap,
@ -584,7 +587,7 @@ async fn obtain_response(
let body = match stream { let body = match stream {
BodyStream::Chunked(receiver) => { BodyStream::Chunked(receiver) => {
let stream = ReceiverStream::new(receiver); let stream = ReceiverStream::new(receiver);
Body::wrap_stream(stream.map(Ok::<_, Infallible>)) BoxBody::new(http_body_util::StreamBody::new(stream))
}, },
BodyStream::Buffered(mut receiver) => { BodyStream::Buffered(mut receiver) => {
// Accumulate bytes received over IPC into a vector. // Accumulate bytes received over IPC into a vector.
@ -598,7 +601,7 @@ async fn obtain_response(
None => warn!("Failed to read all chunks from request body."), None => warn!("Failed to read all chunks from request body."),
} }
} }
body.into() Full::new(body.into()).map_err(|_| unreachable!()).boxed()
}, },
}; };
HyperRequest::builder() HyperRequest::builder()
@ -609,7 +612,11 @@ async fn obtain_response(
HyperRequest::builder() HyperRequest::builder()
.method(method) .method(method)
.uri(encoded_url) .uri(encoded_url)
.body(Body::empty()) .body(
http_body_util::Empty::new()
.map_err(|_| unreachable!())
.boxed(),
)
}; };
context context
@ -695,7 +702,10 @@ async fn obtain_response(
None None
}; };
future::ready(Ok((Decoder::detect(res, is_secure_scheme), msg))) future::ready(Ok((
Decoder::detect(res.map(|r| r.boxed()), is_secure_scheme),
msg,
)))
}) })
.map_err(move |error| { .map_err(move |error| {
NetworkError::from_hyper_error( NetworkError::from_hyper_error(

View file

@ -16,9 +16,7 @@ use net_traits::{NetworkError, ResourceFetchTiming};
use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::unbounded_channel;
use crate::fetch::methods::{Data, DoneChannel, FetchContext}; use crate::fetch::methods::{Data, DoneChannel, FetchContext};
use crate::protocols::{ use crate::protocols::{partial_content, range_not_satisfiable_error, ProtocolHandler};
get_range_request_bounds, partial_content, range_not_satisfiable_error, ProtocolHandler,
};
#[derive(Default)] #[derive(Default)]
pub struct BlobProtocolHander {} pub struct BlobProtocolHander {}
@ -42,9 +40,6 @@ impl ProtocolHandler for BlobProtocolHander {
let range_header = request.headers.typed_get::<Range>(); let range_header = request.headers.typed_get::<Range>();
let is_range_request = range_header.is_some(); let is_range_request = range_header.is_some();
// We will get a final version of this range once we have
// the length of the data backing the blob.
let range = get_range_request_bounds(range_header);
let (id, origin) = match parse_blob_url(&url) { let (id, origin) = match parse_blob_url(&url) {
Ok((id, origin)) => (id, origin), Ok((id, origin)) => (id, origin),
@ -73,7 +68,7 @@ impl ProtocolHandler for BlobProtocolHander {
&context.file_token, &context.file_token,
origin, origin,
&mut response, &mut response,
range, range_header,
) { ) {
let _ = done_sender.send(Data::Done); let _ = done_sender.send(Data::Done);
let err = match err { let err = match err {

View file

@ -58,7 +58,9 @@ impl ProtocolHandler for FileProtocolHander {
let range_header = request.headers.typed_get::<Range>(); let range_header = request.headers.typed_get::<Range>();
let is_range_request = range_header.is_some(); let is_range_request = range_header.is_some();
let Ok(range) = get_range_request_bounds(range_header).get_final(file_size) else { let Ok(range) = get_range_request_bounds(range_header, file_size.unwrap_or(0))
.get_final(file_size)
else {
range_not_satisfiable_error(&mut response); range_not_satisfiable_error(&mut response);
return Box::pin(ready(response)); return Box::pin(ready(response));
}; };

View file

@ -106,19 +106,15 @@ pub fn range_not_satisfiable_error(response: &mut Response) {
} }
/// Get the range bounds if the `Range` header is present. /// Get the range bounds if the `Range` header is present.
pub fn get_range_request_bounds(range: Option<Range>) -> RangeRequestBounds { pub fn get_range_request_bounds(range: Option<Range>, len: u64) -> RangeRequestBounds {
if let Some(ref range) = range { if let Some(ref range) = range {
let (start, end) = match range let (start, end) = match range.satisfiable_ranges(len).next() {
.iter() Some((Bound::Included(start), Bound::Unbounded)) => (start, None),
.collect::<Vec<(Bound<u64>, Bound<u64>)>>() Some((Bound::Included(start), Bound::Included(end))) => {
.first()
{
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
Some(&(Bound::Included(start), Bound::Included(end))) => {
// `end` should be less or equal to `start`. // `end` should be less or equal to `start`.
(start, Some(i64::max(start as i64, end as i64))) (start, Some(i64::max(start as i64, end as i64)))
}, },
Some(&(Bound::Unbounded, Bound::Included(offset))) => { Some((Bound::Unbounded, Bound::Included(offset))) => {
return RangeRequestBounds::Pending(offset); return RangeRequestBounds::Pending(offset);
}, },
_ => (0, None), _ => (0, None),

View file

@ -61,8 +61,8 @@ fn load_root_cert_store_from_file(file_path: String) -> io::Result<RootCertStore
let mut root_cert_store = RootCertStore::empty(); let mut root_cert_store = RootCertStore::empty();
let mut pem = BufReader::new(File::open(file_path)?); let mut pem = BufReader::new(File::open(file_path)?);
let certs = rustls_pemfile::certs(&mut pem)?; let certs: Result<Vec<_>, _> = rustls_pemfile::certs(&mut pem).collect();
root_cert_store.add_parsable_certificates(&certs); root_cert_store.add_parsable_certificates(certs?);
Ok(root_cert_store) Ok(root_cert_store)
} }

View file

@ -21,7 +21,9 @@ use headers::{
}; };
use http::header::{self, HeaderMap, HeaderName, HeaderValue}; use http::header::{self, HeaderMap, HeaderName, HeaderValue};
use http::{Method, StatusCode}; use http::{Method, StatusCode};
use hyper::{Body, Request as HyperRequest, Response as HyperResponse}; use http_body_util::combinators::BoxBody;
use hyper::body::{Bytes, Incoming};
use hyper::{Request as HyperRequest, Response as HyperResponse};
use mime::{self, Mime}; use mime::{self, Mime};
use net::fetch::cors_cache::CorsCache; use net::fetch::cors_cache::CorsCache;
use net::fetch::methods::{self, CancellationListener, FetchContext}; use net::fetch::methods::{self, CancellationListener, FetchContext};
@ -41,13 +43,12 @@ use net_traits::{
}; };
use servo_arc::Arc as ServoArc; use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use tokio_test::block_on;
use uuid::Uuid; use uuid::Uuid;
use crate::http_loader::{expect_devtools_http_request, expect_devtools_http_response}; use crate::http_loader::{expect_devtools_http_request, expect_devtools_http_response};
use crate::{ use crate::{
create_embedder_proxy, create_http_state, fetch, fetch_with_context, fetch_with_cors_cache, create_embedder_proxy, create_http_state, fetch, fetch_with_context, fetch_with_cors_cache,
make_server, make_ssl_server, new_fetch_context, DEFAULT_USER_AGENT, make_body, make_server, make_ssl_server, new_fetch_context, DEFAULT_USER_AGENT,
}; };
// TODO write a struct that impls Handler for storing test values // TODO write a struct that impls Handler for storing test values
@ -55,9 +56,11 @@ use crate::{
#[test] #[test]
fn test_fetch_response_is_not_network_error() { fn test_fetch_response_is_not_network_error() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -89,9 +92,11 @@ fn test_fetch_on_bad_port_is_network_error() {
#[test] #[test]
fn test_fetch_response_body_matches_const_message() { fn test_fetch_response_body_matches_const_message() {
static MESSAGE: &'static [u8] = b"Hello World!"; static MESSAGE: &'static [u8] = b"Hello World!";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -191,7 +196,7 @@ fn test_fetch_blob() {
expected: bytes.to_vec(), expected: bytes.to_vec(),
}; };
block_on(methods::fetch(&mut request, &mut target, &context)); crate::HANDLE.block_on(methods::fetch(&mut request, &mut target, &context));
let fetch_response = receiver.recv().unwrap(); let fetch_response = receiver.recv().unwrap();
assert!(!fetch_response.is_network_error()); assert!(!fetch_response.is_network_error());
@ -285,38 +290,41 @@ fn test_fetch_bogus_scheme() {
fn test_cors_preflight_fetch() { fn test_cors_preflight_fetch() {
static ACK: &'static [u8] = b"ACK"; static ACK: &'static [u8] = b"ACK";
let state = Arc::new(AtomicUsize::new(0)); let state = Arc::new(AtomicUsize::new(0));
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 move |request: HyperRequest<Incoming>,
{ response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
assert!(request if request.method() == Method::OPTIONS &&
.headers() state.clone().fetch_add(1, Ordering::SeqCst) == 0
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD)); {
assert!(!request assert!(request
.headers() .headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS)); .contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
assert!(!request assert!(!request
.headers() .headers()
.get(header::REFERER) .contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
.unwrap() assert!(!request
.to_str() .headers()
.unwrap() .get(header::REFERER)
.contains("a.html")); .unwrap()
response .to_str()
.headers_mut() .unwrap()
.typed_insert(AccessControlAllowOrigin::ANY); .contains("a.html"));
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowCredentials); .typed_insert(AccessControlAllowOrigin::ANY);
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET])); .typed_insert(AccessControlAllowCredentials);
} else { response
response .headers_mut()
.headers_mut() .typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
.typed_insert(AccessControlAllowOrigin::ANY); } else {
*response.body_mut() = ACK.to_vec().into(); response
} .headers_mut()
}; .typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let target_url = url.clone().join("a.html").unwrap(); let target_url = url.clone().join("a.html").unwrap();
@ -340,34 +348,37 @@ fn test_cors_preflight_cache_fetch() {
let state = Arc::new(AtomicUsize::new(0)); let state = Arc::new(AtomicUsize::new(0));
let counter = state.clone(); let counter = state.clone();
let mut cache = CorsCache::default(); let mut cache = CorsCache::default();
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 move |request: HyperRequest<Incoming>,
{ response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
assert!(request if request.method() == Method::OPTIONS &&
.headers() state.clone().fetch_add(1, Ordering::SeqCst) == 0
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD)); {
assert!(!request assert!(request
.headers() .headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS)); .contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
response assert!(!request
.headers_mut() .headers()
.typed_insert(AccessControlAllowOrigin::ANY); .contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowCredentials); .typed_insert(AccessControlAllowOrigin::ANY);
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET])); .typed_insert(AccessControlAllowCredentials);
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlMaxAge::from(Duration::new(6000, 0))); .typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
} else { response
response .headers_mut()
.headers_mut() .typed_insert(AccessControlMaxAge::from(Duration::new(6000, 0)));
.typed_insert(AccessControlAllowOrigin::ANY); } else {
*response.body_mut() = ACK.to_vec().into(); response
} .headers_mut()
}; .typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build(); let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build();
@ -403,31 +414,34 @@ fn test_cors_preflight_cache_fetch() {
fn test_cors_preflight_fetch_network_error() { fn test_cors_preflight_fetch_network_error() {
static ACK: &'static [u8] = b"ACK"; static ACK: &'static [u8] = b"ACK";
let state = Arc::new(AtomicUsize::new(0)); let state = Arc::new(AtomicUsize::new(0));
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 move |request: HyperRequest<Incoming>,
{ response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
assert!(request if request.method() == Method::OPTIONS &&
.headers() state.clone().fetch_add(1, Ordering::SeqCst) == 0
.contains_key(header::ACCESS_CONTROL_REQUEST_METHOD)); {
assert!(!request assert!(request
.headers() .headers()
.contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS)); .contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
response assert!(!request
.headers_mut() .headers()
.typed_insert(AccessControlAllowOrigin::ANY); .contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowCredentials); .typed_insert(AccessControlAllowOrigin::ANY);
response response
.headers_mut() .headers_mut()
.typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET])); .typed_insert(AccessControlAllowCredentials);
} else { response
response .headers_mut()
.headers_mut() .typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
.typed_insert(AccessControlAllowOrigin::ANY); } else {
*response.body_mut() = ACK.to_vec().into(); response
} .headers_mut()
}; .typed_insert(AccessControlAllowOrigin::ANY);
*response.body_mut() = make_body(ACK.to_vec());
}
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build(); let mut request = RequestBuilder::new(url, Referrer::NoReferrer).build();
@ -443,18 +457,20 @@ fn test_cors_preflight_fetch_network_error() {
#[test] #[test]
fn test_fetch_response_is_basic_filtered() { fn test_fetch_response_is_basic_filtered() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
response move |_: HyperRequest<Incoming>,
.headers_mut() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.insert(header::SET_COOKIE, HeaderValue::from_static("")); response
// this header is obsoleted, so hyper doesn't implement it, but it's still covered by the spec .headers_mut()
response.headers_mut().insert( .insert(header::SET_COOKIE, HeaderValue::from_static(""));
HeaderName::from_static("set-cookie2"), // this header is obsoleted, so hyper doesn't implement it, but it's still covered by the spec
HeaderValue::from_bytes(&vec![]).unwrap(), response.headers_mut().insert(
); HeaderName::from_static("set-cookie2"),
HeaderValue::from_bytes(&vec![]).unwrap(),
);
*response.body_mut() = MESSAGE.to_vec().into(); *response.body_mut() = make_body(MESSAGE.to_vec());
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -476,47 +492,49 @@ fn test_fetch_response_is_basic_filtered() {
#[test] #[test]
fn test_fetch_response_is_cors_filtered() { fn test_fetch_response_is_cors_filtered() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
// this is mandatory for the Cors Check to pass move |_: HyperRequest<Incoming>,
// TODO test using different url encodings with this value ie. punycode response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
response // this is mandatory for the Cors Check to pass
.headers_mut() // TODO test using different url encodings with this value ie. punycode
.typed_insert(AccessControlAllowOrigin::ANY); response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
// these are the headers that should be kept after filtering // these are the headers that should be kept after filtering
response.headers_mut().typed_insert(CacheControl::new()); response.headers_mut().typed_insert(CacheControl::new());
response.headers_mut().insert( response.headers_mut().insert(
header::CONTENT_LANGUAGE, header::CONTENT_LANGUAGE,
HeaderValue::from_bytes(&vec![]).unwrap(), HeaderValue::from_bytes(&vec![]).unwrap(),
); );
response response
.headers_mut() .headers_mut()
.typed_insert(ContentType::from(mime::TEXT_HTML)); .typed_insert(ContentType::from(mime::TEXT_HTML));
response response
.headers_mut() .headers_mut()
.typed_insert(Expires::from(SystemTime::now() + Duration::new(86400, 0))); .typed_insert(Expires::from(SystemTime::now() + Duration::new(86400, 0)));
response response
.headers_mut() .headers_mut()
.typed_insert(LastModified::from(SystemTime::now())); .typed_insert(LastModified::from(SystemTime::now()));
response.headers_mut().typed_insert(Pragma::no_cache()); response.headers_mut().typed_insert(Pragma::no_cache());
// these headers should not be kept after filtering, even though they are given a pass // these headers should not be kept after filtering, even though they are given a pass
response response
.headers_mut() .headers_mut()
.insert(header::SET_COOKIE, HeaderValue::from_static("")); .insert(header::SET_COOKIE, HeaderValue::from_static(""));
response.headers_mut().insert( response.headers_mut().insert(
HeaderName::from_static("set-cookie2"),
HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(AccessControlAllowHeaders::from_iter(vec![
HeaderName::from_static("set-cookie"),
HeaderName::from_static("set-cookie2"), HeaderName::from_static("set-cookie2"),
])); HeaderValue::from_bytes(&vec![]).unwrap(),
);
response
.headers_mut()
.typed_insert(AccessControlAllowHeaders::from_iter(vec![
HeaderName::from_static("set-cookie"),
HeaderName::from_static("set-cookie2"),
]));
*response.body_mut() = MESSAGE.to_vec().into(); *response.body_mut() = make_body(MESSAGE.to_vec());
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
// an origin mis-match will stop it from defaulting to a basic filtered response // an origin mis-match will stop it from defaulting to a basic filtered response
@ -546,9 +564,11 @@ fn test_fetch_response_is_cors_filtered() {
#[test] #[test]
fn test_fetch_response_is_opaque_filtered() { fn test_fetch_response_is_opaque_filtered() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
// an origin mis-match will fall through to an Opaque filtered response // an origin mis-match will fall through to an Opaque filtered response
@ -577,24 +597,26 @@ fn test_fetch_response_is_opaque_filtered() {
#[test] #[test]
fn test_fetch_response_is_opaque_redirect_filtered() { fn test_fetch_response_is_opaque_redirect_filtered() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
let redirects = request move |request: HyperRequest<Incoming>,
.uri() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.path() let redirects = request
.split("/") .uri()
.collect::<String>() .path()
.parse::<u32>() .split("/")
.unwrap_or(0); .collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects == 1 { if redirects == 1 {
*response.body_mut() = MESSAGE.to_vec().into(); *response.body_mut() = make_body(MESSAGE.to_vec());
} else { } else {
*response.status_mut() = StatusCode::FOUND; *response.status_mut() = StatusCode::FOUND;
response response
.headers_mut() .headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1")); .insert(header::LOCATION, HeaderValue::from_static("1"));
} }
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
@ -626,9 +648,11 @@ fn test_fetch_with_local_urls_only() {
// If flag `local_urls_only` is set, fetching a non-local URL must result in network error. // If flag `local_urls_only` is set, fetching a non-local URL must result in network error.
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, server_url) = make_server(handler); let (server, server_url) = make_server(handler);
let do_fetch = |url: ServoUrl| { let do_fetch = |url: ServoUrl| {
@ -661,9 +685,11 @@ fn test_fetch_with_local_urls_only() {
#[test] #[test]
fn test_fetch_with_hsts() { fn test_fetch_with_hsts() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_ssl_server(handler); let (server, url) = make_ssl_server(handler);
@ -710,14 +736,16 @@ fn test_fetch_with_hsts() {
#[test] #[test]
fn test_load_adds_host_to_hsts_list_when_url_is_https() { fn test_load_adds_host_to_hsts_list_when_url_is_https() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
response move |_: HyperRequest<Incoming>,
.headers_mut() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.typed_insert(StrictTransportSecurity::excluding_subdomains( response
Duration::from_secs(31536000), .headers_mut()
)); .typed_insert(StrictTransportSecurity::excluding_subdomains(
*response.body_mut() = b"Yay!".to_vec().into(); Duration::from_secs(31536000),
}; ));
*response.body_mut() = make_body(b"Yay!".to_vec());
};
let (server, mut url) = make_ssl_server(handler); let (server, mut url) = make_ssl_server(handler);
url.as_mut_url().set_scheme("https").unwrap(); url.as_mut_url().set_scheme("https").unwrap();
@ -772,9 +800,11 @@ fn test_load_adds_host_to_hsts_list_when_url_is_https() {
#[test] #[test]
fn test_fetch_self_signed() { fn test_fetch_self_signed() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = b"Yay!".to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(b"Yay!".to_vec());
};
let (server, mut url) = make_ssl_server(handler); let (server, mut url) = make_ssl_server(handler);
url.as_mut_url().set_scheme("https").unwrap(); url.as_mut_url().set_scheme("https").unwrap();
@ -834,9 +864,11 @@ fn test_fetch_self_signed() {
#[test] #[test]
fn test_fetch_with_sri_network_error() { fn test_fetch_with_sri_network_error() {
static MESSAGE: &'static [u8] = b"alert('Hello, Network Error');"; static MESSAGE: &'static [u8] = b"alert('Hello, Network Error');";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -858,9 +890,11 @@ fn test_fetch_with_sri_network_error() {
#[test] #[test]
fn test_fetch_with_sri_sucess() { fn test_fetch_with_sri_sucess() {
static MESSAGE: &'static [u8] = b"alert('Hello, world.');"; static MESSAGE: &'static [u8] = b"alert('Hello, world.');";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -888,18 +922,20 @@ fn test_fetch_blocked_nosniff() {
const HEADER: &'static str = "x-content-type-options"; const HEADER: &'static str = "x-content-type-options";
const VALUE: &'static [u8] = b"nosniff"; const VALUE: &'static [u8] = b"nosniff";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
let mime_header = ContentType::from(mime.clone()); move |_: HyperRequest<Incoming>,
response.headers_mut().typed_insert(mime_header); response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
assert!(response.headers().contains_key(header::CONTENT_TYPE)); let mime_header = ContentType::from(mime.clone());
// Add the nosniff header response.headers_mut().typed_insert(mime_header);
response.headers_mut().insert( assert!(response.headers().contains_key(header::CONTENT_TYPE));
HeaderName::from_static(HEADER), // Add the nosniff header
HeaderValue::from_bytes(VALUE).unwrap(), response.headers_mut().insert(
); HeaderName::from_static(HEADER),
HeaderValue::from_bytes(VALUE).unwrap(),
);
*response.body_mut() = MESSAGE.to_vec().into(); *response.body_mut() = make_body(MESSAGE.to_vec());
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
@ -926,25 +962,27 @@ fn test_fetch_blocked_nosniff() {
} }
fn setup_server_and_fetch(message: &'static [u8], redirect_cap: u32) -> Response { fn setup_server_and_fetch(message: &'static [u8], redirect_cap: u32) -> Response {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
let redirects = request move |request: HyperRequest<Incoming>,
.uri() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.path() let redirects = request
.split("/") .uri()
.collect::<String>() .path()
.parse::<u32>() .split("/")
.unwrap_or(0); .collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects >= redirect_cap { if redirects >= redirect_cap {
*response.body_mut() = message.to_vec().into(); *response.body_mut() = make_body(message.to_vec());
} else { } else {
*response.status_mut() = StatusCode::FOUND; *response.status_mut() = StatusCode::FOUND;
let url = format!("{redirects}", redirects = redirects + 1); let url = format!("{redirects}", redirects = redirects + 1);
response response
.headers_mut() .headers_mut()
.insert(header::LOCATION, HeaderValue::from_str(&url).unwrap()); .insert(header::LOCATION, HeaderValue::from_str(&url).unwrap());
} }
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
@ -997,44 +1035,46 @@ fn test_fetch_redirect_updates_method_runner(
method: Method, method: Method,
) { ) {
let handler_method = method.clone(); let handler_method = method.clone();
let handler_tx = Arc::new(Mutex::new(tx)); let handler_tx = Arc::new(tx);
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
let redirects = request move |request: HyperRequest<Incoming>,
.uri() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.path() let redirects = request
.split("/") .uri()
.collect::<String>() .path()
.parse::<u32>() .split("/")
.unwrap_or(0); .collect::<String>()
.parse::<u32>()
.unwrap_or(0);
let mut test_pass = true; let mut test_pass = true;
if redirects == 0 { if redirects == 0 {
*response.status_mut() = StatusCode::TEMPORARY_REDIRECT; *response.status_mut() = StatusCode::TEMPORARY_REDIRECT;
response response
.headers_mut() .headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1")); .insert(header::LOCATION, HeaderValue::from_static("1"));
} else if redirects == 1 { } else if redirects == 1 {
// this makes sure that the request method does't change from the wrong status code // this makes sure that the request method does't change from the wrong status code
if handler_method != Method::GET && request.method() == Method::GET { if handler_method != Method::GET && request.method() == Method::GET {
test_pass = false;
}
*response.status_mut() = status_code;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("2"));
} else if request.method() != Method::GET {
test_pass = false; test_pass = false;
} }
*response.status_mut() = status_code;
response
.headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("2"));
} else if request.method() != Method::GET {
test_pass = false;
}
// the first time this handler is reached, nothing is being tested, so don't send anything // the first time this handler is reached, nothing is being tested, so don't send anything
if redirects > 0 { if redirects > 0 {
handler_tx.lock().unwrap().send(test_pass).unwrap(); handler_tx.send(test_pass).unwrap();
} }
}; };
let (server, url) = make_server(handler); let (server, url) = crate::make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
.origin(url.origin()) .origin(url.origin())
@ -1114,9 +1154,11 @@ fn response_is_done(response: &Response) -> bool {
#[test] #[test]
fn test_fetch_async_returns_complete_response() { fn test_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b"this message should be retrieved in full"; static MESSAGE: &'static [u8] = b"this message should be retrieved in full";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -1131,9 +1173,11 @@ fn test_fetch_async_returns_complete_response() {
#[test] #[test]
fn test_opaque_filtered_fetch_async_returns_complete_response() { fn test_opaque_filtered_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
// an origin mis-match will fall through to an Opaque filtered response // an origin mis-match will fall through to an Opaque filtered response
@ -1149,24 +1193,26 @@ fn test_opaque_filtered_fetch_async_returns_complete_response() {
#[test] #[test]
fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() { fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
static MESSAGE: &'static [u8] = b""; static MESSAGE: &'static [u8] = b"";
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
let redirects = request move |request: HyperRequest<Incoming>,
.uri() response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
.path() let redirects = request
.split("/") .uri()
.collect::<String>() .path()
.parse::<u32>() .split("/")
.unwrap_or(0); .collect::<String>()
.parse::<u32>()
.unwrap_or(0);
if redirects == 1 { if redirects == 1 {
*response.body_mut() = MESSAGE.to_vec().into(); *response.body_mut() = make_body(MESSAGE.to_vec());
} else { } else {
*response.status_mut() = StatusCode::FOUND; *response.status_mut() = StatusCode::FOUND;
response response
.headers_mut() .headers_mut()
.insert(header::LOCATION, HeaderValue::from_static("1")); .insert(header::LOCATION, HeaderValue::from_static("1"));
} }
}; };
let (server, url) = make_server(handler); let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer) let mut request = RequestBuilder::new(url.clone(), Referrer::NoReferrer)
@ -1186,9 +1232,11 @@ fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
fn test_fetch_with_devtools() { fn test_fetch_with_devtools() {
static MESSAGE: &'static [u8] = b"Yay!"; static MESSAGE: &'static [u8] = b"Yay!";
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| { let handler =
*response.body_mut() = MESSAGE.to_vec().into(); move |_: HyperRequest<Incoming>,
}; response: &mut HyperResponse<BoxBody<Bytes, hyper::Error>>| {
*response.body_mut() = make_body(MESSAGE.to_vec());
};
let (server, url) = make_server(handler); let (server, url) = make_server(handler);

File diff suppressed because it is too large Load diff

View file

@ -30,11 +30,13 @@ use crossbeam_channel::{unbounded, Sender};
use devtools_traits::DevtoolsControlMsg; use devtools_traits::DevtoolsControlMsg;
use embedder_traits::{EmbedderProxy, EmbedderReceiver, EventLoopWaker}; use embedder_traits::{EmbedderProxy, EmbedderReceiver, EventLoopWaker};
use futures::future::ready; use futures::future::ready;
use futures::StreamExt; use http_body_util::combinators::BoxBody;
use hyper::server::conn::Http; use http_body_util::{BodyExt, Empty, Full};
use hyper::server::Server as HyperServer; use hyper::body::{Bytes, Incoming};
use hyper::service::{make_service_fn, service_fn}; use hyper::server::conn::http1;
use hyper::{Body, Request as HyperRequest, Response as HyperResponse}; use hyper::service::service_fn;
use hyper::{Request as HyperRequest, Response as HyperResponse};
use hyper_util::rt::tokio::TokioIo;
use net::connector::{create_http_client, create_tls_config}; use net::connector::{create_http_client, create_tls_config};
use net::fetch::cors_cache::CorsCache; use net::fetch::cors_cache::CorsCache;
use net::fetch::methods::{self, CancellationListener, FetchContext}; use net::fetch::methods::{self, CancellationListener, FetchContext};
@ -46,30 +48,26 @@ use net_traits::filemanager_thread::FileTokenCheck;
use net_traits::request::Request; use net_traits::request::Request;
use net_traits::response::Response; use net_traits::response::Response;
use net_traits::{FetchTaskTarget, ResourceFetchTiming, ResourceTimingType}; use net_traits::{FetchTaskTarget, ResourceFetchTiming, ResourceTimingType};
use rustls::{self, Certificate, PrivateKey};
use rustls_pemfile::{certs, pkcs8_private_keys}; use rustls_pemfile::{certs, pkcs8_private_keys};
use rustls_pki_types::{CertificateDer, PrivateKeyDer};
use servo_arc::Arc as ServoArc; use servo_arc::Arc as ServoArc;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use tokio::net::{TcpListener, TcpStream}; use tokio::net::{TcpListener, TcpStream};
use tokio::runtime::{Builder, Runtime}; use tokio::runtime::{Builder, Runtime};
use tokio_rustls::{self, TlsAcceptor}; use tokio_rustls::{self, TlsAcceptor};
use tokio_stream::wrappers::TcpListenerStream;
use tokio_test::block_on;
pub static HANDLE: LazyLock<Mutex<Runtime>> = LazyLock::new(|| { pub static HANDLE: LazyLock<Runtime> = LazyLock::new(|| {
Mutex::new( Builder::new_multi_thread()
Builder::new_multi_thread() .enable_io()
.enable_io() .worker_threads(10)
.worker_threads(10) .build()
.build() .unwrap()
.unwrap(),
)
}); });
const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow."; const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow.";
struct FetchResponseCollector { struct FetchResponseCollector {
sender: Sender<Response>, sender: Option<tokio::sync::oneshot::Sender<Response>>,
} }
fn create_embedder_proxy() -> EmbedderProxy { fn create_embedder_proxy() -> EmbedderProxy {
@ -149,6 +147,8 @@ fn receive_credential_prompt_msgs(
} }
fn create_http_state(fc: Option<EmbedderProxy>) -> HttpState { fn create_http_state(fc: Option<EmbedderProxy>) -> HttpState {
let _ = rustls::crypto::ring::default_provider().install_default();
let override_manager = net::connector::CertificateErrorOverrideManager::new(); let override_manager = net::connector::CertificateErrorOverrideManager::new();
HttpState { HttpState {
hsts_list: RwLock::new(net::hsts::HstsList::default()), hsts_list: RwLock::new(net::hsts::HstsList::default()),
@ -197,7 +197,7 @@ impl FetchTaskTarget for FetchResponseCollector {
fn process_response_chunk(&mut self, _: &Request, _: Vec<u8>) {} fn process_response_chunk(&mut self, _: &Request, _: Vec<u8>) {}
/// Fired when the response is fully fetched /// Fired when the response is fully fetched
fn process_response_eof(&mut self, _: &Request, response: &Response) { fn process_response_eof(&mut self, _: &Request, response: &Response) {
let _ = self.sender.send(response.clone()); let _ = self.sender.take().unwrap().send(response.clone());
} }
} }
@ -206,18 +206,22 @@ fn fetch(request: &mut Request, dc: Option<Sender<DevtoolsControlMsg>>) -> Respo
} }
fn fetch_with_context(request: &mut Request, mut context: &mut FetchContext) -> Response { fn fetch_with_context(request: &mut Request, mut context: &mut FetchContext) -> Response {
let (sender, receiver) = unbounded(); let (sender, receiver) = tokio::sync::oneshot::channel();
let mut target = FetchResponseCollector { sender: sender }; let mut target = FetchResponseCollector {
block_on(async move { sender: Some(sender),
};
HANDLE.block_on(async move {
methods::fetch(request, &mut target, &mut context).await; methods::fetch(request, &mut target, &mut context).await;
receiver.recv().unwrap() receiver.await.unwrap()
}) })
} }
fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Response { fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Response {
let (sender, receiver) = unbounded(); let (sender, receiver) = tokio::sync::oneshot::channel();
let mut target = FetchResponseCollector { sender: sender }; let mut target = FetchResponseCollector {
block_on(async move { sender: Some(sender),
};
HANDLE.block_on(async move {
methods::fetch_with_cors_cache( methods::fetch_with_cors_cache(
request, request,
cache, cache,
@ -225,13 +229,13 @@ fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Respon
&mut new_fetch_context(None, None, None), &mut new_fetch_context(None, None, None),
) )
.await; .await;
receiver.recv().unwrap() receiver.await.unwrap()
}) })
} }
pub(crate) struct Server { pub(crate) struct Server {
pub close_channel: tokio::sync::oneshot::Sender<()>, pub close_channel: tokio::sync::oneshot::Sender<()>,
pub certificates: Option<Vec<Certificate>>, pub certificates: Option<Vec<CertificateDer<'static>>>,
} }
impl Server { impl Server {
@ -242,34 +246,59 @@ impl Server {
fn make_server<H>(handler: H) -> (Server, ServoUrl) fn make_server<H>(handler: H) -> (Server, ServoUrl)
where where
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static, H: Fn(HyperRequest<Incoming>, &mut HyperResponse<BoxBody<Bytes, hyper::Error>>)
+ Send
+ Sync
+ 'static,
{ {
let handler = Arc::new(handler); let handler = Arc::new(handler);
let listener = StdTcpListener::bind("0.0.0.0:0").unwrap(); let listener = StdTcpListener::bind("0.0.0.0:0").unwrap();
listener.set_nonblocking(true).unwrap();
let listener = HANDLE.block_on(async move { TcpListener::from_std(listener).unwrap() });
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port()); let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
let url = ServoUrl::parse(&url_string).unwrap(); let url = ServoUrl::parse(&url_string).unwrap();
let (tx, rx) = tokio::sync::oneshot::channel::<()>();
let graceful = hyper_util::server::graceful::GracefulShutdown::new();
let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();
let server = async move { let server = async move {
HyperServer::from_tcp(listener) loop {
.unwrap() let stream = tokio::select! {
.serve(make_service_fn(move |_| { stream = listener.accept() => stream.unwrap().0,
let handler = handler.clone(); _val = &mut rx => {
ready(Ok::<_, Infallible>(service_fn( let _ = graceful.shutdown();
move |req: HyperRequest<Body>| { break;
let mut response = HyperResponse::new(Vec::<u8>::new().into()); }
handler(req, &mut response); };
ready(Ok::<_, Infallible>(response))
}, let handler = handler.clone();
)))
})) let stream = stream.into_std().unwrap();
.with_graceful_shutdown(async move { stream
rx.await.ok(); .set_read_timeout(Some(std::time::Duration::new(5, 0)))
}) .unwrap();
.await let stream = TcpStream::from_std(stream).unwrap();
.expect("Could not start server");
let http = http1::Builder::new();
let conn = http.serve_connection(
TokioIo::new(stream),
service_fn(move |req: HyperRequest<Incoming>| {
let mut response =
HyperResponse::new(Empty::new().map_err(|_| unreachable!()).boxed());
handler(req, &mut response);
ready(Ok::<_, Infallible>(response))
}),
);
let conn = graceful.watch(conn);
HANDLE.spawn(async move {
let _ = conn.await;
});
}
}; };
HANDLE.lock().unwrap().spawn(server); let _ = HANDLE.spawn(server);
( (
Server { Server {
close_channel: tx, close_channel: tx,
@ -281,37 +310,40 @@ where
/// Given a path to a file containing PEM certificates, load and parse them into /// Given a path to a file containing PEM certificates, load and parse them into
/// a vector of RusTLS [Certificate]s. /// a vector of RusTLS [Certificate]s.
fn load_certificates_from_pem(path: &PathBuf) -> std::io::Result<Vec<Certificate>> { fn load_certificates_from_pem(path: &PathBuf) -> std::io::Result<Vec<CertificateDer<'static>>> {
let file = File::open(path)?; let file = File::open(path)?;
let mut reader = BufReader::new(file); let mut reader = BufReader::new(file);
let certs = certs(&mut reader)?; certs(&mut reader).collect::<Result<Vec<_>, _>>()
Ok(certs.into_iter().map(Certificate).collect())
} }
/// Given a path to a file containing PEM keys, load and parse them into /// Given a path to a file containing PEM keys, load and parse them into
/// a vector of RusTLS [PrivateKey]s. /// a vector of RusTLS [PrivateKey]s.
fn load_private_key_from_file(path: &PathBuf) -> Result<PrivateKey, Box<dyn std::error::Error>> { fn load_private_key_from_file(
path: &PathBuf,
) -> Result<PrivateKeyDer<'static>, Box<dyn std::error::Error>> {
let file = File::open(&path)?; let file = File::open(&path)?;
let mut reader = BufReader::new(file); let mut reader = BufReader::new(file);
let mut keys = pkcs8_private_keys(&mut reader)?; let mut keys = pkcs8_private_keys(&mut reader).collect::<Result<Vec<_>, _>>()?;
match keys.len() { match keys.len() {
0 => Err(format!("No PKCS8-encoded private key found in {path:?}").into()), 0 => Err(format!("No PKCS8-encoded private key found in {path:?}").into()),
1 => Ok(PrivateKey(keys.remove(0))), 1 => Ok(PrivateKeyDer::try_from(keys.remove(0))?),
_ => Err(format!("More than one PKCS8-encoded private key found in {path:?}").into()), _ => Err(format!("More than one PKCS8-encoded private key found in {path:?}").into()),
} }
} }
fn make_ssl_server<H>(handler: H) -> (Server, ServoUrl) fn make_ssl_server<H>(handler: H) -> (Server, ServoUrl)
where where
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static, H: Fn(HyperRequest<Incoming>, &mut HyperResponse<BoxBody<Bytes, hyper::Error>>)
+ Send
+ Sync
+ 'static,
{ {
let handler = Arc::new(handler); let handler = Arc::new(handler);
let listener = StdTcpListener::bind("[::0]:0").unwrap(); let listener = StdTcpListener::bind("[::0]:0").unwrap();
let listener = HANDLE listener.set_nonblocking(true).unwrap();
.lock() let listener = HANDLE.block_on(async move { TcpListener::from_std(listener).unwrap() });
.unwrap()
.block_on(async move { TcpListener::from_std(listener).unwrap() });
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port()); let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
let url = ServoUrl::parse(&url_string).unwrap(); let url = ServoUrl::parse(&url_string).unwrap();
@ -325,27 +357,20 @@ where
let key = load_private_key_from_file(&key_path).expect("Invalid key"); let key = load_private_key_from_file(&key_path).expect("Invalid key");
let config = rustls::ServerConfig::builder() let config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(certificates.clone(), key) .with_single_cert(certificates.clone(), key)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err)) .map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))
.expect("Could not create rustls ServerConfig"); .expect("Could not create rustls ServerConfig");
let acceptor = TlsAcceptor::from(Arc::new(config)); let acceptor = TlsAcceptor::from(Arc::new(config));
let mut listener = TcpListenerStream::new(listener);
let (tx, mut rx) = tokio::sync::oneshot::channel::<()>(); let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();
let server = async move { let server = async move {
loop { loop {
let stream = tokio::select! { let stream = tokio::select! {
stream = listener.next() => stream, stream = listener.accept() => stream.unwrap().0,
_ = &mut rx => break _ = &mut rx => break
}; };
let stream = match stream {
Some(stream) => stream.expect("Could not accept stream: "),
_ => break,
};
let stream = stream.into_std().unwrap(); let stream = stream.into_std().unwrap();
stream stream
.set_read_timeout(Some(std::time::Duration::new(5, 0))) .set_read_timeout(Some(std::time::Duration::new(5, 0)))
@ -363,11 +388,12 @@ where
}, },
}; };
let _ = Http::new() let _ = http1::Builder::new()
.serve_connection( .serve_connection(
stream, TokioIo::new(stream),
service_fn(move |req: HyperRequest<Body>| { service_fn(move |req: HyperRequest<Incoming>| {
let mut response = HyperResponse::new(Body::empty()); let mut response =
HyperResponse::new(Empty::new().map_err(|_| unreachable!()).boxed());
handler(req, &mut response); handler(req, &mut response);
ready(Ok::<_, Infallible>(response)) ready(Ok::<_, Infallible>(response))
}), }),
@ -376,7 +402,7 @@ where
} }
}; };
HANDLE.lock().unwrap().spawn(server); HANDLE.spawn(server);
( (
Server { Server {
@ -386,3 +412,9 @@ where
url, url,
) )
} }
pub fn make_body(bytes: Vec<u8>) -> BoxBody<Bytes, hyper::Error> {
Full::new(Bytes::from(bytes))
.map_err(|_| unreachable!())
.boxed()
}

View file

@ -23,6 +23,7 @@ headers = { workspace = true }
http = { workspace = true } http = { workspace = true }
hyper = { workspace = true } hyper = { workspace = true }
hyper_serde = { workspace = true } hyper_serde = { workspace = true }
hyper-util = { workspace = true, features = ["client-legacy"] }
ipc-channel = { workspace = true } ipc-channel = { workspace = true }
log = { workspace = true } log = { workspace = true }
malloc_size_of = { workspace = true } malloc_size_of = { workspace = true }
@ -31,7 +32,7 @@ mime = { workspace = true }
num-traits = { workspace = true } num-traits = { workspace = true }
percent-encoding = { workspace = true } percent-encoding = { workspace = true }
pixels = { path = "../../pixels" } pixels = { path = "../../pixels" }
rustls = { workspace = true } rustls-pki-types = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
servo_arc = { workspace = true } servo_arc = { workspace = true }
servo_rand = { path = "../../rand" } servo_rand = { path = "../../rand" }

View file

@ -15,8 +15,8 @@ use cookie::Cookie;
use crossbeam_channel::{unbounded, Receiver, Sender}; use crossbeam_channel::{unbounded, Receiver, Sender};
use headers::{ContentType, HeaderMapExt, ReferrerPolicy as ReferrerPolicyHeader}; use headers::{ContentType, HeaderMapExt, ReferrerPolicy as ReferrerPolicyHeader};
use http::{Error as HttpError, HeaderMap, StatusCode}; use http::{Error as HttpError, HeaderMap, StatusCode};
use hyper::Error as HyperError;
use hyper_serde::Serde; use hyper_serde::Serde;
use hyper_util::client::legacy::Error as HyperError;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::router::ROUTER; use ipc_channel::router::ROUTER;
use ipc_channel::Error as IpcError; use ipc_channel::Error as IpcError;
@ -24,7 +24,7 @@ use malloc_size_of::malloc_size_of_is_0;
use malloc_size_of_derive::MallocSizeOf; use malloc_size_of_derive::MallocSizeOf;
use mime::Mime; use mime::Mime;
use request::RequestId; use request::RequestId;
use rustls::Certificate; use rustls_pki_types::CertificateDer;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use servo_rand::RngCore; use servo_rand::RngCore;
use servo_url::{ImmutableOrigin, ServoUrl}; use servo_url::{ImmutableOrigin, ServoUrl};
@ -882,10 +882,10 @@ pub enum NetworkError {
} }
impl NetworkError { impl NetworkError {
pub fn from_hyper_error(error: &HyperError, certificate: Option<Certificate>) -> Self { pub fn from_hyper_error(error: &HyperError, certificate: Option<CertificateDer>) -> Self {
let error_string = error.to_string(); let error_string = error.to_string();
match certificate { match certificate {
Some(certificate) => NetworkError::SslValidation(error_string, certificate.0), Some(certificate) => NetworkError::SslValidation(error_string, certificate.to_vec()),
_ => NetworkError::Internal(error_string), _ => NetworkError::Internal(error_string),
} }
} }

View file

@ -18,7 +18,7 @@ compositing_traits = { workspace = true }
cookie = { workspace = true } cookie = { workspace = true }
crossbeam-channel = { workspace = true } crossbeam-channel = { workspace = true }
euclid = { workspace = true } euclid = { workspace = true }
http = { workspace = true } http = { version = "0.2" }
image = { workspace = true } image = { workspace = true }
ipc-channel = { workspace = true } ipc-channel = { workspace = true }
keyboard-types = { workspace = true } keyboard-types = { workspace = true }

View file

@ -155,6 +155,14 @@ skip = [
# wgpu depends on thiserror 2, while rest is still on 1 # wgpu depends on thiserror 2, while rest is still on 1
"thiserror", "thiserror",
"thiserror-impl", "thiserror-impl",
# duplicated by webdriver
"h2",
"headers",
"headers-core",
"http",
"http-body",
"hyper",
] ]
# github.com organizations to allow git sources for # github.com organizations to allow git sources for

View file

@ -65,6 +65,7 @@ getopts = { workspace = true }
hitrace = { workspace = true, optional = true } hitrace = { workspace = true, optional = true }
mime_guess = { workspace = true } mime_guess = { workspace = true }
url = { workspace = true } url = { workspace = true }
rustls = { version = "0.23", default-features = false, features = ["ring"] }
tokio = { workspace = true } tokio = { workspace = true }
tracing = { workspace = true, optional = true } tracing = { workspace = true, optional = true }
tracing-subscriber = { workspace = true, optional = true, features = ["env-filter"] } tracing-subscriber = { workspace = true, optional = true, features = ["env-filter"] }

View file

@ -16,6 +16,7 @@ use crate::panic_hook;
pub fn main() { pub fn main() {
crate::crash_handler::install(); crate::crash_handler::install();
crate::init_tracing(); crate::init_tracing();
crate::init_crypto();
crate::resources::init(); crate::resources::init();
// Parse the command line options and store them globally // Parse the command line options and store them globally

View file

@ -61,6 +61,7 @@ pub fn init(
callbacks: Box<dyn HostTrait>, callbacks: Box<dyn HostTrait>,
) -> Result<(), &'static str> { ) -> Result<(), &'static str> {
crate::init_tracing(); crate::init_tracing();
crate::init_crypto();
resources::set(Box::new(ResourceReaderInstance::new())); resources::set(Box::new(ResourceReaderInstance::new()));
if let Some(prefs) = init_opts.prefs { if let Some(prefs) = init_opts.prefs {

View file

@ -43,6 +43,7 @@ pub fn init(
) -> Result<ServoGlue, &'static str> { ) -> Result<ServoGlue, &'static str> {
info!("Entered simpleservo init function"); info!("Entered simpleservo init function");
crate::init_tracing(); crate::init_tracing();
crate::init_crypto();
let resource_dir = PathBuf::from(&options.resource_dir).join("servo"); let resource_dir = PathBuf::from(&options.resource_dir).join("servo");
resources::set(Box::new(ResourceReaderInstance::new(resource_dir))); resources::set(Box::new(ResourceReaderInstance::new(resource_dir)));
let mut args = vec!["servoshell".to_string()]; let mut args = vec!["servoshell".to_string()];

View file

@ -38,6 +38,12 @@ pub fn main() {
desktop::cli::main() desktop::cli::main()
} }
pub fn init_crypto() {
rustls::crypto::ring::default_provider()
.install_default()
.expect("Error initializing crypto provider");
}
pub fn init_tracing() { pub fn init_tracing() {
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
{ {