Merge branch 'master' into tidy

This commit is contained in:
Alex Touchet 2018-09-11 09:06:42 -07:00 committed by GitHub
commit 025b5550fc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2805 changed files with 67337 additions and 36455 deletions

181
Cargo.lock generated
View file

@ -454,6 +454,14 @@ dependencies = [
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "cloudabi"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "cmake" name = "cmake"
version = "0.1.29" version = "0.1.29"
@ -603,6 +611,18 @@ dependencies = [
"build_const 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "build_const 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "crossbeam-channel"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-epoch 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "crossbeam-deque" name = "crossbeam-deque"
version = "0.2.0" version = "0.2.0"
@ -626,6 +646,19 @@ dependencies = [
"scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "crossbeam-epoch"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.2.2" version = "0.2.2"
@ -634,6 +667,11 @@ dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "crossbeam-utils"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "cssparser" name = "cssparser"
version = "0.24.0" version = "0.24.0"
@ -899,7 +937,7 @@ version = "0.3.0"
source = "git+https://github.com/energymon/energymon-sys.git#f8d77ea2906b25f9c0fd358aa9d300a46dc3e97c" source = "git+https://github.com/energymon/energymon-sys.git#f8d77ea2906b25f9c0fd358aa9d300a46dc3e97c"
dependencies = [ dependencies = [
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -973,7 +1011,7 @@ version = "2.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1191,7 +1229,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1218,7 +1256,7 @@ dependencies = [
"wayland-client 0.20.10 (registry+https://github.com/rust-lang/crates.io-index)", "wayland-client 0.20.10 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"winit 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", "winit 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
"x11-dl 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)", "x11-dl 2.18.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1238,7 +1276,7 @@ dependencies = [
"bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"glib-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "glib-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1284,7 +1322,7 @@ dependencies = [
"gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1313,7 +1351,7 @@ dependencies = [
"gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1340,7 +1378,7 @@ dependencies = [
"gobject-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "gobject-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1369,7 +1407,7 @@ dependencies = [
"gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-video-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-video-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1381,7 +1419,7 @@ dependencies = [
"glib-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "glib-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gobject-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "gobject-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1412,7 +1450,7 @@ dependencies = [
"gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-base-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1432,7 +1470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
"freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1459,7 +1497,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1623,12 +1661,29 @@ dependencies = [
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ipc-channel"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.7.6" version = "0.7.6"
@ -1844,7 +1899,7 @@ name = "libdbus-sys"
version = "0.1.3" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1921,7 +1976,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cc 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2135,7 +2190,7 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.12" version = "0.6.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2144,10 +2199,10 @@ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2157,7 +2212,7 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2285,14 +2340,12 @@ dependencies = [
[[package]] [[package]]
name = "net2" name = "net2"
version = "0.2.29" version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2491,7 +2544,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cc 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2566,7 +2619,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "petgraph" name = "petgraph"
version = "0.4.12" version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2610,7 +2663,7 @@ dependencies = [
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.12" version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -2752,6 +2805,23 @@ dependencies = [
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_core"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "range" name = "range"
version = "0.0.1" version = "0.0.1"
@ -2780,7 +2850,7 @@ dependencies = [
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -3182,7 +3252,7 @@ version = "4.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"expat-sys 2.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "expat-sys 2.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"servo-freetype-sys 4.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "servo-freetype-sys 4.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -3192,13 +3262,13 @@ version = "4.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "servo-media" name = "servo-media"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/servo/media#6ecac1c6259b3995e8d6a368e49777e5c2d398ae" source = "git+https://github.com/servo/media#44ad355b020168e78ab32db2c6f5286e7db2ba77"
dependencies = [ dependencies = [
"servo-media-audio 0.1.0 (git+https://github.com/servo/media)", "servo-media-audio 0.1.0 (git+https://github.com/servo/media)",
"servo-media-gstreamer 0.1.0 (git+https://github.com/servo/media)", "servo-media-gstreamer 0.1.0 (git+https://github.com/servo/media)",
@ -3208,12 +3278,12 @@ dependencies = [
[[package]] [[package]]
name = "servo-media-audio" name = "servo-media-audio"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/servo/media#6ecac1c6259b3995e8d6a368e49777e5c2d398ae" source = "git+https://github.com/servo/media#44ad355b020168e78ab32db2c6f5286e7db2ba77"
dependencies = [ dependencies = [
"byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"servo_media_derive 0.1.0 (git+https://github.com/servo/media)", "servo_media_derive 0.1.0 (git+https://github.com/servo/media)",
"smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -3221,7 +3291,7 @@ dependencies = [
[[package]] [[package]]
name = "servo-media-gstreamer" name = "servo-media-gstreamer"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/servo/media#6ecac1c6259b3995e8d6a368e49777e5c2d398ae" source = "git+https://github.com/servo/media#44ad355b020168e78ab32db2c6f5286e7db2ba77"
dependencies = [ dependencies = [
"byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"glib 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "glib 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3229,7 +3299,7 @@ dependencies = [
"gstreamer-app 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-app 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-audio 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-audio 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
"gstreamer-player 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", "gstreamer-player 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ipc-channel 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "ipc-channel 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"servo-media-audio 0.1.0 (git+https://github.com/servo/media)", "servo-media-audio 0.1.0 (git+https://github.com/servo/media)",
"servo-media-player 0.1.0 (git+https://github.com/servo/media)", "servo-media-player 0.1.0 (git+https://github.com/servo/media)",
@ -3239,9 +3309,9 @@ dependencies = [
[[package]] [[package]]
name = "servo-media-player" name = "servo-media-player"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/servo/media#6ecac1c6259b3995e8d6a368e49777e5c2d398ae" source = "git+https://github.com/servo/media#44ad355b020168e78ab32db2c6f5286e7db2ba77"
dependencies = [ dependencies = [
"ipc-channel 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "ipc-channel 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -3325,7 +3395,7 @@ dependencies = [
[[package]] [[package]]
name = "servo_media_derive" name = "servo_media_derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/servo/media#6ecac1c6259b3995e8d6a368e49777e5c2d398ae" source = "git+https://github.com/servo/media#44ad355b020168e78ab32db2c6f5286e7db2ba77"
dependencies = [ dependencies = [
"quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3400,6 +3470,11 @@ name = "slab"
version = "0.3.0" version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smallbitvec" name = "smallbitvec"
version = "2.1.1" version = "2.1.1"
@ -4115,7 +4190,7 @@ dependencies = [
"smithay-client-toolkit 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "smithay-client-toolkit 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"wayland-client 0.20.10 (registry+https://github.com/rust-lang/crates.io-index)", "wayland-client 0.20.10 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"x11-dl 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)", "x11-dl 2.18.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -4135,7 +4210,7 @@ dependencies = [
"bytes 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"httparse 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl 0.9.24 (registry+https://github.com/rust-lang/crates.io-index)", "openssl 0.9.24 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4158,7 +4233,7 @@ version = "2.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -4172,12 +4247,12 @@ dependencies = [
[[package]] [[package]]
name = "x11-dl" name = "x11-dl"
version = "2.18.1" version = "2.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -4276,6 +4351,7 @@ dependencies = [
"checksum clap 2.28.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc34bf7d5d66268b466b9852bca925ec1d2650654dab4da081e63fd230145c2e" "checksum clap 2.28.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc34bf7d5d66268b466b9852bca925ec1d2650654dab4da081e63fd230145c2e"
"checksum clipboard 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b9b4623b47d8637fc9d47564583d4cc01eb8c8e34e26b2bf348bf4b036acb657" "checksum clipboard 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b9b4623b47d8637fc9d47564583d4cc01eb8c8e34e26b2bf348bf4b036acb657"
"checksum clipboard-win 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "14cc3e6c075926b96490d5f90d4a5af7be8012a4d8a8698e619655085a7641a3" "checksum clipboard-win 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "14cc3e6c075926b96490d5f90d4a5af7be8012a4d8a8698e619655085a7641a3"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb" "checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb"
"checksum cocoa 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5cd1afb83b2de9c41e5dfedb2bcccb779d433b958404876009ae4b01746ff23" "checksum cocoa 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5cd1afb83b2de9c41e5dfedb2bcccb779d433b958404876009ae4b01746ff23"
"checksum color_quant 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a475fc4af42d83d28adf72968d9bcfaf035a1a9381642d8e85d8a04957767b0d" "checksum color_quant 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a475fc4af42d83d28adf72968d9bcfaf035a1a9381642d8e85d8a04957767b0d"
@ -4286,9 +4362,12 @@ dependencies = [
"checksum core-graphics 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92801c908ea6301ae619ed842a72e01098085fc321b9c2f3f833dad555bba055" "checksum core-graphics 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92801c908ea6301ae619ed842a72e01098085fc321b9c2f3f833dad555bba055"
"checksum core-text 11.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "157ff38a92496dc676ce36d9124554e9ac66f1c1039f952690ac64f71cfa5968" "checksum core-text 11.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "157ff38a92496dc676ce36d9124554e9ac66f1c1039f952690ac64f71cfa5968"
"checksum crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb" "checksum crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb"
"checksum crossbeam-channel 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6c0a94250b0278d7fc5a894c3d276b11ea164edc8bf8feb10ca1ea517b44a649"
"checksum crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f739f8c5363aca78cfb059edf753d8f0d36908c348f3d8d1503f03d8b75d9cf3" "checksum crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f739f8c5363aca78cfb059edf753d8f0d36908c348f3d8d1503f03d8b75d9cf3"
"checksum crossbeam-epoch 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "927121f5407de9956180ff5e936fe3cf4324279280001cd56b669d28ee7e9150" "checksum crossbeam-epoch 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "927121f5407de9956180ff5e936fe3cf4324279280001cd56b669d28ee7e9150"
"checksum crossbeam-epoch 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "30fecfcac6abfef8771151f8be4abc9e4edc112c2bcb233314cafde2680536e9"
"checksum crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2760899e32a1d58d5abb31129f8fae5de75220bc2176e77ff7c627ae45c918d9" "checksum crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2760899e32a1d58d5abb31129f8fae5de75220bc2176e77ff7c627ae45c918d9"
"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015"
"checksum cssparser 0.24.0 (registry+https://github.com/rust-lang/crates.io-index)" = "495beddc39b1987b8e9f029354eccbd5ef88eb5f1cd24badb764dce338acf2e0" "checksum cssparser 0.24.0 (registry+https://github.com/rust-lang/crates.io-index)" = "495beddc39b1987b8e9f029354eccbd5ef88eb5f1cd24badb764dce338acf2e0"
"checksum cssparser-macros 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f3a5383ae18dbfdeb569ed62019f5bddb2a95cd2d3833313c475a0d014777805" "checksum cssparser-macros 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f3a5383ae18dbfdeb569ed62019f5bddb2a95cd2d3833313c475a0d014777805"
"checksum darling 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a78af487e4eb8f4421a1770687b328af6bb4494ca93435210678c6eea875c11" "checksum darling 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a78af487e4eb8f4421a1770687b328af6bb4494ca93435210678c6eea875c11"
@ -4373,6 +4452,7 @@ dependencies = [
"checksum io-surface 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9a33981dff54baaff80f4decb487a65d148a3c00facc97820d0f09128f74dd" "checksum io-surface 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9a33981dff54baaff80f4decb487a65d148a3c00facc97820d0f09128f74dd"
"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
"checksum ipc-channel 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db9daf099728ac5390c73f54e6e3708f0c514d2b51f24373830f568702eadfca" "checksum ipc-channel 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db9daf099728ac5390c73f54e6e3708f0c514d2b51f24373830f568702eadfca"
"checksum ipc-channel 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dd75debad4ffd295c00c6e3634d254df30050b0837a85e5cd039ac424365f24a"
"checksum itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b07332223953b5051bceb67e8c4700aa65291535568e1f12408c43c4a42c0394" "checksum itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b07332223953b5051bceb67e8c4700aa65291535568e1f12408c43c4a42c0394"
"checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682" "checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
"checksum jemalloc-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "479294d130502fada93c7a957e8d059b632b03d6204aca37af557dee947f30a9" "checksum jemalloc-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "479294d130502fada93c7a957e8d059b632b03d6204aca37af557dee947f30a9"
@ -4406,7 +4486,7 @@ dependencies = [
"checksum miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "609ce024854aeb19a0ef7567d348aaa5a746b32fb72e336df7fcc16869d7e2b4" "checksum miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "609ce024854aeb19a0ef7567d348aaa5a746b32fb72e336df7fcc16869d7e2b4"
"checksum miniz_oxide 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9ba430291c9d6cedae28bcd2d49d1c32fc57d60cd49086646c5dd5673a870eb5" "checksum miniz_oxide 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9ba430291c9d6cedae28bcd2d49d1c32fc57d60cd49086646c5dd5673a870eb5"
"checksum miniz_oxide_c_api 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "5a5b8234d6103ebfba71e29786da4608540f862de5ce980a1c94f86a40ca0d51" "checksum miniz_oxide_c_api 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "5a5b8234d6103ebfba71e29786da4608540f862de5ce980a1c94f86a40ca0d51"
"checksum mio 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "75f72a93f046f1517e3cfddc0a096eb756a2ba727d36edc8227dee769a50a9b0" "checksum mio 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)" = "4fcfcb32d63961fb6f367bfd5d21e4600b92cd310f71f9dca25acae196eb1560"
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
"checksum mitochondria 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9de3eca27871df31c33b807f834b94ef7d000956f57aa25c5aed9c5f0aae8f6f" "checksum mitochondria 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9de3eca27871df31c33b807f834b94ef7d000956f57aa25c5aed9c5f0aae8f6f"
"checksum mozangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "45a8a18a41cfab0fde25cc2f43ea89064d211a0fbb33225b8ff93ab20406e0e7" "checksum mozangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "45a8a18a41cfab0fde25cc2f43ea89064d211a0fbb33225b8ff93ab20406e0e7"
@ -4416,7 +4496,7 @@ dependencies = [
"checksum mp4parse 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7316728464443fe5793a805dde3257864e9690cf46374daff3ce93de1df2f254" "checksum mp4parse 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7316728464443fe5793a805dde3257864e9690cf46374daff3ce93de1df2f254"
"checksum msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729" "checksum msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729"
"checksum muldiv 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1cbef5aa2e8cd82a18cc20e26434cc9843e1ef46e55bfabe5bddb022236c5b3e" "checksum muldiv 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1cbef5aa2e8cd82a18cc20e26434cc9843e1ef46e55bfabe5bddb022236c5b3e"
"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
"checksum new-ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ccbebba6fb53a6d2bdcfaf79cb339bc136dee3bfff54dc337a334bafe36476a" "checksum new-ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ccbebba6fb53a6d2bdcfaf79cb339bc136dee3bfff54dc337a334bafe36476a"
"checksum new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0cdc457076c78ab54d5e0d6fa7c47981757f1e34dc39ff92787f217dede586c4" "checksum new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0cdc457076c78ab54d5e0d6fa7c47981757f1e34dc39ff92787f217dede586c4"
"checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17" "checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17"
@ -4446,12 +4526,12 @@ dependencies = [
"checksum parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "538ef00b7317875071d5e00f603f24d16f0b474c1a5fc0ccb8b454ca72eafa79" "checksum parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "538ef00b7317875071d5e00f603f24d16f0b474c1a5fc0ccb8b454ca72eafa79"
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum petgraph 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "8b30dc85588cd02b9b76f5e386535db546d21dc68506cff2abebee0b6445e8e4" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f"
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
"checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2" "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
"checksum pkg-config 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "6a52e4dbc8354505ee07e484ab07127e06d87ca6fa7f0a516a2b294e5ad5ad16" "checksum pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "676e8eb2b1b4c9043511a9b7bea0915320d7e502b0a079fb03f9635a5252b18c"
"checksum plane-split 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ff3a4fc9e31d70eb6828e9a2d7a401a824d9f281686a39a8fc06f08796edb1bb" "checksum plane-split 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ff3a4fc9e31d70eb6828e9a2d7a401a824d9f281686a39a8fc06f08796edb1bb"
"checksum png 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f54b9600d584d3b8a739e1662a595fab051329eff43f20e7d8cc22872962145b" "checksum png 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f54b9600d584d3b8a739e1662a595fab051329eff43f20e7d8cc22872962145b"
"checksum podio 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "780fb4b6698bbf9cf2444ea5d22411cef2953f0824b98f33cf454ec5615645bd" "checksum podio 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "780fb4b6698bbf9cf2444ea5d22411cef2953f0824b98f33cf454ec5615645bd"
@ -4464,6 +4544,8 @@ dependencies = [
"checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035" "checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" "checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5"
"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
"checksum rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "edecf0f94da5551fc9b492093e30b041a891657db7940ee221f9d2f66e82eef2"
"checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d" "checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
"checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8" "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
"checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b" "checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b"
@ -4504,6 +4586,7 @@ dependencies = [
"checksum signpost 0.1.0 (git+https://github.com/pcwalton/signpost.git)" = "<none>" "checksum signpost 0.1.0 (git+https://github.com/pcwalton/signpost.git)" = "<none>"
"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537" "checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23" "checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
"checksum slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9776d6b986f77b35c6cf846c11ad986ff128fe0b2b63a3628e3755e8d3102d"
"checksum smallbitvec 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c63726029f0069f88467873e47f392575f28f9f16b72ac65465263db4b3a13c" "checksum smallbitvec 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c63726029f0069f88467873e47f392575f28f9f16b72ac65465263db4b3a13c"
"checksum smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "26df3bb03ca5eac2e64192b723d51f56c1b1e0860e7c766281f4598f181acdc8" "checksum smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "26df3bb03ca5eac2e64192b723d51f56c1b1e0860e7c766281f4598f181acdc8"
"checksum smithay-client-toolkit 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2051bffc6cbf271176e8ba1527f801b6444567daee15951ff5152aaaf7777b2f" "checksum smithay-client-toolkit 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2051bffc6cbf271176e8ba1527f801b6444567daee15951ff5152aaaf7777b2f"
@ -4573,7 +4656,7 @@ dependencies = [
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum x11 2.17.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e5c4ac579b5d324dc4add02312b5d0e3e0218521e2d5779d526ac39ee4bb171" "checksum x11 2.17.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e5c4ac579b5d324dc4add02312b5d0e3e0218521e2d5779d526ac39ee4bb171"
"checksum x11-clipboard 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2e7374c7699210cca7084ca61d57e09640fc744d1391808cb9ae2fe4ca9bd1df" "checksum x11-clipboard 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2e7374c7699210cca7084ca61d57e09640fc744d1391808cb9ae2fe4ca9bd1df"
"checksum x11-dl 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)" = "966f78e9291e51d573bd3dd9287b285c0265daa8aa9fbe74c370467baa360c4e" "checksum x11-dl 2.18.3 (registry+https://github.com/rust-lang/crates.io-index)" = "940586acb859ea05c53971ac231685799a7ec1dee66ac0bccc0e6ad96e06b4e3"
"checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de" "checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de"
"checksum xi-unicode 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "12ea8eda4b1eb72f02d148402e23832d56a33f55d8c1b2d5bcdde91d79d47cb1" "checksum xi-unicode 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "12ea8eda4b1eb72f02d148402e23832d56a33f55d8c1b2d5bcdde91d79d47cb1"
"checksum xml-rs 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c1cb601d29fe2c2ac60a2b2e5e293994d87a1f6fa9687a31a15270f909be9c2" "checksum xml-rs 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c1cb601d29fe2c2ac60a2b2e5e293994d87a1f6fa9687a31a15270f909be9c2"

View file

@ -101,9 +101,10 @@ If `virtualenv` does not exist, try `python-virtualenv`.
``` sh ``` sh
sudo dnf install curl libtool gcc-c++ libXi-devel \ sudo dnf install curl libtool gcc-c++ libXi-devel \
freetype-devel mesa-libGL-devel mesa-libEGL-devel glib2-devel libX11-devel libXrandr-devel gperf \ freetype-devel mesa-libGL-devel mesa-libEGL-devel glib2-devel libX11-devel libXrandr-devel gperf \
fontconfig-devel cabextract ttmkfdir python python-virtualenv python-pip expat-devel \ fontconfig-devel cabextract ttmkfdir python2 python2-virtualenv python2-pip expat-devel \
rpm-build openssl-devel cmake bzip2-devel libXcursor-devel libXmu-devel mesa-libOSMesa-devel \ rpm-build openssl-devel cmake bzip2-devel libXcursor-devel libXmu-devel mesa-libOSMesa-devel \
dbus-devel ncurses-devel harfbuzz-devel ccache mesa-libGLU-devel clang clang-libs gstreamer autoconf213 dbus-devel ncurses-devel harfbuzz-devel ccache mesa-libGLU-devel clang clang-libs gstreamer1-devel \
gstreamer1-plugins-base-devel gstreamer1-plugins-bad-free-devel autoconf213
``` ```
#### On CentOS #### On CentOS
@ -138,7 +139,7 @@ sudo zypper install libX11-devel libexpat-devel libbz2-devel Mesa-libEGL-devel M
``` sh ``` sh
sudo pacman -S --needed base-devel git python2 python2-virtualenv python2-pip mesa cmake bzip2 libxmu glu \ sudo pacman -S --needed base-devel git python2 python2-virtualenv python2-pip mesa cmake bzip2 libxmu glu \
pkg-config ttf-fira-sans harfbuzz ccache clang pkg-config ttf-fira-sans harfbuzz ccache clang autoconf2.13
``` ```
#### On Gentoo Linux #### On Gentoo Linux

View file

@ -44,7 +44,7 @@ cache:
- .ccache - .ccache
install: install:
- choco install pkgconfiglite - appveyor-retry choco install pkgconfiglite
- appveyor-retry appveyor DownloadFile https://gstreamer.freedesktop.org/data/pkg/windows/1.14.1/gstreamer-1.0-devel-x86_64-1.14.1.msi - appveyor-retry appveyor DownloadFile https://gstreamer.freedesktop.org/data/pkg/windows/1.14.1/gstreamer-1.0-devel-x86_64-1.14.1.msi
- appveyor-retry appveyor DownloadFile https://gstreamer.freedesktop.org/data/pkg/windows/1.14.1/gstreamer-1.0-x86_64-1.14.1.msi - appveyor-retry appveyor DownloadFile https://gstreamer.freedesktop.org/data/pkg/windows/1.14.1/gstreamer-1.0-x86_64-1.14.1.msi
- msiexec /i gstreamer-1.0-devel-x86_64-1.14.1.msi /quiet /qn /norestart /log install-devel.log - msiexec /i gstreamer-1.0-devel-x86_64-1.14.1.msi /quiet /qn /norestart /log install-devel.log
@ -54,8 +54,6 @@ install:
- rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain none - rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain none
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
- set PKG_CONFIG_PATH=%PKG_CONFIG_PATH%;C:\gstreamer\1.0\x86_64\lib\pkgconfig - set PKG_CONFIG_PATH=%PKG_CONFIG_PATH%;C:\gstreamer\1.0\x86_64\lib\pkgconfig
- set LIB=%LIB%;C:\gstreamer\1.0\x86_64\lib
- set LIBPATH=C:\gstreamer\1.0\x86_64\lib;%LIBPATH%
- rustup -V - rustup -V
- mach rustc --version - mach rustc --version
- mach cargo --version - mach cargo --version

View file

@ -36,6 +36,7 @@ invalid
keydown keydown
keypress keypress
left left
ltr
load load
loadeddata loadeddata
loadedmetadata loadedmetadata
@ -65,6 +66,7 @@ readystatechange
reftest-wait reftest-wait
reset reset
right right
rtl
sans-serif sans-serif
scan scan
screen screen

View file

@ -22,23 +22,38 @@ pub struct WebGLThreads(WebGLSender<WebGLMsg>);
impl WebGLThreads { impl WebGLThreads {
/// Creates a new WebGLThreads object /// Creates a new WebGLThreads object
pub fn new(gl_factory: GLContextFactory, pub fn new(
gl_factory: GLContextFactory,
webrender_gl: Rc<gl::Gl>, webrender_gl: Rc<gl::Gl>,
webrender_api_sender: webrender_api::RenderApiSender, webrender_api_sender: webrender_api::RenderApiSender,
webvr_compositor: Option<Box<WebVRRenderHandler>>) webvr_compositor: Option<Box<WebVRRenderHandler>>,
-> (WebGLThreads, Box<webrender::ExternalImageHandler>, Option<Box<webrender::OutputImageHandler>>) { ) -> (
WebGLThreads,
Box<webrender::ExternalImageHandler>,
Option<Box<webrender::OutputImageHandler>>,
) {
// This implementation creates a single `WebGLThread` for all the pipelines. // This implementation creates a single `WebGLThread` for all the pipelines.
let channel = WebGLThread::start(gl_factory, let channel = WebGLThread::start(
gl_factory,
webrender_api_sender, webrender_api_sender,
webvr_compositor.map(|c| WebVRRenderWrapper(c)), webvr_compositor.map(|c| WebVRRenderWrapper(c)),
PhantomData); PhantomData,
);
let output_handler = if PREFS.is_dom_to_texture_enabled() { let output_handler = if PREFS.is_dom_to_texture_enabled() {
Some(Box::new(OutputHandler::new(webrender_gl.clone(), channel.clone()))) Some(Box::new(OutputHandler::new(
webrender_gl.clone(),
channel.clone(),
)))
} else { } else {
None None
}; };
let external = WebGLExternalImageHandler::new(WebGLExternalImages::new(webrender_gl, channel.clone())); let external =
(WebGLThreads(channel), Box::new(external), output_handler.map(|b| b as Box<_>)) WebGLExternalImageHandler::new(WebGLExternalImages::new(webrender_gl, channel.clone()));
(
WebGLThreads(channel),
Box::new(external),
output_handler.map(|b| b as Box<_>),
)
} }
/// Gets the WebGLThread handle for each script pipeline. /// Gets the WebGLThread handle for each script pipeline.
@ -49,7 +64,9 @@ impl WebGLThreads {
/// Sends a exit message to close the WebGLThreads and release all WebGLContexts. /// Sends a exit message to close the WebGLThreads and release all WebGLContexts.
pub fn exit(&self) -> Result<(), &'static str> { pub fn exit(&self) -> Result<(), &'static str> {
self.0.send(WebGLMsg::Exit).map_err(|_| "Failed to send Exit message") self.0
.send(WebGLMsg::Exit)
.map_err(|_| "Failed to send Exit message")
} }
} }
@ -58,7 +75,10 @@ struct WebGLExternalImages {
webrender_gl: Rc<gl::Gl>, webrender_gl: Rc<gl::Gl>,
webgl_channel: WebGLSender<WebGLMsg>, webgl_channel: WebGLSender<WebGLMsg>,
// Used to avoid creating a new channel on each received WebRender request. // Used to avoid creating a new channel on each received WebRender request.
lock_channel: (WebGLSender<(u32, Size2D<i32>, usize)>, WebGLReceiver<(u32, Size2D<i32>, usize)>), lock_channel: (
WebGLSender<(u32, Size2D<i32>, usize)>,
WebGLReceiver<(u32, Size2D<i32>, usize)>,
),
} }
impl WebGLExternalImages { impl WebGLExternalImages {
@ -75,12 +95,15 @@ impl WebGLExternalImageApi for WebGLExternalImages {
fn lock(&mut self, ctx_id: WebGLContextId) -> (u32, Size2D<i32>) { fn lock(&mut self, ctx_id: WebGLContextId) -> (u32, Size2D<i32>) {
// WebGL Thread has it's own GL command queue that we need to synchronize with the WR GL command queue. // WebGL Thread has it's own GL command queue that we need to synchronize with the WR GL command queue.
// The WebGLMsg::Lock message inserts a fence in the WebGL command queue. // The WebGLMsg::Lock message inserts a fence in the WebGL command queue.
self.webgl_channel.send(WebGLMsg::Lock(ctx_id, self.lock_channel.0.clone())).unwrap(); self.webgl_channel
.send(WebGLMsg::Lock(ctx_id, self.lock_channel.0.clone()))
.unwrap();
let (image_id, size, gl_sync) = self.lock_channel.1.recv().unwrap(); let (image_id, size, gl_sync) = self.lock_channel.1.recv().unwrap();
// The next glWaitSync call is run on the WR thread and it's used to synchronize the two // The next glWaitSync call is run on the WR thread and it's used to synchronize the two
// flows of OpenGL commands in order to avoid WR using a semi-ready WebGL texture. // flows of OpenGL commands in order to avoid WR using a semi-ready WebGL texture.
// glWaitSync doesn't block WR thread, it affects only internal OpenGL subsystem. // glWaitSync doesn't block WR thread, it affects only internal OpenGL subsystem.
self.webrender_gl.wait_sync(gl_sync as gl::GLsync, 0, gl::TIMEOUT_IGNORED); self.webrender_gl
.wait_sync(gl_sync as gl::GLsync, 0, gl::TIMEOUT_IGNORED);
(image_id, size) (image_id, size)
} }
@ -92,11 +115,17 @@ impl WebGLExternalImageApi for WebGLExternalImages {
/// Custom observer used in a `WebGLThread`. /// Custom observer used in a `WebGLThread`.
impl WebGLThreadObserver for PhantomData<()> { impl WebGLThreadObserver for PhantomData<()> {
fn on_context_create(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D<i32>) { fn on_context_create(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D<i32>) {
debug!("WebGLContext created (ctx_id: {:?} texture_id: {:?} size: {:?}", ctx_id, texture_id, size); debug!(
"WebGLContext created (ctx_id: {:?} texture_id: {:?} size: {:?}",
ctx_id, texture_id, size
);
} }
fn on_context_resize(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D<i32>) { fn on_context_resize(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D<i32>) {
debug!("WebGLContext resized (ctx_id: {:?} texture_id: {:?} size: {:?}", ctx_id, texture_id, size); debug!(
"WebGLContext resized (ctx_id: {:?} texture_id: {:?} size: {:?}",
ctx_id, texture_id, size
);
} }
fn on_context_delete(&mut self, ctx_id: WebGLContextId) { fn on_context_delete(&mut self, ctx_id: WebGLContextId) {
@ -104,7 +133,6 @@ impl WebGLThreadObserver for PhantomData<()> {
} }
} }
/// Wrapper to send WebVR commands used in `WebGLThread`. /// Wrapper to send WebVR commands used in `WebGLThread`.
struct WebVRRenderWrapper(Box<WebVRRenderHandler>); struct WebVRRenderWrapper(Box<WebVRRenderHandler>);
@ -120,7 +148,10 @@ struct OutputHandler {
webrender_gl: Rc<gl::Gl>, webrender_gl: Rc<gl::Gl>,
webgl_channel: WebGLSender<WebGLMsg>, webgl_channel: WebGLSender<WebGLMsg>,
// Used to avoid creating a new channel on each received WebRender request. // Used to avoid creating a new channel on each received WebRender request.
lock_channel: (WebGLSender<OutputHandlerData>, WebGLReceiver<OutputHandlerData>), lock_channel: (
WebGLSender<OutputHandlerData>,
WebGLReceiver<OutputHandlerData>,
),
sync_objects: FnvHashMap<webrender_api::PipelineId, gl::GLsync>, sync_objects: FnvHashMap<webrender_api::PipelineId, gl::GLsync>,
} }
@ -137,14 +168,24 @@ impl OutputHandler {
/// Bridge between the WR frame outputs and WebGL to implement DOMToTexture synchronization. /// Bridge between the WR frame outputs and WebGL to implement DOMToTexture synchronization.
impl webrender::OutputImageHandler for OutputHandler { impl webrender::OutputImageHandler for OutputHandler {
fn lock(&mut self, id: webrender_api::PipelineId) -> Option<(u32, webrender_api::DeviceIntSize)> { fn lock(
&mut self,
id: webrender_api::PipelineId,
) -> Option<(u32, webrender_api::DeviceIntSize)> {
// Insert a fence in the WR command queue // Insert a fence in the WR command queue
let gl_sync = self.webrender_gl.fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0); let gl_sync = self
.webrender_gl
.fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0);
// The lock command adds a WaitSync call on the WebGL command flow. // The lock command adds a WaitSync call on the WebGL command flow.
let command = DOMToTextureCommand::Lock(id, gl_sync as usize, self.lock_channel.0.clone()); let command = DOMToTextureCommand::Lock(id, gl_sync as usize, self.lock_channel.0.clone());
self.webgl_channel.send(WebGLMsg::DOMToTextureCommand(command)).unwrap(); self.webgl_channel
.send(WebGLMsg::DOMToTextureCommand(command))
.unwrap();
self.lock_channel.1.recv().unwrap().map(|(tex_id, size)| { self.lock_channel.1.recv().unwrap().map(|(tex_id, size)| {
(tex_id, webrender_api::DeviceIntSize::new(size.width, size.height)) (
tex_id,
webrender_api::DeviceIntSize::new(size.width, size.height),
)
}) })
} }

View file

@ -7,8 +7,8 @@ use canvas_traits::webgl::*;
use euclid::Size2D; use euclid::Size2D;
use fnv::FnvHashMap; use fnv::FnvHashMap;
use gleam::gl; use gleam::gl;
use ipc_channel::ipc::IpcBytesSender;
use offscreen_gl_context::{GLContext, GLContextAttributes, GLLimits, NativeGLContextMethods}; use offscreen_gl_context::{GLContext, GLContextAttributes, GLLimits, NativeGLContextMethods};
use serde_bytes::ByteBuf;
use std::thread; use std::thread;
use super::gl_context::{GLContextFactory, GLContextWrapper}; use super::gl_context::{GLContextFactory, GLContextWrapper};
use webrender; use webrender;
@ -659,10 +659,12 @@ impl WebGLImpl {
ctx.gl().blend_func(src, dest), ctx.gl().blend_func(src, dest),
WebGLCommand::BlendFuncSeparate(src_rgb, dest_rgb, src_alpha, dest_alpha) => WebGLCommand::BlendFuncSeparate(src_rgb, dest_rgb, src_alpha, dest_alpha) =>
ctx.gl().blend_func_separate(src_rgb, dest_rgb, src_alpha, dest_alpha), ctx.gl().blend_func_separate(src_rgb, dest_rgb, src_alpha, dest_alpha),
WebGLCommand::BufferData(buffer_type, ref data, usage) => WebGLCommand::BufferData(buffer_type, ref receiver, usage) => {
gl::buffer_data(ctx.gl(), buffer_type, data, usage), gl::buffer_data(ctx.gl(), buffer_type, &receiver.recv().unwrap(), usage)
WebGLCommand::BufferSubData(buffer_type, offset, ref data) => },
gl::buffer_sub_data(ctx.gl(), buffer_type, offset, data), WebGLCommand::BufferSubData(buffer_type, offset, ref receiver) => {
gl::buffer_sub_data(ctx.gl(), buffer_type, offset, &receiver.recv().unwrap())
},
WebGLCommand::Clear(mask) => WebGLCommand::Clear(mask) =>
ctx.gl().clear(mask), ctx.gl().clear(mask),
WebGLCommand::ClearColor(r, g, b, a) => WebGLCommand::ClearColor(r, g, b, a) =>
@ -711,8 +713,9 @@ impl WebGLImpl {
ctx.gl().pixel_store_i(name, val), ctx.gl().pixel_store_i(name, val),
WebGLCommand::PolygonOffset(factor, units) => WebGLCommand::PolygonOffset(factor, units) =>
ctx.gl().polygon_offset(factor, units), ctx.gl().polygon_offset(factor, units),
WebGLCommand::ReadPixels(x, y, width, height, format, pixel_type, ref chan) => WebGLCommand::ReadPixels(x, y, width, height, format, pixel_type, ref chan) => {
Self::read_pixels(ctx.gl(), x, y, width, height, format, pixel_type, chan), Self::read_pixels(ctx.gl(), x, y, width, height, format, pixel_type, chan)
}
WebGLCommand::RenderbufferStorage(target, format, width, height) => WebGLCommand::RenderbufferStorage(target, format, width, height) =>
ctx.gl().renderbuffer_storage(target, format, width, height), ctx.gl().renderbuffer_storage(target, format, width, height),
WebGLCommand::SampleCoverage(value, invert) => WebGLCommand::SampleCoverage(value, invert) =>
@ -831,11 +834,32 @@ impl WebGLImpl {
WebGLCommand::SetViewport(x, y, width, height) => { WebGLCommand::SetViewport(x, y, width, height) => {
ctx.gl().viewport(x, y, width, height); ctx.gl().viewport(x, y, width, height);
} }
WebGLCommand::TexImage2D(target, level, internal, width, height, format, data_type, ref data) => WebGLCommand::TexImage2D(target, level, internal, width, height, format, data_type, ref chan) => {
ctx.gl().tex_image_2d(target, level, internal, width, height, ctx.gl().tex_image_2d(
/*border*/0, format, data_type, Some(data)), target,
WebGLCommand::TexSubImage2D(target, level, xoffset, yoffset, x, y, width, height, ref data) => level,
ctx.gl().tex_sub_image_2d(target, level, xoffset, yoffset, x, y, width, height, data), internal,
width,
height,
0,
format,
data_type,
Some(&chan.recv().unwrap()),
)
}
WebGLCommand::TexSubImage2D(target, level, xoffset, yoffset, x, y, width, height, ref chan) => {
ctx.gl().tex_sub_image_2d(
target,
level,
xoffset,
yoffset,
x,
y,
width,
height,
&chan.recv().unwrap(),
)
}
WebGLCommand::DrawingBufferWidth(ref sender) => WebGLCommand::DrawingBufferWidth(ref sender) =>
sender.send(ctx.borrow_draw_buffer().unwrap().size().width).unwrap(), sender.send(ctx.borrow_draw_buffer().unwrap().size().width).unwrap(),
WebGLCommand::DrawingBufferHeight(ref sender) => WebGLCommand::DrawingBufferHeight(ref sender) =>
@ -1163,10 +1187,10 @@ impl WebGLImpl {
height: i32, height: i32,
format: u32, format: u32,
pixel_type: u32, pixel_type: u32,
chan: &WebGLSender<ByteBuf>, chan: &IpcBytesSender,
) { ) {
let result = gl.read_pixels(x, y, width, height, format, pixel_type); let result = gl.read_pixels(x, y, width, height, format, pixel_type);
chan.send(result.into()).unwrap() chan.send(&result).unwrap()
} }
fn finish(gl: &gl::Gl, chan: &WebGLSender<()>) { fn finish(gl: &gl::Gl, chan: &WebGLSender<()>) {

View file

@ -4,6 +4,7 @@
use euclid::Size2D; use euclid::Size2D;
use gleam::gl; use gleam::gl;
use ipc_channel::ipc::{IpcBytesReceiver, IpcBytesSender};
use offscreen_gl_context::{GLContextAttributes, GLLimits}; use offscreen_gl_context::{GLContextAttributes, GLLimits};
use serde_bytes::ByteBuf; use serde_bytes::ByteBuf;
use std::borrow::Cow; use std::borrow::Cow;
@ -24,7 +25,7 @@ pub use ::webgl_channel::WebGLPipeline;
pub use ::webgl_channel::WebGLChan; pub use ::webgl_channel::WebGLChan;
/// WebGL Message API /// WebGL Message API
#[derive(Clone, Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub enum WebGLMsg { pub enum WebGLMsg {
/// Creates a new WebGLContext. /// Creates a new WebGLContext.
CreateContext(WebGLVersion, Size2D<i32>, GLContextAttributes, CreateContext(WebGLVersion, Size2D<i32>, GLContextAttributes,
@ -155,7 +156,7 @@ impl WebGLMsgSender {
} }
/// WebGL Commands for a specific WebGLContext /// WebGL Commands for a specific WebGLContext
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub enum WebGLCommand { pub enum WebGLCommand {
GetContextAttributes(WebGLSender<GLContextAttributes>), GetContextAttributes(WebGLSender<GLContextAttributes>),
ActiveTexture(u32), ActiveTexture(u32),
@ -167,8 +168,8 @@ pub enum WebGLCommand {
AttachShader(WebGLProgramId, WebGLShaderId), AttachShader(WebGLProgramId, WebGLShaderId),
DetachShader(WebGLProgramId, WebGLShaderId), DetachShader(WebGLProgramId, WebGLShaderId),
BindAttribLocation(WebGLProgramId, u32, String), BindAttribLocation(WebGLProgramId, u32, String),
BufferData(u32, ByteBuf, u32), BufferData(u32, IpcBytesReceiver, u32),
BufferSubData(u32, isize, ByteBuf), BufferSubData(u32, isize, IpcBytesReceiver),
Clear(u32), Clear(u32),
ClearColor(f32, f32, f32, f32), ClearColor(f32, f32, f32, f32),
ClearDepth(f32), ClearDepth(f32),
@ -213,7 +214,7 @@ pub enum WebGLCommand {
GetRenderbufferParameter(u32, u32, WebGLSender<i32>), GetRenderbufferParameter(u32, u32, WebGLSender<i32>),
PolygonOffset(f32, f32), PolygonOffset(f32, f32),
RenderbufferStorage(u32, u32, i32, i32), RenderbufferStorage(u32, u32, i32, i32),
ReadPixels(i32, i32, i32, i32, u32, u32, WebGLSender<ByteBuf>), ReadPixels(i32, i32, i32, i32, u32, u32, IpcBytesSender),
SampleCoverage(f32, bool), SampleCoverage(f32, bool),
Scissor(i32, i32, i32, i32), Scissor(i32, i32, i32, i32),
StencilFunc(u32, i32, u32), StencilFunc(u32, i32, u32),
@ -251,8 +252,8 @@ pub enum WebGLCommand {
VertexAttribPointer(u32, i32, u32, bool, i32, u32), VertexAttribPointer(u32, i32, u32, bool, i32, u32),
VertexAttribPointer2f(u32, i32, bool, i32, u32), VertexAttribPointer2f(u32, i32, bool, i32, u32),
SetViewport(i32, i32, i32, i32), SetViewport(i32, i32, i32, i32),
TexImage2D(u32, i32, i32, i32, i32, u32, u32, ByteBuf), TexImage2D(u32, i32, i32, i32, i32, u32, u32, IpcBytesReceiver),
TexSubImage2D(u32, i32, i32, i32, i32, i32, u32, u32, ByteBuf), TexSubImage2D(u32, i32, i32, i32, i32, i32, u32, u32, IpcBytesReceiver),
DrawingBufferWidth(WebGLSender<i32>), DrawingBufferWidth(WebGLSender<i32>),
DrawingBufferHeight(WebGLSender<i32>), DrawingBufferHeight(WebGLSender<i32>),
Finish(WebGLSender<()>), Finish(WebGLSender<()>),

View file

@ -9,7 +9,7 @@ use std::io;
pub type WebGLSender<T> = ipc_channel::ipc::IpcSender<T>; pub type WebGLSender<T> = ipc_channel::ipc::IpcSender<T>;
pub type WebGLReceiver<T> = ipc_channel::ipc::IpcReceiver<T>; pub type WebGLReceiver<T> = ipc_channel::ipc::IpcReceiver<T>;
pub fn webgl_channel<T: Serialize + for<'de> Deserialize<'de>>() pub fn webgl_channel<T: Serialize + for<'de> Deserialize<'de>>(
-> Result<(WebGLSender<T>, WebGLReceiver<T>), io::Error> { ) -> Result<(WebGLSender<T>, WebGLReceiver<T>), io::Error> {
ipc_channel::ipc::channel() ipc_channel::ipc::channel()
} }

View file

@ -13,17 +13,27 @@ use servo_config::opts;
use std::fmt; use std::fmt;
lazy_static! { lazy_static! {
static ref IS_MULTIPROCESS: bool = { static ref IS_MULTIPROCESS: bool = { opts::multiprocess() };
opts::multiprocess()
};
} }
#[derive(Clone, Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub enum WebGLSender<T: Serialize> { pub enum WebGLSender<T: Serialize> {
Ipc(ipc::WebGLSender<T>), Ipc(ipc::WebGLSender<T>),
Mpsc(mpsc::WebGLSender<T>), Mpsc(mpsc::WebGLSender<T>),
} }
impl<T> Clone for WebGLSender<T>
where
T: Serialize,
{
fn clone(&self) -> Self {
match *self {
WebGLSender::Ipc(ref chan) => WebGLSender::Ipc(chan.clone()),
WebGLSender::Mpsc(ref chan) => WebGLSender::Mpsc(chan.clone()),
}
}
}
impl<T: Serialize> fmt::Debug for WebGLSender<T> { impl<T: Serialize> fmt::Debug for WebGLSender<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "WebGLSender(..)") write!(f, "WebGLSender(..)")
@ -34,40 +44,41 @@ impl<T: Serialize> WebGLSender<T> {
#[inline] #[inline]
pub fn send(&self, msg: T) -> WebGLSendResult { pub fn send(&self, msg: T) -> WebGLSendResult {
match *self { match *self {
WebGLSender::Ipc(ref sender) => { WebGLSender::Ipc(ref sender) => sender.send(msg).map_err(|_| ()),
sender.send(msg).map_err(|_| ()) WebGLSender::Mpsc(ref sender) => sender.send(msg).map_err(|_| ()),
},
WebGLSender::Mpsc(ref sender) => {
sender.send(msg).map_err(|_| ())
}
} }
} }
} }
pub type WebGLSendResult = Result<(), ()>; pub type WebGLSendResult = Result<(), ()>;
pub enum WebGLReceiver<T> where T: for<'de> Deserialize<'de> + Serialize { pub enum WebGLReceiver<T>
where
T: for<'de> Deserialize<'de> + Serialize,
{
Ipc(ipc::WebGLReceiver<T>), Ipc(ipc::WebGLReceiver<T>),
Mpsc(mpsc::WebGLReceiver<T>), Mpsc(mpsc::WebGLReceiver<T>),
} }
impl<T> WebGLReceiver<T> where T: for<'de> Deserialize<'de> + Serialize { impl<T> WebGLReceiver<T>
where
T: for<'de> Deserialize<'de> + Serialize,
{
pub fn recv(&self) -> Result<T, ()> { pub fn recv(&self) -> Result<T, ()> {
match *self { match *self {
WebGLReceiver::Ipc(ref receiver) => { WebGLReceiver::Ipc(ref receiver) => receiver.recv().map_err(|_| ()),
receiver.recv().map_err(|_| ()) WebGLReceiver::Mpsc(ref receiver) => receiver.recv().map_err(|_| ()),
},
WebGLReceiver::Mpsc(ref receiver) => {
receiver.recv().map_err(|_| ())
}
} }
} }
} }
pub fn webgl_channel<T>() -> Result<(WebGLSender<T>, WebGLReceiver<T>), ()> pub fn webgl_channel<T>() -> Result<(WebGLSender<T>, WebGLReceiver<T>), ()>
where T: for<'de> Deserialize<'de> + Serialize { where
T: for<'de> Deserialize<'de> + Serialize,
{
if *IS_MULTIPROCESS { if *IS_MULTIPROCESS {
ipc::webgl_channel().map(|(tx, rx)| (WebGLSender::Ipc(tx), WebGLReceiver::Ipc(rx))) ipc::webgl_channel()
.map(|(tx, rx)| (WebGLSender::Ipc(tx), WebGLReceiver::Ipc(rx)))
.map_err(|_| ()) .map_err(|_| ())
} else { } else {
mpsc::webgl_channel().map(|(tx, rx)| (WebGLSender::Mpsc(tx), WebGLReceiver::Mpsc(rx))) mpsc::webgl_channel().map(|(tx, rx)| (WebGLSender::Mpsc(tx), WebGLReceiver::Mpsc(rx)))

View file

@ -17,17 +17,24 @@ macro_rules! unreachable_serializable {
impl<'a, T> Deserialize<'a> for $name<T> { impl<'a, T> Deserialize<'a> for $name<T> {
fn deserialize<D>(_: D) -> Result<$name<T>, D::Error> fn deserialize<D>(_: D) -> Result<$name<T>, D::Error>
where D: Deserializer<'a> { where
D: Deserializer<'a>,
{
unreachable!(); unreachable!();
} }
} }
}; };
} }
#[derive(Clone)]
pub struct WebGLSender<T>(mpsc::Sender<T>); pub struct WebGLSender<T>(mpsc::Sender<T>);
pub struct WebGLReceiver<T>(mpsc::Receiver<T>); pub struct WebGLReceiver<T>(mpsc::Receiver<T>);
impl<T> Clone for WebGLSender<T> {
fn clone(&self) -> Self {
WebGLSender(self.0.clone())
}
}
impl<T> WebGLSender<T> { impl<T> WebGLSender<T> {
#[inline] #[inline]
pub fn send(&self, data: T) -> Result<(), mpsc::SendError<T>> { pub fn send(&self, data: T) -> Result<(), mpsc::SendError<T>> {

View file

@ -10,24 +10,33 @@ use std::io::{Read, Write};
use std::path::Path; use std::path::Path;
fn main() { fn main() {
let lockfile_path = Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()).join("..").join("..").join("Cargo.lock"); let lockfile_path = Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap())
let revision_file_path = Path::new(&env::var_os("OUT_DIR").unwrap()).join("webrender_revision.rs"); .join("..")
.join("..")
.join("Cargo.lock");
let revision_file_path =
Path::new(&env::var_os("OUT_DIR").unwrap()).join("webrender_revision.rs");
let mut lockfile = String::new(); let mut lockfile = String::new();
File::open(lockfile_path).expect("Cannot open lockfile") File::open(lockfile_path)
.expect("Cannot open lockfile")
.read_to_string(&mut lockfile) .read_to_string(&mut lockfile)
.expect("Failed to read lockfile"); .expect("Failed to read lockfile");
match toml::from_str::<toml::value::Table>(&lockfile) { match toml::from_str::<toml::value::Table>(&lockfile) {
Ok(result) => { Ok(result) => {
let packages = result.get("package").expect("Cargo lockfile should contain package list"); let packages = result
.get("package")
.expect("Cargo lockfile should contain package list");
match *packages { match *packages {
toml::Value::Array(ref arr) => { toml::Value::Array(ref arr) => {
let source = arr let source = arr
.iter() .iter()
.find(|pkg| pkg.get("name").and_then(|name| name.as_str()).unwrap_or("") == "webrender") .find(|pkg| {
.and_then(|pkg| pkg.get("source").and_then(|source| source.as_str())) pkg.get("name").and_then(|name| name.as_str()).unwrap_or("") ==
"webrender"
}).and_then(|pkg| pkg.get("source").and_then(|source| source.as_str()))
.unwrap_or("unknown"); .unwrap_or("unknown");
let parsed: Vec<&str> = source.split("#").collect(); let parsed: Vec<&str> = source.split("#").collect();
@ -36,9 +45,9 @@ fn main() {
let mut revision_module_file = File::create(&revision_file_path).unwrap(); let mut revision_module_file = File::create(&revision_file_path).unwrap();
write!(&mut revision_module_file, "{}", format!("\"{}\"", revision)).unwrap(); write!(&mut revision_module_file, "{}", format!("\"{}\"", revision)).unwrap();
}, },
_ => panic!("Cannot find package definitions in lockfile") _ => panic!("Cannot find package definitions in lockfile"),
} }
}, },
Err(e) => panic!(e) Err(e) => panic!(e),
} }
} }

View file

@ -43,7 +43,6 @@ use webrender_api::{self, DeviceIntPoint, DevicePoint, HitTestFlags, HitTestResu
use webrender_api::{LayoutVector2D, ScrollLocation}; use webrender_api::{LayoutVector2D, ScrollLocation};
use windowing::{self, EmbedderCoordinates, MouseWindowEvent, WebRenderDebugOption, WindowMethods}; use windowing::{self, EmbedderCoordinates, MouseWindowEvent, WebRenderDebugOption, WindowMethods};
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum UnableToComposite { enum UnableToComposite {
WindowUnprepared, WindowUnprepared,
@ -251,7 +250,7 @@ enum CompositeTarget {
WindowAndPng, WindowAndPng,
/// Compose to a PNG, write it to disk, and then exit the browser (used for reftests) /// Compose to a PNG, write it to disk, and then exit the browser (used for reftests)
PngFile PngFile,
} }
#[derive(Clone)] #[derive(Clone)]
@ -273,7 +272,8 @@ impl webrender_api::RenderNotifier for RenderNotifier {
} }
fn wake_up(&self) { fn wake_up(&self) {
self.compositor_proxy.recomposite(CompositingReason::NewWebRenderFrame); self.compositor_proxy
.recomposite(CompositingReason::NewWebRenderFrame);
} }
fn new_frame_ready( fn new_frame_ready(
@ -284,7 +284,8 @@ impl webrender_api::RenderNotifier for RenderNotifier {
_render_time_ns: Option<u64>, _render_time_ns: Option<u64>,
) { ) {
if scrolled { if scrolled {
self.compositor_proxy.send(Msg::NewScrollFrameReady(composite_needed)); self.compositor_proxy
.send(Msg::NewScrollFrameReady(composite_needed));
} else { } else {
self.wake_up(); self.wake_up();
} }
@ -295,7 +296,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
fn new(window: Rc<Window>, state: InitialCompositorState) -> Self { fn new(window: Rc<Window>, state: InitialCompositorState) -> Self {
let composite_target = match opts::get().output_file { let composite_target = match opts::get().output_file {
Some(_) => CompositeTarget::PngFile, Some(_) => CompositeTarget::PngFile,
None => CompositeTarget::Window None => CompositeTarget::Window,
}; };
IOCompositor { IOCompositor {
@ -372,7 +373,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
// Tell the profiler, memory profiler, and scrolling timer to shut down. // Tell the profiler, memory profiler, and scrolling timer to shut down.
if let Ok((sender, receiver)) = ipc::channel() { if let Ok((sender, receiver)) = ipc::channel() {
self.time_profiler_chan.send(time::ProfilerMsg::Exit(sender)); self.time_profiler_chan
.send(time::ProfilerMsg::Exit(sender));
let _ = receiver.recv(); let _ = receiver.recv();
} }
@ -383,33 +385,33 @@ impl<Window: WindowMethods> IOCompositor<Window> {
match (msg, self.shutdown_state) { match (msg, self.shutdown_state) {
(_, ShutdownState::FinishedShuttingDown) => { (_, ShutdownState::FinishedShuttingDown) => {
error!("compositor shouldn't be handling messages after shutting down"); error!("compositor shouldn't be handling messages after shutting down");
return false return false;
} },
(Msg::ShutdownComplete, _) => { (Msg::ShutdownComplete, _) => {
self.finish_shutting_down(); self.finish_shutting_down();
return false; return false;
} },
(Msg::ChangeRunningAnimationsState(pipeline_id, animation_state), (
ShutdownState::NotShuttingDown) => { Msg::ChangeRunningAnimationsState(pipeline_id, animation_state),
ShutdownState::NotShuttingDown,
) => {
self.change_running_animations_state(pipeline_id, animation_state); self.change_running_animations_state(pipeline_id, animation_state);
} },
(Msg::SetFrameTree(frame_tree), (Msg::SetFrameTree(frame_tree), ShutdownState::NotShuttingDown) => {
ShutdownState::NotShuttingDown) => {
self.set_frame_tree(&frame_tree); self.set_frame_tree(&frame_tree);
self.send_viewport_rects(); self.send_viewport_rects();
} },
(Msg::Recomposite(reason), ShutdownState::NotShuttingDown) => { (Msg::Recomposite(reason), ShutdownState::NotShuttingDown) => {
self.composition_request = CompositionRequest::CompositeNow(reason) self.composition_request = CompositionRequest::CompositeNow(reason)
} },
(Msg::TouchEventProcessed(result), ShutdownState::NotShuttingDown) => { (Msg::TouchEventProcessed(result), ShutdownState::NotShuttingDown) => {
self.touch_handler.on_event_processed(result); self.touch_handler.on_event_processed(result);
} },
(Msg::CreatePng(reply), ShutdownState::NotShuttingDown) => { (Msg::CreatePng(reply), ShutdownState::NotShuttingDown) => {
let res = self.composite_specific_target(CompositeTarget::WindowAndPng); let res = self.composite_specific_target(CompositeTarget::WindowAndPng);
@ -420,15 +422,20 @@ impl<Window: WindowMethods> IOCompositor<Window> {
if let Err(e) = reply.send(img) { if let Err(e) = reply.send(img) {
warn!("Sending reply to create png failed ({}).", e); warn!("Sending reply to create png failed ({}).", e);
} }
} },
(Msg::ViewportConstrained(pipeline_id, constraints), (
ShutdownState::NotShuttingDown) => { Msg::ViewportConstrained(pipeline_id, constraints),
ShutdownState::NotShuttingDown,
) => {
self.constrain_viewport(pipeline_id, constraints); self.constrain_viewport(pipeline_id, constraints);
} },
(Msg::IsReadyToSaveImageReply(is_ready), ShutdownState::NotShuttingDown) => { (Msg::IsReadyToSaveImageReply(is_ready), ShutdownState::NotShuttingDown) => {
assert_eq!(self.ready_to_save_state, ReadyState::WaitingForConstellationReply); assert_eq!(
self.ready_to_save_state,
ReadyState::WaitingForConstellationReply
);
if is_ready { if is_ready {
self.ready_to_save_state = ReadyState::ReadyToSaveImage; self.ready_to_save_state = ReadyState::ReadyToSaveImage;
if opts::get().is_running_problem_test { if opts::get().is_running_problem_test {
@ -441,34 +448,38 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
} }
self.composite_if_necessary(CompositingReason::Headless); self.composite_if_necessary(CompositingReason::Headless);
} },
(Msg::PipelineVisibilityChanged(pipeline_id, visible), ShutdownState::NotShuttingDown) => { (
Msg::PipelineVisibilityChanged(pipeline_id, visible),
ShutdownState::NotShuttingDown,
) => {
self.pipeline_details(pipeline_id).visible = visible; self.pipeline_details(pipeline_id).visible = visible;
if visible { if visible {
self.process_animations(); self.process_animations();
} }
} },
(Msg::PipelineExited(pipeline_id, sender), _) => { (Msg::PipelineExited(pipeline_id, sender), _) => {
debug!("Compositor got pipeline exited: {:?}", pipeline_id); debug!("Compositor got pipeline exited: {:?}", pipeline_id);
self.remove_pipeline_root_layer(pipeline_id); self.remove_pipeline_root_layer(pipeline_id);
let _ = sender.send(()); let _ = sender.send(());
} },
(Msg::NewScrollFrameReady(recomposite_needed), ShutdownState::NotShuttingDown) => { (Msg::NewScrollFrameReady(recomposite_needed), ShutdownState::NotShuttingDown) => {
self.waiting_for_results_of_scroll = false; self.waiting_for_results_of_scroll = false;
if recomposite_needed { if recomposite_needed {
self.composition_request = CompositionRequest::CompositeNow( self.composition_request = CompositionRequest::CompositeNow(
CompositingReason::NewWebRenderScrollFrame); CompositingReason::NewWebRenderScrollFrame,
} );
} }
},
(Msg::Dispatch(func), ShutdownState::NotShuttingDown) => { (Msg::Dispatch(func), ShutdownState::NotShuttingDown) => {
// The functions sent here right now are really dumb, so they can't panic. // The functions sent here right now are really dumb, so they can't panic.
// But if we start running more complex code here, we should really catch panic here. // But if we start running more complex code here, we should really catch panic here.
func(); func();
} },
(Msg::LoadComplete(_), ShutdownState::NotShuttingDown) => { (Msg::LoadComplete(_), ShutdownState::NotShuttingDown) => {
// If we're painting in headless mode, schedule a recomposite. // If we're painting in headless mode, schedule a recomposite.
@ -479,30 +490,30 @@ impl<Window: WindowMethods> IOCompositor<Window> {
(Msg::PendingPaintMetric(pipeline_id, epoch), _) => { (Msg::PendingPaintMetric(pipeline_id, epoch), _) => {
self.pending_paint_metrics.insert(pipeline_id, epoch); self.pending_paint_metrics.insert(pipeline_id, epoch);
} },
(Msg::GetClientWindow(req), ShutdownState::NotShuttingDown) => { (Msg::GetClientWindow(req), ShutdownState::NotShuttingDown) => {
if let Err(e) = req.send(self.embedder_coordinates.window) { if let Err(e) = req.send(self.embedder_coordinates.window) {
warn!("Sending response to get client window failed ({}).", e); warn!("Sending response to get client window failed ({}).", e);
} }
} },
(Msg::GetScreenSize(req), ShutdownState::NotShuttingDown) => { (Msg::GetScreenSize(req), ShutdownState::NotShuttingDown) => {
if let Err(e) = req.send(self.embedder_coordinates.screen) { if let Err(e) = req.send(self.embedder_coordinates.screen) {
warn!("Sending response to get screen size failed ({}).", e); warn!("Sending response to get screen size failed ({}).", e);
} }
} },
(Msg::GetScreenAvailSize(req), ShutdownState::NotShuttingDown) => { (Msg::GetScreenAvailSize(req), ShutdownState::NotShuttingDown) => {
if let Err(e) = req.send(self.embedder_coordinates.screen_avail) { if let Err(e) = req.send(self.embedder_coordinates.screen_avail) {
warn!("Sending response to get screen avail size failed ({}).", e); warn!("Sending response to get screen avail size failed ({}).", e);
} }
} },
// When we are shutting_down, we need to avoid performing operations // When we are shutting_down, we need to avoid performing operations
// such as Paint that may crash because we have begun tearing down // such as Paint that may crash because we have begun tearing down
// the rest of our resources. // the rest of our resources.
(_, ShutdownState::ShuttingDown) => {} (_, ShutdownState::ShuttingDown) => {},
} }
true true
@ -522,42 +533,53 @@ impl<Window: WindowMethods> IOCompositor<Window> {
if visible { if visible {
self.composite_if_necessary(CompositingReason::Animation); self.composite_if_necessary(CompositingReason::Animation);
} }
} },
AnimationState::AnimationCallbacksPresent => { AnimationState::AnimationCallbacksPresent => {
let visible = self.pipeline_details(pipeline_id).visible; let visible = self.pipeline_details(pipeline_id).visible;
self.pipeline_details(pipeline_id).animation_callbacks_running = true; self.pipeline_details(pipeline_id)
.animation_callbacks_running = true;
if visible { if visible {
self.tick_animations_for_pipeline(pipeline_id); self.tick_animations_for_pipeline(pipeline_id);
} }
} },
AnimationState::NoAnimationsPresent => { AnimationState::NoAnimationsPresent => {
self.pipeline_details(pipeline_id).animations_running = false; self.pipeline_details(pipeline_id).animations_running = false;
} },
AnimationState::NoAnimationCallbacksPresent => { AnimationState::NoAnimationCallbacksPresent => {
self.pipeline_details(pipeline_id).animation_callbacks_running = false; self.pipeline_details(pipeline_id)
} .animation_callbacks_running = false;
},
} }
} }
fn pipeline_details(&mut self, pipeline_id: PipelineId) -> &mut PipelineDetails { fn pipeline_details(&mut self, pipeline_id: PipelineId) -> &mut PipelineDetails {
if !self.pipeline_details.contains_key(&pipeline_id) { if !self.pipeline_details.contains_key(&pipeline_id) {
self.pipeline_details.insert(pipeline_id, PipelineDetails::new()); self.pipeline_details
.insert(pipeline_id, PipelineDetails::new());
} }
self.pipeline_details.get_mut(&pipeline_id).expect("Insert then get failed!") self.pipeline_details
.get_mut(&pipeline_id)
.expect("Insert then get failed!")
} }
pub fn pipeline(&self, pipeline_id: PipelineId) -> Option<&CompositionPipeline> { pub fn pipeline(&self, pipeline_id: PipelineId) -> Option<&CompositionPipeline> {
match self.pipeline_details.get(&pipeline_id) { match self.pipeline_details.get(&pipeline_id) {
Some(ref details) => details.pipeline.as_ref(), Some(ref details) => details.pipeline.as_ref(),
None => { None => {
warn!("Compositor layer has an unknown pipeline ({:?}).", pipeline_id); warn!(
"Compositor layer has an unknown pipeline ({:?}).",
pipeline_id
);
None None
} },
} }
} }
fn set_frame_tree(&mut self, frame_tree: &SendableFrameTree) { fn set_frame_tree(&mut self, frame_tree: &SendableFrameTree) {
debug!("Setting the frame tree for pipeline {}", frame_tree.pipeline.id); debug!(
"Setting the frame tree for pipeline {}",
frame_tree.pipeline.id
);
self.root_pipeline = Some(frame_tree.pipeline.clone()); self.root_pipeline = Some(frame_tree.pipeline.clone());
@ -565,7 +587,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let mut txn = webrender_api::Transaction::new(); let mut txn = webrender_api::Transaction::new();
txn.set_root_pipeline(pipeline_id); txn.set_root_pipeline(pipeline_id);
txn.generate_frame(); txn.generate_frame();
self.webrender_api.send_transaction(self.webrender_document, txn); self.webrender_api
.send_transaction(self.webrender_document, txn);
self.create_pipeline_details_for_frame_tree(&frame_tree); self.create_pipeline_details_for_frame_tree(&frame_tree);
@ -589,10 +612,12 @@ impl<Window: WindowMethods> IOCompositor<Window> {
fn send_window_size(&self, size_type: WindowSizeType) { fn send_window_size(&self, size_type: WindowSizeType) {
let dppx = self.page_zoom * self.embedder_coordinates.hidpi_factor; let dppx = self.page_zoom * self.embedder_coordinates.hidpi_factor;
self.webrender_api.set_window_parameters(self.webrender_document, self.webrender_api.set_window_parameters(
self.webrender_document,
self.embedder_coordinates.framebuffer, self.embedder_coordinates.framebuffer,
self.embedder_coordinates.viewport, self.embedder_coordinates.viewport,
self.embedder_coordinates.hidpi_factor.get()); self.embedder_coordinates.hidpi_factor.get(),
);
let initial_viewport = self.embedder_coordinates.viewport.size.to_f32() / dppx; let initial_viewport = self.embedder_coordinates.viewport.size.to_f32() / dppx;
@ -601,9 +626,10 @@ impl<Window: WindowMethods> IOCompositor<Window> {
initial_viewport: initial_viewport, initial_viewport: initial_viewport,
}; };
let top_level_browsing_context_id = self.root_pipeline.as_ref().map(|pipeline| { let top_level_browsing_context_id = self
pipeline.top_level_browsing_context_id .root_pipeline
}); .as_ref()
.map(|pipeline| pipeline.top_level_browsing_context_id);
let msg = ConstellationMsg::WindowSize(top_level_browsing_context_id, data, size_type); let msg = ConstellationMsg::WindowSize(top_level_browsing_context_id, data, size_type);
@ -624,7 +650,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
if self.embedder_coordinates.viewport == old_coords.viewport && if self.embedder_coordinates.viewport == old_coords.viewport &&
self.embedder_coordinates.framebuffer == old_coords.framebuffer { self.embedder_coordinates.framebuffer == old_coords.framebuffer
{
return; return;
} }
@ -634,11 +661,11 @@ impl<Window: WindowMethods> IOCompositor<Window> {
pub fn on_mouse_window_event_class(&mut self, mouse_window_event: MouseWindowEvent) { pub fn on_mouse_window_event_class(&mut self, mouse_window_event: MouseWindowEvent) {
if opts::get().convert_mouse_to_touch { if opts::get().convert_mouse_to_touch {
match mouse_window_event { match mouse_window_event {
MouseWindowEvent::Click(_, _) => {} MouseWindowEvent::Click(_, _) => {},
MouseWindowEvent::MouseDown(_, p) => self.on_touch_down(TouchId(0), p), MouseWindowEvent::MouseDown(_, p) => self.on_touch_down(TouchId(0), p),
MouseWindowEvent::MouseUp(_, p) => self.on_touch_up(TouchId(0), p), MouseWindowEvent::MouseUp(_, p) => self.on_touch_up(TouchId(0), p),
} }
return return;
} }
self.dispatch_mouse_window_event_class(mouse_window_event); self.dispatch_mouse_window_event_class(mouse_window_event);
@ -687,15 +714,14 @@ impl<Window: WindowMethods> IOCompositor<Window> {
self.webrender_document, self.webrender_document,
None, None,
world_cursor, world_cursor,
HitTestFlags::empty() HitTestFlags::empty(),
) )
} }
pub fn on_mouse_window_move_event_class(&mut self, cursor: DevicePoint) { pub fn on_mouse_window_move_event_class(&mut self, cursor: DevicePoint) {
if opts::get().convert_mouse_to_touch { if opts::get().convert_mouse_to_touch {
self.on_touch_move(TouchId(0), cursor); self.on_touch_move(TouchId(0), cursor);
return return;
} }
self.dispatch_mouse_window_move_event_class(cursor); self.dispatch_mouse_window_move_event_class(cursor);
@ -733,8 +759,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
&self, &self,
event_type: TouchEventType, event_type: TouchEventType,
identifier: TouchId, identifier: TouchId,
point: DevicePoint) point: DevicePoint,
{ ) {
let results = self.hit_test_at_point(point); let results = self.hit_test_at_point(point);
if let Some(item) = results.items.first() { if let Some(item) = results.items.first() {
let event = TouchEvent( let event = TouchEvent(
@ -751,10 +777,12 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
} }
pub fn on_touch_event(&mut self, pub fn on_touch_event(
&mut self,
event_type: TouchEventType, event_type: TouchEventType,
identifier: TouchId, identifier: TouchId,
location: DevicePoint) { location: DevicePoint,
) {
match event_type { match event_type {
TouchEventType::Down => self.on_touch_down(identifier, location), TouchEventType::Down => self.on_touch_down(identifier, location),
TouchEventType::Move => self.on_touch_move(identifier, location), TouchEventType::Move => self.on_touch_move(identifier, location),
@ -770,28 +798,25 @@ impl<Window: WindowMethods> IOCompositor<Window> {
fn on_touch_move(&mut self, identifier: TouchId, point: DevicePoint) { fn on_touch_move(&mut self, identifier: TouchId, point: DevicePoint) {
match self.touch_handler.on_touch_move(identifier, point) { match self.touch_handler.on_touch_move(identifier, point) {
TouchAction::Scroll(delta) => { TouchAction::Scroll(delta) => self.on_scroll_window_event(
self.on_scroll_window_event( ScrollLocation::Delta(LayoutVector2D::from_untyped(&delta.to_untyped())),
ScrollLocation::Delta( point.cast(),
LayoutVector2D::from_untyped(&delta.to_untyped())
), ),
point.cast()
)
}
TouchAction::Zoom(magnification, scroll_delta) => { TouchAction::Zoom(magnification, scroll_delta) => {
let cursor = TypedPoint2D::new(-1, -1); // Make sure this hits the base layer. let cursor = TypedPoint2D::new(-1, -1); // Make sure this hits the base layer.
self.pending_scroll_zoom_events.push(ScrollZoomEvent { self.pending_scroll_zoom_events.push(ScrollZoomEvent {
magnification: magnification, magnification: magnification,
scroll_location: ScrollLocation::Delta(webrender_api::LayoutVector2D::from_untyped( scroll_location: ScrollLocation::Delta(
&scroll_delta.to_untyped())), webrender_api::LayoutVector2D::from_untyped(&scroll_delta.to_untyped()),
),
cursor: cursor, cursor: cursor,
event_count: 1, event_count: 1,
}); });
} },
TouchAction::DispatchEvent => { TouchAction::DispatchEvent => {
self.send_touch_event(TouchEventType::Move, identifier, point); self.send_touch_event(TouchEventType::Move, identifier, point);
} },
_ => {} _ => {},
} }
} }
@ -818,24 +843,24 @@ impl<Window: WindowMethods> IOCompositor<Window> {
self.dispatch_mouse_window_event_class(MouseWindowEvent::Click(button, p)); self.dispatch_mouse_window_event_class(MouseWindowEvent::Click(button, p));
} }
pub fn on_scroll_event(&mut self, pub fn on_scroll_event(
&mut self,
delta: ScrollLocation, delta: ScrollLocation,
cursor: DeviceIntPoint, cursor: DeviceIntPoint,
phase: TouchEventType) { phase: TouchEventType,
) {
match phase { match phase {
TouchEventType::Move => self.on_scroll_window_event(delta, cursor), TouchEventType::Move => self.on_scroll_window_event(delta, cursor),
TouchEventType::Up | TouchEventType::Cancel => { TouchEventType::Up | TouchEventType::Cancel => {
self.on_scroll_end_window_event(delta, cursor); self.on_scroll_end_window_event(delta, cursor);
} },
TouchEventType::Down => { TouchEventType::Down => {
self.on_scroll_start_window_event(delta, cursor); self.on_scroll_start_window_event(delta, cursor);
} },
} }
} }
fn on_scroll_window_event(&mut self, fn on_scroll_window_event(&mut self, scroll_location: ScrollLocation, cursor: DeviceIntPoint) {
scroll_location: ScrollLocation,
cursor: DeviceIntPoint) {
self.in_scroll_transaction = Some(Instant::now()); self.in_scroll_transaction = Some(Instant::now());
self.pending_scroll_zoom_events.push(ScrollZoomEvent { self.pending_scroll_zoom_events.push(ScrollZoomEvent {
magnification: 1.0, magnification: 1.0,
@ -845,9 +870,11 @@ impl<Window: WindowMethods> IOCompositor<Window> {
}); });
} }
fn on_scroll_start_window_event(&mut self, fn on_scroll_start_window_event(
&mut self,
scroll_location: ScrollLocation, scroll_location: ScrollLocation,
cursor: DeviceIntPoint) { cursor: DeviceIntPoint,
) {
self.scroll_in_progress = true; self.scroll_in_progress = true;
self.pending_scroll_zoom_events.push(ScrollZoomEvent { self.pending_scroll_zoom_events.push(ScrollZoomEvent {
magnification: 1.0, magnification: 1.0,
@ -857,9 +884,11 @@ impl<Window: WindowMethods> IOCompositor<Window> {
}); });
} }
fn on_scroll_end_window_event(&mut self, fn on_scroll_end_window_event(
&mut self,
scroll_location: ScrollLocation, scroll_location: ScrollLocation,
cursor: DeviceIntPoint) { cursor: DeviceIntPoint,
) {
self.scroll_in_progress = false; self.scroll_in_progress = false;
self.pending_scroll_zoom_events.push(ScrollZoomEvent { self.pending_scroll_zoom_events.push(ScrollZoomEvent {
magnification: 1.0, magnification: 1.0,
@ -884,19 +913,20 @@ impl<Window: WindowMethods> IOCompositor<Window> {
// disregard other pending events and exit the loop. // disregard other pending events and exit the loop.
last_combined_event = Some(scroll_event); last_combined_event = Some(scroll_event);
break; break;
} },
}; };
match &mut last_combined_event { match &mut last_combined_event {
last_combined_event @ &mut None => { last_combined_event @ &mut None => {
*last_combined_event = Some(ScrollZoomEvent { *last_combined_event = Some(ScrollZoomEvent {
magnification: scroll_event.magnification, magnification: scroll_event.magnification,
scroll_location: ScrollLocation::Delta(webrender_api::LayoutVector2D::from_untyped( scroll_location: ScrollLocation::Delta(
&this_delta.to_untyped())), webrender_api::LayoutVector2D::from_untyped(&this_delta.to_untyped()),
),
cursor: this_cursor, cursor: this_cursor,
event_count: 1, event_count: 1,
}) })
} },
&mut Some(ref mut last_combined_event) => { &mut Some(ref mut last_combined_event) => {
// Mac OS X sometimes delivers scroll events out of vsync during a // Mac OS X sometimes delivers scroll events out of vsync during a
// fling. This causes events to get bunched up occasionally, causing // fling. This causes events to get bunched up occasionally, causing
@ -909,20 +939,22 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let new_event_count = let new_event_count =
TypedScale::new(last_combined_event.event_count as f32); TypedScale::new(last_combined_event.event_count as f32);
last_combined_event.scroll_location = ScrollLocation::Delta( last_combined_event.scroll_location = ScrollLocation::Delta(
(delta * old_event_count + this_delta) / (delta * old_event_count + this_delta) / new_event_count,
new_event_count); );
} }
last_combined_event.magnification *= scroll_event.magnification; last_combined_event.magnification *= scroll_event.magnification;
} },
} }
} }
if let Some(combined_event) = last_combined_event { if let Some(combined_event) = last_combined_event {
let scroll_location = match combined_event.scroll_location { let scroll_location = match combined_event.scroll_location {
ScrollLocation::Delta(delta) => { ScrollLocation::Delta(delta) => {
let scaled_delta = (TypedVector2D::from_untyped(&delta.to_untyped()) / self.scale) let scaled_delta = (TypedVector2D::from_untyped(&delta.to_untyped()) /
self.scale)
.to_untyped(); .to_untyped();
let calculated_delta = webrender_api::LayoutVector2D::from_untyped(&scaled_delta); let calculated_delta =
webrender_api::LayoutVector2D::from_untyped(&scaled_delta);
ScrollLocation::Delta(calculated_delta) ScrollLocation::Delta(calculated_delta)
}, },
// Leave ScrollLocation unchanged if it is Start or End location. // Leave ScrollLocation unchanged if it is Start or End location.
@ -938,7 +970,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
txn.set_pinch_zoom(webrender_api::ZoomFactor::new(self.pinch_zoom_level())); txn.set_pinch_zoom(webrender_api::ZoomFactor::new(self.pinch_zoom_level()));
} }
txn.generate_frame(); txn.generate_frame();
self.webrender_api.send_transaction(self.webrender_document, txn); self.webrender_api
.send_transaction(self.webrender_document, txn);
self.waiting_for_results_of_scroll = true self.waiting_for_results_of_scroll = true
} }
@ -951,9 +984,9 @@ impl<Window: WindowMethods> IOCompositor<Window> {
fn process_animations(&mut self) { fn process_animations(&mut self) {
let mut pipeline_ids = vec![]; let mut pipeline_ids = vec![];
for (pipeline_id, pipeline_details) in &self.pipeline_details { for (pipeline_id, pipeline_details) in &self.pipeline_details {
if (pipeline_details.animations_running || if (pipeline_details.animations_running || pipeline_details.animation_callbacks_running) &&
pipeline_details.animation_callbacks_running) && pipeline_details.visible
pipeline_details.visible { {
pipeline_ids.push(*pipeline_id); pipeline_ids.push(*pipeline_id);
} }
} }
@ -969,7 +1002,9 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
fn tick_animations_for_pipeline(&mut self, pipeline_id: PipelineId) { fn tick_animations_for_pipeline(&mut self, pipeline_id: PipelineId) {
let animation_callbacks_running = self.pipeline_details(pipeline_id).animation_callbacks_running; let animation_callbacks_running = self
.pipeline_details(pipeline_id)
.animation_callbacks_running;
if animation_callbacks_running { if animation_callbacks_running {
let msg = ConstellationMsg::TickAnimation(pipeline_id, AnimationTickType::Script); let msg = ConstellationMsg::TickAnimation(pipeline_id, AnimationTickType::Script);
if let Err(e) = self.constellation_chan.send(msg) { if let Err(e) = self.constellation_chan.send(msg) {
@ -988,9 +1023,10 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
fn constrain_viewport(&mut self, pipeline_id: PipelineId, constraints: ViewportConstraints) { fn constrain_viewport(&mut self, pipeline_id: PipelineId, constraints: ViewportConstraints) {
let is_root = self.root_pipeline.as_ref().map_or(false, |root_pipeline| { let is_root = self
root_pipeline.id == pipeline_id .root_pipeline
}); .as_ref()
.map_or(false, |root_pipeline| root_pipeline.id == pipeline_id);
if is_root { if is_root {
self.viewport_zoom = constraints.initial_zoom; self.viewport_zoom = constraints.initial_zoom;
@ -1006,7 +1042,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
None => match opts::get().output_file { None => match opts::get().output_file {
Some(_) => TypedScale::new(1.0), Some(_) => TypedScale::new(1.0),
None => self.embedder_coordinates.hidpi_factor, None => self.embedder_coordinates.hidpi_factor,
} },
} }
} }
@ -1027,8 +1063,11 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
pub fn on_zoom_window_event(&mut self, magnification: f32) { pub fn on_zoom_window_event(&mut self, magnification: f32) {
self.page_zoom = TypedScale::new((self.page_zoom.get() * magnification) self.page_zoom = TypedScale::new(
.max(MIN_ZOOM).min(MAX_ZOOM)); (self.page_zoom.get() * magnification)
.max(MIN_ZOOM)
.min(MAX_ZOOM),
);
self.update_zoom_transform(); self.update_zoom_transform();
self.send_window_size(WindowSizeType::Resize); self.send_window_size(WindowSizeType::Resize);
self.update_page_zoom_for_webrender(); self.update_page_zoom_for_webrender();
@ -1039,7 +1078,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let mut txn = webrender_api::Transaction::new(); let mut txn = webrender_api::Transaction::new();
txn.set_page_zoom(page_zoom); txn.set_page_zoom(page_zoom);
self.webrender_api.send_transaction(self.webrender_document, txn); self.webrender_api
.send_transaction(self.webrender_document, txn);
} }
/// Simulate a pinch zoom /// Simulate a pinch zoom
@ -1054,7 +1094,10 @@ impl<Window: WindowMethods> IOCompositor<Window> {
fn send_viewport_rects(&self) { fn send_viewport_rects(&self) {
let mut scroll_states_per_pipeline = HashMap::new(); let mut scroll_states_per_pipeline = HashMap::new();
for scroll_layer_state in self.webrender_api.get_scroll_node_state(self.webrender_document) { for scroll_layer_state in self
.webrender_api
.get_scroll_node_state(self.webrender_document)
{
let scroll_state = ScrollState { let scroll_state = ScrollState {
scroll_id: scroll_layer_state.id, scroll_id: scroll_layer_state.id,
scroll_offset: scroll_layer_state.scroll_offset.to_untyped(), scroll_offset: scroll_layer_state.scroll_offset.to_untyped(),
@ -1105,8 +1148,9 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let mut pipeline_epochs = HashMap::new(); let mut pipeline_epochs = HashMap::new();
for (id, _) in &self.pipeline_details { for (id, _) in &self.pipeline_details {
let webrender_pipeline_id = id.to_webrender(); let webrender_pipeline_id = id.to_webrender();
if let Some(webrender_api::Epoch(epoch)) = self.webrender if let Some(webrender_api::Epoch(epoch)) =
.current_epoch(webrender_pipeline_id) { self.webrender.current_epoch(webrender_pipeline_id)
{
let epoch = Epoch(epoch); let epoch = Epoch(epoch);
pipeline_epochs.insert(*id, epoch); pipeline_epochs.insert(*id, epoch);
} }
@ -1120,12 +1164,12 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
self.ready_to_save_state = ReadyState::WaitingForConstellationReply; self.ready_to_save_state = ReadyState::WaitingForConstellationReply;
Err(NotReadyToPaint::JustNotifiedConstellation) Err(NotReadyToPaint::JustNotifiedConstellation)
} },
ReadyState::WaitingForConstellationReply => { ReadyState::WaitingForConstellationReply => {
// If waiting on a reply from the constellation to the last // If waiting on a reply from the constellation to the last
// query if the image is stable, then assume not ready yet. // query if the image is stable, then assume not ready yet.
Err(NotReadyToPaint::WaitingOnConstellation) Err(NotReadyToPaint::WaitingOnConstellation)
} },
ReadyState::ReadyToSaveImage => { ReadyState::ReadyToSaveImage => {
// Constellation has replied at some point in the past // Constellation has replied at some point in the past
// that the current output image is stable and ready // that the current output image is stable and ready
@ -1137,7 +1181,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
self.ready_to_save_state = ReadyState::Unknown; self.ready_to_save_state = ReadyState::Unknown;
Ok(()) Ok(())
} },
} }
} }
@ -1149,7 +1193,9 @@ impl<Window: WindowMethods> IOCompositor<Window> {
self.start_shutting_down(); self.start_shutting_down();
}, },
Err(e) => if opts::get().is_running_problem_test { Err(e) => if opts::get().is_running_problem_test {
if e != UnableToComposite::NotReadyToPaintImage(NotReadyToPaint::WaitingOnConstellation) { if e != UnableToComposite::NotReadyToPaintImage(
NotReadyToPaint::WaitingOnConstellation,
) {
println!("not ready to composite: {:?}", e); println!("not ready to composite: {:?}", e);
} }
}, },
@ -1161,13 +1207,14 @@ impl<Window: WindowMethods> IOCompositor<Window> {
/// for some reason. If CompositeTarget is Window or Png no image data is returned; /// for some reason. If CompositeTarget is Window or Png no image data is returned;
/// in the latter case the image is written directly to a file. If CompositeTarget /// in the latter case the image is written directly to a file. If CompositeTarget
/// is WindowAndPng Ok(Some(png::Image)) is returned. /// is WindowAndPng Ok(Some(png::Image)) is returned.
fn composite_specific_target(&mut self, fn composite_specific_target(
target: CompositeTarget) &mut self,
-> Result<Option<Image>, UnableToComposite> { target: CompositeTarget,
) -> Result<Option<Image>, UnableToComposite> {
let width = self.embedder_coordinates.framebuffer.width_typed(); let width = self.embedder_coordinates.framebuffer.width_typed();
let height = self.embedder_coordinates.framebuffer.height_typed(); let height = self.embedder_coordinates.framebuffer.height_typed();
if !self.window.prepare_for_composite(width, height) { if !self.window.prepare_for_composite(width, height) {
return Err(UnableToComposite::WindowUnprepared) return Err(UnableToComposite::WindowUnprepared);
} }
self.webrender.update(); self.webrender.update();
@ -1183,34 +1230,40 @@ impl<Window: WindowMethods> IOCompositor<Window> {
// all active animations to complete. // all active animations to complete.
if self.animations_active() { if self.animations_active() {
self.process_animations(); self.process_animations();
return Err(UnableToComposite::NotReadyToPaintImage(NotReadyToPaint::AnimationsActive)); return Err(UnableToComposite::NotReadyToPaintImage(
NotReadyToPaint::AnimationsActive,
));
} }
if let Err(result) = self.is_ready_to_paint_image_output() { if let Err(result) = self.is_ready_to_paint_image_output() {
return Err(UnableToComposite::NotReadyToPaintImage(result)) return Err(UnableToComposite::NotReadyToPaintImage(result));
} }
} }
let rt_info = match target { let rt_info = match target {
#[cfg(feature = "gleam")] #[cfg(feature = "gleam")]
CompositeTarget::Window => { CompositeTarget::Window => gl::RenderTargetInfo::default(),
gl::RenderTargetInfo::default()
}
#[cfg(feature = "gleam")] #[cfg(feature = "gleam")]
CompositeTarget::WindowAndPng | CompositeTarget::WindowAndPng | CompositeTarget::PngFile => {
CompositeTarget::PngFile => {
gl::initialize_png(&*self.window.gl(), width, height) gl::initialize_png(&*self.window.gl(), width, height)
} },
#[cfg(not(feature = "gleam"))] #[cfg(not(feature = "gleam"))]
_ => () _ => (),
}; };
profile(ProfilerCategory::Compositing, None, self.time_profiler_chan.clone(), || { profile(
ProfilerCategory::Compositing,
None,
self.time_profiler_chan.clone(),
|| {
debug!("compositor: compositing"); debug!("compositor: compositing");
// Paint the scene. // Paint the scene.
// TODO(gw): Take notice of any errors the renderer returns! // TODO(gw): Take notice of any errors the renderer returns!
self.webrender.render(self.embedder_coordinates.framebuffer).ok(); self.webrender
}); .render(self.embedder_coordinates.framebuffer)
.ok();
},
);
// If there are pending paint metrics, we check if any of the painted epochs is // If there are pending paint metrics, we check if any of the painted epochs is
// one of the ones that the paint metrics recorder is expecting . In that case, // one of the ones that the paint metrics recorder is expecting . In that case,
@ -1222,7 +1275,9 @@ impl<Window: WindowMethods> IOCompositor<Window> {
// For each pending paint metrics pipeline id // For each pending paint metrics pipeline id
for (id, pending_epoch) in &self.pending_paint_metrics { for (id, pending_epoch) in &self.pending_paint_metrics {
// we get the last painted frame id from webrender // we get the last painted frame id from webrender
if let Some(webrender_api::Epoch(epoch)) = self.webrender.current_epoch(id.to_webrender()) { if let Some(webrender_api::Epoch(epoch)) =
self.webrender.current_epoch(id.to_webrender())
{
// and check if it is the one the layout thread is expecting, // and check if it is the one the layout thread is expecting,
let epoch = Epoch(epoch); let epoch = Epoch(epoch);
if *pending_epoch != epoch { if *pending_epoch != epoch {
@ -1256,27 +1311,31 @@ impl<Window: WindowMethods> IOCompositor<Window> {
bytes: ipc::IpcSharedMemory::from_bytes(&*img), bytes: ipc::IpcSharedMemory::from_bytes(&*img),
id: None, id: None,
}) })
} },
#[cfg(feature = "gleam")] #[cfg(feature = "gleam")]
CompositeTarget::PngFile => { CompositeTarget::PngFile => {
let gl = &*self.window.gl(); let gl = &*self.window.gl();
profile(ProfilerCategory::ImageSaving, None, self.time_profiler_chan.clone(), || { profile(
match opts::get().output_file.as_ref() { ProfilerCategory::ImageSaving,
None,
self.time_profiler_chan.clone(),
|| match opts::get().output_file.as_ref() {
Some(path) => match File::create(path) { Some(path) => match File::create(path) {
Ok(mut file) => { Ok(mut file) => {
let img = gl::draw_img(gl, rt_info, width, height); let img = gl::draw_img(gl, rt_info, width, height);
let dynamic_image = DynamicImage::ImageRgb8(img); let dynamic_image = DynamicImage::ImageRgb8(img);
if let Err(e) = dynamic_image.write_to(&mut file, ImageFormat::PNG) { if let Err(e) = dynamic_image.write_to(&mut file, ImageFormat::PNG)
{
error!("Failed to save {} ({}).", path, e); error!("Failed to save {} ({}).", path, e);
} }
}, },
Err(e) => error!("Failed to create {} ({}).", path, e), Err(e) => error!("Failed to create {} ({}).", path, e),
}, },
None => error!("No file specified."), None => error!("No file specified."),
} },
}); );
None None
} },
#[cfg(not(feature = "gleam"))] #[cfg(not(feature = "gleam"))]
_ => None, _ => None,
}; };
@ -1301,7 +1360,10 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
self.composition_request = CompositionRequest::CompositeNow(reason) self.composition_request = CompositionRequest::CompositeNow(reason)
} else if opts::get().is_running_problem_test { } else if opts::get().is_running_problem_test {
println!("composition_request is already {:?}", self.composition_request); println!(
"composition_request is already {:?}",
self.composition_request
);
} }
} }
@ -1315,17 +1377,17 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let mut found_recomposite_msg = false; let mut found_recomposite_msg = false;
while let Some(msg) = self.port.try_recv_compositor_msg() { while let Some(msg) = self.port.try_recv_compositor_msg() {
match msg { match msg {
Msg::Recomposite(_) if found_recomposite_msg => {} Msg::Recomposite(_) if found_recomposite_msg => {},
Msg::Recomposite(_) => { Msg::Recomposite(_) => {
found_recomposite_msg = true; found_recomposite_msg = true;
compositor_messages.push(msg) compositor_messages.push(msg)
} },
_ => compositor_messages.push(msg), _ => compositor_messages.push(msg),
} }
} }
for msg in compositor_messages { for msg in compositor_messages {
if !self.handle_browser_message(msg) { if !self.handle_browser_message(msg) {
return false return false;
} }
} }
true true
@ -1342,10 +1404,8 @@ impl<Window: WindowMethods> IOCompositor<Window> {
} }
match self.composition_request { match self.composition_request {
CompositionRequest::NoCompositingNecessary => {} CompositionRequest::NoCompositingNecessary => {},
CompositionRequest::CompositeNow(_) => { CompositionRequest::CompositeNow(_) => self.composite(),
self.composite()
}
} }
if !self.pending_scroll_zoom_events.is_empty() && !self.waiting_for_results_of_scroll { if !self.pending_scroll_zoom_events.is_empty() && !self.waiting_for_results_of_scroll {
@ -1368,10 +1428,10 @@ impl<Window: WindowMethods> IOCompositor<Window> {
let keep_going = self.handle_browser_message(msg); let keep_going = self.handle_browser_message(msg);
if need_recomposite { if need_recomposite {
self.composite(); self.composite();
break break;
} }
if !keep_going { if !keep_going {
break break;
} }
} }
} }
@ -1397,42 +1457,45 @@ impl<Window: WindowMethods> IOCompositor<Window> {
webrender::DebugFlags::PROFILER_DBG | webrender::DebugFlags::PROFILER_DBG |
webrender::DebugFlags::GPU_TIME_QUERIES | webrender::DebugFlags::GPU_TIME_QUERIES |
webrender::DebugFlags::GPU_SAMPLE_QUERIES webrender::DebugFlags::GPU_SAMPLE_QUERIES
} },
WebRenderDebugOption::TextureCacheDebug => { WebRenderDebugOption::TextureCacheDebug => webrender::DebugFlags::TEXTURE_CACHE_DBG,
webrender::DebugFlags::TEXTURE_CACHE_DBG WebRenderDebugOption::RenderTargetDebug => webrender::DebugFlags::RENDER_TARGET_DBG,
}
WebRenderDebugOption::RenderTargetDebug => {
webrender::DebugFlags::RENDER_TARGET_DBG
}
}; };
flags.toggle(flag); flags.toggle(flag);
self.webrender.set_debug_flags(flags); self.webrender.set_debug_flags(flags);
let mut txn = webrender_api::Transaction::new(); let mut txn = webrender_api::Transaction::new();
txn.generate_frame(); txn.generate_frame();
self.webrender_api.send_transaction(self.webrender_document, txn); self.webrender_api
.send_transaction(self.webrender_document, txn);
} }
pub fn capture_webrender(&mut self) { pub fn capture_webrender(&mut self) {
let capture_id = now().to_timespec().sec.to_string(); let capture_id = now().to_timespec().sec.to_string();
let available_path = [env::current_dir(), Ok(env::temp_dir())].iter() let available_path = [env::current_dir(), Ok(env::temp_dir())]
.filter_map(|val| val.as_ref().map(|dir| dir.join("capture_webrender").join(&capture_id)).ok()) .iter()
.find(|val| { .filter_map(|val| {
match create_dir_all(&val) { val.as_ref()
.map(|dir| dir.join("capture_webrender").join(&capture_id))
.ok()
}).find(|val| match create_dir_all(&val) {
Ok(_) => true, Ok(_) => true,
Err(err) => { Err(err) => {
eprintln!("Unable to create path '{:?}' for capture: {:?}", &val, err); eprintln!("Unable to create path '{:?}' for capture: {:?}", &val, err);
false false
} },
}
}); });
match available_path { match available_path {
Some(capture_path) => { Some(capture_path) => {
let revision_file_path = capture_path.join("wr.txt"); let revision_file_path = capture_path.join("wr.txt");
debug!("Trying to save webrender capture under {:?}", &revision_file_path); debug!(
self.webrender_api.save_capture(capture_path, webrender_api::CaptureBits::all()); "Trying to save webrender capture under {:?}",
&revision_file_path
);
self.webrender_api
.save_capture(capture_path, webrender_api::CaptureBits::all());
match File::create(revision_file_path) { match File::create(revision_file_path) {
Ok(mut file) => { Ok(mut file) => {
@ -1440,11 +1503,14 @@ impl<Window: WindowMethods> IOCompositor<Window> {
if let Err(err) = write!(&mut file, "{}", revision) { if let Err(err) = write!(&mut file, "{}", revision) {
eprintln!("Unable to write webrender revision: {:?}", err) eprintln!("Unable to write webrender revision: {:?}", err)
} }
} },
Err(err) => eprintln!("Capture triggered, creating webrender revision info skipped: {:?}", err) Err(err) => eprintln!(
"Capture triggered, creating webrender revision info skipped: {:?}",
err
),
} }
}, },
None => eprintln!("Unable to locate path to save captures") None => eprintln!("Unable to locate path to save captures"),
} }
} }
} }

View file

@ -20,7 +20,6 @@ use style_traits::viewport::ViewportConstraints;
use webrender; use webrender;
use webrender_api::{self, DeviceIntPoint, DeviceUintSize}; use webrender_api::{self, DeviceIntPoint, DeviceUintSize};
/// Sends messages to the compositor. /// Sends messages to the compositor.
pub struct CompositorProxy { pub struct CompositorProxy {
pub sender: Sender<Msg>, pub sender: Sender<Msg>,
@ -48,7 +47,7 @@ impl Clone for CompositorProxy {
/// The port that the compositor receives messages on. /// The port that the compositor receives messages on.
pub struct CompositorReceiver { pub struct CompositorReceiver {
pub receiver: Receiver<Msg> pub receiver: Receiver<Msg>,
} }
impl CompositorReceiver { impl CompositorReceiver {

View file

@ -2,12 +2,10 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gleam::gl; use gleam::gl;
use image::RgbImage; use image::RgbImage;
use servo_geometry::DeviceUintLength; use servo_geometry::DeviceUintLength;
#[derive(Default)] #[derive(Default)]
pub struct RenderTargetInfo { pub struct RenderTargetInfo {
framebuffer_ids: Vec<gl::GLuint>, framebuffer_ids: Vec<gl::GLuint>,
@ -16,7 +14,9 @@ pub struct RenderTargetInfo {
} }
pub fn initialize_png( pub fn initialize_png(
gl: &gl::Gl, width: DeviceUintLength, height: DeviceUintLength gl: &gl::Gl,
width: DeviceUintLength,
height: DeviceUintLength,
) -> RenderTargetInfo { ) -> RenderTargetInfo {
let framebuffer_ids = gl.gen_framebuffers(1); let framebuffer_ids = gl.gen_framebuffers(1);
gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_ids[0]); gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_ids[0]);
@ -24,27 +24,53 @@ pub fn initialize_png(
let texture_ids = gl.gen_textures(1); let texture_ids = gl.gen_textures(1);
gl.bind_texture(gl::TEXTURE_2D, texture_ids[0]); gl.bind_texture(gl::TEXTURE_2D, texture_ids[0]);
gl.tex_image_2d(gl::TEXTURE_2D, 0, gl::RGB as gl::GLint, width.get() as gl::GLsizei, gl.tex_image_2d(
height.get() as gl::GLsizei, 0, gl::RGB, gl::UNSIGNED_BYTE, None); gl::TEXTURE_2D,
gl.tex_parameter_i(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::NEAREST as gl::GLint); 0,
gl.tex_parameter_i(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::NEAREST as gl::GLint); gl::RGB as gl::GLint,
width.get() as gl::GLsizei,
height.get() as gl::GLsizei,
0,
gl::RGB,
gl::UNSIGNED_BYTE,
None,
);
gl.tex_parameter_i(
gl::TEXTURE_2D,
gl::TEXTURE_MAG_FILTER,
gl::NEAREST as gl::GLint,
);
gl.tex_parameter_i(
gl::TEXTURE_2D,
gl::TEXTURE_MIN_FILTER,
gl::NEAREST as gl::GLint,
);
gl.framebuffer_texture_2d(gl::FRAMEBUFFER, gl::COLOR_ATTACHMENT0, gl::TEXTURE_2D, gl.framebuffer_texture_2d(
texture_ids[0], 0); gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
gl::TEXTURE_2D,
texture_ids[0],
0,
);
gl.bind_texture(gl::TEXTURE_2D, 0); gl.bind_texture(gl::TEXTURE_2D, 0);
let renderbuffer_ids = gl.gen_renderbuffers(1); let renderbuffer_ids = gl.gen_renderbuffers(1);
let depth_rb = renderbuffer_ids[0]; let depth_rb = renderbuffer_ids[0];
gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb); gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb);
gl.renderbuffer_storage(gl::RENDERBUFFER, gl.renderbuffer_storage(
gl::RENDERBUFFER,
gl::DEPTH_COMPONENT24, gl::DEPTH_COMPONENT24,
width.get() as gl::GLsizei, width.get() as gl::GLsizei,
height.get() as gl::GLsizei); height.get() as gl::GLsizei,
gl.framebuffer_renderbuffer(gl::FRAMEBUFFER, );
gl.framebuffer_renderbuffer(
gl::FRAMEBUFFER,
gl::DEPTH_ATTACHMENT, gl::DEPTH_ATTACHMENT,
gl::RENDERBUFFER, gl::RENDERBUFFER,
depth_rb); depth_rb,
);
RenderTargetInfo { RenderTargetInfo {
framebuffer_ids, framebuffer_ids,
@ -70,10 +96,12 @@ pub fn draw_img(
gl.bind_vertex_array(0); gl.bind_vertex_array(0);
let mut pixels = gl.read_pixels( let mut pixels = gl.read_pixels(
0, 0, 0,
0,
width as gl::GLsizei, width as gl::GLsizei,
height as gl::GLsizei, height as gl::GLsizei,
gl::RGB, gl::UNSIGNED_BYTE, gl::RGB,
gl::UNSIGNED_BYTE,
); );
gl.bind_framebuffer(gl::FRAMEBUFFER, 0); gl.bind_framebuffer(gl::FRAMEBUFFER, 0);
@ -88,10 +116,9 @@ pub fn draw_img(
for y in 0..height { for y in 0..height {
let dst_start = y * stride; let dst_start = y * stride;
let src_start = (height - y - 1) * stride; let src_start = (height - y - 1) * stride;
let src_slice = &orig_pixels[src_start .. src_start + stride]; let src_slice = &orig_pixels[src_start..src_start + stride];
(&mut pixels[dst_start .. dst_start + stride]).clone_from_slice(&src_slice[..stride]); (&mut pixels[dst_start..dst_start + stride]).clone_from_slice(&src_slice[..stride]);
} }
RgbImage::from_raw(width as u32, height as u32, pixels) RgbImage::from_raw(width as u32, height as u32, pixels).expect("Flipping image failed!")
.expect("Flipping image failed!")
} }

View file

@ -19,12 +19,15 @@ pub struct TouchHandler {
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub struct TouchPoint { pub struct TouchPoint {
pub id: TouchId, pub id: TouchId,
pub point: TypedPoint2D<f32, DevicePixel> pub point: TypedPoint2D<f32, DevicePixel>,
} }
impl TouchPoint { impl TouchPoint {
pub fn new(id: TouchId, point: TypedPoint2D<f32, DevicePixel>) -> Self { pub fn new(id: TouchId, point: TypedPoint2D<f32, DevicePixel>) -> Self {
TouchPoint { id: id, point: point } TouchPoint {
id: id,
point: point,
}
} }
} }
@ -87,14 +90,17 @@ impl TouchHandler {
}; };
} }
pub fn on_touch_move(&mut self, id: TouchId, point: TypedPoint2D<f32, DevicePixel>) pub fn on_touch_move(
-> TouchAction { &mut self,
id: TouchId,
point: TypedPoint2D<f32, DevicePixel>,
) -> TouchAction {
let idx = match self.active_touch_points.iter_mut().position(|t| t.id == id) { let idx = match self.active_touch_points.iter_mut().position(|t| t.id == id) {
Some(i) => i, Some(i) => i,
None => { None => {
warn!("Got a touchmove event for a non-active touch point"); warn!("Got a touchmove event for a non-active touch point");
return TouchAction::NoAction; return TouchAction::NoAction;
} },
}; };
let old_point = self.active_touch_points[idx].point; let old_point = self.active_touch_points[idx].point;
@ -110,14 +116,12 @@ impl TouchHandler {
} else { } else {
TouchAction::NoAction TouchAction::NoAction
} }
} },
Panning => { Panning => {
let delta = point - old_point; let delta = point - old_point;
TouchAction::Scroll(delta) TouchAction::Scroll(delta)
} },
DefaultPrevented => { DefaultPrevented => TouchAction::DispatchEvent,
TouchAction::DispatchEvent
}
Pinching => { Pinching => {
let (d0, c0) = self.pinch_distance_and_center(); let (d0, c0) = self.pinch_distance_and_center();
self.active_touch_points[idx].point = point; self.active_touch_points[idx].point = point;
@ -127,7 +131,7 @@ impl TouchHandler {
let scroll_delta = c1 - c0 * TypedScale::new(magnification); let scroll_delta = c1 - c0 * TypedScale::new(magnification);
TouchAction::Zoom(magnification, scroll_delta) TouchAction::Zoom(magnification, scroll_delta)
} },
WaitingForScript => TouchAction::NoAction, WaitingForScript => TouchAction::NoAction,
MultiTouch => TouchAction::NoAction, MultiTouch => TouchAction::NoAction,
Nothing => unreachable!(), Nothing => unreachable!(),
@ -141,15 +145,18 @@ impl TouchHandler {
action action
} }
pub fn on_touch_up(&mut self, id: TouchId, _point: TypedPoint2D<f32, DevicePixel>) pub fn on_touch_up(
-> TouchAction { &mut self,
id: TouchId,
_point: TypedPoint2D<f32, DevicePixel>,
) -> TouchAction {
match self.active_touch_points.iter().position(|t| t.id == id) { match self.active_touch_points.iter().position(|t| t.id == id) {
Some(i) => { Some(i) => {
self.active_touch_points.swap_remove(i); self.active_touch_points.swap_remove(i);
} },
None => { None => {
warn!("Got a touch up event for a non-active touch point"); warn!("Got a touch up event for a non-active touch point");
} },
} }
match self.state { match self.state {
Touching => { Touching => {
@ -157,21 +164,21 @@ impl TouchHandler {
// FIXME: Don't send a click if preventDefault is called on the touchend event. // FIXME: Don't send a click if preventDefault is called on the touchend event.
self.state = Nothing; self.state = Nothing;
TouchAction::Click TouchAction::Click
} },
Nothing | Panning => { Nothing | Panning => {
self.state = Nothing; self.state = Nothing;
TouchAction::NoAction TouchAction::NoAction
} },
Pinching => { Pinching => {
self.state = Panning; self.state = Panning;
TouchAction::NoAction TouchAction::NoAction
} },
WaitingForScript | DefaultPrevented | MultiTouch => { WaitingForScript | DefaultPrevented | MultiTouch => {
if self.active_touch_points.is_empty() { if self.active_touch_points.is_empty() {
self.state = Nothing; self.state = Nothing;
} }
TouchAction::NoAction TouchAction::NoAction
} },
} }
} }
@ -179,25 +186,25 @@ impl TouchHandler {
match self.active_touch_points.iter().position(|t| t.id == id) { match self.active_touch_points.iter().position(|t| t.id == id) {
Some(i) => { Some(i) => {
self.active_touch_points.swap_remove(i); self.active_touch_points.swap_remove(i);
} },
None => { None => {
warn!("Got a touchcancel event for a non-active touch point"); warn!("Got a touchcancel event for a non-active touch point");
return; return;
} },
} }
match self.state { match self.state {
Nothing => {} Nothing => {},
Touching | Panning => { Touching | Panning => {
self.state = Nothing; self.state = Nothing;
} },
Pinching => { Pinching => {
self.state = Panning; self.state = Panning;
} },
WaitingForScript | DefaultPrevented | MultiTouch => { WaitingForScript | DefaultPrevented | MultiTouch => {
if self.active_touch_points.is_empty() { if self.active_touch_points.is_empty() {
self.state = Nothing; self.state = Nothing;
} }
} },
} }
} }
@ -209,7 +216,7 @@ impl TouchHandler {
1 => Touching, 1 => Touching,
2 => Pinching, 2 => Pinching,
_ => MultiTouch, _ => MultiTouch,
} },
} }
} }
} }

View file

@ -8,7 +8,12 @@
use std::path::PathBuf; use std::path::PathBuf;
#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] #[cfg(all(
unix,
not(target_os = "macos"),
not(target_os = "ios"),
not(target_os = "android")
))]
pub fn default_config_dir() -> Option<PathBuf> { pub fn default_config_dir() -> Option<PathBuf> {
let mut config_dir = ::dirs::config_dir().unwrap(); let mut config_dir = ::dirs::config_dir().unwrap();
config_dir.push("servo"); config_dir.push("servo");

View file

@ -9,17 +9,21 @@ extern crate dirs;
extern crate embedder_traits; extern crate embedder_traits;
extern crate euclid; extern crate euclid;
extern crate getopts; extern crate getopts;
#[macro_use] extern crate lazy_static; #[macro_use]
#[macro_use] extern crate log; extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate num_cpus; extern crate num_cpus;
extern crate rustc_serialize; extern crate rustc_serialize;
#[macro_use] extern crate serde; #[macro_use]
extern crate serde;
extern crate servo_geometry; extern crate servo_geometry;
extern crate servo_url; extern crate servo_url;
extern crate url; extern crate url;
pub mod basedir; pub mod basedir;
#[allow(unsafe_code)] pub mod opts; #[allow(unsafe_code)]
pub mod opts;
pub mod prefs; pub mod prefs;
pub fn servo_version() -> String { pub fn servo_version() -> String {

View file

@ -22,7 +22,6 @@ use std::process;
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering}; use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};
use url::{self, Url}; use url::{self, Url};
/// Global flags for Servo, currently set on the command line. /// Global flags for Servo, currently set on the command line.
#[derive(Clone, Deserialize, Serialize)] #[derive(Clone, Deserialize, Serialize)]
pub struct Opts { pub struct Opts {
@ -230,11 +229,13 @@ pub struct Opts {
} }
fn print_usage(app: &str, opts: &Options) { fn print_usage(app: &str, opts: &Options) {
let message = format!("Usage: {} [ options ... ] [URL]\n\twhere options include", app); let message = format!(
"Usage: {} [ options ... ] [URL]\n\twhere options include",
app
);
println!("{}", opts.usage(&message)); println!("{}", opts.usage(&message));
} }
/// Debug options for Servo, currently set on the command line with -Z /// Debug options for Servo, currently set on the command line with -Z
#[derive(Default)] #[derive(Default)]
pub struct DebugOptions { pub struct DebugOptions {
@ -333,7 +334,6 @@ pub struct DebugOptions {
pub signpost: bool, pub signpost: bool,
} }
impl DebugOptions { impl DebugOptions {
pub fn extend(&mut self, debug_string: String) -> Result<(), String> { pub fn extend(&mut self, debug_string: String) -> Result<(), String> {
for option in debug_string.split(',') { for option in debug_string.split(',') {
@ -371,50 +371,103 @@ impl DebugOptions {
"" => {}, "" => {},
_ => return Err(String::from(option)), _ => return Err(String::from(option)),
}; };
}; }
Ok(()) Ok(())
} }
} }
fn print_debug_usage(app: &str) -> ! { fn print_debug_usage(app: &str) -> ! {
fn print_option(name: &str, description: &str) { fn print_option(name: &str, description: &str) {
println!("\t{:<35} {}", name, description); println!("\t{:<35} {}", name, description);
} }
println!("Usage: {} debug option,[options,...]\n\twhere options include\n\nOptions:", app); println!(
"Usage: {} debug option,[options,...]\n\twhere options include\n\nOptions:",
app
);
print_option("bubble-widths", "Bubble intrinsic widths separately like other engines."); print_option(
"bubble-widths",
"Bubble intrinsic widths separately like other engines.",
);
print_option("disable-text-aa", "Disable antialiasing of rendered text."); print_option("disable-text-aa", "Disable antialiasing of rendered text.");
print_option("disable-canvas-aa", "Disable antialiasing on the HTML canvas element."); print_option(
print_option("dump-style-tree", "Print the DOM with computed styles after each restyle."); "disable-canvas-aa",
"Disable antialiasing on the HTML canvas element.",
);
print_option(
"dump-style-tree",
"Print the DOM with computed styles after each restyle.",
);
print_option("dump-flow-tree", "Print the flow tree after each layout."); print_option("dump-flow-tree", "Print the flow tree after each layout.");
print_option("dump-display-list", "Print the display list after each layout."); print_option(
print_option("dump-display-list-json", "Print the display list in JSON form."); "dump-display-list",
print_option("relayout-event", "Print notifications when there is a relayout."); "Print the display list after each layout.",
print_option("profile-script-events", "Enable profiling of script-related events."); );
print_option("profile-heartbeats", "Enable heartbeats for all thread categories."); print_option(
print_option("show-fragment-borders", "Paint borders along fragment boundaries."); "dump-display-list-json",
print_option("show-parallel-layout", "Mark which thread laid each flow out with colors."); "Print the display list in JSON form.",
print_option("trace-layout", "Write layout trace to an external file for debugging."); );
print_option("disable-share-style-cache", print_option(
"Disable the style sharing cache."); "relayout-event",
print_option("parallel-display-list-building", "Build display lists in parallel."); "Print notifications when there is a relayout.",
print_option("convert-mouse-to-touch", "Send touch events instead of mouse events"); );
print_option("replace-surrogates", "Replace unpaires surrogates in DOM strings with U+FFFD. \ print_option(
See https://github.com/servo/servo/issues/6564"); "profile-script-events",
"Enable profiling of script-related events.",
);
print_option(
"profile-heartbeats",
"Enable heartbeats for all thread categories.",
);
print_option(
"show-fragment-borders",
"Paint borders along fragment boundaries.",
);
print_option(
"show-parallel-layout",
"Mark which thread laid each flow out with colors.",
);
print_option(
"trace-layout",
"Write layout trace to an external file for debugging.",
);
print_option(
"disable-share-style-cache",
"Disable the style sharing cache.",
);
print_option(
"parallel-display-list-building",
"Build display lists in parallel.",
);
print_option(
"convert-mouse-to-touch",
"Send touch events instead of mouse events",
);
print_option(
"replace-surrogates",
"Replace unpaires surrogates in DOM strings with U+FFFD. \
See https://github.com/servo/servo/issues/6564",
);
print_option("gc-profile", "Log GC passes and their durations."); print_option("gc-profile", "Log GC passes and their durations.");
print_option("load-webfonts-synchronously", print_option(
"Load web fonts synchronously to avoid non-deterministic network-driven reflows"); "load-webfonts-synchronously",
print_option("disable-vsync", "Load web fonts synchronously to avoid non-deterministic network-driven reflows",
"Disable vsync mode in the compositor to allow profiling at more than monitor refresh rate"); );
print_option(
"disable-vsync",
"Disable vsync mode in the compositor to allow profiling at more than monitor refresh rate",
);
print_option("wr-stats", "Show WebRender profiler on screen."); print_option("wr-stats", "Show WebRender profiler on screen.");
print_option("msaa", "Use multisample antialiasing in WebRender."); print_option("msaa", "Use multisample antialiasing in WebRender.");
print_option("full-backtraces", "Print full backtraces for all errors"); print_option("full-backtraces", "Print full backtraces for all errors");
print_option("wr-debug", "Display webrender tile borders."); print_option("wr-debug", "Display webrender tile borders.");
print_option("wr-no-batch", "Disable webrender instanced batching."); print_option("wr-no-batch", "Disable webrender instanced batching.");
print_option("precache-shaders", "Compile all shaders during init."); print_option("precache-shaders", "Compile all shaders during init.");
print_option("signpost", "Emit native OS signposts for profile events (currently macOS only)"); print_option(
"signpost",
"Emit native OS signposts for profile events (currently macOS only)",
);
println!(""); println!("");
@ -445,7 +498,7 @@ enum UserAgent {
Desktop, Desktop,
Android, Android,
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
iOS iOS,
} }
fn default_user_agent_string(agent: UserAgent) -> &'static str { fn default_user_agent_string(agent: UserAgent) -> &'static str {
@ -468,17 +521,12 @@ fn default_user_agent_string(agent: UserAgent) -> &'static str {
const DESKTOP_UA_STRING: &'static str = const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:55.0) Servo/1.0 Firefox/55.0"; "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:55.0) Servo/1.0 Firefox/55.0";
match agent { match agent {
UserAgent::Desktop => { UserAgent::Desktop => DESKTOP_UA_STRING,
DESKTOP_UA_STRING UserAgent::Android => "Mozilla/5.0 (Android; Mobile; rv:55.0) Servo/1.0 Firefox/55.0",
}
UserAgent::Android => {
"Mozilla/5.0 (Android; Mobile; rv:55.0) Servo/1.0 Firefox/55.0"
}
UserAgent::iOS => { UserAgent::iOS => {
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X; rv:55.0) Servo/1.0 Firefox/55.0" "Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X; rv:55.0) Servo/1.0 Firefox/55.0"
} },
} }
} }
@ -564,54 +612,146 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
opts.optopt("o", "output", "Output file", "output.png"); opts.optopt("o", "output", "Output file", "output.png");
opts.optopt("s", "size", "Size of tiles", "512"); opts.optopt("s", "size", "Size of tiles", "512");
opts.optopt("", "device-pixel-ratio", "Device pixels per px", ""); opts.optopt("", "device-pixel-ratio", "Device pixels per px", "");
opts.optflagopt("p", "profile", "Time profiler flag and either a TSV output filename \ opts.optflagopt(
OR an interval for output to Stdout (blank for Stdout with interval of 5s)", "10 \ "p",
OR time.tsv"); "profile",
opts.optflagopt("", "profiler-trace-path", "Time profiler flag and either a TSV output filename \
OR an interval for output to Stdout (blank for Stdout with interval of 5s)",
"10 \
OR time.tsv",
);
opts.optflagopt(
"",
"profiler-trace-path",
"Path to dump a self-contained HTML timeline of profiler traces", "Path to dump a self-contained HTML timeline of profiler traces",
""); "",
opts.optflagopt("m", "memory-profile", "Memory profiler flag and output interval", "10"); );
opts.optflagopt(
"m",
"memory-profile",
"Memory profiler flag and output interval",
"10",
);
opts.optflag("x", "exit", "Exit after load flag"); opts.optflag("x", "exit", "Exit after load flag");
opts.optopt("y", "layout-threads", "Number of threads to use for layout", "1"); opts.optopt(
opts.optflag("i", "nonincremental-layout", "Enable to turn off incremental layout."); "y",
opts.optflagopt("", "userscripts", "layout-threads",
"Uses userscripts in resources/user-agent-js, or a specified full path", ""); "Number of threads to use for layout",
opts.optmulti("", "user-stylesheet", "1",
"A user stylesheet to be added to every document", "file.css"); );
opts.optopt("", "shaders", opts.optflag(
"Shaders will be loaded from the specified directory instead of using the builtin ones.", ""); "i",
"nonincremental-layout",
"Enable to turn off incremental layout.",
);
opts.optflagopt(
"",
"userscripts",
"Uses userscripts in resources/user-agent-js, or a specified full path",
"",
);
opts.optmulti(
"",
"user-stylesheet",
"A user stylesheet to be added to every document",
"file.css",
);
opts.optopt(
"",
"shaders",
"Shaders will be loaded from the specified directory instead of using the builtin ones.",
"",
);
opts.optflag("z", "headless", "Headless mode"); opts.optflag("z", "headless", "Headless mode");
opts.optflag("f", "hard-fail", "Exit on thread failure instead of displaying about:failure"); opts.optflag(
opts.optflag("F", "soft-fail", "Display about:failure on thread failure instead of exiting"); "f",
opts.optflagopt("", "remote-debugging-port", "Start remote debugger server on port", "2794"); "hard-fail",
opts.optflagopt("", "devtools", "Start remote devtools server on port", "6000"); "Exit on thread failure instead of displaying about:failure",
opts.optflagopt("", "webdriver", "Start remote WebDriver server on port", "7000"); );
opts.optflag(
"F",
"soft-fail",
"Display about:failure on thread failure instead of exiting",
);
opts.optflagopt(
"",
"remote-debugging-port",
"Start remote debugger server on port",
"2794",
);
opts.optflagopt(
"",
"devtools",
"Start remote devtools server on port",
"6000",
);
opts.optflagopt(
"",
"webdriver",
"Start remote WebDriver server on port",
"7000",
);
opts.optopt("", "resolution", "Set window resolution.", "1024x740"); opts.optopt("", "resolution", "Set window resolution.", "1024x740");
opts.optopt("u", opts.optopt(
"u",
"user-agent", "user-agent",
"Set custom user agent string (or ios / android / desktop for platform default)", "Set custom user agent string (or ios / android / desktop for platform default)",
"NCSA Mosaic/1.0 (X11;SunOS 4.1.4 sun4m)"); "NCSA Mosaic/1.0 (X11;SunOS 4.1.4 sun4m)",
);
opts.optflag("M", "multiprocess", "Run in multiprocess mode"); opts.optflag("M", "multiprocess", "Run in multiprocess mode");
opts.optflag("S", "sandbox", "Run in a sandbox if multiprocess"); opts.optflag("S", "sandbox", "Run in a sandbox if multiprocess");
opts.optopt("", opts.optopt(
"",
"random-pipeline-closure-probability", "random-pipeline-closure-probability",
"Probability of randomly closing a pipeline (for testing constellation hardening).", "Probability of randomly closing a pipeline (for testing constellation hardening).",
"0.0"); "0.0",
opts.optopt("", "random-pipeline-closure-seed", "A fixed seed for repeatbility of random pipeline closure.", ""); );
opts.optmulti("Z", "debug", opts.optopt(
"A comma-separated string of debug options. Pass help to show available options.", ""); "",
"random-pipeline-closure-seed",
"A fixed seed for repeatbility of random pipeline closure.",
"",
);
opts.optmulti(
"Z",
"debug",
"A comma-separated string of debug options. Pass help to show available options.",
"",
);
opts.optflag("h", "help", "Print this message"); opts.optflag("h", "help", "Print this message");
opts.optopt("", "resources-path", "Path to find static resources", "/home/servo/resources"); opts.optopt(
opts.optopt("", "certificate-path", "Path to find SSL certificates", "/home/servo/resources/certs"); "",
opts.optopt("", "content-process" , "Run as a content process and connect to the given pipe", "resources-path",
"servo-ipc-channel.abcdefg"); "Path to find static resources",
opts.optmulti("", "pref", "/home/servo/resources",
"A preference to set to enable", "dom.bluetooth.enabled"); );
opts.optopt(
"",
"certificate-path",
"Path to find SSL certificates",
"/home/servo/resources/certs",
);
opts.optopt(
"",
"content-process",
"Run as a content process and connect to the given pipe",
"servo-ipc-channel.abcdefg",
);
opts.optmulti(
"",
"pref",
"A preference to set to enable",
"dom.bluetooth.enabled",
);
opts.optflag("b", "no-native-titlebar", "Do not use native titlebar"); opts.optflag("b", "no-native-titlebar", "Do not use native titlebar");
opts.optflag("w", "webrender", "Use webrender backend"); opts.optflag("w", "webrender", "Use webrender backend");
opts.optopt("G", "graphics", "Select graphics backend (gl or es2)", "gl"); opts.optopt("G", "graphics", "Select graphics backend (gl or es2)", "gl");
opts.optopt("", "config-dir", opts.optopt(
"config directory following xdg spec on linux platform", ""); "",
"config-dir",
"config directory following xdg spec on linux platform",
"",
);
opts.optflag("v", "version", "Display servo version information"); opts.optflag("v", "version", "Display servo version information");
opts.optflag("", "unminify-js", "Unminify Javascript"); opts.optflag("", "unminify-js", "Unminify Javascript");
opts.optopt("", "profiler-db-user", "Profiler database user", ""); opts.optopt("", "profiler-db-user", "Profiler database user", "");
@ -654,44 +794,50 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
} else { } else {
None None
}; };
let is_running_problem_test = let is_running_problem_test = url_opt.as_ref().map_or(false, |url| {
url_opt
.as_ref()
.map_or(false, |url|
url.starts_with("http://web-platform.test:8000/2dcontext/drawing-images-to-the-canvas/") || url.starts_with("http://web-platform.test:8000/2dcontext/drawing-images-to-the-canvas/") ||
url.starts_with("http://web-platform.test:8000/_mozilla/mozilla/canvas/") || url.starts_with("http://web-platform.test:8000/_mozilla/mozilla/canvas/") ||
url.starts_with("http://web-platform.test:8000/_mozilla/css/canvas_over_area.html")); url.starts_with("http://web-platform.test:8000/_mozilla/css/canvas_over_area.html")
});
let url_opt = url_opt.and_then(|url_string| parse_url_or_filename(&cwd, url_string) let url_opt = url_opt.and_then(|url_string| {
parse_url_or_filename(&cwd, url_string)
.or_else(|error| { .or_else(|error| {
warn!("URL parsing failed ({:?}).", error); warn!("URL parsing failed ({:?}).", error);
Err(error) Err(error)
}).ok()); }).ok()
});
let tile_size: usize = match opt_match.opt_str("s") { let tile_size: usize = match opt_match.opt_str("s") {
Some(tile_size_str) => tile_size_str.parse() Some(tile_size_str) => tile_size_str
.parse()
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: -s ({})", err))), .unwrap_or_else(|err| args_fail(&format!("Error parsing option: -s ({})", err))),
None => 512, None => 512,
}; };
let device_pixels_per_px = opt_match.opt_str("device-pixel-ratio").map(|dppx_str| let device_pixels_per_px = opt_match.opt_str("device-pixel-ratio").map(|dppx_str| {
dppx_str.parse() dppx_str.parse().unwrap_or_else(|err| {
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: --device-pixel-ratio ({})", err))) args_fail(&format!(
); "Error parsing option: --device-pixel-ratio ({})",
err
))
})
});
// If only the flag is present, default to a 5 second period for both profilers // If only the flag is present, default to a 5 second period for both profilers
let time_profiling = if opt_match.opt_present("p") { let time_profiling = if opt_match.opt_present("p") {
match opt_match.opt_str("p") { match opt_match.opt_str("p") {
Some(argument) => match argument.parse::<f64>() { Some(argument) => match argument.parse::<f64>() {
Ok(interval) => Some(OutputOptions::Stdout(interval)) , Ok(interval) => Some(OutputOptions::Stdout(interval)),
Err(_) => { Err(_) => match ServoUrl::parse(&argument) {
match ServoUrl::parse(&argument) { Ok(url) => Some(OutputOptions::DB(
Ok(url) => Some(OutputOptions::DB(url, opt_match.opt_str("profiler-db-name"), url,
opt_match.opt_str("profiler-db-name"),
opt_match.opt_str("profiler-db-user"), opt_match.opt_str("profiler-db-user"),
opt_match.opt_str("profiler-db-pass"))), opt_match.opt_str("profiler-db-pass"),
)),
Err(_) => Some(OutputOptions::FileName(argument)), Err(_) => Some(OutputOptions::FileName(argument)),
} },
}
}, },
None => Some(OutputOptions::Stdout(5.0 as f64)), None => Some(OutputOptions::Stdout(5.0 as f64)),
} }
@ -704,34 +850,50 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
let mut path = PathBuf::from(time_profiler_trace_path); let mut path = PathBuf::from(time_profiler_trace_path);
path.pop(); path.pop();
if let Err(why) = fs::create_dir_all(&path) { if let Err(why) = fs::create_dir_all(&path) {
error!("Couldn't create/open {:?}: {:?}", error!(
Path::new(time_profiler_trace_path).to_string_lossy(), why); "Couldn't create/open {:?}: {:?}",
Path::new(time_profiler_trace_path).to_string_lossy(),
why
);
} }
} }
let mem_profiler_period = opt_match.opt_default("m", "5").map(|period| { let mem_profiler_period = opt_match.opt_default("m", "5").map(|period| {
period.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: -m ({})", err))) period
.parse()
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: -m ({})", err)))
}); });
let mut layout_threads: Option<usize> = opt_match.opt_str("y") let mut layout_threads: Option<usize> = opt_match.opt_str("y").map(|layout_threads_str| {
.map(|layout_threads_str| { layout_threads_str
layout_threads_str.parse() .parse()
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: -y ({})", err))) .unwrap_or_else(|err| args_fail(&format!("Error parsing option: -y ({})", err)))
}); });
let nonincremental_layout = opt_match.opt_present("i"); let nonincremental_layout = opt_match.opt_present("i");
let random_pipeline_closure_probability = opt_match.opt_str("random-pipeline-closure-probability").map(|prob| let random_pipeline_closure_probability = opt_match
.opt_str("random-pipeline-closure-probability")
.map(|prob| {
prob.parse().unwrap_or_else(|err| { prob.parse().unwrap_or_else(|err| {
args_fail(&format!("Error parsing option: --random-pipeline-closure-probability ({})", err)) args_fail(&format!(
"Error parsing option: --random-pipeline-closure-probability ({})",
err
))
}) })
); });
let random_pipeline_closure_seed = opt_match.opt_str("random-pipeline-closure-seed").map(|seed| let random_pipeline_closure_seed =
opt_match
.opt_str("random-pipeline-closure-seed")
.map(|seed| {
seed.parse().unwrap_or_else(|err| { seed.parse().unwrap_or_else(|err| {
args_fail(&format!("Error parsing option: --random-pipeline-closure-seed ({})", err)) args_fail(&format!(
"Error parsing option: --random-pipeline-closure-seed ({})",
err
))
}) })
); });
let mut bubble_inline_sizes_separately = debug_options.bubble_widths; let mut bubble_inline_sizes_separately = debug_options.bubble_widths;
if debug_options.trace_layout { if debug_options.trace_layout {
@ -739,29 +901,40 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
bubble_inline_sizes_separately = true; bubble_inline_sizes_separately = true;
} }
let debugger_port = opt_match.opt_default("remote-debugging-port", "2794").map(|port| { let debugger_port = opt_match
port.parse() .opt_default("remote-debugging-port", "2794")
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: --remote-debugging-port ({})", err))) .map(|port| {
port.parse().unwrap_or_else(|err| {
args_fail(&format!(
"Error parsing option: --remote-debugging-port ({})",
err
))
})
}); });
let devtools_port = opt_match.opt_default("devtools", "6000").map(|port| { let devtools_port = opt_match.opt_default("devtools", "6000").map(|port| {
port.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --devtools ({})", err))) port.parse()
.unwrap_or_else(|err| args_fail(&format!("Error parsing option: --devtools ({})", err)))
}); });
let webdriver_port = opt_match.opt_default("webdriver", "7000").map(|port| { let webdriver_port = opt_match.opt_default("webdriver", "7000").map(|port| {
port.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --webdriver ({})", err))) port.parse().unwrap_or_else(|err| {
args_fail(&format!("Error parsing option: --webdriver ({})", err))
})
}); });
let initial_window_size = match opt_match.opt_str("resolution") { let initial_window_size = match opt_match.opt_str("resolution") {
Some(res_string) => { Some(res_string) => {
let res: Vec<u32> = res_string.split('x').map(|r| { let res: Vec<u32> = res_string
r.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --resolution ({})", err))) .split('x')
.map(|r| {
r.parse().unwrap_or_else(|err| {
args_fail(&format!("Error parsing option: --resolution ({})", err))
})
}).collect(); }).collect();
TypedSize2D::new(res[0], res[1]) TypedSize2D::new(res[0], res[1])
} },
None => { None => TypedSize2D::new(1024, 740),
TypedSize2D::new(1024, 740)
}
}; };
if opt_match.opt_present("M") { if opt_match.opt_present("M") {
@ -776,7 +949,10 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
None => default_user_agent_string(DEFAULT_USER_AGENT).into(), None => default_user_agent_string(DEFAULT_USER_AGENT).into(),
}; };
let user_stylesheets = opt_match.opt_strs("user-stylesheet").iter().map(|filename| { let user_stylesheets = opt_match
.opt_strs("user-stylesheet")
.iter()
.map(|filename| {
let path = cwd.join(filename); let path = cwd.join(filename);
let url = ServoUrl::from_url(Url::from_file_path(&path).unwrap()); let url = ServoUrl::from_url(Url::from_file_path(&path).unwrap());
let mut contents = Vec::new(); let mut contents = Vec::new();
@ -787,9 +963,10 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
(contents, url) (contents, url)
}).collect(); }).collect();
let do_not_use_native_titlebar = let do_not_use_native_titlebar = opt_match.opt_present("b") || !PREFS
opt_match.opt_present("b") || .get("shell.native-titlebar.enabled")
!PREFS.get("shell.native-titlebar.enabled").as_boolean().unwrap(); .as_boolean()
.unwrap();
let is_printing_version = opt_match.opt_present("v") || opt_match.opt_present("version"); let is_printing_version = opt_match.opt_present("v") || opt_match.opt_present("version");
@ -870,7 +1047,10 @@ pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
if let Some(layout_threads) = layout_threads { if let Some(layout_threads) = layout_threads {
PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64)); PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64));
} else if let Some(layout_threads) = PREFS.get("layout.threads").as_string() { } else if let Some(layout_threads) = PREFS.get("layout.threads").as_string() {
PREFS.set("layout.threads", PrefValue::Number(layout_threads.parse::<f64>().unwrap())); PREFS.set(
"layout.threads",
PrefValue::Number(layout_threads.parse::<f64>().unwrap()),
);
} else if *PREFS.get("layout.threads") == PrefValue::Missing { } else if *PREFS.get("layout.threads") == PrefValue::Missing {
let layout_threads = cmp::max(num_cpus::get() * 3 / 4, 1); let layout_threads = cmp::max(num_cpus::get() * 3 / 4, 1);
PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64)); PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64));
@ -926,8 +1106,8 @@ pub fn parse_pref_from_command_line(pref: &str) {
Some(&"true") | None => PREFS.set(pref_name, PrefValue::Boolean(true)), Some(&"true") | None => PREFS.set(pref_name, PrefValue::Boolean(true)),
Some(value) => match value.parse::<f64>() { Some(value) => match value.parse::<f64>() {
Ok(v) => PREFS.set(pref_name, PrefValue::Number(v)), Ok(v) => PREFS.set(pref_name, PrefValue::Number(v)),
Err(_) => PREFS.set(pref_name, PrefValue::String(value.to_string())) Err(_) => PREFS.set(pref_name, PrefValue::String(value.to_string())),
} },
}; };
} }
@ -941,7 +1121,7 @@ pub fn parse_url_or_filename(cwd: &Path, input: &str) -> Result<ServoUrl, ()> {
Ok(url) => Ok(url), Ok(url) => Ok(url),
Err(url::ParseError::RelativeUrlWithoutBase) => { Err(url::ParseError::RelativeUrlWithoutBase) => {
Url::from_file_path(&*cwd.join(input)).map(ServoUrl::from_url) Url::from_file_path(&*cwd.join(input)).map(ServoUrl::from_url)
} },
Err(_) => Err(()), Err(_) => Err(()),
} }
} }

View file

@ -30,7 +30,7 @@ pub enum PrefValue {
Boolean(bool), Boolean(bool),
String(String), String(String),
Number(f64), Number(f64),
Missing Missing,
} }
impl PrefValue { impl PrefValue {
@ -41,26 +41,22 @@ impl PrefValue {
Json::F64(x) => PrefValue::Number(x), Json::F64(x) => PrefValue::Number(x),
Json::I64(x) => PrefValue::Number(x as f64), Json::I64(x) => PrefValue::Number(x as f64),
Json::U64(x) => PrefValue::Number(x as f64), Json::U64(x) => PrefValue::Number(x as f64),
_ => return Err(()) _ => return Err(()),
}; };
Ok(value) Ok(value)
} }
pub fn as_boolean(&self) -> Option<bool> { pub fn as_boolean(&self) -> Option<bool> {
match *self { match *self {
PrefValue::Boolean(value) => { PrefValue::Boolean(value) => Some(value),
Some(value) _ => None,
},
_ => None
} }
} }
pub fn as_string(&self) -> Option<&str> { pub fn as_string(&self) -> Option<&str> {
match *self { match *self {
PrefValue::String(ref value) => { PrefValue::String(ref value) => Some(&value),
Some(&value) _ => None,
},
_ => None
} }
} }
@ -82,16 +78,10 @@ impl PrefValue {
impl ToJson for PrefValue { impl ToJson for PrefValue {
fn to_json(&self) -> Json { fn to_json(&self) -> Json {
match *self { match *self {
PrefValue::Boolean(x) => { PrefValue::Boolean(x) => Json::Boolean(x),
Json::Boolean(x) PrefValue::String(ref x) => Json::String(x.clone()),
}, PrefValue::Number(x) => Json::F64(x),
PrefValue::String(ref x) => { PrefValue::Missing => Json::Null,
Json::String(x.clone())
},
PrefValue::Number(x) => {
Json::F64(x)
},
PrefValue::Missing => Json::Null
} }
} }
} }
@ -99,10 +89,9 @@ impl ToJson for PrefValue {
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Clone, Debug, Deserialize, Serialize)]
pub enum Pref { pub enum Pref {
NoDefault(Arc<PrefValue>), NoDefault(Arc<PrefValue>),
WithDefault(Arc<PrefValue>, Option<Arc<PrefValue>>) WithDefault(Arc<PrefValue>, Option<Arc<PrefValue>>),
} }
impl Pref { impl Pref {
pub fn new(value: PrefValue) -> Pref { pub fn new(value: PrefValue) -> Pref {
Pref::NoDefault(Arc::new(value)) Pref::NoDefault(Arc::new(value))
@ -120,12 +109,10 @@ impl Pref {
pub fn value(&self) -> &Arc<PrefValue> { pub fn value(&self) -> &Arc<PrefValue> {
match *self { match *self {
Pref::NoDefault(ref x) => x, Pref::NoDefault(ref x) => x,
Pref::WithDefault(ref default, ref override_value) => { Pref::WithDefault(ref default, ref override_value) => match *override_value {
match *override_value {
Some(ref x) => x, Some(ref x) => x,
None => default None => default,
} },
}
} }
} }
@ -133,12 +120,8 @@ impl Pref {
// TODO - this should error if we try to override a pref of one type // TODO - this should error if we try to override a pref of one type
// with a value of a different type // with a value of a different type
match *self { match *self {
Pref::NoDefault(ref mut pref_value) => { Pref::NoDefault(ref mut pref_value) => *pref_value = Arc::new(value),
*pref_value = Arc::new(value) Pref::WithDefault(_, ref mut override_value) => *override_value = Some(Arc::new(value)),
},
Pref::WithDefault(_, ref mut override_value) => {
*override_value = Some(Arc::new(value))
}
} }
} }
} }
@ -151,8 +134,10 @@ impl ToJson for Pref {
pub fn default_prefs() -> Preferences { pub fn default_prefs() -> Preferences {
let prefs = Preferences(Arc::new(RwLock::new(HashMap::new()))); let prefs = Preferences(Arc::new(RwLock::new(HashMap::new())));
prefs.set("layout.threads", PrefValue::Number( prefs.set(
max(num_cpus::get() * 3 / 4, 1) as f64)); "layout.threads",
PrefValue::Number(max(num_cpus::get() * 3 / 4, 1) as f64),
);
prefs prefs
} }
@ -169,7 +154,10 @@ pub fn read_prefs(txt: &str) -> Result<HashMap<String, Pref>, ()> {
Ok(x) => { Ok(x) => {
prefs.insert(name, x); prefs.insert(name, x);
}, },
Err(_) => println!("Ignoring non-boolean/string/i64 preference value for {:?}", name), Err(_) => println!(
"Ignoring non-boolean/string/i64 preference value for {:?}",
name
),
} }
} }
} }
@ -181,14 +169,14 @@ pub fn add_user_prefs() {
Some(ref config_path) => { Some(ref config_path) => {
let mut path = PathBuf::from(config_path); let mut path = PathBuf::from(config_path);
init_user_prefs(&mut path); init_user_prefs(&mut path);
} },
None => { None => {
if let Some(mut path) = default_config_dir() { if let Some(mut path) = default_config_dir() {
if path.join("prefs.json").exists() { if path.join("prefs.json").exists() {
init_user_prefs(&mut path); init_user_prefs(&mut path);
} }
} }
} },
} }
} }
@ -201,8 +189,10 @@ fn init_user_prefs(path: &mut PathBuf) {
PREFS.extend(prefs); PREFS.extend(prefs);
} }
} else { } else {
writeln!(&mut stderr(), "Error opening prefs.json from config directory") writeln!(
.expect("failed printing to stderr"); &mut stderr(),
"Error opening prefs.json from config directory"
).expect("failed printing to stderr");
} }
} }
@ -210,7 +200,11 @@ pub struct Preferences(Arc<RwLock<HashMap<String, Pref>>>);
impl Preferences { impl Preferences {
pub fn get(&self, name: &str) -> Arc<PrefValue> { pub fn get(&self, name: &str) -> Arc<PrefValue> {
self.0.read().unwrap().get(name).map_or(Arc::new(PrefValue::Missing), |x| x.value().clone()) self.0
.read()
.unwrap()
.get(name)
.map_or(Arc::new(PrefValue::Missing), |x| x.value().clone())
} }
pub fn cloned(&self) -> HashMap<String, Pref> { pub fn cloned(&self) -> HashMap<String, Pref> {
@ -244,7 +238,12 @@ impl Preferences {
pub fn reset_all(&self) { pub fn reset_all(&self) {
let names = { let names = {
self.0.read().unwrap().keys().cloned().collect::<Vec<String>>() self.0
.read()
.unwrap()
.keys()
.cloned()
.collect::<Vec<String>>()
}; };
for name in names.iter() { for name in names.iter() {
self.reset(name); self.reset(name);
@ -260,7 +259,9 @@ impl Preferences {
} }
pub fn is_dom_to_texture_enabled(&self) -> bool { pub fn is_dom_to_texture_enabled(&self) -> bool {
self.get("dom.webgl.dom_to_texture.enabled").as_boolean().unwrap_or(false) self.get("dom.webgl.dom_to_texture.enabled")
.as_boolean()
.unwrap_or(false)
} }
pub fn is_webgl2_enabled(&self) -> bool { pub fn is_webgl2_enabled(&self) -> bool {

View file

@ -24,7 +24,10 @@ fn test_argument_parsing() {
let url = parse_url_or_filename(fake_cwd, "file:///foo/bar.html").unwrap(); let url = parse_url_or_filename(fake_cwd, "file:///foo/bar.html").unwrap();
assert_eq!(url.scheme(), "file"); assert_eq!(url.scheme(), "file");
assert_eq!(url.path_segments().unwrap().collect::<Vec<_>>(), ["foo", "bar.html"]); assert_eq!(
url.path_segments().unwrap().collect::<Vec<_>>(),
["foo", "bar.html"]
);
} }
#[test] #[test]
@ -34,7 +37,10 @@ fn test_file_path_parsing() {
let url = parse_url_or_filename(fake_cwd, "bar.html").unwrap(); let url = parse_url_or_filename(fake_cwd, "bar.html").unwrap();
assert_eq!(url.scheme(), "file"); assert_eq!(url.scheme(), "file");
assert_eq!(url.path_segments().unwrap().collect::<Vec<_>>(), ["fake", "cwd", "bar.html"]); assert_eq!(
url.path_segments().unwrap().collect::<Vec<_>>(),
["fake", "cwd", "bar.html"]
);
} }
#[test] #[test]
@ -44,7 +50,10 @@ fn test_file_path_parsing() {
let url = parse_url_or_filename(fake_cwd, "bar.html").unwrap(); let url = parse_url_or_filename(fake_cwd, "bar.html").unwrap();
assert_eq!(url.scheme(), "file"); assert_eq!(url.scheme(), "file");
assert_eq!(url.path_segments().unwrap().collect::<Vec<_>>(), ["C:", "fake", "cwd", "bar.html"]); assert_eq!(
url.path_segments().unwrap().collect::<Vec<_>>(),
["C:", "fake", "cwd", "bar.html"]
);
} }
#[test] #[test]
@ -57,16 +66,24 @@ fn test_argument_parsing_special() {
let url = parse_url_or_filename(fake_cwd, "file:///foo/bar?baz#buzz.html").unwrap(); let url = parse_url_or_filename(fake_cwd, "file:///foo/bar?baz#buzz.html").unwrap();
assert_eq!(&*url.to_file_path().unwrap(), Path::new("/foo/bar")); assert_eq!(&*url.to_file_path().unwrap(), Path::new("/foo/bar"));
assert_eq!(url.scheme(), "file"); assert_eq!(url.scheme(), "file");
assert_eq!(url.path_segments().unwrap().collect::<Vec<_>>(), ["foo", "bar"]); assert_eq!(
url.path_segments().unwrap().collect::<Vec<_>>(),
["foo", "bar"]
);
assert_eq!(url.query(), Some("baz")); assert_eq!(url.query(), Some("baz"));
assert_eq!(url.fragment(), Some("buzz.html")); assert_eq!(url.fragment(), Some("buzz.html"));
// but not in file names. // but not in file names.
let url = parse_url_or_filename(fake_cwd, "./bar?baz#buzz.html").unwrap(); let url = parse_url_or_filename(fake_cwd, "./bar?baz#buzz.html").unwrap();
assert_eq!(&*url.to_file_path().unwrap(), Path::new("/fake/cwd/bar?baz#buzz.html")); assert_eq!(
&*url.to_file_path().unwrap(),
Path::new("/fake/cwd/bar?baz#buzz.html")
);
assert_eq!(url.scheme(), "file"); assert_eq!(url.scheme(), "file");
assert_eq!(url.path_segments().unwrap().collect::<Vec<_>>(), assert_eq!(
["fake", "cwd", "bar%3Fbaz%23buzz.html"]); url.path_segments().unwrap().collect::<Vec<_>>(),
["fake", "cwd", "bar%3Fbaz%23buzz.html"]
);
assert_eq!(url.query(), None); assert_eq!(url.query(), None);
assert_eq!(url.fragment(), None); assert_eq!(url.fragment(), None);
} }

View file

@ -15,7 +15,7 @@ fn test_create_pref() {
\"layout.writing-mode.enabled\": true,\ \"layout.writing-mode.enabled\": true,\
\"network.mime.sniff\": false,\ \"network.mime.sniff\": false,\
\"shell.homepage\": \"https://servo.org\"\ \"shell.homepage\": \"https://servo.org\"\
}"; }";
let prefs = read_prefs(json_str); let prefs = read_prefs(json_str);
assert!(prefs.is_ok()); assert!(prefs.is_ok());
@ -30,21 +30,33 @@ fn test_get_set_reset_extend() {
\"layout.writing-mode.enabled\": true,\ \"layout.writing-mode.enabled\": true,\
\"extra.stuff\": false,\ \"extra.stuff\": false,\
\"shell.homepage\": \"https://google.com\"\ \"shell.homepage\": \"https://google.com\"\
}"; }";
assert_eq!(*PREFS.get("test"), PrefValue::Missing); assert_eq!(*PREFS.get("test"), PrefValue::Missing);
PREFS.set("test", PrefValue::String("hi".to_owned())); PREFS.set("test", PrefValue::String("hi".to_owned()));
assert_eq!(*PREFS.get("test"), PrefValue::String("hi".to_owned())); assert_eq!(*PREFS.get("test"), PrefValue::String("hi".to_owned()));
assert_eq!(*PREFS.get("shell.homepage"), PrefValue::String("https://servo.org".to_owned())); assert_eq!(
*PREFS.get("shell.homepage"),
PrefValue::String("https://servo.org".to_owned())
);
PREFS.set("shell.homepage", PrefValue::Boolean(true)); PREFS.set("shell.homepage", PrefValue::Boolean(true));
assert_eq!(*PREFS.get("shell.homepage"), PrefValue::Boolean(true)); assert_eq!(*PREFS.get("shell.homepage"), PrefValue::Boolean(true));
PREFS.reset("shell.homepage"); PREFS.reset("shell.homepage");
assert_eq!(*PREFS.get("shell.homepage"), PrefValue::String("https://servo.org".to_owned())); assert_eq!(
*PREFS.get("shell.homepage"),
PrefValue::String("https://servo.org".to_owned())
);
let extension = read_prefs(json_str).unwrap(); let extension = read_prefs(json_str).unwrap();
PREFS.extend(extension); PREFS.extend(extension);
assert_eq!(*PREFS.get("shell.homepage"), PrefValue::String("https://google.com".to_owned())); assert_eq!(
assert_eq!(*PREFS.get("layout.writing-mode.enabled"), PrefValue::Boolean(true)); *PREFS.get("shell.homepage"),
PrefValue::String("https://google.com".to_owned())
);
assert_eq!(
*PREFS.get("layout.writing-mode.enabled"),
PrefValue::Boolean(true)
);
assert_eq!(*PREFS.get("extra.stuff"), PrefValue::Boolean(false)); assert_eq!(*PREFS.get("extra.stuff"), PrefValue::Boolean(false));
} }
@ -55,7 +67,7 @@ fn test_default_config_dir_create_read_write() {
\"layout.writing-mode.enabled\": true,\ \"layout.writing-mode.enabled\": true,\
\"extra.stuff\": false,\ \"extra.stuff\": false,\
\"shell.homepage\": \"https://google.com\"\ \"shell.homepage\": \"https://google.com\"\
}"; }";
let mut expected_json = String::new(); let mut expected_json = String::new();
let config_path = basedir::default_config_dir().unwrap(); let config_path = basedir::default_config_dir().unwrap();

View file

@ -18,7 +18,7 @@ enum Message {
pub struct Sender(mpsc::Sender<Message>); pub struct Sender(mpsc::Sender<Message>);
struct Connection { struct Connection {
sender: ws::Sender sender: ws::Sender,
} }
impl Handler for Connection { impl Handler for Connection {
@ -39,19 +39,23 @@ impl Handler for Connection {
pub fn start_server(port: u16) -> Sender { pub fn start_server(port: u16) -> Sender {
debug!("Starting server."); debug!("Starting server.");
let (sender, receiver) = channel(); let (sender, receiver) = channel();
thread::Builder::new().name("debugger".to_owned()).spawn(move || { thread::Builder::new()
let socket = Builder::new().build(|sender: ws::Sender| { .name("debugger".to_owned())
Connection { sender: sender } .spawn(move || {
}).unwrap(); let socket = Builder::new()
.build(|sender: ws::Sender| Connection { sender: sender })
.unwrap();
let sender = socket.broadcaster(); let sender = socket.broadcaster();
thread::Builder::new().name("debugger-websocket".to_owned()).spawn(move || { thread::Builder::new()
.name("debugger-websocket".to_owned())
.spawn(move || {
socket.listen(("127.0.0.1", port)).unwrap(); socket.listen(("127.0.0.1", port)).unwrap();
}).expect("Thread spawning failed"); }).expect("Thread spawning failed");
while let Ok(message) = receiver.recv() { while let Ok(message) = receiver.recv() {
match message { match message {
Message::ShutdownServer => { Message::ShutdownServer => {
break; break;
} },
} }
} }
sender.shutdown().unwrap(); sender.shutdown().unwrap();

View file

@ -14,8 +14,10 @@ decl_derive!([DenyPublicFields] => deny_public_fields_derive);
fn deny_public_fields_derive(s: synstructure::Structure) -> proc_macro::TokenStream { fn deny_public_fields_derive(s: synstructure::Structure) -> proc_macro::TokenStream {
s.each(|binding| { s.each(|binding| {
if binding.ast().vis != syn::Visibility::Inherited { if binding.ast().vis != syn::Visibility::Inherited {
panic!("Field `{}` should not be public", panic!(
binding.ast().ident.as_ref().unwrap_or(&binding.binding)); "Field `{}` should not be public",
binding.ast().ident.as_ref().unwrap_or(&binding.binding)
);
} }
"".to_owned() "".to_owned()

View file

@ -3,7 +3,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// General actor system infrastructure. /// General actor system infrastructure.
use devtools_traits::PreciseTime; use devtools_traits::PreciseTime;
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use std::any::Any; use std::any::Any;
@ -23,11 +22,13 @@ pub enum ActorMessageStatus {
/// and the ability to process messages that are directed to particular actors. /// and the ability to process messages that are directed to particular actors.
/// TODO: ensure the name is immutable /// TODO: ensure the name is immutable
pub trait Actor: Any + ActorAsAny { pub trait Actor: Any + ActorAsAny {
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()>; stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()>;
fn name(&self) -> String; fn name(&self) -> String;
} }
@ -37,8 +38,12 @@ pub trait ActorAsAny {
} }
impl<T: Actor> ActorAsAny for T { impl<T: Actor> ActorAsAny for T {
fn actor_as_any(&self) -> &Any { self } fn actor_as_any(&self) -> &Any {
fn actor_as_any_mut(&mut self) -> &mut Any { self } self
}
fn actor_as_any_mut(&mut self) -> &mut Any {
self
}
} }
/// A list of known, owned actors. /// A list of known, owned actors.
@ -57,8 +62,8 @@ impl ActorRegistry {
pub fn new() -> ActorRegistry { pub fn new() -> ActorRegistry {
ActorRegistry { ActorRegistry {
actors: HashMap::new(), actors: HashMap::new(),
new_actors: RefCell::new(vec!()), new_actors: RefCell::new(vec![]),
old_actors: RefCell::new(vec!()), old_actors: RefCell::new(vec![]),
script_actors: RefCell::new(HashMap::new()), script_actors: RefCell::new(HashMap::new()),
shareable: None, shareable: None,
next: Cell::new(0), next: Cell::new(0),
@ -149,29 +154,33 @@ impl ActorRegistry {
/// Attempt to process a message as directed by its `to` property. If the actor is not /// Attempt to process a message as directed by its `to` property. If the actor is not
/// found or does not indicate that it knew how to process the message, ignore the failure. /// found or does not indicate that it knew how to process the message, ignore the failure.
pub fn handle_message(&mut self, pub fn handle_message(
&mut self,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) stream: &mut TcpStream,
-> Result<(), ()> { ) -> Result<(), ()> {
let to = msg.get("to").unwrap().as_str().unwrap(); let to = msg.get("to").unwrap().as_str().unwrap();
match self.actors.get(to) { match self.actors.get(to) {
None => debug!("message received for unknown actor \"{}\"", to), None => debug!("message received for unknown actor \"{}\"", to),
Some(actor) => { Some(actor) => {
let msg_type = msg.get("type").unwrap().as_str().unwrap(); let msg_type = msg.get("type").unwrap().as_str().unwrap();
if actor.handle_message(self, msg_type, msg, stream)? if actor.handle_message(self, msg_type, msg, stream)? !=
!= ActorMessageStatus::Processed { ActorMessageStatus::Processed
debug!("unexpected message type \"{}\" found for actor \"{}\"", {
msg_type, to); debug!(
"unexpected message type \"{}\" found for actor \"{}\"",
msg_type, to
);
} }
},
} }
} let new_actors = replace(&mut *self.new_actors.borrow_mut(), vec![]);
let new_actors = replace(&mut *self.new_actors.borrow_mut(), vec!());
for actor in new_actors.into_iter() { for actor in new_actors.into_iter() {
self.actors.insert(actor.name().to_owned(), actor); self.actors.insert(actor.name().to_owned(), actor);
} }
let old_actors = replace(&mut *self.old_actors.borrow_mut(), vec!()); let old_actors = replace(&mut *self.old_actors.borrow_mut(), vec![]);
for name in old_actors { for name in old_actors {
self.drop_actor(name); self.drop_actor(name);
} }

View file

@ -94,28 +94,44 @@ impl Actor for ConsoleActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"getCachedMessages" => { "getCachedMessages" => {
let str_types = msg.get("messageTypes").unwrap().as_array().unwrap().into_iter().map(|json_type| { let str_types = msg
json_type.as_str().unwrap() .get("messageTypes")
}); .unwrap()
.as_array()
.unwrap()
.into_iter()
.map(|json_type| json_type.as_str().unwrap());
let mut message_types = CachedConsoleMessageTypes::empty(); let mut message_types = CachedConsoleMessageTypes::empty();
for str_type in str_types { for str_type in str_types {
match str_type { match str_type {
"PageError" => message_types.insert(CachedConsoleMessageTypes::PAGE_ERROR), "PageError" => message_types.insert(CachedConsoleMessageTypes::PAGE_ERROR),
"ConsoleAPI" => message_types.insert(CachedConsoleMessageTypes::CONSOLE_API), "ConsoleAPI" => {
message_types.insert(CachedConsoleMessageTypes::CONSOLE_API)
},
s => debug!("unrecognized message type requested: \"{}\"", s), s => debug!("unrecognized message type requested: \"{}\"", s),
}; };
}; }
let (chan, port) = ipc::channel().unwrap(); let (chan, port) = ipc::channel().unwrap();
self.script_chan.send(DevtoolScriptControlMsg::GetCachedMessages( self.script_chan
self.pipeline, message_types, chan)).unwrap(); .send(DevtoolScriptControlMsg::GetCachedMessages(
let messages = port.recv().map_err(|_| ())?.into_iter().map(|message| { self.pipeline,
message_types,
chan,
)).unwrap();
let messages = port
.recv()
.map_err(|_| ())?
.into_iter()
.map(|message| {
let json_string = message.encode().unwrap(); let json_string = message.encode().unwrap();
let json = serde_json::from_str::<Value>(&json_string).unwrap(); let json = serde_json::from_str::<Value>(&json_string).unwrap();
json.as_object().unwrap().to_owned() json.as_object().unwrap().to_owned()
@ -127,56 +143,60 @@ impl Actor for ConsoleActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"startListeners" => { "startListeners" => {
//TODO: actually implement listener filters that support starting/stopping //TODO: actually implement listener filters that support starting/stopping
let msg = StartedListenersReply { let msg = StartedListenersReply {
from: self.name(), from: self.name(),
nativeConsoleAPI: true, nativeConsoleAPI: true,
startedListeners: startedListeners: vec!["PageError".to_owned(), "ConsoleAPI".to_owned()],
vec!("PageError".to_owned(), "ConsoleAPI".to_owned()),
traits: StartedListenersTraits { traits: StartedListenersTraits {
customNetworkRequest: true, customNetworkRequest: true,
} },
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"stopListeners" => { "stopListeners" => {
//TODO: actually implement listener filters that support starting/stopping //TODO: actually implement listener filters that support starting/stopping
let msg = StopListenersReply { let msg = StopListenersReply {
from: self.name(), from: self.name(),
stoppedListeners: msg.get("listeners") stoppedListeners: msg
.get("listeners")
.unwrap() .unwrap()
.as_array() .as_array()
.unwrap_or(&vec!()) .unwrap_or(&vec![])
.iter() .iter()
.map(|listener| listener.as_str().unwrap().to_owned()) .map(|listener| listener.as_str().unwrap().to_owned())
.collect(), .collect(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
//TODO: implement autocompletion like onAutocomplete in //TODO: implement autocompletion like onAutocomplete in
// http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webconsole.js // http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webconsole.js
"autocomplete" => { "autocomplete" => {
let msg = AutocompleteReply { let msg = AutocompleteReply {
from: self.name(), from: self.name(),
matches: vec!(), matches: vec![],
matchProp: "".to_owned(), matchProp: "".to_owned(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"evaluateJS" => { "evaluateJS" => {
let input = msg.get("text").unwrap().as_str().unwrap().to_owned(); let input = msg.get("text").unwrap().as_str().unwrap().to_owned();
let (chan, port) = ipc::channel().unwrap(); let (chan, port) = ipc::channel().unwrap();
self.script_chan.send(DevtoolScriptControlMsg::EvaluateJS( self.script_chan
self.pipeline, input.clone(), chan)).unwrap(); .send(DevtoolScriptControlMsg::EvaluateJS(
self.pipeline,
input.clone(),
chan,
)).unwrap();
//TODO: extract conversion into protocol module or some other useful place //TODO: extract conversion into protocol module or some other useful place
let result = match port.recv().map_err(|_| ())? { let result = match port.recv().map_err(|_| ())? {
@ -184,12 +204,12 @@ impl Actor for ConsoleActor {
let mut m = Map::new(); let mut m = Map::new();
m.insert("type".to_owned(), Value::String("undefined".to_owned())); m.insert("type".to_owned(), Value::String("undefined".to_owned()));
Value::Object(m) Value::Object(m)
} },
NullValue => { NullValue => {
let mut m = Map::new(); let mut m = Map::new();
m.insert("type".to_owned(), Value::String("null".to_owned())); m.insert("type".to_owned(), Value::String("null".to_owned()));
Value::Object(m) Value::Object(m)
} },
BooleanValue(val) => Value::Bool(val), BooleanValue(val) => Value::Bool(val),
NumberValue(val) => { NumberValue(val) => {
if val.is_nan() { if val.is_nan() {
@ -211,7 +231,7 @@ impl Actor for ConsoleActor {
} else { } else {
Value::Number(Number::from_f64(val).unwrap()) Value::Number(Number::from_f64(val).unwrap())
} }
} },
StringValue(s) => Value::String(s), StringValue(s) => Value::String(s),
ActorValue { class, uuid } => { ActorValue { class, uuid } => {
//TODO: make initial ActorValue message include these properties? //TODO: make initial ActorValue message include these properties?
@ -225,7 +245,7 @@ impl Actor for ConsoleActor {
m.insert("frozen".to_owned(), Value::Bool(false)); m.insert("frozen".to_owned(), Value::Bool(false));
m.insert("sealed".to_owned(), Value::Bool(false)); m.insert("sealed".to_owned(), Value::Bool(false));
Value::Object(m) Value::Object(m)
} },
}; };
//TODO: catch and return exception values from JS evaluation //TODO: catch and return exception values from JS evaluation
@ -240,7 +260,7 @@ impl Actor for ConsoleActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"setPreferences" => { "setPreferences" => {
let msg = SetPreferencesReply { let msg = SetPreferencesReply {
@ -249,9 +269,9 @@ impl Actor for ConsoleActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored _ => ActorMessageStatus::Ignored,
}) })
} }
} }

View file

@ -26,21 +26,24 @@ impl Actor for FramerateActor {
self.name.clone() self.name.clone()
} }
fn handle_message(
fn handle_message(&self, &self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
_msg_type: &str, _msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
_stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { _stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(ActorMessageStatus::Ignored) Ok(ActorMessageStatus::Ignored)
} }
} }
impl FramerateActor { impl FramerateActor {
/// return name of actor /// return name of actor
pub fn create(registry: &ActorRegistry, pub fn create(
registry: &ActorRegistry,
pipeline_id: PipelineId, pipeline_id: PipelineId,
script_sender: IpcSender<DevtoolScriptControlMsg>) -> String { script_sender: IpcSender<DevtoolScriptControlMsg>,
) -> String {
let actor_name = registry.new_name("framerate"); let actor_name = registry.new_name("framerate");
let mut actor = FramerateActor { let mut actor = FramerateActor {
name: actor_name.clone(), name: actor_name.clone(),
@ -60,8 +63,7 @@ impl FramerateActor {
self.ticks.push(HighResolutionStamp::wrap(tick)); self.ticks.push(HighResolutionStamp::wrap(tick));
if self.is_recording { if self.is_recording {
let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, self.name());
self.name());
self.script_sender.send(msg).unwrap(); self.script_sender.send(msg).unwrap();
} }
} }
@ -78,8 +80,7 @@ impl FramerateActor {
self.start_time = Some(precise_time_ns()); self.start_time = Some(precise_time_ns());
self.is_recording = true; self.is_recording = true;
let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, self.name());
self.name());
self.script_sender.send(msg).unwrap(); self.script_sender.send(msg).unwrap();
} }
@ -90,7 +91,6 @@ impl FramerateActor {
self.is_recording = false; self.is_recording = false;
self.start_time = None; self.start_time = None;
} }
} }
impl Drop for FramerateActor { impl Drop for FramerateActor {

View file

@ -61,27 +61,25 @@ impl Actor for HighlighterActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"showBoxModel" => { "showBoxModel" => {
let msg = ShowBoxModelReply { let msg = ShowBoxModelReply { from: self.name() };
from: self.name(),
};
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"hideBoxModel" => { "hideBoxModel" => {
let msg = HideBoxModelReply { let msg = HideBoxModelReply { from: self.name() };
from: self.name(),
};
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })
@ -98,29 +96,33 @@ impl Actor for NodeActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"modifyAttributes" => { "modifyAttributes" => {
let target = msg.get("to").unwrap().as_str().unwrap(); let target = msg.get("to").unwrap().as_str().unwrap();
let mods = msg.get("modifications").unwrap().as_array().unwrap(); let mods = msg.get("modifications").unwrap().as_array().unwrap();
let modifications = mods.iter().map(|json_mod| { let modifications = mods
.iter()
.map(|json_mod| {
serde_json::from_str(&serde_json::to_string(json_mod).unwrap()).unwrap() serde_json::from_str(&serde_json::to_string(json_mod).unwrap()).unwrap()
}).collect(); }).collect();
self.script_chan.send(ModifyAttribute(self.pipeline, self.script_chan
.send(ModifyAttribute(
self.pipeline,
registry.actor_to_script(target.to_owned()), registry.actor_to_script(target.to_owned()),
modifications)) modifications,
.unwrap(); )).unwrap();
let reply = ModifyAttributeReply { let reply = ModifyAttributeReply { from: self.name() };
from: self.name(),
};
stream.write_json_packet(&reply); stream.write_json_packet(&reply);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })
@ -175,19 +177,23 @@ struct NodeActorMsg {
} }
trait NodeInfoToProtocol { trait NodeInfoToProtocol {
fn encode(self, fn encode(
self,
actors: &ActorRegistry, actors: &ActorRegistry,
display: bool, display: bool,
script_chan: IpcSender<DevtoolScriptControlMsg>, script_chan: IpcSender<DevtoolScriptControlMsg>,
pipeline: PipelineId) -> NodeActorMsg; pipeline: PipelineId,
) -> NodeActorMsg;
} }
impl NodeInfoToProtocol for NodeInfo { impl NodeInfoToProtocol for NodeInfo {
fn encode(self, fn encode(
self,
actors: &ActorRegistry, actors: &ActorRegistry,
display: bool, display: bool,
script_chan: IpcSender<DevtoolScriptControlMsg>, script_chan: IpcSender<DevtoolScriptControlMsg>,
pipeline: PipelineId) -> NodeActorMsg { pipeline: PipelineId,
) -> NodeActorMsg {
let actor_name = if !actors.script_actor_registered(self.uniqueId.clone()) { let actor_name = if !actors.script_actor_registered(self.uniqueId.clone()) {
let name = actors.new_name("node"); let name = actors.new_name("node");
let node_actor = NodeActor { let node_actor = NodeActor {
@ -215,15 +221,16 @@ impl NodeInfoToProtocol for NodeInfo {
publicId: self.publicId, publicId: self.publicId,
systemId: self.systemId, systemId: self.systemId,
attrs: self.attrs.into_iter().map(|attr| { attrs: self
AttrMsg { .attrs
.into_iter()
.map(|attr| AttrMsg {
namespace: attr.namespace, namespace: attr.namespace,
name: attr.name, name: attr.name,
value: attr.value, value: attr.value,
}
}).collect(), }).collect(),
pseudoClassLocks: vec!(), //TODO get this data from script pseudoClassLocks: vec![], //TODO get this data from script
isDisplayed: display, isDisplayed: display,
@ -272,25 +279,28 @@ impl Actor for WalkerActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"querySelector" => { "querySelector" => {
let msg = QuerySelectorReply { let msg = QuerySelectorReply { from: self.name() };
from: self.name(),
};
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"documentElement" => { "documentElement" => {
let (tx, rx) = ipc::channel().unwrap(); let (tx, rx) = ipc::channel().unwrap();
self.script_chan.send(GetDocumentElement(self.pipeline, tx)).unwrap(); self.script_chan
.send(GetDocumentElement(self.pipeline, tx))
.unwrap();
let doc_elem_info = rx.recv().unwrap().ok_or(())?; let doc_elem_info = rx.recv().unwrap().ok_or(())?;
let node = doc_elem_info.encode(registry, true, self.script_chan.clone(), self.pipeline); let node =
doc_elem_info.encode(registry, true, self.script_chan.clone(), self.pipeline);
let msg = DocumentElementReply { let msg = DocumentElementReply {
from: self.name(), from: self.name(),
@ -298,36 +308,38 @@ impl Actor for WalkerActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"clearPseudoClassLocks" => { "clearPseudoClassLocks" => {
let msg = ClearPseudoclassesReply { let msg = ClearPseudoclassesReply { from: self.name() };
from: self.name(),
};
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"children" => { "children" => {
let target = msg.get("node").unwrap().as_str().unwrap(); let target = msg.get("node").unwrap().as_str().unwrap();
let (tx, rx) = ipc::channel().unwrap(); let (tx, rx) = ipc::channel().unwrap();
self.script_chan.send(GetChildren(self.pipeline, self.script_chan
.send(GetChildren(
self.pipeline,
registry.actor_to_script(target.to_owned()), registry.actor_to_script(target.to_owned()),
tx)) tx,
.unwrap(); )).unwrap();
let children = rx.recv().unwrap().ok_or(())?; let children = rx.recv().unwrap().ok_or(())?;
let msg = ChildrenReply { let msg = ChildrenReply {
hasFirst: true, hasFirst: true,
hasLast: true, hasLast: true,
nodes: children.into_iter().map(|child| { nodes: children
.into_iter()
.map(|child| {
child.encode(registry, true, self.script_chan.clone(), self.pipeline) child.encode(registry, true, self.script_chan.clone(), self.pipeline)
}).collect(), }).collect(),
from: self.name(), from: self.name(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })
@ -447,52 +459,72 @@ impl Actor for PageStyleActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"getApplied" => { "getApplied" => {
//TODO: query script for relevant applied styles to node (msg.node) //TODO: query script for relevant applied styles to node (msg.node)
let msg = GetAppliedReply { let msg = GetAppliedReply {
entries: vec!(), entries: vec![],
rules: vec!(), rules: vec![],
sheets: vec!(), sheets: vec![],
from: self.name(), from: self.name(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getComputed" => { "getComputed" => {
//TODO: query script for relevant computed styles on node (msg.node) //TODO: query script for relevant computed styles on node (msg.node)
let msg = GetComputedReply { let msg = GetComputedReply {
computed: vec!(), computed: vec![],
from: self.name(), from: self.name(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
//TODO: query script for box layout properties of node (msg.node) //TODO: query script for box layout properties of node (msg.node)
"getLayout" => { "getLayout" => {
let target = msg.get("node").unwrap().as_str().unwrap(); let target = msg.get("node").unwrap().as_str().unwrap();
let (tx, rx) = ipc::channel().unwrap(); let (tx, rx) = ipc::channel().unwrap();
self.script_chan.send(GetLayout(self.pipeline, self.script_chan
.send(GetLayout(
self.pipeline,
registry.actor_to_script(target.to_owned()), registry.actor_to_script(target.to_owned()),
tx)) tx,
.unwrap(); )).unwrap();
let ComputedNodeLayout { let ComputedNodeLayout {
display, position, zIndex, boxSizing, display,
autoMargins, marginTop, marginRight, marginBottom, marginLeft, position,
borderTopWidth, borderRightWidth, borderBottomWidth, borderLeftWidth, zIndex,
paddingTop, paddingRight, paddingBottom, paddingLeft, boxSizing,
width, height, autoMargins,
marginTop,
marginRight,
marginBottom,
marginLeft,
borderTopWidth,
borderRightWidth,
borderBottomWidth,
borderLeftWidth,
paddingTop,
paddingRight,
paddingBottom,
paddingLeft,
width,
height,
} = rx.recv().unwrap().ok_or(())?; } = rx.recv().unwrap().ok_or(())?;
let auto_margins = msg.get("autoMargins") let auto_margins = msg
.and_then(&Value::as_bool).unwrap_or(false); .get("autoMargins")
.and_then(&Value::as_bool)
.unwrap_or(false);
// http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/styles.js // http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/styles.js
let msg = GetLayoutReply { let msg = GetLayoutReply {
@ -504,10 +536,18 @@ impl Actor for PageStyleActor {
autoMargins: if auto_margins { autoMargins: if auto_margins {
let mut m = Map::new(); let mut m = Map::new();
let auto = serde_json::value::Value::String("auto".to_owned()); let auto = serde_json::value::Value::String("auto".to_owned());
if autoMargins.top { m.insert("top".to_owned(), auto.clone()); } if autoMargins.top {
if autoMargins.right { m.insert("right".to_owned(), auto.clone()); } m.insert("top".to_owned(), auto.clone());
if autoMargins.bottom { m.insert("bottom".to_owned(), auto.clone()); } }
if autoMargins.left { m.insert("left".to_owned(), auto.clone()); } if autoMargins.right {
m.insert("right".to_owned(), auto.clone());
}
if autoMargins.bottom {
m.insert("bottom".to_owned(), auto.clone());
}
if autoMargins.left {
m.insert("left".to_owned(), auto.clone());
}
serde_json::value::Value::Object(m) serde_json::value::Value::Object(m)
} else { } else {
serde_json::value::Value::Null serde_json::value::Value::Null
@ -531,7 +571,7 @@ impl Actor for PageStyleActor {
let msg = serde_json::from_str::<Value>(&msg).unwrap(); let msg = serde_json::from_str::<Value>(&msg).unwrap();
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })
@ -543,11 +583,13 @@ impl Actor for InspectorActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"getWalker" => { "getWalker" => {
if self.walker.borrow().is_none() { if self.walker.borrow().is_none() {
@ -562,21 +604,24 @@ impl Actor for InspectorActor {
} }
let (tx, rx) = ipc::channel().unwrap(); let (tx, rx) = ipc::channel().unwrap();
self.script_chan.send(GetRootNode(self.pipeline, tx)).unwrap(); self.script_chan
.send(GetRootNode(self.pipeline, tx))
.unwrap();
let root_info = rx.recv().unwrap().ok_or(())?; let root_info = rx.recv().unwrap().ok_or(())?;
let node = root_info.encode(registry, false, self.script_chan.clone(), self.pipeline); let node =
root_info.encode(registry, false, self.script_chan.clone(), self.pipeline);
let msg = GetWalkerReply { let msg = GetWalkerReply {
from: self.name(), from: self.name(),
walker: WalkerMsg { walker: WalkerMsg {
actor: self.walker.borrow().clone().unwrap(), actor: self.walker.borrow().clone().unwrap(),
root: node, root: node,
} },
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getPageStyle" => { "getPageStyle" => {
if self.pageStyle.borrow().is_none() { if self.pageStyle.borrow().is_none() {
@ -598,7 +643,7 @@ impl Actor for InspectorActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
//TODO: this is an old message; try adding highlightable to the root traits instead //TODO: this is an old message; try adding highlightable to the root traits instead
// and support getHighlighter instead // and support getHighlighter instead
@ -621,7 +666,7 @@ impl Actor for InspectorActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })

View file

@ -28,11 +28,13 @@ impl Actor for MemoryActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
_msg_type: &str, _msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
_stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { _stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(ActorMessageStatus::Ignored) Ok(ActorMessageStatus::Ignored)
} }
} }
@ -42,7 +44,7 @@ impl MemoryActor {
pub fn create(registry: &ActorRegistry) -> String { pub fn create(registry: &ActorRegistry) -> String {
let actor_name = registry.new_name("memory"); let actor_name = registry.new_name("memory");
let actor = MemoryActor { let actor = MemoryActor {
name: actor_name.clone() name: actor_name.clone(),
}; };
registry.register_later(Box::new(actor)); registry.register_later(Box::new(actor));

View file

@ -34,7 +34,7 @@ struct HttpRequest {
struct HttpResponse { struct HttpResponse {
headers: Option<Headers>, headers: Option<Headers>,
status: Option<RawStatus>, status: Option<RawStatus>,
body: Option<Vec<u8>> body: Option<Vec<u8>>,
} }
pub struct NetworkEventActor { pub struct NetworkEventActor {
@ -52,7 +52,7 @@ pub struct EventActor {
pub startedDateTime: String, pub startedDateTime: String,
pub timeStamp: i64, pub timeStamp: i64,
pub isXHR: bool, pub isXHR: bool,
pub private: bool pub private: bool,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -79,14 +79,12 @@ pub struct ResponseContentMsg {
pub discardResponseBody: bool, pub discardResponseBody: bool,
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct ResponseHeadersMsg { pub struct ResponseHeadersMsg {
pub headers: usize, pub headers: usize,
pub headersSize: usize, pub headersSize: usize,
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct RequestCookiesMsg { pub struct RequestCookiesMsg {
pub cookies: usize, pub cookies: usize,
@ -103,7 +101,7 @@ struct GetRequestHeadersReply {
from: String, from: String,
headers: Vec<Header>, headers: Vec<Header>,
headerSize: usize, headerSize: usize,
rawHeaders: String rawHeaders: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -117,7 +115,7 @@ struct GetResponseHeadersReply {
from: String, from: String,
headers: Vec<Header>, headers: Vec<Header>,
headerSize: usize, headerSize: usize,
rawHeaders: String rawHeaders: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -131,19 +129,19 @@ struct GetResponseContentReply {
struct GetRequestPostDataReply { struct GetRequestPostDataReply {
from: String, from: String,
postData: Option<Vec<u8>>, postData: Option<Vec<u8>>,
postDataDiscarded: bool postDataDiscarded: bool,
} }
#[derive(Serialize)] #[derive(Serialize)]
struct GetRequestCookiesReply { struct GetRequestCookiesReply {
from: String, from: String,
cookies: Vec<u8> cookies: Vec<u8>,
} }
#[derive(Serialize)] #[derive(Serialize)]
struct GetResponseCookiesReply { struct GetResponseCookiesReply {
from: String, from: String,
cookies: Vec<u8> cookies: Vec<u8>,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -179,11 +177,13 @@ impl Actor for NetworkEventActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"getRequestHeaders" => { "getRequestHeaders" => {
let mut headers = Vec::new(); let mut headers = Vec::new();
@ -194,7 +194,10 @@ impl Actor for NetworkEventActor {
let value = item.value_string(); let value = item.value_string();
rawHeadersString = rawHeadersString + name + ":" + &value + "\r\n"; rawHeadersString = rawHeadersString + name + ":" + &value + "\r\n";
headersSize += name.len() + value.len(); headersSize += name.len() + value.len();
headers.push(Header { name: name.to_owned(), value: value.to_owned() }); headers.push(Header {
name: name.to_owned(),
value: value.to_owned(),
});
} }
let msg = GetRequestHeadersReply { let msg = GetRequestHeadersReply {
from: self.name(), from: self.name(),
@ -204,7 +207,7 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getRequestCookies" => { "getRequestCookies" => {
let mut cookies = Vec::new(); let mut cookies = Vec::new();
if let Some(req_cookies) = self.request.headers.get_raw("Cookie") { if let Some(req_cookies) = self.request.headers.get_raw("Cookie") {
@ -221,7 +224,7 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getRequestPostData" => { "getRequestPostData" => {
let msg = GetRequestPostDataReply { let msg = GetRequestPostDataReply {
from: self.name(), from: self.name(),
@ -230,7 +233,7 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getResponseHeaders" => { "getResponseHeaders" => {
if let Some(ref response_headers) = self.response.headers { if let Some(ref response_headers) = self.response.headers {
let mut headers = vec![]; let mut headers = vec![];
@ -258,7 +261,7 @@ impl Actor for NetworkEventActor {
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
} }
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getResponseCookies" => { "getResponseCookies" => {
let mut cookies = Vec::new(); let mut cookies = Vec::new();
if let Some(res_cookies) = self.request.headers.get_raw("set-cookie") { if let Some(res_cookies) = self.request.headers.get_raw("set-cookie") {
@ -275,7 +278,7 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getResponseContent" => { "getResponseContent" => {
let msg = GetResponseContentReply { let msg = GetResponseContentReply {
from: self.name(), from: self.name(),
@ -284,7 +287,7 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getEventTimings" => { "getEventTimings" => {
// TODO: This is a fake timings msg // TODO: This is a fake timings msg
let timingsObj = Timings { let timingsObj = Timings {
@ -304,19 +307,19 @@ impl Actor for NetworkEventActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"getSecurityInfo" => { "getSecurityInfo" => {
// TODO: Send the correct values for securityInfo. // TODO: Send the correct values for securityInfo.
let msg = GetSecurityInfoReply { let msg = GetSecurityInfoReply {
from: self.name(), from: self.name(),
securityInfo: SecurityInfo { securityInfo: SecurityInfo {
state: "insecure".to_owned() state: "insecure".to_owned(),
}, },
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored _ => ActorMessageStatus::Ignored,
}) })
} }
} }
@ -382,8 +385,13 @@ impl NetworkEventActor {
// TODO: Send the correct values for all these fields. // TODO: Send the correct values for all these fields.
let hSizeOption = self.response.headers.as_ref().map(|headers| headers.len()); let hSizeOption = self.response.headers.as_ref().map(|headers| headers.len());
let hSize = hSizeOption.unwrap_or(0); let hSize = hSizeOption.unwrap_or(0);
let (status_code, status_message) = self.response.status.as_ref(). let (status_code, status_message) = self
map_or((0, "".to_owned()), |&RawStatus(ref code, ref text)| (*code, text.clone().into_owned())); .response
.status
.as_ref()
.map_or((0, "".to_owned()), |&RawStatus(ref code, ref text)| {
(*code, text.clone().into_owned())
});
// TODO: Send the correct values for remoteAddress and remotePort and http_version. // TODO: Send the correct values for remoteAddress and remotePort and http_version.
ResponseStartMsg { ResponseStartMsg {
httpVersion: "HTTP/1.1".to_owned(), httpVersion: "HTTP/1.1".to_owned(),
@ -392,7 +400,7 @@ impl NetworkEventActor {
status: status_code.to_string(), status: status_code.to_string(),
statusText: status_message, statusText: status_message,
headersSize: hSize, headersSize: hSize,
discardResponseBody: false discardResponseBody: false,
} }
} }
@ -401,7 +409,7 @@ impl NetworkEventActor {
if let Some(ref headers) = self.response.headers { if let Some(ref headers) = self.response.headers {
mString = match headers.get() { mString = match headers.get() {
Some(&ContentType(ref mime)) => mime.to_string(), Some(&ContentType(ref mime)) => mime.to_string(),
None => "".to_owned() None => "".to_owned(),
}; };
} }
// TODO: Set correct values when response's body is sent to the devtools in http_loader. // TODO: Set correct values when response's body is sent to the devtools in http_loader.
@ -418,7 +426,7 @@ impl NetworkEventActor {
if let Some(ref headers) = self.response.headers { if let Some(ref headers) = self.response.headers {
cookies_size = match headers.get() { cookies_size = match headers.get() {
Some(&Cookie(ref cookie)) => cookie.len(), Some(&Cookie(ref cookie)) => cookie.len(),
None => 0 None => 0,
}; };
} }
ResponseCookiesMsg { ResponseCookiesMsg {
@ -434,7 +442,6 @@ impl NetworkEventActor {
for item in headers.iter() { for item in headers.iter() {
headers_byte_count += item.name().len() + item.value_string().len(); headers_byte_count += item.name().len() + item.value_string().len();
} }
} }
ResponseHeadersMsg { ResponseHeadersMsg {
headers: headers_size, headers: headers_size,
@ -443,7 +450,8 @@ impl NetworkEventActor {
} }
pub fn request_headers(&self) -> RequestHeadersMsg { pub fn request_headers(&self) -> RequestHeadersMsg {
let size = self.request let size = self
.request
.headers .headers
.iter() .iter()
.fold(0, |acc, h| acc + h.name().len() + h.value_string().len()); .fold(0, |acc, h| acc + h.name().len() + h.value_string().len());
@ -456,7 +464,7 @@ impl NetworkEventActor {
pub fn request_cookies(&self) -> RequestCookiesMsg { pub fn request_cookies(&self) -> RequestCookiesMsg {
let cookies_size = match self.request.headers.get() { let cookies_size = match self.request.headers.get() {
Some(&Cookie(ref cookie)) => cookie.len(), Some(&Cookie(ref cookie)) => cookie.len(),
None => 0 None => 0,
}; };
RequestCookiesMsg { RequestCookiesMsg {
cookies: cookies_size, cookies: cookies_size,

View file

@ -15,11 +15,13 @@ impl Actor for ObjectActor {
fn name(&self) -> String { fn name(&self) -> String {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_: &ActorRegistry, _: &ActorRegistry,
_: &str, _: &str,
_: &Map<String, Value>, _: &Map<String, Value>,
_: &mut TcpStream) -> Result<ActorMessageStatus, ()> { _: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(ActorMessageStatus::Ignored) Ok(ActorMessageStatus::Ignored)
} }
} }

View file

@ -51,11 +51,13 @@ impl Actor for PerformanceActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"connect" => { "connect" => {
let msg = ConnectReply { let msg = ConnectReply {
@ -79,11 +81,11 @@ impl Actor for PerformanceActor {
value: SuccessMsg { value: SuccessMsg {
success: true, success: true,
errors: vec![], errors: vec![],
} },
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })
} }
@ -91,28 +93,34 @@ impl Actor for PerformanceActor {
impl PerformanceActor { impl PerformanceActor {
pub fn new(name: String) -> PerformanceActor { pub fn new(name: String) -> PerformanceActor {
PerformanceActor { PerformanceActor { name: name }
name: name,
}
} }
pub fn description() -> ActorDescription { pub fn description() -> ActorDescription {
ActorDescription { ActorDescription {
category: "actor", category: "actor",
typeName: "performance", typeName: "performance",
methods: vec![ methods: vec![Method {
Method {
name: "canCurrentlyRecord", name: "canCurrentlyRecord",
request: Value::Object(vec![ request: Value::Object(
("type".to_owned(), Value::String("canCurrentlyRecord".to_owned())), vec![(
].into_iter().collect()), "type".to_owned(),
response: Value::Object(vec![ Value::String("canCurrentlyRecord".to_owned()),
("value".to_owned(), Value::Object(vec![ )].into_iter()
("_retval".to_owned(), Value::String("json".to_owned())), .collect(),
].into_iter().collect())), ),
].into_iter().collect()), response: Value::Object(
}, vec![(
], "value".to_owned(),
Value::Object(
vec![("_retval".to_owned(), Value::String("json".to_owned()))]
.into_iter()
.collect(),
),
)].into_iter()
.collect(),
),
}],
} }
} }
} }

View file

@ -15,19 +15,19 @@ impl Actor for ProfilerActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_registry: &ActorRegistry, _registry: &ActorRegistry,
_msg_type: &str, _msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
_stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { _stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(ActorMessageStatus::Ignored) Ok(ActorMessageStatus::Ignored)
} }
} }
impl ProfilerActor { impl ProfilerActor {
pub fn new(name: String) -> ProfilerActor { pub fn new(name: String) -> ProfilerActor {
ProfilerActor { ProfilerActor { name: name }
name: name,
}
} }
} }

View file

@ -6,7 +6,6 @@
/// (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/root.js). /// (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/root.js).
/// Connection point for all new remote devtools interactions, providing lists of know actors /// Connection point for all new remote devtools interactions, providing lists of know actors
/// that perform more specific actions (tabs, addons, browser chrome, etc.) /// that perform more specific actions (tabs, addons, browser chrome, etc.)
use actor::{Actor, ActorMessageStatus, ActorRegistry}; use actor::{Actor, ActorMessageStatus, ActorRegistry};
use actors::performance::PerformanceActor; use actors::performance::PerformanceActor;
use actors::tab::{TabActor, TabActorMsg}; use actors::tab::{TabActor, TabActorMsg};
@ -65,11 +64,13 @@ impl Actor for RootActor {
"root".to_owned() "root".to_owned()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"listAddons" => { "listAddons" => {
let actor = ListAddonsReply { let actor = ListAddonsReply {
@ -78,20 +79,22 @@ impl Actor for RootActor {
}; };
stream.write_json_packet(&actor); stream.write_json_packet(&actor);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
//https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs //https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs
"listTabs" => { "listTabs" => {
let actor = ListTabsReply { let actor = ListTabsReply {
from: "root".to_owned(), from: "root".to_owned(),
selected: 0, selected: 0,
tabs: self.tabs.iter().map(|tab| { tabs: self
registry.find::<TabActor>(tab).encodable() .tabs
}).collect() .iter()
.map(|tab| registry.find::<TabActor>(tab).encodable())
.collect(),
}; };
stream.write_json_packet(&actor); stream.write_json_packet(&actor);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"protocolDescription" => { "protocolDescription" => {
let msg = ProtocolDescriptionReply { let msg = ProtocolDescriptionReply {
@ -102,9 +105,9 @@ impl Actor for RootActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored _ => ActorMessageStatus::Ignored,
}) })
} }
} }
@ -118,7 +121,7 @@ impl RootActor {
sources: true, sources: true,
highlightable: true, highlightable: true,
customHighlighters: true, customHighlighters: true,
networkMonitor: true networkMonitor: true,
}, },
} }
} }

View file

@ -37,7 +37,7 @@ struct TabDetachedReply {
#[derive(Serialize)] #[derive(Serialize)]
struct ReconfigureReply { struct ReconfigureReply {
from: String from: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -84,25 +84,28 @@ impl Actor for TabActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"reconfigure" => { "reconfigure" => {
if let Some(options) = msg.get("options").and_then(|o| o.as_object()) { if let Some(options) = msg.get("options").and_then(|o| o.as_object()) {
if let Some(val) = options.get("performReload") { if let Some(val) = options.get("performReload") {
if val.as_bool().unwrap_or(false) { if val.as_bool().unwrap_or(false) {
let console_actor = registry.find::<ConsoleActor>(&self.console); let console_actor = registry.find::<ConsoleActor>(&self.console);
let _ = console_actor.script_chan.send( let _ = console_actor
DevtoolScriptControlMsg::Reload(console_actor.pipeline)); .script_chan
.send(DevtoolScriptControlMsg::Reload(console_actor.pipeline));
} }
} }
} }
stream.write_json_packet(&ReconfigureReply { from: self.name() }); stream.write_json_packet(&ReconfigureReply { from: self.name() });
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
// https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs // https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs
// (see "To attach to a _tabActor_") // (see "To attach to a _tabActor_")
@ -116,12 +119,17 @@ impl Actor for TabActor {
traits: TabTraits, traits: TabTraits,
}; };
let console_actor = registry.find::<ConsoleActor>(&self.console); let console_actor = registry.find::<ConsoleActor>(&self.console);
console_actor.streams.borrow_mut().push(stream.try_clone().unwrap()); console_actor
.streams
.borrow_mut()
.push(stream.try_clone().unwrap());
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
console_actor.script_chan.send( console_actor
WantsLiveNotifications(console_actor.pipeline, true)).unwrap(); .script_chan
.send(WantsLiveNotifications(console_actor.pipeline, true))
.unwrap();
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
//FIXME: The current implementation won't work for multiple connections. Need to ensure 105 //FIXME: The current implementation won't work for multiple connections. Need to ensure 105
// that the correct stream is removed. // that the correct stream is removed.
@ -133,21 +141,23 @@ impl Actor for TabActor {
let console_actor = registry.find::<ConsoleActor>(&self.console); let console_actor = registry.find::<ConsoleActor>(&self.console);
console_actor.streams.borrow_mut().pop(); console_actor.streams.borrow_mut().pop();
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
console_actor.script_chan.send( console_actor
WantsLiveNotifications(console_actor.pipeline, false)).unwrap(); .script_chan
.send(WantsLiveNotifications(console_actor.pipeline, false))
.unwrap();
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"listFrames" => { "listFrames" => {
let msg = ListFramesReply { let msg = ListFramesReply {
from: self.name(), from: self.name(),
frames: vec!(), frames: vec![],
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored _ => ActorMessageStatus::Ignored,
}) })
} }
} }

View file

@ -35,7 +35,7 @@ struct ThreadResumedReply {
#[derive(Serialize)] #[derive(Serialize)]
struct ReconfigureReply { struct ReconfigureReply {
from: String from: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -53,9 +53,7 @@ pub struct ThreadActor {
impl ThreadActor { impl ThreadActor {
pub fn new(name: String) -> ThreadActor { pub fn new(name: String) -> ThreadActor {
ThreadActor { ThreadActor { name: name }
name: name,
}
} }
} }
@ -64,11 +62,13 @@ impl Actor for ThreadActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
_msg: &Map<String, Value>, _msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"attach" => { "attach" => {
let msg = ThreadAttachedReply { let msg = ThreadAttachedReply {
@ -76,7 +76,9 @@ impl Actor for ThreadActor {
type_: "paused".to_owned(), type_: "paused".to_owned(),
actor: registry.new_name("pause"), actor: registry.new_name("pause"),
poppedFrames: vec![], poppedFrames: vec![],
why: WhyMsg { type_: "attached".to_owned() }, why: WhyMsg {
type_: "attached".to_owned(),
},
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
@ -94,7 +96,7 @@ impl Actor for ThreadActor {
"reconfigure" => { "reconfigure" => {
stream.write_json_packet(&ReconfigureReply { from: self.name() }); stream.write_json_packet(&ReconfigureReply { from: self.name() });
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"sources" => { "sources" => {
let msg = SourcesReply { let msg = SourcesReply {
@ -103,7 +105,7 @@ impl Actor for ThreadActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => ActorMessageStatus::Ignored, _ => ActorMessageStatus::Ignored,
}) })

View file

@ -44,7 +44,7 @@ struct Emitter {
#[derive(Serialize)] #[derive(Serialize)]
struct IsRecordingReply { struct IsRecordingReply {
from: String, from: String,
value: bool value: bool,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -103,7 +103,9 @@ pub struct HighResolutionStamp(f64);
impl HighResolutionStamp { impl HighResolutionStamp {
pub fn new(start_stamp: PreciseTime, time: PreciseTime) -> HighResolutionStamp { pub fn new(start_stamp: PreciseTime, time: PreciseTime) -> HighResolutionStamp {
let duration = start_stamp.to(time).num_microseconds() let duration = start_stamp
.to(time)
.num_microseconds()
.expect("Too big duration in microseconds"); .expect("Too big duration in microseconds");
HighResolutionStamp(duration as f64 / 1000 as f64) HighResolutionStamp(duration as f64 / 1000 as f64)
} }
@ -122,11 +124,12 @@ impl Serialize for HighResolutionStamp {
static DEFAULT_TIMELINE_DATA_PULL_TIMEOUT: u64 = 200; //ms static DEFAULT_TIMELINE_DATA_PULL_TIMEOUT: u64 = 200; //ms
impl TimelineActor { impl TimelineActor {
pub fn new(name: String, pub fn new(
name: String,
pipeline: PipelineId, pipeline: PipelineId,
script_sender: IpcSender<DevtoolScriptControlMsg>) -> TimelineActor { script_sender: IpcSender<DevtoolScriptControlMsg>,
let marker_types = vec!(TimelineMarkerType::Reflow, ) -> TimelineActor {
TimelineMarkerType::DOMEvent); let marker_types = vec![TimelineMarkerType::Reflow, TimelineMarkerType::DOMEvent];
TimelineActor { TimelineActor {
name: name, name: name,
@ -141,15 +144,20 @@ impl TimelineActor {
} }
} }
fn pull_timeline_data(&self, receiver: IpcReceiver<Option<TimelineMarker>>, mut emitter: Emitter) { fn pull_timeline_data(
&self,
receiver: IpcReceiver<Option<TimelineMarker>>,
mut emitter: Emitter,
) {
let is_recording = self.is_recording.clone(); let is_recording = self.is_recording.clone();
if !*is_recording.lock().unwrap() { if !*is_recording.lock().unwrap() {
return; return;
} }
thread::Builder::new().name("PullTimelineMarkers".to_owned()).spawn(move || { thread::Builder::new()
loop { .name("PullTimelineMarkers".to_owned())
.spawn(move || loop {
if !*is_recording.lock().unwrap() { if !*is_recording.lock().unwrap() {
break; break;
} }
@ -161,7 +169,6 @@ impl TimelineActor {
emitter.send(markers); emitter.send(markers);
thread::sleep(Duration::from_millis(DEFAULT_TIMELINE_DATA_PULL_TIMEOUT)); thread::sleep(Duration::from_millis(DEFAULT_TIMELINE_DATA_PULL_TIMEOUT));
}
}).expect("Thread spawning failed"); }).expect("Thread spawning failed");
} }
} }
@ -171,19 +178,24 @@ impl Actor for TimelineActor {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
registry: &ActorRegistry, registry: &ActorRegistry,
msg_type: &str, msg_type: &str,
msg: &Map<String, Value>, msg: &Map<String, Value>,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> { stream: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type { Ok(match msg_type {
"start" => { "start" => {
**self.is_recording.lock().as_mut().unwrap() = true; **self.is_recording.lock().as_mut().unwrap() = true;
let (tx, rx) = ipc::channel::<Option<TimelineMarker>>().unwrap(); let (tx, rx) = ipc::channel::<Option<TimelineMarker>>().unwrap();
self.script_sender.send(SetTimelineMarkers(self.pipeline, self.script_sender
.send(SetTimelineMarkers(
self.pipeline,
self.marker_types.clone(), self.marker_types.clone(),
tx)).unwrap(); tx,
)).unwrap();
*self.stream.borrow_mut() = stream.try_clone().ok(); *self.stream.borrow_mut() = stream.try_clone().ok();
@ -200,16 +212,20 @@ impl Actor for TimelineActor {
let framerate_actor = Some(FramerateActor::create( let framerate_actor = Some(FramerateActor::create(
registry, registry,
self.pipeline.clone(), self.pipeline.clone(),
self.script_sender.clone())); self.script_sender.clone(),
));
*self.framerate_actor.borrow_mut() = framerate_actor; *self.framerate_actor.borrow_mut() = framerate_actor;
} }
} }
let emitter = Emitter::new(self.name(), registry.shareable(), let emitter = Emitter::new(
self.name(),
registry.shareable(),
registry.start_stamp(), registry.start_stamp(),
stream.try_clone().unwrap(), stream.try_clone().unwrap(),
self.memory_actor.borrow().clone(), self.memory_actor.borrow().clone(),
self.framerate_actor.borrow().clone()); self.framerate_actor.borrow().clone(),
);
self.pull_timeline_data(rx, emitter); self.pull_timeline_data(rx, emitter);
@ -219,7 +235,7 @@ impl Actor for TimelineActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"stop" => { "stop" => {
let msg = StopReply { let msg = StopReply {
@ -228,7 +244,11 @@ impl Actor for TimelineActor {
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
self.script_sender.send(DropTimelineMarkers(self.pipeline, self.marker_types.clone())).unwrap(); self.script_sender
.send(DropTimelineMarkers(
self.pipeline,
self.marker_types.clone(),
)).unwrap();
if let Some(ref actor_name) = *self.framerate_actor.borrow() { if let Some(ref actor_name) = *self.framerate_actor.borrow() {
registry.drop_actor_later(actor_name.clone()); registry.drop_actor_later(actor_name.clone());
@ -241,32 +261,32 @@ impl Actor for TimelineActor {
**self.is_recording.lock().as_mut().unwrap() = false; **self.is_recording.lock().as_mut().unwrap() = false;
self.stream.borrow_mut().take(); self.stream.borrow_mut().take();
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
"isRecording" => { "isRecording" => {
let msg = IsRecordingReply { let msg = IsRecordingReply {
from: self.name(), from: self.name(),
value: self.is_recording.lock().unwrap().clone() value: self.is_recording.lock().unwrap().clone(),
}; };
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
ActorMessageStatus::Processed ActorMessageStatus::Processed
} },
_ => { _ => ActorMessageStatus::Ignored,
ActorMessageStatus::Ignored
}
}) })
} }
} }
impl Emitter { impl Emitter {
pub fn new(name: String, pub fn new(
name: String,
registry: Arc<Mutex<ActorRegistry>>, registry: Arc<Mutex<ActorRegistry>>,
start_stamp: PreciseTime, start_stamp: PreciseTime,
stream: TcpStream, stream: TcpStream,
memory_actor_name: Option<String>, memory_actor_name: Option<String>,
framerate_actor_name: Option<String>) -> Emitter { framerate_actor_name: Option<String>,
) -> Emitter {
Emitter { Emitter {
from: name, from: name,
stream: stream, stream: stream,

View file

@ -17,11 +17,13 @@ impl Actor for WorkerActor {
fn name(&self) -> String { fn name(&self) -> String {
self.name.clone() self.name.clone()
} }
fn handle_message(&self, fn handle_message(
&self,
_: &ActorRegistry, _: &ActorRegistry,
_: &str, _: &str,
_: &Map<String, Value>, _: &Map<String, Value>,
_: &mut TcpStream) -> Result<ActorMessageStatus, ()> { _: &mut TcpStream,
) -> Result<ActorMessageStatus, ()> {
Ok(ActorMessageStatus::Processed) Ok(ActorMessageStatus::Processed)
} }
} }

View file

@ -9,7 +9,6 @@
#![crate_name = "devtools"] #![crate_name = "devtools"]
#![crate_type = "rlib"] #![crate_type = "rlib"]
#![allow(non_snake_case)] #![allow(non_snake_case)]
#![deny(unsafe_code)] #![deny(unsafe_code)]
@ -19,7 +18,8 @@ extern crate ipc_channel;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate msg; extern crate msg;
#[macro_use] extern crate serde; #[macro_use]
extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate time; extern crate time;
@ -128,23 +128,24 @@ pub fn start_server(port: u16) -> Sender<DevtoolsControlMsg> {
let (sender, receiver) = channel(); let (sender, receiver) = channel();
{ {
let sender = sender.clone(); let sender = sender.clone();
thread::Builder::new().name("Devtools".to_owned()).spawn(move || { thread::Builder::new()
run_server(sender, receiver, port) .name("Devtools".to_owned())
}).expect("Thread spawning failed"); .spawn(move || run_server(sender, receiver, port))
.expect("Thread spawning failed");
} }
sender sender
} }
fn run_server(sender: Sender<DevtoolsControlMsg>, fn run_server(
sender: Sender<DevtoolsControlMsg>,
receiver: Receiver<DevtoolsControlMsg>, receiver: Receiver<DevtoolsControlMsg>,
port: u16) { port: u16,
) {
let listener = TcpListener::bind(&("127.0.0.1", port)).unwrap(); let listener = TcpListener::bind(&("127.0.0.1", port)).unwrap();
let mut registry = ActorRegistry::new(); let mut registry = ActorRegistry::new();
let root = Box::new(RootActor { let root = Box::new(RootActor { tabs: vec![] });
tabs: vec!(),
});
registry.register(root); registry.register(root);
registry.find::<RootActor>("root"); registry.find::<RootActor>("root");
@ -158,7 +159,6 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new(); let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new();
/// Process the input from a single devtools client until EOF. /// Process the input from a single devtools client until EOF.
fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) { fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) {
debug!("connection established to {}", stream.peer_addr().unwrap()); debug!("connection established to {}", stream.peer_addr().unwrap());
@ -171,21 +171,24 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
'outer: loop { 'outer: loop {
match stream.read_json_packet() { match stream.read_json_packet() {
Ok(Some(json_packet)) => { Ok(Some(json_packet)) => {
if let Err(()) = actors.lock().unwrap().handle_message(json_packet.as_object().unwrap(), if let Err(()) = actors
&mut stream) { .lock()
.unwrap()
.handle_message(json_packet.as_object().unwrap(), &mut stream)
{
debug!("error: devtools actor stopped responding"); debug!("error: devtools actor stopped responding");
let _ = stream.shutdown(Shutdown::Both); let _ = stream.shutdown(Shutdown::Both);
break 'outer break 'outer;
}
} }
},
Ok(None) => { Ok(None) => {
debug!("error: EOF"); debug!("error: EOF");
break 'outer break 'outer;
} },
Err(err_msg) => { Err(err_msg) => {
debug!("error: {}", err_msg); debug!("error: {}", err_msg);
break 'outer break 'outer;
} },
} }
} }
} }
@ -199,12 +202,14 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
// We need separate actor representations for each script global that exists; // We need separate actor representations for each script global that exists;
// clients can theoretically connect to multiple globals simultaneously. // clients can theoretically connect to multiple globals simultaneously.
// TODO: move this into the root or tab modules? // TODO: move this into the root or tab modules?
fn handle_new_global(actors: Arc<Mutex<ActorRegistry>>, fn handle_new_global(
actors: Arc<Mutex<ActorRegistry>>,
ids: (PipelineId, Option<WorkerId>), ids: (PipelineId, Option<WorkerId>),
script_sender: IpcSender<DevtoolScriptControlMsg>, script_sender: IpcSender<DevtoolScriptControlMsg>,
actor_pipelines: &mut HashMap<PipelineId, String>, actor_pipelines: &mut HashMap<PipelineId, String>,
actor_workers: &mut HashMap<(PipelineId, WorkerId), String>, actor_workers: &mut HashMap<(PipelineId, WorkerId), String>,
page_info: DevtoolsPageInfo) { page_info: DevtoolsPageInfo,
) {
let mut actors = actors.lock().unwrap(); let mut actors = actors.lock().unwrap();
let (pipeline, worker_id) = ids; let (pipeline, worker_id) = ids;
@ -226,9 +231,7 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
pipeline: pipeline, pipeline: pipeline,
}; };
let timeline = TimelineActor::new(actors.new_name("timeline"), let timeline = TimelineActor::new(actors.new_name("timeline"), pipeline, script_sender);
pipeline,
script_sender);
let profiler = ProfilerActor::new(actors.new_name("profiler")); let profiler = ProfilerActor::new(actors.new_name("profiler"));
let performance = PerformanceActor::new(actors.new_name("performance")); let performance = PerformanceActor::new(actors.new_name("performance"));
@ -251,7 +254,15 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
let root = actors.find_mut::<RootActor>("root"); let root = actors.find_mut::<RootActor>("root");
root.tabs.push(tab.name.clone()); root.tabs.push(tab.name.clone());
(tab, console, inspector, timeline, profiler, performance, thread) (
tab,
console,
inspector,
timeline,
profiler,
performance,
thread,
)
}; };
if let Some(id) = worker_id { if let Some(id) = worker_id {
@ -274,14 +285,21 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
actors.register(Box::new(thread)); actors.register(Box::new(thread));
} }
fn handle_console_message(actors: Arc<Mutex<ActorRegistry>>, fn handle_console_message(
actors: Arc<Mutex<ActorRegistry>>,
id: PipelineId, id: PipelineId,
worker_id: Option<WorkerId>, worker_id: Option<WorkerId>,
console_message: ConsoleMessage, console_message: ConsoleMessage,
actor_pipelines: &HashMap<PipelineId, String>, actor_pipelines: &HashMap<PipelineId, String>,
actor_workers: &HashMap<(PipelineId, WorkerId), String>) { actor_workers: &HashMap<(PipelineId, WorkerId), String>,
let console_actor_name = match find_console_actor(actors.clone(), id, worker_id, actor_workers, ) {
actor_pipelines) { let console_actor_name = match find_console_actor(
actors.clone(),
id,
worker_id,
actor_workers,
actor_pipelines,
) {
Some(name) => name, Some(name) => name,
None => return, None => return,
}; };
@ -296,10 +314,10 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
LogLevel::Info => "info", LogLevel::Info => "info",
LogLevel::Warn => "warn", LogLevel::Warn => "warn",
LogLevel::Error => "error", LogLevel::Error => "error",
_ => "log" _ => "log",
}.to_owned(), }.to_owned(),
timeStamp: precise_time_ns(), timeStamp: precise_time_ns(),
arguments: vec!(console_message.message), arguments: vec![console_message.message],
filename: console_message.filename, filename: console_message.filename,
lineNumber: console_message.lineNumber, lineNumber: console_message.lineNumber,
columnNumber: console_message.columnNumber, columnNumber: console_message.columnNumber,
@ -310,11 +328,13 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
} }
} }
fn find_console_actor(actors: Arc<Mutex<ActorRegistry>>, fn find_console_actor(
actors: Arc<Mutex<ActorRegistry>>,
id: PipelineId, id: PipelineId,
worker_id: Option<WorkerId>, worker_id: Option<WorkerId>,
actor_workers: &HashMap<(PipelineId, WorkerId), String>, actor_workers: &HashMap<(PipelineId, WorkerId), String>,
actor_pipelines: &HashMap<PipelineId, String>) -> Option<String> { actor_pipelines: &HashMap<PipelineId, String>,
) -> Option<String> {
let actors = actors.lock().unwrap(); let actors = actors.lock().unwrap();
if let Some(worker_id) = worker_id { if let Some(worker_id) = worker_id {
let actor_name = (*actor_workers).get(&(id, worker_id))?; let actor_name = (*actor_workers).get(&(id, worker_id))?;
@ -325,20 +345,28 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
} }
} }
fn handle_network_event(actors: Arc<Mutex<ActorRegistry>>, fn handle_network_event(
actors: Arc<Mutex<ActorRegistry>>,
mut connections: Vec<TcpStream>, mut connections: Vec<TcpStream>,
actor_pipelines: &HashMap<PipelineId, String>, actor_pipelines: &HashMap<PipelineId, String>,
actor_requests: &mut HashMap<String, String>, actor_requests: &mut HashMap<String, String>,
actor_workers: &HashMap<(PipelineId, WorkerId), String>, actor_workers: &HashMap<(PipelineId, WorkerId), String>,
pipeline_id: PipelineId, pipeline_id: PipelineId,
request_id: String, request_id: String,
network_event: NetworkEvent) { network_event: NetworkEvent,
let console_actor_name = match find_console_actor(actors.clone(), pipeline_id, None, ) {
actor_workers, actor_pipelines) { let console_actor_name = match find_console_actor(
actors.clone(),
pipeline_id,
None,
actor_workers,
actor_pipelines,
) {
Some(name) => name, Some(name) => name,
None => return, None => return,
}; };
let netevent_actor_name = find_network_event_actor(actors.clone(), actor_requests, request_id.clone()); let netevent_actor_name =
find_network_event_actor(actors.clone(), actor_requests, request_id.clone());
let mut actors = actors.lock().unwrap(); let mut actors = actors.lock().unwrap();
let actor = actors.find_mut::<NetworkEventActor>(&netevent_actor_name); let actor = actors.find_mut::<NetworkEventActor>(&netevent_actor_name);
@ -356,8 +384,7 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
for stream in &mut connections { for stream in &mut connections {
stream.write_json_packet(&msg); stream.write_json_packet(&msg);
} }
},
}
NetworkEvent::HttpResponse(httpresponse) => { NetworkEvent::HttpResponse(httpresponse) => {
//Store the response information in the actor //Store the response information in the actor
actor.add_response(httpresponse); actor.add_response(httpresponse);
@ -385,7 +412,7 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
from: netevent_actor_name.clone(), from: netevent_actor_name.clone(),
type_: "networkEventUpdate".to_owned(), type_: "networkEventUpdate".to_owned(),
updateType: "responseStart".to_owned(), updateType: "responseStart".to_owned(),
response: actor.response_start() response: actor.response_start(),
}; };
for stream in &mut connections { for stream in &mut connections {
@ -441,38 +468,44 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
for stream in &mut connections { for stream in &mut connections {
stream.write_merged_json_packet(&msg, &actor.response_headers()); stream.write_merged_json_packet(&msg, &actor.response_headers());
} }
} },
} }
} }
// Find the name of NetworkEventActor corresponding to request_id // Find the name of NetworkEventActor corresponding to request_id
// Create a new one if it does not exist, add it to the actor_requests hashmap // Create a new one if it does not exist, add it to the actor_requests hashmap
fn find_network_event_actor(actors: Arc<Mutex<ActorRegistry>>, fn find_network_event_actor(
actors: Arc<Mutex<ActorRegistry>>,
actor_requests: &mut HashMap<String, String>, actor_requests: &mut HashMap<String, String>,
request_id: String) -> String { request_id: String,
) -> String {
let mut actors = actors.lock().unwrap(); let mut actors = actors.lock().unwrap();
match (*actor_requests).entry(request_id) { match (*actor_requests).entry(request_id) {
Occupied(name) => { Occupied(name) => {
//TODO: Delete from map like Firefox does? //TODO: Delete from map like Firefox does?
name.into_mut().clone() name.into_mut().clone()
} },
Vacant(entry) => { Vacant(entry) => {
let actor_name = actors.new_name("netevent"); let actor_name = actors.new_name("netevent");
let actor = NetworkEventActor::new(actor_name.clone()); let actor = NetworkEventActor::new(actor_name.clone());
entry.insert(actor_name.clone()); entry.insert(actor_name.clone());
actors.register(Box::new(actor)); actors.register(Box::new(actor));
actor_name actor_name
} },
} }
} }
let sender_clone = sender.clone(); let sender_clone = sender.clone();
thread::Builder::new().name("DevtoolsClientAcceptor".to_owned()).spawn(move || { thread::Builder::new()
.name("DevtoolsClientAcceptor".to_owned())
.spawn(move || {
// accept connections and process them, spawning a new thread for each one // accept connections and process them, spawning a new thread for each one
for stream in listener.incoming() { for stream in listener.incoming() {
// connection succeeded // connection succeeded
sender_clone.send(DevtoolsControlMsg::FromChrome( sender_clone
ChromeToDevtoolsControlMsg::AddClient(stream.unwrap()))).unwrap(); .send(DevtoolsControlMsg::FromChrome(
ChromeToDevtoolsControlMsg::AddClient(stream.unwrap()),
)).unwrap();
} }
}).expect("Thread spawning failed"); }).expect("Thread spawning failed");
@ -481,26 +514,43 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::AddClient(stream)) => { DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::AddClient(stream)) => {
let actors = actors.clone(); let actors = actors.clone();
accepted_connections.push(stream.try_clone().unwrap()); accepted_connections.push(stream.try_clone().unwrap());
thread::Builder::new().name("DevtoolsClientHandler".to_owned()).spawn(move || { thread::Builder::new()
handle_client(actors, stream.try_clone().unwrap()) .name("DevtoolsClientHandler".to_owned())
}).expect("Thread spawning failed"); .spawn(move || handle_client(actors, stream.try_clone().unwrap()))
} .expect("Thread spawning failed");
},
DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::FramerateTick( DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::FramerateTick(
actor_name, tick)) => actor_name,
handle_framerate_tick(actors.clone(), actor_name, tick), tick,
)) => handle_framerate_tick(actors.clone(), actor_name, tick),
DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::NewGlobal( DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::NewGlobal(
ids, script_sender, pageinfo)) => ids,
handle_new_global(actors.clone(), ids, script_sender, &mut actor_pipelines, script_sender,
&mut actor_workers, pageinfo), pageinfo,
)) => handle_new_global(
actors.clone(),
ids,
script_sender,
&mut actor_pipelines,
&mut actor_workers,
pageinfo,
),
DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ConsoleAPI( DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ConsoleAPI(
id, id,
console_message, console_message,
worker_id)) => worker_id,
handle_console_message(actors.clone(), id, worker_id, console_message, )) => handle_console_message(
&actor_pipelines, &actor_workers), actors.clone(),
id,
worker_id,
console_message,
&actor_pipelines,
&actor_workers,
),
DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ReportCSSError( DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ReportCSSError(
id, id,
css_error)) => { css_error,
)) => {
let console_message = ConsoleMessage { let console_message = ConsoleMessage {
message: css_error.msg, message: css_error.msg,
logLevel: LogLevel::Warn, logLevel: LogLevel::Warn,
@ -508,11 +558,19 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
lineNumber: css_error.line as usize, lineNumber: css_error.line as usize,
columnNumber: css_error.column as usize, columnNumber: css_error.column as usize,
}; };
handle_console_message(actors.clone(), id, None, console_message, handle_console_message(
&actor_pipelines, &actor_workers) actors.clone(),
id,
None,
console_message,
&actor_pipelines,
&actor_workers,
)
}, },
DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::NetworkEvent( DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::NetworkEvent(
request_id, network_event)) => { request_id,
network_event,
)) => {
// copy the accepted_connections vector // copy the accepted_connections vector
let mut connections = Vec::<TcpStream>::new(); let mut connections = Vec::<TcpStream>::new();
for stream in &accepted_connections { for stream in &accepted_connections {
@ -523,10 +581,18 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
NetworkEvent::HttpResponse(ref response) => response.pipeline_id, NetworkEvent::HttpResponse(ref response) => response.pipeline_id,
NetworkEvent::HttpRequest(ref request) => request.pipeline_id, NetworkEvent::HttpRequest(ref request) => request.pipeline_id,
}; };
handle_network_event(actors.clone(), connections, &actor_pipelines, &mut actor_requests, handle_network_event(
&actor_workers, pipeline_id, request_id, network_event); actors.clone(),
connections,
&actor_pipelines,
&mut actor_requests,
&actor_workers,
pipeline_id,
request_id,
network_event,
);
}, },
DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::ServerExitMsg) => break DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::ServerExitMsg) => break,
} }
} }
for connection in &mut accepted_connections { for connection in &mut accepted_connections {

View file

@ -55,7 +55,7 @@ impl JsonPacketStream for TcpStream {
fn read_json_packet(&mut self) -> Result<Option<Value>, String> { fn read_json_packet(&mut self) -> Result<Option<Value>, String> {
// https://wiki.mozilla.org/Remote_Debugging_Protocol_Stream_Transport // https://wiki.mozilla.org/Remote_Debugging_Protocol_Stream_Transport
// In short, each JSON packet is [ascii length]:[JSON data of given length] // In short, each JSON packet is [ascii length]:[JSON data of given length]
let mut buffer = vec!(); let mut buffer = vec![];
loop { loop {
let mut buf = [0]; let mut buf = [0];
let byte = match self.read(&mut buf) { let byte = match self.read(&mut buf) {

View file

@ -8,7 +8,6 @@
#![crate_name = "devtools_traits"] #![crate_name = "devtools_traits"]
#![crate_type = "rlib"] #![crate_type = "rlib"]
#![allow(non_snake_case)] #![allow(non_snake_case)]
#![deny(unsafe_code)] #![deny(unsafe_code)]
@ -17,9 +16,11 @@ extern crate bitflags;
extern crate hyper; extern crate hyper;
extern crate ipc_channel; extern crate ipc_channel;
extern crate malloc_size_of; extern crate malloc_size_of;
#[macro_use] extern crate malloc_size_of_derive; #[macro_use]
extern crate malloc_size_of_derive;
extern crate msg; extern crate msg;
#[macro_use] extern crate serde; #[macro_use]
extern crate serde;
extern crate servo_url; extern crate servo_url;
extern crate time; extern crate time;
@ -45,7 +46,7 @@ pub struct CSSError {
pub filename: String, pub filename: String,
pub line: u32, pub line: u32,
pub column: u32, pub column: u32,
pub msg: String pub msg: String,
} }
/// Messages to instruct the devtools server to update its known actors/state /// Messages to instruct the devtools server to update its known actors/state
@ -75,9 +76,11 @@ pub enum ChromeToDevtoolsControlMsg {
pub enum ScriptToDevtoolsControlMsg { pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline. /// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided. /// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>), NewGlobal(
(PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>, IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo), DevtoolsPageInfo,
),
/// A particular page has invoked the console API. /// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>), ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script thread. /// An animation frame with the given timestamp was processed in a script thread.
@ -201,13 +204,21 @@ pub enum DevtoolScriptControlMsg {
/// Retrieve the computed layout properties of the given node in the given pipeline. /// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<Option<ComputedNodeLayout>>), GetLayout(PipelineId, String, IpcSender<Option<ComputedNodeLayout>>),
/// Retrieve all stored console messages for the given pipeline. /// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>), GetCachedMessages(
PipelineId,
CachedConsoleMessageTypes,
IpcSender<Vec<CachedConsoleMessage>>,
),
/// Update a given node's attributes with a list of modifications. /// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>), ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise). /// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool), WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline. /// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<Option<TimelineMarker>>), SetTimelineMarkers(
PipelineId,
Vec<TimelineMarkerType>,
IpcSender<Option<TimelineMarker>>,
),
/// Withdraw request for live timeline notifications for a given pipeline. /// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>), DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame /// Request a callback directed at the given actor name from the next animation frame

View file

@ -25,7 +25,6 @@ pub fn dom_struct(args: TokenStream, input: TokenStream) -> TokenStream {
// Work around https://github.com/rust-lang/rust/issues/46489 // Work around https://github.com/rust-lang/rust/issues/46489
let attributes: TokenStream = attributes.to_string().parse().unwrap(); let attributes: TokenStream = attributes.to_string().parse().unwrap();
let output: TokenStream = attributes.into_iter().chain(input.into_iter()).collect(); let output: TokenStream = attributes.into_iter().chain(input.into_iter()).collect();
let item: Item = syn::parse(output).unwrap(); let item: Item = syn::parse(output).unwrap();
@ -36,7 +35,11 @@ pub fn dom_struct(args: TokenStream, input: TokenStream) -> TokenStream {
return quote!(#s2).into(); return quote!(#s2).into();
} }
if let Fields::Named(ref f) = s.fields { if let Fields::Named(ref f) = s.fields {
let f = f.named.first().expect("Must have at least one field").into_value(); let f = f
.named
.first()
.expect("Must have at least one field")
.into_value();
let ident = f.ident.as_ref().expect("Must have named fields"); let ident = f.ident.as_ref().expect("Must have named fields");
let name = &s.ident; let name = &s.ident;
let ty = &f.ty; let ty = &f.ty;

View file

@ -5,8 +5,10 @@
#![recursion_limit = "128"] #![recursion_limit = "128"]
extern crate proc_macro; extern crate proc_macro;
#[macro_use] extern crate quote; #[macro_use]
#[macro_use] extern crate syn; extern crate quote;
#[macro_use]
extern crate syn;
#[proc_macro_derive(DomObject)] #[proc_macro_derive(DomObject)]
pub fn expand_token_stream(input: proc_macro::TokenStream) -> proc_macro::TokenStream { pub fn expand_token_stream(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
@ -74,7 +76,9 @@ fn expand_dom_object(input: syn::DeriveInput) -> quote::Tokens {
})); }));
let mut generics = input.generics.clone(); let mut generics = input.generics.clone();
generics.params.push(parse_quote!(__T: ::dom::bindings::reflector::DomObject)); generics
.params
.push(parse_quote!(__T: ::dom::bindings::reflector::DomObject));
let (impl_generics, _, where_clause) = generics.split_for_impl(); let (impl_generics, _, where_clause) = generics.split_for_impl();

View file

@ -24,9 +24,8 @@ use std::sync::mpsc::{Receiver, Sender};
use style_traits::cursor::CursorKind; use style_traits::cursor::CursorKind;
use webrender_api::{DeviceIntPoint, DeviceUintSize}; use webrender_api::{DeviceIntPoint, DeviceUintSize};
/// Used to wake up the event loop, provided by the servo port/embedder. /// Used to wake up the event loop, provided by the servo port/embedder.
pub trait EventLoopWaker : 'static + Send { pub trait EventLoopWaker: 'static + Send {
fn clone(&self) -> Box<EventLoopWaker + Send>; fn clone(&self) -> Box<EventLoopWaker + Send>;
fn wake(&self); fn wake(&self);
} }
@ -58,11 +57,13 @@ impl Clone for EmbedderProxy {
/// The port that the embedder receives messages on. /// The port that the embedder receives messages on.
pub struct EmbedderReceiver { pub struct EmbedderReceiver {
pub receiver: Receiver<(Option<TopLevelBrowsingContextId>, EmbedderMsg)> pub receiver: Receiver<(Option<TopLevelBrowsingContextId>, EmbedderMsg)>,
} }
impl EmbedderReceiver { impl EmbedderReceiver {
pub fn try_recv_embedder_msg(&mut self) -> Option<(Option<TopLevelBrowsingContextId>, EmbedderMsg)> { pub fn try_recv_embedder_msg(
&mut self,
) -> Option<(Option<TopLevelBrowsingContextId>, EmbedderMsg)> {
self.receiver.try_recv().ok() self.receiver.try_recv().ok()
} }
pub fn recv_embedder_msg(&mut self) -> (Option<TopLevelBrowsingContextId>, EmbedderMsg) { pub fn recv_embedder_msg(&mut self) -> (Option<TopLevelBrowsingContextId>, EmbedderMsg) {
@ -148,7 +149,7 @@ impl Debug for EmbedderMsg {
EmbedderMsg::HideIME => write!(f, "HideIME"), EmbedderMsg::HideIME => write!(f, "HideIME"),
EmbedderMsg::Shutdown => write!(f, "Shutdown"), EmbedderMsg::Shutdown => write!(f, "Shutdown"),
EmbedderMsg::AllowOpeningBrowser(..) => write!(f, "AllowOpeningBrowser"), EmbedderMsg::AllowOpeningBrowser(..) => write!(f, "AllowOpeningBrowser"),
EmbedderMsg::BrowserCreated(..) => write!(f, "BrowserCreated") EmbedderMsg::BrowserCreated(..) => write!(f, "BrowserCreated"),
} }
} }
} }

View file

@ -7,10 +7,12 @@ use std::sync::RwLock;
lazy_static! { lazy_static! {
static ref RES: RwLock<Option<Box<ResourceReaderMethods + Sync + Send>>> = RwLock::new({ static ref RES: RwLock<Option<Box<ResourceReaderMethods + Sync + Send>>> = RwLock::new({
#[cfg(not(feature = "tests"))] { #[cfg(not(feature = "tests"))]
{
None None
} }
#[cfg(feature = "tests")] { #[cfg(feature = "tests")]
{
Some(resources_for_tests()) Some(resources_for_tests())
} }
}); });
@ -21,7 +23,11 @@ pub fn set(reader: Box<ResourceReaderMethods + Sync + Send>) {
} }
pub fn read_bytes(res: Resource) -> Vec<u8> { pub fn read_bytes(res: Resource) -> Vec<u8> {
RES.read().unwrap().as_ref().expect("Resource reader not set.").read(res) RES.read()
.unwrap()
.as_ref()
.expect("Resource reader not set.")
.read(res)
} }
pub fn read_string(res: Resource) -> String { pub fn read_string(res: Resource) -> String {
@ -29,11 +35,19 @@ pub fn read_string(res: Resource) -> String {
} }
pub fn sandbox_access_files() -> Vec<PathBuf> { pub fn sandbox_access_files() -> Vec<PathBuf> {
RES.read().unwrap().as_ref().expect("Resource reader not set.").sandbox_access_files() RES.read()
.unwrap()
.as_ref()
.expect("Resource reader not set.")
.sandbox_access_files()
} }
pub fn sandbox_access_files_dirs() -> Vec<PathBuf> { pub fn sandbox_access_files_dirs() -> Vec<PathBuf> {
RES.read().unwrap().as_ref().expect("Resource reader not set.").sandbox_access_files_dirs() RES.read()
.unwrap()
.as_ref()
.expect("Resource reader not set.")
.sandbox_access_files_dirs()
} }
pub enum Resource { pub enum Resource {
@ -64,8 +78,12 @@ fn resources_for_tests() -> Box<ResourceReaderMethods + Sync + Send> {
use std::io::Read; use std::io::Read;
struct ResourceReader; struct ResourceReader;
impl ResourceReaderMethods for ResourceReader { impl ResourceReaderMethods for ResourceReader {
fn sandbox_access_files(&self) -> Vec<PathBuf> { vec![] } fn sandbox_access_files(&self) -> Vec<PathBuf> {
fn sandbox_access_files_dirs(&self) -> Vec<PathBuf> { vec![] } vec![]
}
fn sandbox_access_files_dirs(&self) -> Vec<PathBuf> {
vec![]
}
fn read(&self, file: Resource) -> Vec<u8> { fn read(&self, file: Resource) -> Vec<u8> {
let file = match file { let file = match file {
Resource::Preferences => "prefs.json", Resource::Preferences => "prefs.json",
@ -92,8 +110,10 @@ fn resources_for_tests() -> Box<ResourceReaderMethods + Sync + Send> {
} }
path.push(file); path.push(file);
let mut buffer = vec![]; let mut buffer = vec![];
File::open(path).expect(&format!("Can't find file: {}", file)) File::open(path)
.read_to_end(&mut buffer).expect("Can't read file"); .expect(&format!("Can't find file: {}", file))
.read_to_end(&mut buffer)
.expect("Can't read file");
buffer buffer
} }
} }

View file

@ -18,7 +18,6 @@ pub trait FallibleVec<T> {
fn try_push(&mut self, value: T) -> Result<(), FailedAllocationError>; fn try_push(&mut self, value: T) -> Result<(), FailedAllocationError>;
} }
///////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////
// Vec // Vec
@ -52,14 +51,14 @@ fn try_double_vec<T>(vec: &mut Vec<T>) -> Result<(), FailedAllocationError> {
let new_cap: usize = if old_cap == 0 { let new_cap: usize = if old_cap == 0 {
4 4
} else { } else {
old_cap.checked_mul(2).ok_or(FailedAllocationError::new( old_cap
"capacity overflow for Vec", .checked_mul(2)
))? .ok_or(FailedAllocationError::new("capacity overflow for Vec"))?
}; };
let new_size_bytes = new_cap.checked_mul(mem::size_of::<T>()).ok_or( let new_size_bytes = new_cap
FailedAllocationError::new("capacity overflow for Vec"), .checked_mul(mem::size_of::<T>())
)?; .ok_or(FailedAllocationError::new("capacity overflow for Vec"))?;
let new_ptr = unsafe { let new_ptr = unsafe {
if old_cap == 0 { if old_cap == 0 {
@ -75,15 +74,12 @@ fn try_double_vec<T>(vec: &mut Vec<T>) -> Result<(), FailedAllocationError> {
)); ));
} }
let new_vec = unsafe { let new_vec = unsafe { Vec::from_raw_parts(new_ptr as *mut T, old_len, new_cap) };
Vec::from_raw_parts(new_ptr as *mut T, old_len, new_cap)
};
mem::forget(mem::replace(vec, new_vec)); mem::forget(mem::replace(vec, new_vec));
Ok(()) Ok(())
} }
///////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////
// SmallVec // SmallVec
@ -107,8 +103,7 @@ impl<T: Array> FallibleVec<T::Item> for SmallVec<T> {
#[cfg(feature = "known_system_malloc")] #[cfg(feature = "known_system_malloc")]
#[inline(never)] #[inline(never)]
#[cold] #[cold]
fn try_double_small_vec<T>(svec: &mut SmallVec<T>) fn try_double_small_vec<T>(svec: &mut SmallVec<T>) -> Result<(), FailedAllocationError>
-> Result<(), FailedAllocationError>
where where
T: Array, T: Array,
{ {
@ -122,20 +117,20 @@ where
let new_cap: usize = if old_cap == 0 { let new_cap: usize = if old_cap == 0 {
4 4
} else { } else {
old_cap.checked_mul(2).ok_or(FailedAllocationError::new( old_cap
"capacity overflow for SmallVec", .checked_mul(2)
))? .ok_or(FailedAllocationError::new("capacity overflow for SmallVec"))?
}; };
// This surely shouldn't fail, if |old_cap| was previously accepted as a // This surely shouldn't fail, if |old_cap| was previously accepted as a
// valid value. But err on the side of caution. // valid value. But err on the side of caution.
let old_size_bytes = old_cap.checked_mul(mem::size_of::<T>()).ok_or( let old_size_bytes = old_cap
FailedAllocationError::new("capacity overflow for SmallVec"), .checked_mul(mem::size_of::<T>())
)?; .ok_or(FailedAllocationError::new("capacity overflow for SmallVec"))?;
let new_size_bytes = new_cap.checked_mul(mem::size_of::<T>()).ok_or( let new_size_bytes = new_cap
FailedAllocationError::new("capacity overflow for SmallVec"), .checked_mul(mem::size_of::<T>())
)?; .ok_or(FailedAllocationError::new("capacity overflow for SmallVec"))?;
let new_ptr; let new_ptr;
if svec.spilled() { if svec.spilled() {
@ -149,8 +144,7 @@ where
unsafe { unsafe {
new_ptr = alloc::alloc(new_size_bytes, 0); new_ptr = alloc::alloc(new_size_bytes, 0);
if !new_ptr.is_null() && old_size_bytes > 0 { if !new_ptr.is_null() && old_size_bytes > 0 {
copy_nonoverlapping(old_ptr as *const u8, copy_nonoverlapping(old_ptr as *const u8, new_ptr as *mut u8, old_size_bytes);
new_ptr as *mut u8, old_size_bytes);
} }
} }
} }
@ -161,9 +155,7 @@ where
)); ));
} }
let new_vec = unsafe { let new_vec = unsafe { Vec::from_raw_parts(new_ptr as *mut T::Item, old_len, new_cap) };
Vec::from_raw_parts(new_ptr as *mut T::Item, old_len, new_cap)
};
let new_svec = SmallVec::from_vec(new_vec); let new_svec = SmallVec::from_vec(new_vec);
mem::forget(mem::replace(svec, new_svec)); mem::forget(mem::replace(svec, new_svec));

View file

@ -5,8 +5,9 @@
extern crate app_units; extern crate app_units;
extern crate euclid; extern crate euclid;
extern crate malloc_size_of; extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
extern crate style_traits; extern crate style_traits;
#[macro_use] extern crate malloc_size_of_derive;
extern crate webrender_api; extern crate webrender_api;
use app_units::{Au, MAX_AU, MIN_AU}; use app_units::{Au, MAX_AU, MIN_AU};
@ -48,7 +49,7 @@ impl MaxRect for Rect<Au> {
fn max_rect() -> Rect<Au> { fn max_rect() -> Rect<Au> {
Rect::new( Rect::new(
Point2D::new(MIN_AU / 2, MIN_AU / 2), Point2D::new(MIN_AU / 2, MIN_AU / 2),
Size2D::new(MAX_AU, MAX_AU) Size2D::new(MAX_AU, MAX_AU),
) )
} }
} }
@ -64,12 +65,22 @@ impl MaxRect for LayoutRect {
/// A helper function to convert a rect of `f32` pixels to a rect of app units. /// A helper function to convert a rect of `f32` pixels to a rect of app units.
pub fn f32_rect_to_au_rect(rect: Rect<f32>) -> Rect<Au> { pub fn f32_rect_to_au_rect(rect: Rect<f32>) -> Rect<Au> {
Rect::new(Point2D::new(Au::from_f32_px(rect.origin.x), Au::from_f32_px(rect.origin.y)), Rect::new(
Size2D::new(Au::from_f32_px(rect.size.width), Au::from_f32_px(rect.size.height))) Point2D::new(
Au::from_f32_px(rect.origin.x),
Au::from_f32_px(rect.origin.y),
),
Size2D::new(
Au::from_f32_px(rect.size.width),
Au::from_f32_px(rect.size.height),
),
)
} }
/// A helper function to convert a rect of `Au` pixels to a rect of f32 units. /// A helper function to convert a rect of `Au` pixels to a rect of f32 units.
pub fn au_rect_to_f32_rect(rect: Rect<Au>) -> Rect<f32> { pub fn au_rect_to_f32_rect(rect: Rect<Au>) -> Rect<f32> {
Rect::new(Point2D::new(rect.origin.x.to_f32_px(), rect.origin.y.to_f32_px()), Rect::new(
Size2D::new(rect.size.width.to_f32_px(), rect.size.height.to_f32_px())) Point2D::new(rect.origin.x.to_f32_px(), rect.origin.y.to_f32_px()),
Size2D::new(rect.size.width.to_f32_px(), rect.size.height.to_f32_px()),
)
} }

View file

@ -32,9 +32,9 @@ use unicode_script::Script;
use webrender_api; use webrender_api;
macro_rules! ot_tag { macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => ( ($t1:expr, $t2:expr, $t3:expr, $t4:expr) => {
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32) (($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
); };
} }
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S'); pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
@ -87,10 +87,12 @@ trait FontTableTagConversions {
impl FontTableTagConversions for FontTableTag { impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String { fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8, let bytes = [
(self >> 24) as u8,
(self >> 16) as u8, (self >> 16) as u8,
(self >> 8) as u8, (self >> 8) as u8,
(self >> 0) as u8]; (self >> 0) as u8,
];
str::from_utf8(&bytes).unwrap().to_owned() str::from_utf8(&bytes).unwrap().to_owned()
} }
} }
@ -149,10 +151,12 @@ pub struct Font {
} }
impl Font { impl Font {
pub fn new(handle: FontHandle, pub fn new(
handle: FontHandle,
descriptor: FontDescriptor, descriptor: FontDescriptor,
actual_pt_size: Au, actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font { font_key: webrender_api::FontInstanceKey,
) -> Font {
let metrics = handle.metrics(); let metrics = handle.metrics();
Font { Font {
@ -218,11 +222,19 @@ impl Font {
text: text.to_owned(), text: text.to_owned(),
options: *options, options: *options,
}; };
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| { let result = self
.shape_cache
.borrow_mut()
.entry(lookup_key)
.or_insert_with(|| {
let start_time = time::precise_time_ns(); let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(), let mut glyphs = GlyphStore::new(
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG), text.len(),
options.flags.contains(ShapingFlags::RTL_FLAG)); options
.flags
.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG),
);
if self.can_do_fast_shaping(text, options) { if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path."); debug!("shape_text: Using ASCII fast path.");
@ -232,12 +244,15 @@ impl Font {
if shaper.is_none() { if shaper.is_none() {
shaper = Some(Shaper::new(this)); shaper = Some(Shaper::new(this));
} }
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs); shaper
.as_ref()
.unwrap()
.shape_text(text, options, &mut glyphs);
} }
let end_time = time::precise_time_ns(); let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize, TEXT_SHAPING_PERFORMANCE_COUNTER
Ordering::Relaxed); .fetch_add((end_time - start_time) as usize, Ordering::Relaxed);
Arc::new(glyphs) Arc::new(glyphs)
}).clone(); }).clone();
self.shaper = shaper; self.shaper = shaper;
@ -285,12 +300,21 @@ impl Font {
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> { pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag); let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" }; let status = if result.is_some() {
"Found"
} else {
"Didn't find"
};
debug!("{} font table[{}] with family={}, face={}", debug!(
status, tag.tag_to_str(), "{} font table[{}] with family={}, face={}",
self.handle.family_name().unwrap_or("unavailable".to_owned()), status,
self.handle.face_name().unwrap_or("unavailable".to_owned())); tag.tag_to_str(),
self.handle
.family_name()
.unwrap_or("unavailable".to_owned()),
self.handle.face_name().unwrap_or("unavailable".to_owned())
);
result result
} }
@ -308,16 +332,19 @@ impl Font {
self.glyph_index(codepoint).is_some() self.glyph_index(codepoint).is_some()
} }
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph) self.handle.glyph_h_kerning(first_glyph, second_glyph)
} }
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel { pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| { *self
.glyph_advance_cache
.borrow_mut()
.entry(glyph)
.or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) { match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv, Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy None => 10f64 as FractionalPixel, // FIXME: Need fallback strategy
} }
}) })
} }
@ -339,8 +366,10 @@ impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup { pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style); let descriptor = FontDescriptor::from(style);
let families = let families = style
style.font_family.0.iter() .font_family
.0
.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family)) .map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect(); .collect();
@ -358,25 +387,25 @@ impl FontGroup {
pub fn find_by_codepoint<S: FontSource>( pub fn find_by_codepoint<S: FontSource>(
&mut self, &mut self,
mut font_context: &mut FontContext<S>, mut font_context: &mut FontContext<S>,
codepoint: char codepoint: char,
) -> Option<FontRef> { ) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint); let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font)); let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() { if font.is_some() {
return font return font;
} }
if let Some(ref fallback) = self.last_matching_fallback { if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) { if has_glyph(&fallback) {
return self.last_matching_fallback.clone() return self.last_matching_fallback.clone();
} }
} }
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph); let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() { if font.is_some() {
self.last_matching_fallback = font.clone(); self.last_matching_fallback = font.clone();
return font return font;
} }
self.first(&mut font_context) self.first(&mut font_context)
@ -385,7 +414,7 @@ impl FontGroup {
/// Find the first available font in the group, or the first available fallback font. /// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>( pub fn first<S: FontSource>(
&mut self, &mut self,
mut font_context: &mut FontContext<S> mut font_context: &mut FontContext<S>,
) -> Option<FontRef> { ) -> Option<FontRef> {
self.find(&mut font_context, |_| true) self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true)) .or_else(|| self.find_fallback(&mut font_context, None, |_| true))
@ -393,16 +422,13 @@ impl FontGroup {
/// Find a font which returns true for `predicate`. This method mutates because we may need to /// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font. /// load new font data in the process of finding a suitable font.
fn find<S, P>( fn find<S, P>(&mut self, mut font_context: &mut FontContext<S>, predicate: P) -> Option<FontRef>
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where where
S: FontSource, S: FontSource,
P: FnMut(&FontRef) -> bool, P: FnMut(&FontRef) -> bool,
{ {
self.families.iter_mut() self.families
.iter_mut()
.filter_map(|family| family.font(&mut font_context)) .filter_map(|family| family.font(&mut font_context))
.find(predicate) .find(predicate)
} }
@ -422,15 +448,9 @@ impl FontGroup {
P: FnMut(&FontRef) -> bool, P: FnMut(&FontRef) -> bool,
{ {
iter::once(FontFamilyDescriptor::default()) iter::once(FontFamilyDescriptor::default())
.chain( .chain(fallback_font_families(codepoint).into_iter().map(|family| {
fallback_font_families(codepoint).into_iter().map(|family| { FontFamilyDescriptor::new(FontFamilyName::from(family), FontSearchScope::Local)
FontFamilyDescriptor::new( })).filter_map(|family| font_context.font(&self.descriptor, &family))
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate) .find(predicate)
} }
} }
@ -448,10 +468,8 @@ struct FontGroupFamily {
impl FontGroupFamily { impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily { fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new( let family_descriptor =
FontFamilyName::from(family), FontFamilyDescriptor::new(FontFamilyName::from(family), FontSearchScope::Any);
FontSearchScope::Any
);
FontGroupFamily { FontGroupFamily {
font_descriptor, font_descriptor,
@ -481,13 +499,15 @@ pub struct RunMetrics {
pub descent: Au, // nonzero pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline. // this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent // so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au> pub bounding_box: Rect<Au>,
} }
impl RunMetrics { impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics { pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent), let bounds = Rect::new(
Size2D::new(advance, ascent + descent)); Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent),
);
// TODO(Issue #125): support loose and tight bounding boxes; using the // TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and // ascent+descent and advance is sometimes too generous and
@ -540,11 +560,13 @@ impl FontFamilyName {
impl<'a> From<&'a SingleFontFamily> for FontFamilyName { impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName { fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other { match *other {
SingleFontFamily::FamilyName(ref family_name) => SingleFontFamily::FamilyName(ref family_name) => {
FontFamilyName::Specific(family_name.name.clone()), FontFamilyName::Specific(family_name.name.clone())
},
SingleFontFamily::Generic(ref generic_name) => SingleFontFamily::Generic(ref generic_name) => {
FontFamilyName::Generic(generic_name.clone()), FontFamilyName::Generic(generic_name.clone())
},
} }
} }
} }

View file

@ -40,14 +40,15 @@ pub struct FontTemplateInfo {
impl FontTemplates { impl FontTemplates {
pub fn new() -> FontTemplates { pub fn new() -> FontTemplates {
FontTemplates { FontTemplates { templates: vec![] }
templates: vec!(),
}
} }
/// Find a font in this family that matches a given descriptor. /// Find a font in this family that matches a given descriptor.
pub fn find_font_for_style(&mut self, desc: &FontTemplateDescriptor, fctx: &FontContextHandle) pub fn find_font_for_style(
-> Option<Arc<FontTemplateData>> { &mut self,
desc: &FontTemplateDescriptor,
fctx: &FontContextHandle,
) -> Option<Arc<FontTemplateData>> {
// TODO(Issue #189): optimize lookup for // TODO(Issue #189): optimize lookup for
// regular/bold/italic/bolditalic with fixed offsets and a // regular/bold/italic/bolditalic with fixed offsets and a
// static decision table for fallback between these values. // static decision table for fallback between these values.
@ -63,7 +64,8 @@ impl FontTemplates {
let (mut best_template_data, mut best_distance) = (None, f32::MAX); let (mut best_template_data, mut best_distance) = (None, f32::MAX);
for template in &mut self.templates { for template in &mut self.templates {
if let Some((template_data, distance)) = if let Some((template_data, distance)) =
template.data_for_approximate_descriptor(fctx, desc) { template.data_for_approximate_descriptor(fctx, desc)
{
if distance < best_distance { if distance < best_distance {
best_template_data = Some(template_data); best_template_data = Some(template_data);
best_distance = distance best_distance = distance
@ -71,7 +73,7 @@ impl FontTemplates {
} }
} }
if best_template_data.is_some() { if best_template_data.is_some() {
return best_template_data return best_template_data;
} }
// If a request is made for a font family that exists, // If a request is made for a font family that exists,
@ -103,8 +105,16 @@ impl FontTemplates {
/// Commands that the FontContext sends to the font cache thread. /// Commands that the FontContext sends to the font cache thread.
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub enum Command { pub enum Command {
GetFontTemplate(FontTemplateDescriptor, FontFamilyDescriptor, IpcSender<Reply>), GetFontTemplate(
GetFontInstance(webrender_api::FontKey, Au, IpcSender<webrender_api::FontInstanceKey>), FontTemplateDescriptor,
FontFamilyDescriptor,
IpcSender<Reply>,
),
GetFontInstance(
webrender_api::FontKey,
Au,
IpcSender<webrender_api::FontInstanceKey>,
),
AddWebFont(LowercaseString, EffectiveSources, IpcSender<()>), AddWebFont(LowercaseString, EffectiveSources, IpcSender<()>),
AddDownloadedWebFont(LowercaseString, ServoUrl, Vec<u8>, IpcSender<()>), AddDownloadedWebFont(LowercaseString, ServoUrl, Vec<u8>, IpcSender<()>),
Exit(IpcSender<()>), Exit(IpcSender<()>),
@ -148,7 +158,7 @@ fn populate_generic_fonts() -> HashMap<FontFamilyName, LowercaseString> {
) { ) {
let family_name = match system_default_family(generic_name) { let family_name = match system_default_family(generic_name) {
Some(system_default) => LowercaseString::new(&system_default), Some(system_default) => LowercaseString::new(&system_default),
None => LowercaseString::new(mapped_name) None => LowercaseString::new(mapped_name),
}; };
let generic_name = FontFamilyName::Generic(Atom::from(generic_name)); let generic_name = FontFamilyName::Generic(Atom::from(generic_name));
@ -156,7 +166,6 @@ fn populate_generic_fonts() -> HashMap<FontFamilyName, LowercaseString> {
generic_fonts.insert(generic_name, family_name); generic_fonts.insert(generic_name, family_name);
} }
generic_fonts generic_fonts
} }
@ -167,50 +176,50 @@ impl FontCache {
match msg { match msg {
Command::GetFontTemplate(template_descriptor, family_descriptor, result) => { Command::GetFontTemplate(template_descriptor, family_descriptor, result) => {
let maybe_font_template = self.find_font_template(&template_descriptor, &family_descriptor); let maybe_font_template =
self.find_font_template(&template_descriptor, &family_descriptor);
let _ = result.send(Reply::GetFontTemplateReply(maybe_font_template)); let _ = result.send(Reply::GetFontTemplateReply(maybe_font_template));
} },
Command::GetFontInstance(font_key, size, result) => { Command::GetFontInstance(font_key, size, result) => {
let webrender_api = &self.webrender_api; let webrender_api = &self.webrender_api;
let instance_key = *self.font_instances let instance_key =
*self
.font_instances
.entry((font_key, size)) .entry((font_key, size))
.or_insert_with(|| { .or_insert_with(|| {
let key = webrender_api.generate_font_instance_key(); let key = webrender_api.generate_font_instance_key();
let mut txn = webrender_api::Transaction::new(); let mut txn = webrender_api::Transaction::new();
txn.add_font_instance(key, txn.add_font_instance(key, font_key, size, None, None, Vec::new());
font_key,
size,
None,
None,
Vec::new());
webrender_api.update_resources(txn.resource_updates); webrender_api.update_resources(txn.resource_updates);
key key
}); });
let _ = result.send(instance_key); let _ = result.send(instance_key);
} },
Command::AddWebFont(family_name, sources, result) => { Command::AddWebFont(family_name, sources, result) => {
self.handle_add_web_font(family_name, sources, result); self.handle_add_web_font(family_name, sources, result);
} },
Command::AddDownloadedWebFont(family_name, url, bytes, result) => { Command::AddDownloadedWebFont(family_name, url, bytes, result) => {
let templates = &mut self.web_families.get_mut(&family_name).unwrap(); let templates = &mut self.web_families.get_mut(&family_name).unwrap();
templates.add_template(Atom::from(url.to_string()), Some(bytes)); templates.add_template(Atom::from(url.to_string()), Some(bytes));
drop(result.send(())); drop(result.send(()));
} },
Command::Ping => (), Command::Ping => (),
Command::Exit(result) => { Command::Exit(result) => {
let _ = result.send(()); let _ = result.send(());
break; break;
} },
} }
} }
} }
fn handle_add_web_font(&mut self, fn handle_add_web_font(
&mut self,
family_name: LowercaseString, family_name: LowercaseString,
mut sources: EffectiveSources, mut sources: EffectiveSources,
sender: IpcSender<()>) { sender: IpcSender<()>,
) {
let src = if let Some(src) = sources.next() { let src = if let Some(src) = sources.next() {
src src
} else { } else {
@ -236,7 +245,7 @@ impl FontCache {
destination: Destination::Font, destination: Destination::Font,
// TODO: Add a proper origin - Can't import GlobalScope from gfx // TODO: Add a proper origin - Can't import GlobalScope from gfx
// We can leave origin to be set by default // We can leave origin to be set by default
.. RequestInit::default() ..RequestInit::default()
}; };
let channel_to_self = self.channel_to_self.clone(); let channel_to_self = self.channel_to_self.clone();
@ -248,19 +257,27 @@ impl FontCache {
FetchResponseMsg::ProcessRequestBody | FetchResponseMsg::ProcessRequestBody |
FetchResponseMsg::ProcessRequestEOF => (), FetchResponseMsg::ProcessRequestEOF => (),
FetchResponseMsg::ProcessResponse(meta_result) => { FetchResponseMsg::ProcessResponse(meta_result) => {
trace!("@font-face {} metadata ok={:?}", family_name, meta_result.is_ok()); trace!(
"@font-face {} metadata ok={:?}",
family_name,
meta_result.is_ok()
);
*response_valid.lock().unwrap() = meta_result.is_ok(); *response_valid.lock().unwrap() = meta_result.is_ok();
} },
FetchResponseMsg::ProcessResponseChunk(new_bytes) => { FetchResponseMsg::ProcessResponseChunk(new_bytes) => {
trace!("@font-face {} chunk={:?}", family_name, new_bytes); trace!("@font-face {} chunk={:?}", family_name, new_bytes);
if *response_valid.lock().unwrap() { if *response_valid.lock().unwrap() {
bytes.lock().unwrap().extend(new_bytes.into_iter()) bytes.lock().unwrap().extend(new_bytes.into_iter())
} }
} },
FetchResponseMsg::ProcessResponseEOF(response) => { FetchResponseMsg::ProcessResponseEOF(response) => {
trace!("@font-face {} EOF={:?}", family_name, response); trace!("@font-face {} EOF={:?}", family_name, response);
if response.is_err() || !*response_valid.lock().unwrap() { if response.is_err() || !*response_valid.lock().unwrap() {
let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone()); let msg = Command::AddWebFont(
family_name.clone(),
sources.clone(),
sender.clone(),
);
channel_to_self.send(msg).unwrap(); channel_to_self.send(msg).unwrap();
return; return;
} }
@ -270,23 +287,31 @@ impl FontCache {
Ok(san) => san, Ok(san) => san,
Err(_) => { Err(_) => {
// FIXME(servo/fontsan#1): get an error message // FIXME(servo/fontsan#1): get an error message
debug!("Sanitiser rejected web font: \ debug!(
family={} url={:?}", family_name, url); "Sanitiser rejected web font: \
let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone()); family={} url={:?}",
family_name, url
);
let msg = Command::AddWebFont(
family_name.clone(),
sources.clone(),
sender.clone(),
);
channel_to_self.send(msg).unwrap(); channel_to_self.send(msg).unwrap();
return; return;
}, },
}; };
let command = let command = Command::AddDownloadedWebFont(
Command::AddDownloadedWebFont(family_name.clone(), family_name.clone(),
url.clone(), url.clone(),
bytes, bytes,
sender.clone()); sender.clone(),
);
channel_to_self.send(command).unwrap(); channel_to_self.send(command).unwrap();
} },
} }
}); });
} },
Source::Local(ref font) => { Source::Local(ref font) => {
let font_face_name = LowercaseString::new(&font.name); let font_face_name = LowercaseString::new(&font.name);
let templates = &mut self.web_families.get_mut(&family_name).unwrap(); let templates = &mut self.web_families.get_mut(&family_name).unwrap();
@ -301,7 +326,7 @@ impl FontCache {
let msg = Command::AddWebFont(family_name, sources, sender); let msg = Command::AddWebFont(family_name, sources, sender);
self.channel_to_self.send(msg).unwrap(); self.channel_to_self.send(msg).unwrap();
} }
} },
} }
} }
@ -319,7 +344,7 @@ impl FontCache {
fn transform_family(&self, family_name: &FontFamilyName) -> LowercaseString { fn transform_family(&self, family_name: &FontFamilyName) -> LowercaseString {
match self.generic_fonts.get(family_name) { match self.generic_fonts.get(family_name) {
None => LowercaseString::from(family_name), None => LowercaseString::from(family_name),
Some(mapped_family) => (*mapped_family).clone() Some(mapped_family) => (*mapped_family).clone(),
} }
} }
@ -347,7 +372,10 @@ impl FontCache {
s.find_font_for_style(template_descriptor, &self.font_context) s.find_font_for_style(template_descriptor, &self.font_context)
} else { } else {
debug!("FontList: Couldn't find font family with name={}", &*family_name); debug!(
"FontList: Couldn't find font family with name={}",
&*family_name
);
None None
} }
} }
@ -371,7 +399,9 @@ impl FontCache {
let webrender_api = &self.webrender_api; let webrender_api = &self.webrender_api;
let webrender_fonts = &mut self.webrender_fonts; let webrender_fonts = &mut self.webrender_fonts;
let font_key = *webrender_fonts.entry(template.identifier.clone()).or_insert_with(|| { let font_key = *webrender_fonts
.entry(template.identifier.clone())
.or_insert_with(|| {
let font_key = webrender_api.generate_font_key(); let font_key = webrender_api.generate_font_key();
let mut txn = webrender_api::Transaction::new(); let mut txn = webrender_api::Transaction::new();
match (template.bytes_if_in_memory(), template.native_font()) { match (template.bytes_if_in_memory(), template.native_font()) {
@ -395,14 +425,15 @@ impl FontCache {
family_descriptor: &FontFamilyDescriptor, family_descriptor: &FontFamilyDescriptor,
) -> Option<FontTemplateInfo> { ) -> Option<FontTemplateInfo> {
match family_descriptor.scope { match family_descriptor.scope {
FontSearchScope::Any => { FontSearchScope::Any => self
self.find_font_in_web_family(&template_descriptor, &family_descriptor.name) .find_font_in_web_family(&template_descriptor, &family_descriptor.name)
.or_else(|| self.find_font_in_local_family(&template_descriptor, &family_descriptor.name)) .or_else(|| {
} self.find_font_in_local_family(&template_descriptor, &family_descriptor.name)
}),
FontSearchScope::Local => { FontSearchScope::Local => {
self.find_font_in_local_family(&template_descriptor, &family_descriptor.name) self.find_font_in_local_family(&template_descriptor, &family_descriptor.name)
} },
}.map(|t| self.get_font_template_info(t)) }.map(|t| self.get_font_template_info(t))
} }
} }
@ -415,12 +446,16 @@ pub struct FontCacheThread {
} }
impl FontCacheThread { impl FontCacheThread {
pub fn new(core_resource_thread: CoreResourceThread, pub fn new(
webrender_api: webrender_api::RenderApi) -> FontCacheThread { core_resource_thread: CoreResourceThread,
webrender_api: webrender_api::RenderApi,
) -> FontCacheThread {
let (chan, port) = ipc::channel().unwrap(); let (chan, port) = ipc::channel().unwrap();
let channel_to_self = chan.clone(); let channel_to_self = chan.clone();
thread::Builder::new().name("FontCacheThread".to_owned()).spawn(move || { thread::Builder::new()
.name("FontCacheThread".to_owned())
.spawn(move || {
// TODO: Allow users to specify these. // TODO: Allow users to specify these.
let generic_fonts = populate_generic_fonts(); let generic_fonts = populate_generic_fonts();
@ -441,33 +476,52 @@ impl FontCacheThread {
cache.run(); cache.run();
}).expect("Thread spawning failed"); }).expect("Thread spawning failed");
FontCacheThread { FontCacheThread { chan: chan }
chan: chan,
}
} }
pub fn add_web_font(&self, family: FamilyName, sources: EffectiveSources, sender: IpcSender<()>) { pub fn add_web_font(
self.chan.send(Command::AddWebFont(LowercaseString::new(&family.name), sources, sender)).unwrap(); &self,
family: FamilyName,
sources: EffectiveSources,
sender: IpcSender<()>,
) {
self.chan
.send(Command::AddWebFont(
LowercaseString::new(&family.name),
sources,
sender,
)).unwrap();
} }
pub fn exit(&self) { pub fn exit(&self) {
let (response_chan, response_port) = ipc::channel().unwrap(); let (response_chan, response_port) = ipc::channel().unwrap();
self.chan.send(Command::Exit(response_chan)).expect("Couldn't send FontCacheThread exit message"); self.chan
response_port.recv().expect("Couldn't receive FontCacheThread reply"); .send(Command::Exit(response_chan))
.expect("Couldn't send FontCacheThread exit message");
response_port
.recv()
.expect("Couldn't receive FontCacheThread reply");
} }
} }
impl FontSource for FontCacheThread { impl FontSource for FontCacheThread {
fn get_font_instance(&mut self, key: webrender_api::FontKey, size: Au) -> webrender_api::FontInstanceKey { fn get_font_instance(
let (response_chan, response_port) = &mut self,
ipc::channel().expect("failed to create IPC channel"); key: webrender_api::FontKey,
self.chan.send(Command::GetFontInstance(key, size, response_chan)) size: Au,
) -> webrender_api::FontInstanceKey {
let (response_chan, response_port) = ipc::channel().expect("failed to create IPC channel");
self.chan
.send(Command::GetFontInstance(key, size, response_chan))
.expect("failed to send message to font cache thread"); .expect("failed to send message to font cache thread");
let instance_key = response_port.recv(); let instance_key = response_port.recv();
if instance_key.is_err() { if instance_key.is_err() {
let font_thread_has_closed = self.chan.send(Command::Ping).is_err(); let font_thread_has_closed = self.chan.send(Command::Ping).is_err();
assert!(font_thread_has_closed, "Failed to receive a response from live font cache"); assert!(
font_thread_has_closed,
"Failed to receive a response from live font cache"
);
panic!("Font cache thread has already exited."); panic!("Font cache thread has already exited.");
} }
instance_key.unwrap() instance_key.unwrap()
@ -478,23 +532,27 @@ impl FontSource for FontCacheThread {
template_descriptor: FontTemplateDescriptor, template_descriptor: FontTemplateDescriptor,
family_descriptor: FontFamilyDescriptor, family_descriptor: FontFamilyDescriptor,
) -> Option<FontTemplateInfo> { ) -> Option<FontTemplateInfo> {
let (response_chan, response_port) = let (response_chan, response_port) = ipc::channel().expect("failed to create IPC channel");
ipc::channel().expect("failed to create IPC channel"); self.chan
self.chan.send(Command::GetFontTemplate(template_descriptor, family_descriptor, response_chan)) .send(Command::GetFontTemplate(
.expect("failed to send message to font cache thread"); template_descriptor,
family_descriptor,
response_chan,
)).expect("failed to send message to font cache thread");
let reply = response_port.recv(); let reply = response_port.recv();
if reply.is_err() { if reply.is_err() {
let font_thread_has_closed = self.chan.send(Command::Ping).is_err(); let font_thread_has_closed = self.chan.send(Command::Ping).is_err();
assert!(font_thread_has_closed, "Failed to receive a response from live font cache"); assert!(
font_thread_has_closed,
"Failed to receive a response from live font cache"
);
panic!("Font cache thread has already exited."); panic!("Font cache thread has already exited.");
} }
match reply.unwrap() { match reply.unwrap() {
Reply::GetFontTemplateReply(data) => { Reply::GetFontTemplateReply(data) => data,
data
}
} }
} }
} }

View file

@ -28,7 +28,11 @@ static SMALL_CAPS_SCALE_FACTOR: f32 = 0.8; // Matches FireFox (see gfxFont.
static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT; static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT;
pub trait FontSource { pub trait FontSource {
fn get_font_instance(&mut self, key: webrender_api::FontKey, size: Au) -> webrender_api::FontInstanceKey; fn get_font_instance(
&mut self,
key: webrender_api::FontKey,
size: Au,
) -> webrender_api::FontInstanceKey;
fn font_template( fn font_template(
&mut self, &mut self,
@ -74,7 +78,7 @@ impl<S: FontSource> FontContext<S> {
fn expire_font_caches_if_necessary(&mut self) { fn expire_font_caches_if_necessary(&mut self) {
let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst); let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst);
if current_epoch == self.epoch { if current_epoch == self.epoch {
return return;
} }
self.font_cache.clear(); self.font_cache.clear();
@ -95,7 +99,7 @@ impl<S: FontSource> FontContext<S> {
}; };
if let Some(ref font_group) = self.font_group_cache.get(&cache_key) { if let Some(ref font_group) = self.font_group_cache.get(&cache_key) {
return (*font_group).clone() return (*font_group).clone();
} }
let font_group = Rc::new(RefCell::new(FontGroup::new(&cache_key.style))); let font_group = Rc::new(RefCell::new(FontGroup::new(&cache_key.style)));
@ -115,17 +119,21 @@ impl<S: FontSource> FontContext<S> {
family_descriptor: family_descriptor.clone(), family_descriptor: family_descriptor.clone(),
}; };
self.font_cache.get(&cache_key).map(|v| v.clone()).unwrap_or_else(|| { self.font_cache
.get(&cache_key)
.map(|v| v.clone())
.unwrap_or_else(|| {
debug!( debug!(
"FontContext::font cache miss for font_descriptor={:?} family_descriptor={:?}", "FontContext::font cache miss for font_descriptor={:?} family_descriptor={:?}",
font_descriptor, font_descriptor, family_descriptor
family_descriptor
); );
let font = let font = self
self.font_template(&font_descriptor.template_descriptor, family_descriptor) .font_template(&font_descriptor.template_descriptor, family_descriptor)
.and_then(|template_info| self.create_font(template_info, font_descriptor.to_owned()).ok()) .and_then(|template_info| {
.map(|font| Rc::new(RefCell::new(font))); self.create_font(template_info, font_descriptor.to_owned())
.ok()
}).map(|font| Rc::new(RefCell::new(font)));
self.font_cache.insert(cache_key, font.clone()); self.font_cache.insert(cache_key, font.clone());
font font
@ -135,7 +143,7 @@ impl<S: FontSource> FontContext<S> {
fn font_template( fn font_template(
&mut self, &mut self,
template_descriptor: &FontTemplateDescriptor, template_descriptor: &FontTemplateDescriptor,
family_descriptor: &FontFamilyDescriptor family_descriptor: &FontFamilyDescriptor,
) -> Option<FontTemplateInfo> { ) -> Option<FontTemplateInfo> {
let cache_key = FontTemplateCacheKey { let cache_key = FontTemplateCacheKey {
template_descriptor: template_descriptor.clone(), template_descriptor: template_descriptor.clone(),
@ -164,7 +172,7 @@ impl<S: FontSource> FontContext<S> {
fn create_font( fn create_font(
&mut self, &mut self,
info: FontTemplateInfo, info: FontTemplateInfo,
descriptor: FontDescriptor descriptor: FontDescriptor,
) -> Result<Font, ()> { ) -> Result<Font, ()> {
// TODO: (Bug #3463): Currently we only support fake small-caps // TODO: (Bug #3463): Currently we only support fake small-caps
// painting. We should also support true small-caps (where the // painting. We should also support true small-caps (where the
@ -177,11 +185,18 @@ impl<S: FontSource> FontContext<S> {
let handle = FontHandle::new_from_template( let handle = FontHandle::new_from_template(
&self.platform_handle, &self.platform_handle,
info.font_template, info.font_template,
Some(actual_pt_size) Some(actual_pt_size),
)?; )?;
let font_instance_key = self.font_source.get_font_instance(info.font_key, actual_pt_size); let font_instance_key = self
Ok(Font::new(handle, descriptor.to_owned(), actual_pt_size, font_instance_key)) .font_source
.get_font_instance(info.font_key, actual_pt_size);
Ok(Font::new(
handle,
descriptor.to_owned(),
actual_pt_size,
font_instance_key,
))
} }
} }
@ -219,7 +234,10 @@ impl PartialEq for FontGroupCacheKey {
impl Eq for FontGroupCacheKey {} impl Eq for FontGroupCacheKey {}
impl Hash for FontGroupCacheKey { impl Hash for FontGroupCacheKey {
fn hash<H>(&self, hasher: &mut H) where H: Hasher { fn hash<H>(&self, hasher: &mut H)
where
H: Hasher,
{
self.style.hash.hash(hasher) self.style.hash.hash(hasher)
} }
} }

View file

@ -26,7 +26,6 @@ pub struct FontTemplateDescriptor {
pub style: FontStyle, pub style: FontStyle,
} }
/// FontTemplateDescriptor contains floats, which are not Eq because of NaN. However, /// FontTemplateDescriptor contains floats, which are not Eq because of NaN. However,
/// we know they will never be NaN, so we can manually implement Eq. /// we know they will never be NaN, so we can manually implement Eq.
impl Eq for FontTemplateDescriptor {} impl Eq for FontTemplateDescriptor {}
@ -41,14 +40,9 @@ fn style_to_number(s: &FontStyle) -> f32 {
} }
} }
impl FontTemplateDescriptor { impl FontTemplateDescriptor {
#[inline] #[inline]
pub fn new( pub fn new(weight: FontWeight, stretch: FontStretch, style: FontStyle) -> Self {
weight: FontWeight,
stretch: FontStretch,
style: FontStyle,
) -> Self {
Self { Self {
weight, weight,
stretch, stretch,
@ -138,7 +132,10 @@ impl FontTemplate {
} }
/// Get the descriptor. Returns `None` when instantiating the data fails. /// Get the descriptor. Returns `None` when instantiating the data fails.
pub fn descriptor(&mut self, font_context: &FontContextHandle) -> Option<FontTemplateDescriptor> { pub fn descriptor(
&mut self,
font_context: &FontContextHandle,
) -> Option<FontTemplateDescriptor> {
// The font template data can be unloaded when nothing is referencing // The font template data can be unloaded when nothing is referencing
// it (via the Weak reference to the Arc above). However, if we have // it (via the Weak reference to the Arc above). However, if we have
// already loaded a font, store the style information about it separately, // already loaded a font, store the style information about it separately,
@ -147,18 +144,22 @@ impl FontTemplate {
self.descriptor.or_else(|| { self.descriptor.or_else(|| {
if self.instantiate(font_context).is_err() { if self.instantiate(font_context).is_err() {
return None return None;
}; };
Some(self.descriptor.expect("Instantiation succeeded but no descriptor?")) Some(
self.descriptor
.expect("Instantiation succeeded but no descriptor?"),
)
}) })
} }
/// Get the data for creating a font if it matches a given descriptor. /// Get the data for creating a font if it matches a given descriptor.
pub fn data_for_descriptor(&mut self, pub fn data_for_descriptor(
&mut self,
fctx: &FontContextHandle, fctx: &FontContextHandle,
requested_desc: &FontTemplateDescriptor) requested_desc: &FontTemplateDescriptor,
-> Option<Arc<FontTemplateData>> { ) -> Option<Arc<FontTemplateData>> {
self.descriptor(&fctx).and_then(|descriptor| { self.descriptor(&fctx).and_then(|descriptor| {
if *requested_desc == descriptor { if *requested_desc == descriptor {
self.data().ok() self.data().ok()
@ -176,21 +177,20 @@ impl FontTemplate {
requested_descriptor: &FontTemplateDescriptor, requested_descriptor: &FontTemplateDescriptor,
) -> Option<(Arc<FontTemplateData>, f32)> { ) -> Option<(Arc<FontTemplateData>, f32)> {
self.descriptor(&font_context).and_then(|descriptor| { self.descriptor(&font_context).and_then(|descriptor| {
self.data().ok().map(|data| { self.data()
(data, descriptor.distance_from(requested_descriptor)) .ok()
}) .map(|data| (data, descriptor.distance_from(requested_descriptor)))
}) })
} }
fn instantiate(&mut self, font_context: &FontContextHandle) -> Result<(), ()> { fn instantiate(&mut self, font_context: &FontContextHandle) -> Result<(), ()> {
if !self.is_valid { if !self.is_valid {
return Err(()) return Err(());
} }
let data = self.data().map_err(|_| ())?; let data = self.data().map_err(|_| ())?;
let handle: Result<FontHandle, ()> = FontHandleMethods::new_from_template(font_context, let handle: Result<FontHandle, ()> =
data, FontHandleMethods::new_from_template(font_context, data, None);
None);
self.is_valid = handle.is_ok(); self.is_valid = handle.is_ok();
let handle = handle?; let handle = handle?;
self.descriptor = Some(FontTemplateDescriptor::new( self.descriptor = Some(FontTemplateDescriptor::new(
@ -220,7 +220,7 @@ impl FontTemplate {
}; };
if let Some(data) = maybe_data { if let Some(data) = maybe_data {
return Ok(data) return Ok(data);
} }
assert!(self.strong_ref.is_none()); assert!(self.strong_ref.is_none());

View file

@ -9,14 +9,18 @@ extern crate app_units;
extern crate bitflags; extern crate bitflags;
// Mac OS-specific library dependencies // Mac OS-specific library dependencies
#[cfg(target_os = "macos")] extern crate byteorder; #[cfg(target_os = "macos")]
#[cfg(target_os = "macos")] extern crate core_foundation; extern crate byteorder;
#[cfg(target_os = "macos")] extern crate core_graphics; #[cfg(target_os = "macos")]
#[cfg(target_os = "macos")] extern crate core_text; extern crate core_foundation;
#[cfg(target_os = "macos")]
extern crate core_graphics;
#[cfg(target_os = "macos")]
extern crate core_text;
// Windows-specific library dependencies // Windows-specific library dependencies
#[cfg(target_os = "windows")] extern crate dwrote; #[cfg(target_os = "windows")]
#[cfg(target_os = "windows")] extern crate truetype; extern crate dwrote;
extern crate euclid; extern crate euclid;
extern crate fnv; extern crate fnv;
@ -24,8 +28,8 @@ extern crate fnv;
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
extern crate fontconfig; extern crate fontconfig;
extern crate fontsan; extern crate fontsan;
#[cfg(any(target_os = "linux", target_os = "android"))] extern crate freetype; #[cfg(any(target_os = "linux", target_os = "android"))]
#[cfg(any(target_os = "linux", target_os = "android"))] extern crate servo_allocator; extern crate freetype;
extern crate gfx_traits; extern crate gfx_traits;
// Eventually we would like the shaper to be pluggable, as many operating systems have their own // Eventually we would like the shaper to be pluggable, as many operating systems have their own
@ -35,23 +39,33 @@ extern crate harfbuzz_sys as harfbuzz;
extern crate ipc_channel; extern crate ipc_channel;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
#[cfg(any(target_os = "linux", target_os = "android"))] extern crate libc; #[cfg(any(target_os = "linux", target_os = "android"))]
extern crate libc;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[cfg_attr(target_os = "windows", macro_use)] #[cfg_attr(target_os = "windows", macro_use)]
extern crate malloc_size_of; extern crate malloc_size_of;
extern crate net_traits; extern crate net_traits;
extern crate ordered_float; extern crate ordered_float;
#[cfg(all(feature = "unstable", any(target_feature = "sse2", target_feature = "neon")))] #[cfg(all(
feature = "unstable",
any(target_feature = "sse2", target_feature = "neon")
))]
extern crate packed_simd; extern crate packed_simd;
extern crate range; extern crate range;
#[macro_use] extern crate serde; #[macro_use]
extern crate serde;
#[cfg(any(target_os = "linux", target_os = "android"))]
extern crate servo_allocator;
extern crate servo_arc; extern crate servo_arc;
#[macro_use] extern crate servo_atoms; #[macro_use]
extern crate servo_atoms;
extern crate servo_url; extern crate servo_url;
extern crate smallvec; extern crate smallvec;
extern crate style; extern crate style;
extern crate time; extern crate time;
#[cfg(target_os = "windows")]
extern crate truetype;
extern crate ucd; extern crate ucd;
extern crate unicode_bidi; extern crate unicode_bidi;
extern crate unicode_script; extern crate unicode_script;
@ -61,7 +75,8 @@ extern crate xi_unicode;
extern crate xml5ever; extern crate xml5ever;
// Fonts // Fonts
#[macro_use] pub mod font; #[macro_use]
pub mod font;
pub mod font_cache_thread; pub mod font_cache_thread;
pub mod font_context; pub mod font_context;
pub mod font_template; pub mod font_template;

View file

@ -116,21 +116,18 @@ struct FontFamily {
struct FontAlias { struct FontAlias {
from: String, from: String,
to: String, to: String,
weight: Option<i32> weight: Option<i32>,
} }
struct FontList { struct FontList {
families: Vec<FontFamily>, families: Vec<FontFamily>,
aliases: Vec<FontAlias> aliases: Vec<FontAlias>,
} }
impl FontList { impl FontList {
fn new() -> FontList { fn new() -> FontList {
// Possible paths containing the font mapping xml file. // Possible paths containing the font mapping xml file.
let paths = [ let paths = ["/etc/fonts.xml", "/system/etc/system_fonts.xml"];
"/etc/fonts.xml",
"/system/etc/system_fonts.xml"
];
// Try to load and parse paths until one of them success. // Try to load and parse paths until one of them success.
let mut result = None; let mut result = None;
@ -146,7 +143,7 @@ impl FontList {
None => FontList { None => FontList {
families: Self::fallback_font_families(), families: Self::fallback_font_families(),
aliases: Vec::new(), aliases: Vec::new(),
} },
} }
} }
@ -154,25 +151,26 @@ impl FontList {
fn from_path(path: &str) -> Option<FontList> { fn from_path(path: &str) -> Option<FontList> {
let xml = match Self::load_file(path) { let xml = match Self::load_file(path) {
Ok(xml) => xml, Ok(xml) => xml,
_=> { return None; }, _ => {
return None;
},
}; };
let dom: RcDom = parse_document(RcDom::default(), Default::default()) let dom: RcDom = parse_document(RcDom::default(), Default::default()).one(xml);
.one(xml);
let doc = &dom.document; let doc = &dom.document;
// find familyset root node // find familyset root node
let children = doc.children.borrow(); let children = doc.children.borrow();
let familyset = children.iter().find(|child| { let familyset = children.iter().find(|child| match child.data {
match child.data {
NodeData::Element { ref name, .. } => &*name.local == "familyset", NodeData::Element { ref name, .. } => &*name.local == "familyset",
_ => false, _ => false,
}
}); });
let familyset = match familyset { let familyset = match familyset {
Some(node) => node, Some(node) => node,
_ => { return None; } _ => {
return None;
},
}; };
// Parse familyset node // Parse familyset node
@ -181,7 +179,11 @@ impl FontList {
for node in familyset.children.borrow().iter() { for node in familyset.children.borrow().iter() {
match node.data { match node.data {
NodeData::Element { ref name, ref attrs, .. } => { NodeData::Element {
ref name,
ref attrs,
..
} => {
if &*name.local == "family" { if &*name.local == "family" {
Self::parse_family(&node, attrs, &mut families); Self::parse_family(&node, attrs, &mut families);
} else if &*name.local == "alias" { } else if &*name.local == "alias" {
@ -191,13 +193,13 @@ impl FontList {
} }
} }
}, },
_=> {} _ => {},
} }
} }
Some(FontList { Some(FontList {
families: families, families: families,
aliases: aliases aliases: aliases,
}) })
} }
@ -209,17 +211,16 @@ impl FontList {
("Droid Sans", "DroidSans.ttf"), ("Droid Sans", "DroidSans.ttf"),
]; ];
alternatives.iter().filter(|item| { alternatives
Path::new(&Self::font_absolute_path(item.1)).exists() .iter()
}).map(|item| { .filter(|item| Path::new(&Self::font_absolute_path(item.1)).exists())
FontFamily { .map(|item| FontFamily {
name: item.0.into(), name: item.0.into(),
fonts: vec![Font { fonts: vec![Font {
filename: item.1.into(), filename: item.1.into(),
weight: None, weight: None,
}] }],
} }).collect()
}). collect()
} }
// All Android fonts are located in /system/fonts // All Android fonts are located in /system/fonts
@ -227,15 +228,14 @@ impl FontList {
format!("/system/fonts/{}", filename) format!("/system/fonts/{}", filename)
} }
fn find_family(&self, name: &str) -> Option<&FontFamily>{ fn find_family(&self, name: &str) -> Option<&FontFamily> {
self.families.iter().find(|f| f.name == name) self.families.iter().find(|f| f.name == name)
} }
fn find_alias(&self, name: &str) -> Option<&FontAlias>{ fn find_alias(&self, name: &str) -> Option<&FontAlias> {
self.aliases.iter().find(|f| f.from == name) self.aliases.iter().find(|f| f.from == name)
} }
fn load_file(path: &str) -> Result<String, io::Error> { fn load_file(path: &str) -> Result<String, io::Error> {
let mut file = File::open(path)?; let mut file = File::open(path)?;
let mut content = String::new(); let mut content = String::new();
@ -253,13 +253,15 @@ impl FontList {
// <font weight="300" style="italic">Roboto-LightItalic.ttf</font> // <font weight="300" style="italic">Roboto-LightItalic.ttf</font>
// <font weight="400" style="normal">Roboto-Regular.ttf</font> // <font weight="400" style="normal">Roboto-Regular.ttf</font>
// </family> // </family>
fn parse_family(familyset: &Node, attrs: &RefCell<Vec<Attribute>>, out:&mut Vec<FontFamily>) { fn parse_family(familyset: &Node, attrs: &RefCell<Vec<Attribute>>, out: &mut Vec<FontFamily>) {
// Fallback to old Android API v17 xml format if required // Fallback to old Android API v17 xml format if required
let using_api_17 = familyset.children.borrow().iter().any(|node| { let using_api_17 = familyset
match node.data { .children
.borrow()
.iter()
.any(|node| match node.data {
NodeData::Element { ref name, .. } => &*name.local == "nameset", NodeData::Element { ref name, .. } => &*name.local == "nameset",
_=> false, _ => false,
}
}); });
if using_api_17 { if using_api_17 {
Self::parse_family_v17(familyset, out); Self::parse_family_v17(familyset, out);
@ -269,25 +271,31 @@ impl FontList {
// Parse family name // Parse family name
let name = match Self::find_attrib("name", attrs) { let name = match Self::find_attrib("name", attrs) {
Some(name) => name, Some(name) => name,
_ => { return; }, _ => {
return;
},
}; };
let mut fonts = Vec::new(); let mut fonts = Vec::new();
// Parse font variants // Parse font variants
for node in familyset.children.borrow().iter() { for node in familyset.children.borrow().iter() {
match node.data { match node.data {
NodeData::Element { ref name, ref attrs, .. } => { NodeData::Element {
ref name,
ref attrs,
..
} => {
if &*name.local == "font" { if &*name.local == "font" {
FontList::parse_font(&node, attrs, &mut fonts); FontList::parse_font(&node, attrs, &mut fonts);
} }
}, },
_=> {} _ => {},
} }
} }
out.push(FontFamily { out.push(FontFamily {
name: name, name: name,
fonts: fonts fonts: fonts,
}); });
} }
@ -308,7 +316,7 @@ impl FontList {
// <file>Roboto-BoldItalic.ttf</file> // <file>Roboto-BoldItalic.ttf</file>
// </fileset> // </fileset>
// </family> // </family>
fn parse_family_v17(familyset: &Node, out:&mut Vec<FontFamily>) { fn parse_family_v17(familyset: &Node, out: &mut Vec<FontFamily>) {
let mut nameset = Vec::new(); let mut nameset = Vec::new();
let mut fileset = Vec::new(); let mut fileset = Vec::new();
for node in familyset.children.borrow().iter() { for node in familyset.children.borrow().iter() {
@ -320,13 +328,15 @@ impl FontList {
Self::collect_contents_with_tag(node, "file", &mut fileset); Self::collect_contents_with_tag(node, "file", &mut fileset);
} }
}, },
_=> {} _ => {},
} }
} }
// Create a families for each variation // Create a families for each variation
for name in nameset { for name in nameset {
let fonts: Vec<Font> = fileset.iter().map(|f| Font { let fonts: Vec<Font> = fileset
.iter()
.map(|f| Font {
filename: f.clone(), filename: f.clone(),
weight: None, weight: None,
}).collect(); }).collect();
@ -334,7 +344,7 @@ impl FontList {
if !fonts.is_empty() { if !fonts.is_empty() {
out.push(FontFamily { out.push(FontFamily {
name: name, name: name,
fonts: fonts fonts: fonts,
}) })
} }
} }
@ -342,11 +352,13 @@ impl FontList {
// Example: // Example:
// <font weight="100" style="normal">Roboto-Thin.ttf</font> // <font weight="100" style="normal">Roboto-Thin.ttf</font>
fn parse_font(node: &Node, attrs: &RefCell<Vec<Attribute>>, out:&mut Vec<Font>) { fn parse_font(node: &Node, attrs: &RefCell<Vec<Attribute>>, out: &mut Vec<Font>) {
// Parse font filename // Parse font filename
let filename = match Self::text_content(node) { let filename = match Self::text_content(node) {
Some(filename) => filename, Some(filename) => filename,
_ => { return; } _ => {
return;
},
}; };
// Parse font weight // Parse font weight
@ -367,17 +379,21 @@ impl FontList {
// <alias name="helvetica" to="sans-serif" /> // <alias name="helvetica" to="sans-serif" />
// <alias name="tahoma" to="sans-serif" /> // <alias name="tahoma" to="sans-serif" />
// <alias name="verdana" to="sans-serif" /> // <alias name="verdana" to="sans-serif" />
fn parse_alias(attrs: &RefCell<Vec<Attribute>>, out:&mut Vec<FontAlias>) { fn parse_alias(attrs: &RefCell<Vec<Attribute>>, out: &mut Vec<FontAlias>) {
// Parse alias name and referenced font // Parse alias name and referenced font
let from = match Self::find_attrib("name", attrs) { let from = match Self::find_attrib("name", attrs) {
Some(from) => from, Some(from) => from,
_ => { return; }, _ => {
return;
},
}; };
// Parse referenced font // Parse referenced font
let to = match Self::find_attrib("to", attrs) { let to = match Self::find_attrib("to", attrs) {
Some(to) => to, Some(to) => to,
_ => { return; }, _ => {
return;
},
}; };
// Parse optional weight filter // Parse optional weight filter
@ -391,23 +407,28 @@ impl FontList {
} }
fn find_attrib(name: &str, attrs: &RefCell<Vec<Attribute>>) -> Option<String> { fn find_attrib(name: &str, attrs: &RefCell<Vec<Attribute>>) -> Option<String> {
attrs.borrow().iter().find(|attr| &*attr.name.local == name).map(|s| String::from(&s.value)) attrs
.borrow()
.iter()
.find(|attr| &*attr.name.local == name)
.map(|s| String::from(&s.value))
} }
fn text_content(node: &Node) -> Option<String> { fn text_content(node: &Node) -> Option<String> {
node.children.borrow().get(0).and_then(|child| { node.children
match child.data { .borrow()
.get(0)
.and_then(|child| match child.data {
NodeData::Text { ref contents } => { NodeData::Text { ref contents } => {
let mut result = String::new(); let mut result = String::new();
result.push_str(&contents.borrow()); result.push_str(&contents.borrow());
Some(result) Some(result)
}, },
_ => None _ => None,
}
}) })
} }
fn collect_contents_with_tag(node: &Node, tag: &str, out:&mut Vec<String>) { fn collect_contents_with_tag(node: &Node, tag: &str, out: &mut Vec<String>) {
for child in node.children.borrow().iter() { for child in node.children.borrow().iter() {
match child.data { match child.data {
NodeData::Element { ref name, .. } => { NodeData::Element { ref name, .. } => {
@ -417,14 +438,17 @@ impl FontList {
} }
} }
}, },
_=> {} _ => {},
} }
} }
} }
} }
// Functions used by FontCacheThread // Functions used by FontCacheThread
pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) { pub fn for_each_available_family<F>(mut callback: F)
where
F: FnMut(String),
{
for family in &FONT_LIST.families { for family in &FONT_LIST.families {
callback(family.name.clone()); callback(family.name.clone());
} }
@ -434,7 +458,8 @@ pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) {
} }
pub fn for_each_variation<F>(family_name: &str, mut callback: F) pub fn for_each_variation<F>(family_name: &str, mut callback: F)
where F: FnMut(String) where
F: FnMut(String),
{ {
if let Some(family) = FONT_LIST.find_family(family_name) { if let Some(family) = FONT_LIST.find_family(family_name) {
for font in &family.fonts { for font in &family.fonts {
@ -453,7 +478,7 @@ pub fn for_each_variation<F>(family_name: &str, mut callback: F)
callback(FontList::font_absolute_path(&font.filename)) callback(FontList::font_absolute_path(&font.filename))
} }
}, },
_ => {} _ => {},
} }
} }
} }
@ -473,46 +498,44 @@ pub fn system_default_family(generic_name: &str) -> Option<String> {
// Based on gfxAndroidPlatform::GetCommonFallbackFonts() in Gecko // Based on gfxAndroidPlatform::GetCommonFallbackFonts() in Gecko
pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> { pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
let mut families = vec!(); let mut families = vec![];
if let Some(block) = codepoint.and_then(|c| c.block()) { if let Some(block) = codepoint.and_then(|c| c.block()) {
match block { match block {
UnicodeBlock::Armenian => { UnicodeBlock::Armenian => {
families.push("Droid Sans Armenian"); families.push("Droid Sans Armenian");
} },
UnicodeBlock::Hebrew => { UnicodeBlock::Hebrew => {
families.push("Droid Sans Hebrew"); families.push("Droid Sans Hebrew");
} },
UnicodeBlock::Arabic => { UnicodeBlock::Arabic => {
families.push("Droid Sans Arabic"); families.push("Droid Sans Arabic");
} },
UnicodeBlock::Devanagari => { UnicodeBlock::Devanagari => {
families.push("Noto Sans Devanagari"); families.push("Noto Sans Devanagari");
families.push("Droid Sans Devanagari"); families.push("Droid Sans Devanagari");
} },
UnicodeBlock::Tamil => { UnicodeBlock::Tamil => {
families.push("Noto Sans Tamil"); families.push("Noto Sans Tamil");
families.push("Droid Sans Tamil"); families.push("Droid Sans Tamil");
} },
UnicodeBlock::Thai => { UnicodeBlock::Thai => {
families.push("Noto Sans Thai"); families.push("Noto Sans Thai");
families.push("Droid Sans Thai"); families.push("Droid Sans Thai");
} },
UnicodeBlock::Georgian | UnicodeBlock::Georgian | UnicodeBlock::GeorgianSupplement => {
UnicodeBlock::GeorgianSupplement => {
families.push("Droid Sans Georgian"); families.push("Droid Sans Georgian");
} },
UnicodeBlock::Ethiopic | UnicodeBlock::Ethiopic | UnicodeBlock::EthiopicSupplement => {
UnicodeBlock::EthiopicSupplement => {
families.push("Droid Sans Ethiopic"); families.push("Droid Sans Ethiopic");
} },
_ => { _ => {
if is_cjk(codepoint.unwrap()) { if is_cjk(codepoint.unwrap()) {
@ -520,7 +543,7 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Noto Sans CJK JP"); families.push("Noto Sans CJK JP");
families.push("Droid Sans Japanese"); families.push("Droid Sans Japanese");
} }
} },
} }
} }

View file

@ -98,14 +98,21 @@ fn create_face(
let face_index = 0 as FT_Long; let face_index = 0 as FT_Long;
let result = if let Some(ref bytes) = template.bytes { let result = if let Some(ref bytes) = template.bytes {
FT_New_Memory_Face(lib, bytes.as_ptr(), bytes.len() as FT_Long, face_index, &mut face) FT_New_Memory_Face(
lib,
bytes.as_ptr(),
bytes.len() as FT_Long,
face_index,
&mut face,
)
} else { } else {
// This will trigger a synchronous file read in the layout thread, which we may want to // This will trigger a synchronous file read in the layout thread, which we may want to
// revisit at some point. See discussion here: // revisit at some point. See discussion here:
// //
// https://github.com/servo/servo/pull/20506#issuecomment-378838800 // https://github.com/servo/servo/pull/20506#issuecomment-378838800
let filename = CString::new(&*template.identifier).expect("filename contains NUL byte!"); let filename =
CString::new(&*template.identifier).expect("filename contains NUL byte!");
FT_New_Face(lib, filename.as_ptr(), face_index, &mut face) FT_New_Face(lib, filename.as_ptr(), face_index, &mut face)
}; };
@ -122,12 +129,15 @@ fn create_face(
} }
impl FontHandleMethods for FontHandle { impl FontHandleMethods for FontHandle {
fn new_from_template(fctx: &FontContextHandle, fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>, template: Arc<FontTemplateData>,
pt_size: Option<Au>) pt_size: Option<Au>,
-> Result<FontHandle, ()> { ) -> Result<FontHandle, ()> {
let ft_ctx: FT_Library = fctx.ctx.ctx; let ft_ctx: FT_Library = fctx.ctx.ctx;
if ft_ctx.is_null() { return Err(()); } if ft_ctx.is_null() {
return Err(());
}
let face = create_face(ft_ctx, &template, pt_size)?; let face = create_face(ft_ctx, &template, pt_size)?;
@ -138,9 +148,8 @@ impl FontHandleMethods for FontHandle {
can_do_fast_shaping: false, can_do_fast_shaping: false,
}; };
// TODO (#11310): Implement basic support for GPOS and GSUB. // TODO (#11310): Implement basic support for GPOS and GSUB.
handle.can_do_fast_shaping = handle.has_table(KERN) && handle.can_do_fast_shaping =
!handle.has_table(GPOS) && handle.has_table(KERN) && !handle.has_table(GPOS) && !handle.has_table(GSUB);
!handle.has_table(GSUB);
Ok(handle) Ok(handle)
} }
@ -203,7 +212,7 @@ impl FontHandleMethods for FontHandle {
7 => FontStretchKeyword::Expanded, 7 => FontStretchKeyword::Expanded,
8 => FontStretchKeyword::ExtraExpanded, 8 => FontStretchKeyword::ExtraExpanded,
9 => FontStretchKeyword::UltraExpanded, 9 => FontStretchKeyword::UltraExpanded,
_ => FontStretchKeyword::Normal _ => FontStretchKeyword::Normal,
} }
} else { } else {
FontStretchKeyword::Normal FontStretchKeyword::Normal
@ -218,20 +227,26 @@ impl FontHandleMethods for FontHandle {
if idx != 0 as FT_UInt { if idx != 0 as FT_UInt {
Some(idx as GlyphId) Some(idx as GlyphId)
} else { } else {
debug!("Invalid codepoint: U+{:04X} ('{}')", codepoint as u32, codepoint); debug!(
"Invalid codepoint: U+{:04X} ('{}')",
codepoint as u32, codepoint
);
None None
} }
} }
} }
fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
-> FractionalPixel {
assert!(!self.face.is_null()); assert!(!self.face.is_null());
let mut delta = FT_Vector { x: 0, y: 0 }; let mut delta = FT_Vector { x: 0, y: 0 };
unsafe { unsafe {
FT_Get_Kerning(self.face, first_glyph, second_glyph, FT_Get_Kerning(
self.face,
first_glyph,
second_glyph,
FT_Kerning_Mode::FT_KERNING_DEFAULT as FT_UInt, FT_Kerning_Mode::FT_KERNING_DEFAULT as FT_UInt,
&mut delta); &mut delta,
);
} }
fixed_to_float_ft(delta.x as i32) fixed_to_float_ft(delta.x as i32)
} }
@ -243,9 +258,7 @@ impl FontHandleMethods for FontHandle {
fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> { fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> {
assert!(!self.face.is_null()); assert!(!self.face.is_null());
unsafe { unsafe {
let res = FT_Load_Glyph(self.face, let res = FT_Load_Glyph(self.face, glyph as FT_UInt, GLYPH_LOAD_FLAGS);
glyph as FT_UInt,
GLYPH_LOAD_FLAGS);
if succeeded(res) { if succeeded(res) {
let void_glyph = (*self.face).glyph; let void_glyph = (*self.face).glyph;
let slot: FT_GlyphSlot = mem::transmute(void_glyph); let slot: FT_GlyphSlot = mem::transmute(void_glyph);
@ -291,7 +304,8 @@ impl FontHandleMethods for FontHandle {
x_height = self.font_units_to_au(os2.sx_height as f64); x_height = self.font_units_to_au(os2.sx_height as f64);
} }
let average_advance = self.glyph_index('0') let average_advance = self
.glyph_index('0')
.and_then(|idx| self.glyph_h_advance(idx)) .and_then(|idx| self.glyph_h_advance(idx))
.map_or(max_advance, |advance| self.font_units_to_au(advance)); .map_or(max_advance, |advance| self.font_units_to_au(advance));
@ -320,13 +334,25 @@ impl FontHandleMethods for FontHandle {
unsafe { unsafe {
// Get the length // Get the length
let mut len = 0; let mut len = 0;
if !succeeded(FT_Load_Sfnt_Table(self.face, tag, 0, ptr::null_mut(), &mut len)) { if !succeeded(FT_Load_Sfnt_Table(
return None self.face,
tag,
0,
ptr::null_mut(),
&mut len,
)) {
return None;
} }
// Get the bytes // Get the bytes
let mut buf = vec![0u8; len as usize]; let mut buf = vec![0u8; len as usize];
if !succeeded(FT_Load_Sfnt_Table(self.face, tag, 0, buf.as_mut_ptr(), &mut len)) { if !succeeded(FT_Load_Sfnt_Table(
return None self.face,
tag,
0,
buf.as_mut_ptr(),
&mut len,
)) {
return None;
} }
Some(FontTable { buffer: buf }) Some(FontTable { buffer: buf })
} }
@ -338,25 +364,33 @@ impl FontHandleMethods for FontHandle {
} }
impl<'a> FontHandle { impl<'a> FontHandle {
fn set_char_size(face: FT_Face, pt_size: Au) -> Result<(), ()>{ fn set_char_size(face: FT_Face, pt_size: Au) -> Result<(), ()> {
let char_size = pt_size.to_f64_px() * 64.0 + 0.5; let char_size = pt_size.to_f64_px() * 64.0 + 0.5;
unsafe { unsafe {
let result = FT_Set_Char_Size(face, char_size as FT_F26Dot6, 0, 0, 0); let result = FT_Set_Char_Size(face, char_size as FT_F26Dot6, 0, 0, 0);
if succeeded(result) { Ok(()) } else { Err(()) } if succeeded(result) {
Ok(())
} else {
Err(())
}
} }
} }
fn has_table(&self, tag: FontTableTag) -> bool { fn has_table(&self, tag: FontTableTag) -> bool {
unsafe { unsafe {
succeeded(FT_Load_Sfnt_Table(self.face, tag as FT_ULong, 0, ptr::null_mut(), &mut 0)) succeeded(FT_Load_Sfnt_Table(
self.face,
tag as FT_ULong,
0,
ptr::null_mut(),
&mut 0,
))
} }
} }
fn face_rec_mut(&'a self) -> &'a mut FT_FaceRec { fn face_rec_mut(&'a self) -> &'a mut FT_FaceRec {
unsafe { unsafe { &mut (*self.face) }
&mut (*self.face)
}
} }
fn font_units_to_au(&self, value: f64) -> Au { fn font_units_to_au(&self, value: f64) -> Au {
@ -378,11 +412,12 @@ impl<'a> FontHandle {
fn os2_table(&self) -> Option<OS2Table> { fn os2_table(&self) -> Option<OS2Table> {
unsafe { unsafe {
let os2 = FT_Get_Sfnt_Table(self.face_rec_mut(), FT_Sfnt_Tag::FT_SFNT_OS2) as *mut TT_OS2; let os2 =
FT_Get_Sfnt_Table(self.face_rec_mut(), FT_Sfnt_Tag::FT_SFNT_OS2) as *mut TT_OS2;
let valid = !os2.is_null() && (*os2).version != 0xffff; let valid = !os2.is_null() && (*os2).version != 0xffff;
if !valid { if !valid {
return None return None;
} }
Some(OS2Table { Some(OS2Table {

View file

@ -23,7 +23,7 @@ pub struct User {
size: usize, size: usize,
} }
extern fn ft_alloc(mem: FT_Memory, req_size: c_long) -> *mut c_void { extern "C" fn ft_alloc(mem: FT_Memory, req_size: c_long) -> *mut c_void {
unsafe { unsafe {
let ptr = malloc(req_size as usize); let ptr = malloc(req_size as usize);
let ptr = ptr as *mut c_void; // libc::c_void vs std::os::raw::c_void let ptr = ptr as *mut c_void; // libc::c_void vs std::os::raw::c_void
@ -34,7 +34,7 @@ extern fn ft_alloc(mem: FT_Memory, req_size: c_long) -> *mut c_void {
} }
} }
extern fn ft_free(mem: FT_Memory, ptr: *mut c_void) { extern "C" fn ft_free(mem: FT_Memory, ptr: *mut c_void) {
unsafe { unsafe {
let actual_size = usable_size(ptr); let actual_size = usable_size(ptr);
let user = (*mem).user as *mut User; let user = (*mem).user as *mut User;
@ -43,8 +43,12 @@ extern fn ft_free(mem: FT_Memory, ptr: *mut c_void) {
} }
} }
extern fn ft_realloc(mem: FT_Memory, _old_size: c_long, new_req_size: c_long, extern "C" fn ft_realloc(
old_ptr: *mut c_void) -> *mut c_void { mem: FT_Memory,
_old_size: c_long,
new_req_size: c_long,
old_ptr: *mut c_void,
) -> *mut c_void {
unsafe { unsafe {
let old_actual_size = usable_size(old_ptr); let old_actual_size = usable_size(old_ptr);
let new_ptr = realloc(old_ptr as *mut _, new_req_size as usize); let new_ptr = realloc(old_ptr as *mut _, new_req_size as usize);
@ -108,9 +112,7 @@ impl MallocSizeOf for FontContextHandle {
impl FontContextHandle { impl FontContextHandle {
pub fn new() -> FontContextHandle { pub fn new() -> FontContextHandle {
let user = Box::into_raw(Box::new(User { let user = Box::into_raw(Box::new(User { size: 0 }));
size: 0,
}));
let mem = Box::into_raw(Box::new(FT_MemoryRec_ { let mem = Box::into_raw(Box::new(FT_MemoryRec_ {
user: user as *mut c_void, user: user as *mut c_void,
alloc: Some(ft_alloc), alloc: Some(ft_alloc),
@ -121,12 +123,18 @@ impl FontContextHandle {
let mut ctx: FT_Library = ptr::null_mut(); let mut ctx: FT_Library = ptr::null_mut();
let result = FT_New_Library(mem, &mut ctx); let result = FT_New_Library(mem, &mut ctx);
if !succeeded(result) { panic!("Unable to initialize FreeType library"); } if !succeeded(result) {
panic!("Unable to initialize FreeType library");
}
FT_Add_Default_Modules(ctx); FT_Add_Default_Modules(ctx);
FontContextHandle { FontContextHandle {
ctx: Rc::new(FreeTypeLibraryHandle { ctx: ctx, mem: mem, user: user }), ctx: Rc::new(FreeTypeLibraryHandle {
ctx: ctx,
mem: mem,
user: user,
}),
} }
} }
} }

View file

@ -20,7 +20,10 @@ static FC_FILE: &'static [u8] = b"file\0";
static FC_INDEX: &'static [u8] = b"index\0"; static FC_INDEX: &'static [u8] = b"index\0";
static FC_FONTFORMAT: &'static [u8] = b"fontformat\0"; static FC_FONTFORMAT: &'static [u8] = b"fontformat\0";
pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) { pub fn for_each_available_family<F>(mut callback: F)
where
F: FnMut(String),
{
unsafe { unsafe {
let config = FcConfigGetCurrent(); let config = FcConfigGetCurrent();
let font_set = FcConfigGetFonts(config, FcSetSystem); let font_set = FcConfigGetFonts(config, FcSetSystem);
@ -29,19 +32,21 @@ pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) {
let mut family: *mut FcChar8 = ptr::null_mut(); let mut family: *mut FcChar8 = ptr::null_mut();
let mut format: *mut FcChar8 = ptr::null_mut(); let mut format: *mut FcChar8 = ptr::null_mut();
let mut v: c_int = 0; let mut v: c_int = 0;
if FcPatternGetString(*font, FC_FONTFORMAT.as_ptr() as *mut c_char, v, &mut format) != FcResultMatch { if FcPatternGetString(*font, FC_FONTFORMAT.as_ptr() as *mut c_char, v, &mut format) !=
FcResultMatch
{
continue; continue;
} }
// Skip bitmap fonts. They aren't supported by FreeType. // Skip bitmap fonts. They aren't supported by FreeType.
let fontformat = c_str_to_string(format as *const c_char); let fontformat = c_str_to_string(format as *const c_char);
if fontformat != "TrueType" && if fontformat != "TrueType" && fontformat != "CFF" && fontformat != "Type 1" {
fontformat != "CFF" &&
fontformat != "Type 1" {
continue; continue;
} }
while FcPatternGetString(*font, FC_FAMILY.as_ptr() as *mut c_char, v, &mut family) == FcResultMatch { while FcPatternGetString(*font, FC_FAMILY.as_ptr() as *mut c_char, v, &mut family) ==
FcResultMatch
{
let family_name = c_str_to_string(family as *const c_char); let family_name = c_str_to_string(family as *const c_char);
callback(family_name); callback(family_name);
v += 1; v += 1;
@ -51,7 +56,8 @@ pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) {
} }
pub fn for_each_variation<F>(family_name: &str, mut callback: F) pub fn for_each_variation<F>(family_name: &str, mut callback: F)
where F: FnMut(String) where
F: FnMut(String),
{ {
debug!("getting variations for {}", family_name); debug!("getting variations for {}", family_name);
unsafe { unsafe {
@ -62,7 +68,11 @@ pub fn for_each_variation<F>(family_name: &str, mut callback: F)
assert!(!pattern.is_null()); assert!(!pattern.is_null());
let family_name_c = CString::new(family_name).unwrap(); let family_name_c = CString::new(family_name).unwrap();
let family_name = family_name_c.as_ptr(); let family_name = family_name_c.as_ptr();
let ok = FcPatternAddString(pattern, FC_FAMILY.as_ptr() as *mut c_char, family_name as *mut FcChar8); let ok = FcPatternAddString(
pattern,
FC_FAMILY.as_ptr() as *mut c_char,
family_name as *mut FcChar8,
);
assert_ne!(ok, 0); assert_ne!(ok, 0);
let object_set = FcObjectSetCreate(); let object_set = FcObjectSetCreate();
@ -85,7 +95,8 @@ pub fn for_each_variation<F>(family_name: &str, mut callback: F)
panic!(); panic!();
}; };
let mut index: libc::c_int = 0; let mut index: libc::c_int = 0;
let result = FcPatternGetInteger(*font, FC_INDEX.as_ptr() as *mut c_char, 0, &mut index); let result =
FcPatternGetInteger(*font, FC_INDEX.as_ptr() as *mut c_char, 0, &mut index);
let index = if result == FcResultMatch { let index = if result == FcResultMatch {
index index
} else { } else {
@ -119,7 +130,12 @@ pub fn system_default_family(generic_name: &str) -> Option<String> {
let family_name = if result == FcResultMatch { let family_name = if result == FcResultMatch {
let mut match_string: *mut FcChar8 = ptr::null_mut(); let mut match_string: *mut FcChar8 = ptr::null_mut();
FcPatternGetString(family_match, FC_FAMILY.as_ptr() as *mut c_char, 0, &mut match_string); FcPatternGetString(
family_match,
FC_FAMILY.as_ptr() as *mut c_char,
0,
&mut match_string,
);
let result = c_str_to_string(match_string as *const c_char); let result = c_str_to_string(match_string as *const c_char);
FcPatternDestroy(family_match); FcPatternDestroy(family_match);
Some(result) Some(result)
@ -136,12 +152,7 @@ pub static SANS_SERIF_FONT_FAMILY: &'static str = "DejaVu Sans";
// Based on gfxPlatformGtk::GetCommonFallbackFonts() in Gecko // Based on gfxPlatformGtk::GetCommonFallbackFonts() in Gecko
pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> { pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
let mut families = vec!( let mut families = vec!["DejaVu Serif", "FreeSerif", "DejaVu Sans", "FreeSans"];
"DejaVu Serif",
"FreeSerif",
"DejaVu Sans",
"FreeSans",
);
if let Some(codepoint) = codepoint { if let Some(codepoint) = codepoint {
if is_cjk(codepoint) { if is_cjk(codepoint) {

View file

@ -15,7 +15,6 @@ use webrender_api::NativeFontHandle;
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub struct FontTemplateData { pub struct FontTemplateData {
// If you add members here, review the Debug impl below // If you add members here, review the Debug impl below
pub bytes: Option<Vec<u8>>, pub bytes: Option<Vec<u8>>,
pub identifier: Atom, pub identifier: Atom,
} }
@ -23,8 +22,10 @@ pub struct FontTemplateData {
impl fmt::Debug for FontTemplateData { impl fmt::Debug for FontTemplateData {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("FontTemplateData") fmt.debug_struct("FontTemplateData")
.field("bytes", &self.bytes.as_ref().map(|b| format!("[{} bytes]", b.len()))) .field(
.field("identifier", &self.identifier) "bytes",
&self.bytes.as_ref().map(|b| format!("[{} bytes]", b.len())),
).field("identifier", &self.identifier)
.finish() .finish()
} }
} }

View file

@ -3,7 +3,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Implementation of Quartz (CoreGraphics) fonts. /// Implementation of Quartz (CoreGraphics) fonts.
use app_units::Au; use app_units::Au;
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use core_foundation::base::CFIndex; use core_foundation::base::CFIndex;
@ -111,8 +110,8 @@ impl FontHandle {
return None; return None;
} }
let pt_per_font_unit = self.ctfont.pt_size() as f64 / let pt_per_font_unit =
self.ctfont.units_per_em() as f64; self.ctfont.pt_size() as f64 / self.ctfont.units_per_em() as f64;
result.px_per_font_unit = pt_to_px(pt_per_font_unit); result.px_per_font_unit = pt_to_px(pt_per_font_unit);
} }
start = end; start = end;
@ -160,15 +159,15 @@ impl fmt::Debug for CachedKernTable {
} }
} }
impl FontHandleMethods for FontHandle { impl FontHandleMethods for FontHandle {
fn new_from_template(_fctx: &FontContextHandle, fn new_from_template(
_fctx: &FontContextHandle,
template: Arc<FontTemplateData>, template: Arc<FontTemplateData>,
pt_size: Option<Au>) pt_size: Option<Au>,
-> Result<FontHandle, ()> { ) -> Result<FontHandle, ()> {
let size = match pt_size { let size = match pt_size {
Some(s) => s.to_f64_px(), Some(s) => s.to_f64_px(),
None => 0.0 None => 0.0,
}; };
match template.ctfont(size) { match template.ctfont(size) {
Some(ref ctfont) => { Some(ref ctfont) => {
@ -184,10 +183,8 @@ impl FontHandleMethods for FontHandle {
handle.table_for_tag(GPOS).is_none() && handle.table_for_tag(GPOS).is_none() &&
handle.table_for_tag(GSUB).is_none(); handle.table_for_tag(GSUB).is_none();
Ok(handle) Ok(handle)
} },
None => { None => Err(()),
Err(())
}
} }
} }
@ -214,6 +211,7 @@ impl FontHandleMethods for FontHandle {
fn boldness(&self) -> FontWeight { fn boldness(&self) -> FontWeight {
let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0] let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
// TODO(emilio): It may make sense to make this range [.01, 10.0], to // TODO(emilio): It may make sense to make this range [.01, 10.0], to
// align with css-fonts-4's range of [1, 1000]. // align with css-fonts-4's range of [1, 1000].
let normalized = if normalized <= 0.0 { let normalized = if normalized <= 0.0 {
@ -237,9 +235,9 @@ impl FontHandleMethods for FontHandle {
let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph]; let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph];
let count: CFIndex = 1; let count: CFIndex = 1;
let result = self.ctfont.get_glyphs_for_characters(&characters[0], let result = self
&mut glyphs[0], .ctfont
count); .get_glyphs_for_characters(&characters[0], &mut glyphs[0], count);
if !result { if !result {
// No glyph for this character // No glyph for this character
@ -265,10 +263,12 @@ impl FontHandleMethods for FontHandle {
fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> { fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> {
let glyphs = [glyph as CGGlyph]; let glyphs = [glyph as CGGlyph];
let advance = self.ctfont.get_advances_for_glyphs(kCTFontDefaultOrientation, let advance = self.ctfont.get_advances_for_glyphs(
kCTFontDefaultOrientation,
&glyphs[0], &glyphs[0],
ptr::null_mut(), ptr::null_mut(),
1); 1,
);
Some(advance as FractionalPixel) Some(advance as FractionalPixel)
} }
@ -283,7 +283,8 @@ impl FontHandleMethods for FontHandle {
let line_gap = (ascent + descent + leading + 0.5).floor(); let line_gap = (ascent + descent + leading + 0.5).floor();
let max_advance_width = au_from_pt(bounding_rect.size.width as f64); let max_advance_width = au_from_pt(bounding_rect.size.width as f64);
let average_advance = self.glyph_index('0') let average_advance = self
.glyph_index('0')
.and_then(|idx| self.glyph_h_advance(idx)) .and_then(|idx| self.glyph_h_advance(idx))
.map(Au::from_f64_px) .map(Au::from_f64_px)
.unwrap_or(max_advance_width); .unwrap_or(max_advance_width);
@ -307,15 +308,17 @@ impl FontHandleMethods for FontHandle {
average_advance: average_advance, average_advance: average_advance,
line_gap: Au::from_f64_px(line_gap), line_gap: Au::from_f64_px(line_gap),
}; };
debug!("Font metrics (@{} pt): {:?}", self.ctfont.pt_size() as f64, metrics); debug!(
"Font metrics (@{} pt): {:?}",
self.ctfont.pt_size() as f64,
metrics
);
metrics metrics
} }
fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> { fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result: Option<CFData> = self.ctfont.get_font_table(tag); let result: Option<CFData> = self.ctfont.get_font_table(tag);
result.and_then(|data| { result.and_then(|data| Some(FontTable::wrap(data)))
Some(FontTable::wrap(data))
})
} }
fn identifier(&self) -> Atom { fn identifier(&self) -> Atom {

View file

@ -6,7 +6,7 @@ use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct FontContextHandle { pub struct FontContextHandle {
ctx: () ctx: (),
} }
impl FontContextHandle { impl FontContextHandle {

View file

@ -6,14 +6,20 @@ use core_text;
use text::util::unicode_plane; use text::util::unicode_plane;
use ucd::{Codepoint, UnicodeBlock}; use ucd::{Codepoint, UnicodeBlock};
pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) { pub fn for_each_available_family<F>(mut callback: F)
where
F: FnMut(String),
{
let family_names = core_text::font_collection::get_family_names(); let family_names = core_text::font_collection::get_family_names();
for family_name in family_names.iter() { for family_name in family_names.iter() {
callback(family_name.to_string()); callback(family_name.to_string());
} }
} }
pub fn for_each_variation<F>(family_name: &str, mut callback: F) where F: FnMut(String) { pub fn for_each_variation<F>(family_name: &str, mut callback: F)
where
F: FnMut(String),
{
debug!("Looking for faces of family: {}", family_name); debug!("Looking for faces of family: {}", family_name);
let family_collection = core_text::font_collection::create_for_family(family_name); let family_collection = core_text::font_collection::create_for_family(family_name);
@ -31,7 +37,7 @@ pub fn system_default_family(_generic_name: &str) -> Option<String> {
// Based on gfxPlatformMac::GetCommonFallbackFonts() in Gecko // Based on gfxPlatformMac::GetCommonFallbackFonts() in Gecko
pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> { pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
let mut families = vec!("Lucida Grande"); let mut families = vec!["Lucida Grande"];
if let Some(codepoint) = codepoint { if let Some(codepoint) = codepoint {
match unicode_plane(codepoint) { match unicode_plane(codepoint) {
@ -45,66 +51,65 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::Thaana | UnicodeBlock::Thaana |
UnicodeBlock::NKo => { UnicodeBlock::NKo => {
families.push("Geeza Pro"); families.push("Geeza Pro");
} },
UnicodeBlock::Devanagari => { UnicodeBlock::Devanagari => {
families.push("Devanagari Sangam MN"); families.push("Devanagari Sangam MN");
} },
UnicodeBlock::Gurmukhi => { UnicodeBlock::Gurmukhi => {
families.push("Gurmukhi MN"); families.push("Gurmukhi MN");
} },
UnicodeBlock::Gujarati => { UnicodeBlock::Gujarati => {
families.push("Gujarati Sangam MN"); families.push("Gujarati Sangam MN");
} },
UnicodeBlock::Tamil => { UnicodeBlock::Tamil => {
families.push("Tamil MN"); families.push("Tamil MN");
} },
UnicodeBlock::Lao => { UnicodeBlock::Lao => {
families.push("Lao MN"); families.push("Lao MN");
} },
UnicodeBlock::Tibetan => { UnicodeBlock::Tibetan => {
families.push("Songti SC"); families.push("Songti SC");
} },
UnicodeBlock::Myanmar => { UnicodeBlock::Myanmar => {
families.push("Myanmar MN"); families.push("Myanmar MN");
} },
UnicodeBlock::Ethiopic | UnicodeBlock::Ethiopic |
UnicodeBlock::EthiopicSupplement | UnicodeBlock::EthiopicSupplement |
UnicodeBlock::EthiopicExtended | UnicodeBlock::EthiopicExtended |
UnicodeBlock::EthiopicExtendedA => { UnicodeBlock::EthiopicExtendedA => {
families.push("Kefa"); families.push("Kefa");
} },
UnicodeBlock::Cherokee => { UnicodeBlock::Cherokee => {
families.push("Plantagenet Cherokee"); families.push("Plantagenet Cherokee");
} },
UnicodeBlock::UnifiedCanadianAboriginalSyllabics | UnicodeBlock::UnifiedCanadianAboriginalSyllabics |
UnicodeBlock::UnifiedCanadianAboriginalSyllabicsExtended => { UnicodeBlock::UnifiedCanadianAboriginalSyllabicsExtended => {
families.push("Euphemia UCAS"); families.push("Euphemia UCAS");
} },
UnicodeBlock::Mongolian | UnicodeBlock::Mongolian |
UnicodeBlock::YiSyllables | UnicodeBlock::YiSyllables |
UnicodeBlock::YiRadicals => { UnicodeBlock::YiRadicals => {
families.push("STHeiti"); families.push("STHeiti");
} },
UnicodeBlock::Khmer | UnicodeBlock::Khmer | UnicodeBlock::KhmerSymbols => {
UnicodeBlock::KhmerSymbols => {
families.push("Khmer MN"); families.push("Khmer MN");
} },
UnicodeBlock::TaiLe => { UnicodeBlock::TaiLe => {
families.push("Microsoft Tai Le"); families.push("Microsoft Tai Le");
} },
UnicodeBlock::GeneralPunctuation | UnicodeBlock::GeneralPunctuation |
UnicodeBlock::SuperscriptsandSubscripts | UnicodeBlock::SuperscriptsandSubscripts |
@ -134,11 +139,11 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Apple Symbols"); families.push("Apple Symbols");
families.push("Menlo"); families.push("Menlo");
families.push("STIXGeneral"); families.push("STIXGeneral");
} },
UnicodeBlock::BraillePatterns => { UnicodeBlock::BraillePatterns => {
families.push("Apple Braille"); families.push("Apple Braille");
} },
UnicodeBlock::Bopomofo | UnicodeBlock::Bopomofo |
UnicodeBlock::HangulCompatibilityJamo | UnicodeBlock::HangulCompatibilityJamo |
@ -147,7 +152,7 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::CJKStrokes | UnicodeBlock::CJKStrokes |
UnicodeBlock::KatakanaPhoneticExtensions => { UnicodeBlock::KatakanaPhoneticExtensions => {
families.push("Hiragino Sans GB"); families.push("Hiragino Sans GB");
} },
UnicodeBlock::YijingHexagramSymbols | UnicodeBlock::YijingHexagramSymbols |
UnicodeBlock::CyrillicExtendedB | UnicodeBlock::CyrillicExtendedB |
@ -158,27 +163,27 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::HalfwidthandFullwidthForms | UnicodeBlock::HalfwidthandFullwidthForms |
UnicodeBlock::Specials => { UnicodeBlock::Specials => {
families.push("Apple Symbols"); families.push("Apple Symbols");
} },
_ => {} _ => {},
}
} }
} }
},
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane // https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane
1 => { 1 => {
families.push("Apple Symbols"); families.push("Apple Symbols");
families.push("STIXGeneral"); families.push("STIXGeneral");
} },
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Ideographic_Plane // https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Ideographic_Plane
2 => { 2 => {
// Systems with MS Office may have these fonts // Systems with MS Office may have these fonts
families.push("MingLiU-ExtB"); families.push("MingLiU-ExtB");
families.push("SimSun-ExtB"); families.push("SimSun-ExtB");
} },
_ => {} _ => {},
} }
} }

View file

@ -27,7 +27,6 @@ use webrender_api::NativeFontHandle;
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub struct FontTemplateData { pub struct FontTemplateData {
// If you add members here, review the Debug impl below // If you add members here, review the Debug impl below
/// The `CTFont` object, if present. This is cached here so that we don't have to keep creating /// The `CTFont` object, if present. This is cached here so that we don't have to keep creating
/// `CTFont` instances over and over. It can always be recreated from the `identifier` and/or /// `CTFont` instances over and over. It can always be recreated from the `identifier` and/or
/// `font_data` fields. /// `font_data` fields.
@ -38,7 +37,7 @@ pub struct FontTemplateData {
ctfont: CachedCTFont, ctfont: CachedCTFont,
pub identifier: Atom, pub identifier: Atom,
pub font_data: Option<Arc<Vec<u8>>> pub font_data: Option<Arc<Vec<u8>>>,
} }
impl fmt::Debug for FontTemplateData { impl fmt::Debug for FontTemplateData {
@ -48,11 +47,11 @@ impl fmt::Debug for FontTemplateData {
.field("identifier", &self.identifier) .field("identifier", &self.identifier)
.field( .field(
"font_data", "font_data",
&self.font_data &self
.font_data
.as_ref() .as_ref()
.map(|bytes| format!("[{} bytes]", bytes.len())) .map(|bytes| format!("[{} bytes]", bytes.len())),
) ).finish()
.finish()
} }
} }
@ -64,7 +63,7 @@ impl FontTemplateData {
Ok(FontTemplateData { Ok(FontTemplateData {
ctfont: CachedCTFont(Mutex::new(HashMap::new())), ctfont: CachedCTFont(Mutex::new(HashMap::new())),
identifier: identifier.to_owned(), identifier: identifier.to_owned(),
font_data: font_data.map(Arc::new) font_data: font_data.map(Arc::new),
}) })
} }
@ -83,10 +82,10 @@ impl FontTemplateData {
match cgfont_result { match cgfont_result {
Ok(cgfont) => { Ok(cgfont) => {
Some(core_text::font::new_from_CGFont(&cgfont, clamped_pt_size)) Some(core_text::font::new_from_CGFont(&cgfont, clamped_pt_size))
},
Err(_) => None,
} }
Err(_) => None },
}
}
None => core_text::font::new_from_name(&*self.identifier, clamped_pt_size).ok(), None => core_text::font::new_from_name(&*self.identifier, clamped_pt_size).ok(),
}; };
if let Some(ctfont) = ctfont { if let Some(ctfont) = ctfont {
@ -104,16 +103,23 @@ impl FontTemplateData {
return font_data; return font_data;
} }
let path = ServoUrl::parse(&*self.ctfont(0.0) let path = ServoUrl::parse(
&*self
.ctfont(0.0)
.expect("No Core Text font available!") .expect("No Core Text font available!")
.url() .url()
.expect("No URL for Core Text font!") .expect("No URL for Core Text font!")
.get_string() .get_string()
.to_string()).expect("Couldn't parse Core Text font URL!") .to_string(),
.as_url().to_file_path() ).expect("Couldn't parse Core Text font URL!")
.as_url()
.to_file_path()
.expect("Core Text font didn't name a path!"); .expect("Core Text font didn't name a path!");
let mut bytes = Vec::new(); let mut bytes = Vec::new();
File::open(path).expect("Couldn't open font file!").read_to_end(&mut bytes).unwrap(); File::open(path)
.expect("Couldn't open font file!")
.read_to_end(&mut bytes)
.unwrap();
bytes bytes
} }
@ -125,7 +131,8 @@ impl FontTemplateData {
/// Returns the native font that underlies this font template, if applicable. /// Returns the native font that underlies this font template, if applicable.
pub fn native_font(&self) -> Option<NativeFontHandle> { pub fn native_font(&self) -> Option<NativeFontHandle> {
self.ctfont(0.0).map(|ctfont| NativeFontHandle(ctfont.copy_to_CGFont())) self.ctfont(0.0)
.map(|ctfont| NativeFontHandle(ctfont.copy_to_CGFont()))
} }
} }
@ -140,14 +147,19 @@ impl Deref for CachedCTFont {
} }
impl Serialize for CachedCTFont { impl Serialize for CachedCTFont {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_none() serializer.serialize_none()
} }
} }
impl<'de> Deserialize<'de> for CachedCTFont { impl<'de> Deserialize<'de> for CachedCTFont {
fn deserialize<D>(deserializer: D) -> Result<CachedCTFont, D::Error> fn deserialize<D>(deserializer: D) -> Result<CachedCTFont, D::Error>
where D: Deserializer<'de> { where
D: Deserializer<'de>,
{
struct NoneOptionVisitor; struct NoneOptionVisitor;
impl<'de> Visitor<'de> for NoneOptionVisitor { impl<'de> Visitor<'de> for NoneOptionVisitor {
@ -158,7 +170,10 @@ impl<'de> Deserialize<'de> for CachedCTFont {
} }
#[inline] #[inline]
fn visit_none<E>(self) -> Result<CachedCTFont, E> where E: Error { fn visit_none<E>(self) -> Result<CachedCTFont, E>
where
E: Error,
{
Ok(CachedCTFont(Mutex::new(HashMap::new()))) Ok(CachedCTFont(Mutex::new(HashMap::new())))
} }
} }

View file

@ -23,7 +23,9 @@ mod freetype {
/// Creates a String from the given null-terminated buffer. /// Creates a String from the given null-terminated buffer.
/// Panics if the buffer does not contain UTF-8. /// Panics if the buffer does not contain UTF-8.
unsafe fn c_str_to_string(s: *const c_char) -> String { unsafe fn c_str_to_string(s: *const c_char) -> String {
str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap().to_owned() str::from_utf8(CStr::from_ptr(s).to_bytes())
.unwrap()
.to_owned()
} }
pub mod font; pub mod font;

View file

@ -27,10 +27,18 @@ use text::glyph::GlyphId;
use truetype; use truetype;
// 1em = 12pt = 16px, assuming 72 points per inch and 96 px per inch // 1em = 12pt = 16px, assuming 72 points per inch and 96 px per inch
fn pt_to_px(pt: f64) -> f64 { pt / 72. * 96. } fn pt_to_px(pt: f64) -> f64 {
fn em_to_px(em: f64) -> f64 { em * 16. } pt / 72. * 96.
fn au_from_em(em: f64) -> Au { Au::from_f64_px(em_to_px(em)) } }
fn au_from_pt(pt: f64) -> Au { Au::from_f64_px(pt_to_px(pt)) } fn em_to_px(em: f64) -> f64 {
em * 16.
}
fn au_from_em(em: f64) -> Au {
Au::from_f64_px(em_to_px(em))
}
fn au_from_pt(pt: f64) -> Au {
Au::from_f64_px(pt_to_px(pt))
}
pub struct FontTable { pub struct FontTable {
data: Vec<u8>, data: Vec<u8>,
@ -38,7 +46,9 @@ pub struct FontTable {
impl FontTable { impl FontTable {
pub fn wrap(data: &[u8]) -> FontTable { pub fn wrap(data: &[u8]) -> FontTable {
FontTable { data: data.to_vec() } FontTable {
data: data.to_vec(),
}
} }
} }
@ -139,7 +149,7 @@ impl FontInfo {
} else { } else {
return Err(()); return Err(());
} }
} },
}; };
let mut os2_table_cursor = Cursor::new(os2_table_bytes.as_ref().unwrap()); let mut os2_table_cursor = Cursor::new(os2_table_bytes.as_ref().unwrap());
@ -163,7 +173,8 @@ impl FontInfo {
let weight = StyleFontWeight(weight_val as f32); let weight = StyleFontWeight(weight_val as f32);
let stretch = StyleFontStretch(NonNegative(match min(9, max(1, width_val)) { let stretch = StyleFontStretch(NonNegative(
match min(9, max(1, width_val)) {
1 => FontStretchKeyword::UltraCondensed, 1 => FontStretchKeyword::UltraCondensed,
2 => FontStretchKeyword::ExtraCondensed, 2 => FontStretchKeyword::ExtraCondensed,
3 => FontStretchKeyword::Condensed, 3 => FontStretchKeyword::Condensed,
@ -174,7 +185,8 @@ impl FontInfo {
8 => FontStretchKeyword::ExtraExpanded, 8 => FontStretchKeyword::ExtraExpanded,
9 => FontStretchKeyword::UltraExpanded, 9 => FontStretchKeyword::UltraExpanded,
_ => return Err(()), _ => return Err(()),
}.compute())); }.compute(),
));
let style = if italic_bool { let style = if italic_bool {
GenericFontStyle::Italic GenericFontStyle::Italic
@ -212,7 +224,8 @@ impl FontInfo {
// slightly blacker black // slightly blacker black
FontWeight::ExtraBlack => 1000., FontWeight::ExtraBlack => 1000.,
}); });
let stretch = StyleFontStretch(NonNegative(match font.stretch() { let stretch = StyleFontStretch(NonNegative(
match font.stretch() {
FontStretch::Undefined => FontStretchKeyword::Normal, FontStretch::Undefined => FontStretchKeyword::Normal,
FontStretch::UltraCondensed => FontStretchKeyword::UltraCondensed, FontStretch::UltraCondensed => FontStretchKeyword::UltraCondensed,
FontStretch::ExtraCondensed => FontStretchKeyword::ExtraCondensed, FontStretch::ExtraCondensed => FontStretchKeyword::ExtraCondensed,
@ -223,7 +236,8 @@ impl FontInfo {
FontStretch::Expanded => FontStretchKeyword::Expanded, FontStretch::Expanded => FontStretchKeyword::Expanded,
FontStretch::ExtraExpanded => FontStretchKeyword::ExtraExpanded, FontStretch::ExtraExpanded => FontStretchKeyword::ExtraExpanded,
FontStretch::UltraExpanded => FontStretchKeyword::UltraExpanded, FontStretch::UltraExpanded => FontStretchKeyword::UltraExpanded,
}.compute())); }.compute(),
));
Ok(FontInfo { Ok(FontInfo {
family_name: font.family_name(), family_name: font.family_name(),
@ -246,13 +260,14 @@ pub struct FontHandle {
scaled_du_to_px: f32, scaled_du_to_px: f32,
} }
impl FontHandle { impl FontHandle {}
}
impl FontHandleMethods for FontHandle { impl FontHandleMethods for FontHandle {
fn new_from_template(_: &FontContextHandle, template: Arc<FontTemplateData>, pt_size: Option<Au>) fn new_from_template(
-> Result<Self, ()> _: &FontContextHandle,
{ template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()> {
let (info, face) = if let Some(ref raw_font) = template.bytes { let (info, face) = if let Some(ref raw_font) = template.bytes {
let font_file = FontFile::new_from_data(&raw_font); let font_file = FontFile::new_from_data(&raw_font);
if font_file.is_none() { if font_file.is_none() {
@ -260,7 +275,9 @@ impl FontHandleMethods for FontHandle {
return Err(()); return Err(());
} }
let face = font_file.unwrap().create_face(0, dwrote::DWRITE_FONT_SIMULATIONS_NONE); let face = font_file
.unwrap()
.create_face(0, dwrote::DWRITE_FONT_SIMULATIONS_NONE);
let info = FontInfo::new_from_face(&face)?; let info = FontInfo::new_from_face(&face)?;
(info, face) (info, face)
} else { } else {
@ -350,7 +367,7 @@ impl FontHandleMethods for FontHandle {
let dm = self.face.metrics(); let dm = self.face.metrics();
let au_from_du = |du| -> Au { Au::from_f32_px(du as f32 * self.du_to_px) }; let au_from_du = |du| -> Au { Au::from_f32_px(du as f32 * self.du_to_px) };
let au_from_du_s = |du| -> Au { Au:: from_f32_px(du as f32 * self.scaled_du_to_px) }; let au_from_du_s = |du| -> Au { Au::from_f32_px(du as f32 * self.scaled_du_to_px) };
// anything that we calculate and don't just pull out of self.face.metrics // anything that we calculate and don't just pull out of self.face.metrics
// is pulled out here for clarity // is pulled out here for clarity
@ -375,7 +392,9 @@ impl FontHandleMethods for FontHandle {
} }
fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> { fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
self.face.get_font_table(tag).map(|bytes| FontTable { data: bytes }) self.face
.get_font_table(tag)
.map(|bytes| FontTable { data: bytes })
} }
fn identifier(&self) -> Atom { fn identifier(&self) -> Atom {

View file

@ -21,7 +21,10 @@ pub fn system_default_family(_: &str) -> Option<String> {
Some("Verdana".to_owned()) Some("Verdana".to_owned())
} }
pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) { pub fn for_each_available_family<F>(mut callback: F)
where
F: FnMut(String),
{
let system_fc = FontCollection::system(); let system_fc = FontCollection::system();
for family in system_fc.families_iter() { for family in system_fc.families_iter() {
callback(family.name()); callback(family.name());
@ -37,7 +40,10 @@ pub fn for_each_available_family<F>(mut callback: F) where F: FnMut(String) {
// we'll stringify, and then put them all in a HashMap with // we'll stringify, and then put them all in a HashMap with
// the actual FontDescriptor there. // the actual FontDescriptor there.
pub fn for_each_variation<F>(family_name: &str, mut callback: F) where F: FnMut(String) { pub fn for_each_variation<F>(family_name: &str, mut callback: F)
where
F: FnMut(String),
{
let system_fc = FontCollection::system(); let system_fc = FontCollection::system();
if let Some(family) = system_fc.get_font_family_by_name(family_name) { if let Some(family) = system_fc.get_font_family_by_name(family_name) {
let count = family.get_font_count(); let count = family.get_font_count();
@ -65,12 +71,14 @@ pub fn descriptor_from_atom(ident: &Atom) -> FontDescriptor {
pub fn font_from_atom(ident: &Atom) -> Font { pub fn font_from_atom(ident: &Atom) -> Font {
let fonts = FONT_ATOM_MAP.lock().unwrap(); let fonts = FONT_ATOM_MAP.lock().unwrap();
FontCollection::system().get_font_from_descriptor(fonts.get(ident).unwrap()).unwrap() FontCollection::system()
.get_font_from_descriptor(fonts.get(ident).unwrap())
.unwrap()
} }
// Based on gfxWindowsPlatform::GetCommonFallbackFonts() in Gecko // Based on gfxWindowsPlatform::GetCommonFallbackFonts() in Gecko
pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> { pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
let mut families = vec!("Arial"); let mut families = vec!["Arial"];
if let Some(codepoint) = codepoint { if let Some(codepoint) = codepoint {
match unicode_plane(codepoint) { match unicode_plane(codepoint) {
@ -83,31 +91,29 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::Hebrew => { UnicodeBlock::Hebrew => {
families.push("Estrangelo Edessa"); families.push("Estrangelo Edessa");
families.push("Cambria"); families.push("Cambria");
} },
UnicodeBlock::Arabic | UnicodeBlock::Arabic | UnicodeBlock::ArabicSupplement => {
UnicodeBlock::ArabicSupplement => {
families.push("Microsoft Uighur"); families.push("Microsoft Uighur");
} },
UnicodeBlock::Syriac => { UnicodeBlock::Syriac => {
families.push("Estrangelo Edessa"); families.push("Estrangelo Edessa");
} },
UnicodeBlock::Thaana => { UnicodeBlock::Thaana => {
families.push("MV Boli"); families.push("MV Boli");
} },
UnicodeBlock::NKo => { UnicodeBlock::NKo => {
families.push("Ebrima"); families.push("Ebrima");
} },
UnicodeBlock::Devanagari | UnicodeBlock::Devanagari | UnicodeBlock::Bengali => {
UnicodeBlock::Bengali => {
families.push("Nirmala UI"); families.push("Nirmala UI");
families.push("Utsaah"); families.push("Utsaah");
families.push("Aparajita"); families.push("Aparajita");
} },
UnicodeBlock::Gurmukhi | UnicodeBlock::Gurmukhi |
UnicodeBlock::Gujarati | UnicodeBlock::Gujarati |
@ -123,21 +129,21 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::SundaneseSupplement | UnicodeBlock::SundaneseSupplement |
UnicodeBlock::VedicExtensions => { UnicodeBlock::VedicExtensions => {
families.push("Nirmala UI"); families.push("Nirmala UI");
} },
UnicodeBlock::Thai => { UnicodeBlock::Thai => {
families.push("Leelawadee UI"); families.push("Leelawadee UI");
} },
UnicodeBlock::Lao => { UnicodeBlock::Lao => {
families.push("Lao UI"); families.push("Lao UI");
} },
UnicodeBlock::Myanmar | UnicodeBlock::Myanmar |
UnicodeBlock::MyanmarExtendedA | UnicodeBlock::MyanmarExtendedA |
UnicodeBlock::MyanmarExtendedB => { UnicodeBlock::MyanmarExtendedB => {
families.push("Myanmar Text"); families.push("Myanmar Text");
} },
UnicodeBlock::HangulJamo | UnicodeBlock::HangulJamo |
UnicodeBlock::HangulJamoExtendedA | UnicodeBlock::HangulJamoExtendedA |
@ -145,48 +151,47 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::HangulJamoExtendedB | UnicodeBlock::HangulJamoExtendedB |
UnicodeBlock::HangulCompatibilityJamo => { UnicodeBlock::HangulCompatibilityJamo => {
families.push("Malgun Gothic"); families.push("Malgun Gothic");
} },
UnicodeBlock::Ethiopic | UnicodeBlock::Ethiopic |
UnicodeBlock::EthiopicSupplement | UnicodeBlock::EthiopicSupplement |
UnicodeBlock::EthiopicExtended | UnicodeBlock::EthiopicExtended |
UnicodeBlock::EthiopicExtendedA => { UnicodeBlock::EthiopicExtendedA => {
families.push("Nyala"); families.push("Nyala");
} },
UnicodeBlock::Cherokee => { UnicodeBlock::Cherokee => {
families.push("Plantagenet Cherokee"); families.push("Plantagenet Cherokee");
} },
UnicodeBlock::UnifiedCanadianAboriginalSyllabics | UnicodeBlock::UnifiedCanadianAboriginalSyllabics |
UnicodeBlock::UnifiedCanadianAboriginalSyllabicsExtended => { UnicodeBlock::UnifiedCanadianAboriginalSyllabicsExtended => {
families.push("Euphemia"); families.push("Euphemia");
families.push("Segoe UI"); families.push("Segoe UI");
} },
UnicodeBlock::Khmer | UnicodeBlock::Khmer | UnicodeBlock::KhmerSymbols => {
UnicodeBlock::KhmerSymbols => {
families.push("Khmer UI"); families.push("Khmer UI");
families.push("Leelawadee UI"); families.push("Leelawadee UI");
} },
UnicodeBlock::Mongolian => { UnicodeBlock::Mongolian => {
families.push("Mongolian Baiti"); families.push("Mongolian Baiti");
} },
UnicodeBlock::TaiLe => { UnicodeBlock::TaiLe => {
families.push("Microsoft Tai Le"); families.push("Microsoft Tai Le");
} },
UnicodeBlock::NewTaiLue => { UnicodeBlock::NewTaiLue => {
families.push("Microsoft New Tai Lue"); families.push("Microsoft New Tai Lue");
} },
UnicodeBlock::Buginese | UnicodeBlock::Buginese |
UnicodeBlock::TaiTham | UnicodeBlock::TaiTham |
UnicodeBlock::CombiningDiacriticalMarksExtended => { UnicodeBlock::CombiningDiacriticalMarksExtended => {
families.push("Leelawadee UI"); families.push("Leelawadee UI");
} },
UnicodeBlock::GeneralPunctuation | UnicodeBlock::GeneralPunctuation |
UnicodeBlock::SuperscriptsandSubscripts | UnicodeBlock::SuperscriptsandSubscripts |
@ -220,7 +225,7 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Meiryo"); families.push("Meiryo");
families.push("Lucida Sans Unicode"); families.push("Lucida Sans Unicode");
families.push("Ebrima"); families.push("Ebrima");
} },
UnicodeBlock::GeorgianSupplement | UnicodeBlock::GeorgianSupplement |
UnicodeBlock::Tifinagh | UnicodeBlock::Tifinagh |
@ -232,11 +237,11 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Segoe UI"); families.push("Segoe UI");
families.push("Segoe UI Symbol"); families.push("Segoe UI Symbol");
families.push("Meiryo"); families.push("Meiryo");
} },
UnicodeBlock::BraillePatterns => { UnicodeBlock::BraillePatterns => {
families.push("Segoe UI Symbol"); families.push("Segoe UI Symbol");
} },
UnicodeBlock::CJKSymbolsandPunctuation | UnicodeBlock::CJKSymbolsandPunctuation |
UnicodeBlock::Hiragana | UnicodeBlock::Hiragana |
@ -249,21 +254,20 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::CJKUnifiedIdeographs => { UnicodeBlock::CJKUnifiedIdeographs => {
families.push("Microsoft YaHei"); families.push("Microsoft YaHei");
families.push("Yu Gothic"); families.push("Yu Gothic");
} },
UnicodeBlock::EnclosedCJKLettersandMonths => { UnicodeBlock::EnclosedCJKLettersandMonths => {
families.push("Malgun Gothic"); families.push("Malgun Gothic");
} },
UnicodeBlock::YijingHexagramSymbols => { UnicodeBlock::YijingHexagramSymbols => {
families.push("Segoe UI Symbol"); families.push("Segoe UI Symbol");
} },
UnicodeBlock::YiSyllables | UnicodeBlock::YiSyllables | UnicodeBlock::YiRadicals => {
UnicodeBlock::YiRadicals => {
families.push("Microsoft Yi Baiti"); families.push("Microsoft Yi Baiti");
families.push("Segoe UI"); families.push("Segoe UI");
} },
UnicodeBlock::Vai | UnicodeBlock::Vai |
UnicodeBlock::CyrillicExtendedB | UnicodeBlock::CyrillicExtendedB |
@ -273,7 +277,7 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Ebrima"); families.push("Ebrima");
families.push("Segoe UI"); families.push("Segoe UI");
families.push("Cambria Math"); families.push("Cambria Math");
} },
UnicodeBlock::SylotiNagri | UnicodeBlock::SylotiNagri |
UnicodeBlock::CommonIndicNumberForms | UnicodeBlock::CommonIndicNumberForms |
@ -282,27 +286,25 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::DevanagariExtended => { UnicodeBlock::DevanagariExtended => {
families.push("Microsoft PhagsPa"); families.push("Microsoft PhagsPa");
families.push("Nirmala UI"); families.push("Nirmala UI");
} },
UnicodeBlock::KayahLi | UnicodeBlock::KayahLi | UnicodeBlock::Rejang | UnicodeBlock::Javanese => {
UnicodeBlock::Rejang |
UnicodeBlock::Javanese => {
families.push("Malgun Gothic"); families.push("Malgun Gothic");
families.push("Javanese Text"); families.push("Javanese Text");
families.push("Leelawadee UI"); families.push("Leelawadee UI");
} },
UnicodeBlock::AlphabeticPresentationForms => { UnicodeBlock::AlphabeticPresentationForms => {
families.push("Microsoft Uighur"); families.push("Microsoft Uighur");
families.push("Gabriola"); families.push("Gabriola");
families.push("Sylfaen"); families.push("Sylfaen");
} },
UnicodeBlock::ArabicPresentationFormsA | UnicodeBlock::ArabicPresentationFormsA |
UnicodeBlock::ArabicPresentationFormsB => { UnicodeBlock::ArabicPresentationFormsB => {
families.push("Traditional Arabic"); families.push("Traditional Arabic");
families.push("Arabic Typesetting"); families.push("Arabic Typesetting");
} },
UnicodeBlock::VariationSelectors | UnicodeBlock::VariationSelectors |
UnicodeBlock::VerticalForms | UnicodeBlock::VerticalForms |
@ -312,12 +314,12 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
UnicodeBlock::HalfwidthandFullwidthForms | UnicodeBlock::HalfwidthandFullwidthForms |
UnicodeBlock::Specials => { UnicodeBlock::Specials => {
families.push("Microsoft JhengHei"); families.push("Microsoft JhengHei");
} },
_ => {} _ => {},
}
} }
} }
},
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane // https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane
1 => { 1 => {
@ -325,9 +327,9 @@ pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
families.push("Ebrima"); families.push("Ebrima");
families.push("Nirmala UI"); families.push("Nirmala UI");
families.push("Cambria Math"); families.push("Cambria Math");
} },
_ => {} _ => {},
} }
} }

View file

@ -11,7 +11,6 @@ use webrender_api::NativeFontHandle;
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub struct FontTemplateData { pub struct FontTemplateData {
// If you add members here, review the Debug impl below // If you add members here, review the Debug impl below
pub bytes: Option<Vec<u8>>, pub bytes: Option<Vec<u8>>,
pub identifier: Atom, pub identifier: Atom,
} }
@ -21,18 +20,20 @@ impl fmt::Debug for FontTemplateData {
fmt.debug_struct("FontTemplateData") fmt.debug_struct("FontTemplateData")
.field( .field(
"bytes", "bytes",
&self.bytes &self
.bytes
.as_ref() .as_ref()
.map(|bytes| format!("[{} bytes]", bytes.len())) .map(|bytes| format!("[{} bytes]", bytes.len())),
) ).field("identifier", &self.identifier)
.field("identifier", &self.identifier)
.finish() .finish()
} }
} }
impl FontTemplateData { impl FontTemplateData {
pub fn new(identifier: Atom, pub fn new(
font_data: Option<Vec<u8>>) -> Result<FontTemplateData, io::Error> { identifier: Atom,
font_data: Option<Vec<u8>>,
) -> Result<FontTemplateData, io::Error> {
Ok(FontTemplateData { Ok(FontTemplateData {
bytes: font_data, bytes: font_data,
identifier: identifier, identifier: identifier,

View file

@ -58,26 +58,24 @@ impl TestFontSource {
} }
fn add_face(family: &mut FontTemplates, name: &str, identifier: Option<&str>) { fn add_face(family: &mut FontTemplates, name: &str, identifier: Option<&str>) {
let mut path: PathBuf = [ let mut path: PathBuf = [env!("CARGO_MANIFEST_DIR"), "tests", "support", "CSSTest"]
env!("CARGO_MANIFEST_DIR"), .iter()
"tests", .collect();
"support",
"CSSTest",
].iter().collect();
path.push(format!("{}.ttf", name)); path.push(format!("{}.ttf", name));
let file = File::open(path).unwrap(); let file = File::open(path).unwrap();
let identifier = Atom::from(identifier.unwrap_or(name)); let identifier = Atom::from(identifier.unwrap_or(name));
family.add_template( family.add_template(identifier, Some(file.bytes().map(|b| b.unwrap()).collect()))
identifier,
Some(file.bytes().map(|b| b.unwrap()).collect())
)
} }
} }
impl FontSource for TestFontSource { impl FontSource for TestFontSource {
fn get_font_instance(&mut self, _key: webrender_api::FontKey, _size: Au) -> webrender_api::FontInstanceKey { fn get_font_instance(
&mut self,
_key: webrender_api::FontKey,
_size: Au,
) -> webrender_api::FontInstanceKey {
webrender_api::FontInstanceKey(webrender_api::IdNamespace(0), 0) webrender_api::FontInstanceKey(webrender_api::IdNamespace(0), 0)
} }
@ -92,11 +90,9 @@ impl FontSource for TestFontSource {
self.families self.families
.get_mut(family_descriptor.name()) .get_mut(family_descriptor.name())
.and_then(|family| family.find_font_for_style(&template_descriptor, handle)) .and_then(|family| family.find_font_for_style(&template_descriptor, handle))
.map(|template| { .map(|template| FontTemplateInfo {
FontTemplateInfo {
font_template: template, font_template: template,
font_key: webrender_api::FontKey(webrender_api::IdNamespace(0), 0), font_key: webrender_api::FontKey(webrender_api::IdNamespace(0), 0),
}
}) })
} }
} }
@ -116,12 +112,14 @@ fn style() -> FontStyleStruct {
} }
fn font_family(names: Vec<&str>) -> FontFamily { fn font_family(names: Vec<&str>) -> FontFamily {
let names: Vec<SingleFontFamily> = names.into_iter().map(|name| let names: Vec<SingleFontFamily> = names
.into_iter()
.map(|name| {
SingleFontFamily::FamilyName(FamilyName { SingleFontFamily::FamilyName(FamilyName {
name: Atom::from(name), name: Atom::from(name),
syntax: FamilyNameSyntax::Quoted, syntax: FamilyNameSyntax::Quoted,
}) })
).collect(); }).collect();
FontFamily(FontFamilyList::new(names.into_boxed_slice())) FontFamily(FontFamilyList::new(names.into_boxed_slice()))
} }
@ -156,19 +154,36 @@ fn test_font_group_find_by_codepoint() {
let mut context = FontContext::new(source); let mut context = FontContext::new(source);
let mut style = style(); let mut style = style();
style.set_font_family(font_family(vec!("CSSTest ASCII", "CSSTest Basic"))); style.set_font_family(font_family(vec!["CSSTest ASCII", "CSSTest Basic"]));
let group = context.font_group(Arc::new(style)); let group = context.font_group(Arc::new(style));
let font = group.borrow_mut().find_by_codepoint(&mut context, 'a').unwrap(); let font = group
.borrow_mut()
.find_by_codepoint(&mut context, 'a')
.unwrap();
assert_eq!(&*font.borrow().identifier(), "csstest-ascii"); assert_eq!(&*font.borrow().identifier(), "csstest-ascii");
assert_eq!(count.get(), 1, "only the first font in the list should have been loaded"); assert_eq!(
count.get(),
1,
"only the first font in the list should have been loaded"
);
let font = group.borrow_mut().find_by_codepoint(&mut context, 'a').unwrap(); let font = group
.borrow_mut()
.find_by_codepoint(&mut context, 'a')
.unwrap();
assert_eq!(&*font.borrow().identifier(), "csstest-ascii"); assert_eq!(&*font.borrow().identifier(), "csstest-ascii");
assert_eq!(count.get(), 1, "we shouldn't load the same font a second time"); assert_eq!(
count.get(),
1,
"we shouldn't load the same font a second time"
);
let font = group.borrow_mut().find_by_codepoint(&mut context, 'á').unwrap(); let font = group
.borrow_mut()
.find_by_codepoint(&mut context, 'á')
.unwrap();
assert_eq!(&*font.borrow().identifier(), "csstest-basic-regular"); assert_eq!(&*font.borrow().identifier(), "csstest-basic-regular");
assert_eq!(count.get(), 2, "both fonts should now have been loaded"); assert_eq!(count.get(), 2, "both fonts should now have been loaded");
} }
@ -179,19 +194,27 @@ fn test_font_fallback() {
let mut context = FontContext::new(source); let mut context = FontContext::new(source);
let mut style = style(); let mut style = style();
style.set_font_family(font_family(vec!("CSSTest ASCII"))); style.set_font_family(font_family(vec!["CSSTest ASCII"]));
let group = context.font_group(Arc::new(style)); let group = context.font_group(Arc::new(style));
let font = group.borrow_mut().find_by_codepoint(&mut context, 'a').unwrap(); let font = group
.borrow_mut()
.find_by_codepoint(&mut context, 'a')
.unwrap();
assert_eq!( assert_eq!(
&*font.borrow().identifier(), "csstest-ascii", &*font.borrow().identifier(),
"csstest-ascii",
"a family in the group should be used if there is a matching glyph" "a family in the group should be used if there is a matching glyph"
); );
let font = group.borrow_mut().find_by_codepoint(&mut context, 'á').unwrap(); let font = group
.borrow_mut()
.find_by_codepoint(&mut context, 'á')
.unwrap();
assert_eq!( assert_eq!(
&*font.borrow().identifier(), "fallback", &*font.borrow().identifier(),
"fallback",
"a fallback font should be used if there is no matching glyph in the group" "a fallback font should be used if there is no matching glyph in the group"
); );
} }
@ -212,10 +235,8 @@ fn test_font_template_is_cached() {
pt_size: Au(10), pt_size: Au(10),
}; };
let family_descriptor = FontFamilyDescriptor::new( let family_descriptor =
FontFamilyName::from("CSSTest Basic"), FontFamilyDescriptor::new(FontFamilyName::from("CSSTest Basic"), FontSearchScope::Any);
FontSearchScope::Any,
);
let font1 = context.font(&font_descriptor, &family_descriptor).unwrap(); let font1 = context.font(&font_descriptor, &family_descriptor).unwrap();
@ -228,5 +249,9 @@ fn test_font_template_is_cached() {
"the same font should not have been returned" "the same font should not have been returned"
); );
assert_eq!(count.get(), 1, "we should only have fetched the template data from the cache thread once"); assert_eq!(
count.get(),
1,
"we should only have fetched the template data from the cache thread once"
);
} }

View file

@ -2,9 +2,12 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(not(target_os = "macos"))] extern crate gfx; #[cfg(not(target_os = "macos"))]
#[cfg(not(target_os = "macos"))] extern crate servo_atoms; extern crate gfx;
#[cfg(not(target_os = "macos"))] extern crate style; #[cfg(not(target_os = "macos"))]
extern crate servo_atoms;
#[cfg(not(target_os = "macos"))]
extern crate style;
// Test doesn't yet run on Mac, see https://github.com/servo/servo/pull/19928 for explanation. // Test doesn't yet run on Mac, see https://github.com/servo/servo/pull/19928 for explanation.
#[cfg(not(target_os = "macos"))] #[cfg(not(target_os = "macos"))]
@ -28,14 +31,16 @@ fn test_font_template_descriptor() {
"support", "support",
"dejavu-fonts-ttf-2.37", "dejavu-fonts-ttf-2.37",
"ttf", "ttf",
].iter().collect(); ]
.iter()
.collect();
path.push(format!("{}.ttf", filename)); path.push(format!("{}.ttf", filename));
let file = File::open(path).unwrap(); let file = File::open(path).unwrap();
let mut template = FontTemplate::new( let mut template = FontTemplate::new(
Atom::from(filename), Atom::from(filename),
Some(file.bytes().map(|b| b.unwrap()).collect()) Some(file.bytes().map(|b| b.unwrap()).collect()),
).unwrap(); ).unwrap();
let context = FontContextHandle::new(); let context = FontContextHandle::new();
@ -43,27 +48,39 @@ fn test_font_template_descriptor() {
template.descriptor(&context).unwrap() template.descriptor(&context).unwrap()
} }
assert_eq!(descriptor("DejaVuSans"), FontTemplateDescriptor { assert_eq!(
descriptor("DejaVuSans"),
FontTemplateDescriptor {
weight: FontWeight::normal(), weight: FontWeight::normal(),
stretch: FontStretch::hundred(), stretch: FontStretch::hundred(),
style: FontStyle::Normal, style: FontStyle::Normal,
}); }
);
assert_eq!(descriptor("DejaVuSans-Bold"), FontTemplateDescriptor { assert_eq!(
descriptor("DejaVuSans-Bold"),
FontTemplateDescriptor {
weight: FontWeight::bold(), weight: FontWeight::bold(),
stretch: FontStretch::hundred(), stretch: FontStretch::hundred(),
style: FontStyle::Normal, style: FontStyle::Normal,
}); }
);
assert_eq!(descriptor("DejaVuSans-Oblique"), FontTemplateDescriptor { assert_eq!(
descriptor("DejaVuSans-Oblique"),
FontTemplateDescriptor {
weight: FontWeight::normal(), weight: FontWeight::normal(),
stretch: FontStretch::hundred(), stretch: FontStretch::hundred(),
style: FontStyle::Italic, style: FontStyle::Italic,
}); }
);
assert_eq!(descriptor("DejaVuSansCondensed-BoldOblique"), FontTemplateDescriptor { assert_eq!(
descriptor("DejaVuSansCondensed-BoldOblique"),
FontTemplateDescriptor {
weight: FontWeight::bold(), weight: FontWeight::bold(),
stretch: FontStretch(NonNegative(Percentage(0.875))), stretch: FontStretch(NonNegative(Percentage(0.875))),
style: FontStyle::Italic, style: FontStyle::Italic,
}); }
);
} }

View file

@ -29,26 +29,13 @@ fn test_transform_compress_none() {
#[test] #[test]
fn test_transform_discard_newline() { fn test_transform_discard_newline() {
let test_strs = [ let test_strs = [
(" foo bar", (" foo bar", " foo bar"),
" foo bar"), ("foo bar ", "foo bar "),
("foo\n bar", "foo bar"),
("foo bar ", ("foo \nbar", "foo bar"),
"foo bar "), (" foo bar \nbaz", " foo bar baz"),
("foo bar baz", "foo bar baz"),
("foo\n bar", ("foobarbaz\n\n", "foobarbaz"),
"foo bar"),
("foo \nbar",
"foo bar"),
(" foo bar \nbaz",
" foo bar baz"),
("foo bar baz",
"foo bar baz"),
("foobarbaz\n\n",
"foobarbaz"),
]; ];
let mode = CompressionMode::DiscardNewline; let mode = CompressionMode::DiscardNewline;
@ -62,26 +49,13 @@ fn test_transform_discard_newline() {
#[test] #[test]
fn test_transform_compress_whitespace() { fn test_transform_compress_whitespace() {
let test_strs = [ let test_strs = [
(" foo bar", (" foo bar", "foo bar"),
"foo bar"), ("foo bar ", "foo bar "),
("foo\n bar", "foo\n bar"),
("foo bar ", ("foo \nbar", "foo \nbar"),
"foo bar "), (" foo bar \nbaz", "foo bar \nbaz"),
("foo bar baz", "foo bar baz"),
("foo\n bar", ("foobarbaz\n\n", "foobarbaz\n\n"),
"foo\n bar"),
("foo \nbar",
"foo \nbar"),
(" foo bar \nbaz",
"foo bar \nbaz"),
("foo bar baz",
"foo bar baz"),
("foobarbaz\n\n",
"foobarbaz\n\n"),
]; ];
let mode = CompressionMode::CompressWhitespace; let mode = CompressionMode::CompressWhitespace;
@ -95,26 +69,13 @@ fn test_transform_compress_whitespace() {
#[test] #[test]
fn test_transform_compress_whitespace_newline() { fn test_transform_compress_whitespace_newline() {
let test_strs = vec![ let test_strs = vec![
(" foo bar", (" foo bar", "foo bar"),
"foo bar"), ("foo bar ", "foo bar "),
("foo\n bar", "foo bar"),
("foo bar ", ("foo \nbar", "foo bar"),
"foo bar "), (" foo bar \nbaz", "foo bar baz"),
("foo bar baz", "foo bar baz"),
("foo\n bar", ("foobarbaz\n\n", "foobarbaz "),
"foo bar"),
("foo \nbar",
"foo bar"),
(" foo bar \nbaz",
"foo bar baz"),
("foo bar baz",
"foo bar baz"),
("foobarbaz\n\n",
"foobarbaz "),
]; ];
let mode = CompressionMode::CompressWhitespaceNewline; let mode = CompressionMode::CompressWhitespaceNewline;
@ -128,29 +89,14 @@ fn test_transform_compress_whitespace_newline() {
#[test] #[test]
fn test_transform_compress_whitespace_newline_no_incoming() { fn test_transform_compress_whitespace_newline_no_incoming() {
let test_strs = [ let test_strs = [
(" foo bar", (" foo bar", " foo bar"),
" foo bar"), ("\nfoo bar", " foo bar"),
("foo bar ", "foo bar "),
("\nfoo bar", ("foo\n bar", "foo bar"),
" foo bar"), ("foo \nbar", "foo bar"),
(" foo bar \nbaz", " foo bar baz"),
("foo bar ", ("foo bar baz", "foo bar baz"),
"foo bar "), ("foobarbaz\n\n", "foobarbaz "),
("foo\n bar",
"foo bar"),
("foo \nbar",
"foo bar"),
(" foo bar \nbaz",
" foo bar baz"),
("foo bar baz",
"foo bar baz"),
("foobarbaz\n\n",
"foobarbaz "),
]; ];
let mode = CompressionMode::CompressWhitespaceNewline; let mode = CompressionMode::CompressWhitespaceNewline;

View file

@ -4,7 +4,10 @@
use app_units::Au; use app_units::Au;
use euclid::Point2D; use euclid::Point2D;
#[cfg(all(feature = "unstable", any(target_feature = "sse2", target_feature = "neon")))] #[cfg(all(
feature = "unstable",
any(target_feature = "sse2", target_feature = "neon")
))]
use packed_simd::u32x4; use packed_simd::u32x4;
use range::{self, EachIndex, Range, RangeIndex}; use range::{self, EachIndex, Range, RangeIndex};
use std::{fmt, mem, u16}; use std::{fmt, mem, u16};
@ -28,9 +31,7 @@ pub struct GlyphEntry {
impl GlyphEntry { impl GlyphEntry {
fn new(value: u32) -> GlyphEntry { fn new(value: u32) -> GlyphEntry {
GlyphEntry { GlyphEntry { value: value }
value: value,
}
} }
fn initial() -> GlyphEntry { fn initial() -> GlyphEntry {
@ -54,11 +55,11 @@ impl GlyphEntry {
fn complex(starts_cluster: bool, starts_ligature: bool, glyph_count: usize) -> GlyphEntry { fn complex(starts_cluster: bool, starts_ligature: bool, glyph_count: usize) -> GlyphEntry {
assert!(glyph_count <= u16::MAX as usize); assert!(glyph_count <= u16::MAX as usize);
debug!("creating complex glyph entry: starts_cluster={}, starts_ligature={}, \ debug!(
"creating complex glyph entry: starts_cluster={}, starts_ligature={}, \
glyph_count={}", glyph_count={}",
starts_cluster, starts_cluster, starts_ligature, glyph_count
starts_ligature, );
glyph_count);
GlyphEntry::new(glyph_count as u32) GlyphEntry::new(glyph_count as u32)
} }
@ -205,8 +206,8 @@ struct DetailedGlyphStore {
impl<'a> DetailedGlyphStore { impl<'a> DetailedGlyphStore {
fn new() -> DetailedGlyphStore { fn new() -> DetailedGlyphStore {
DetailedGlyphStore { DetailedGlyphStore {
detail_buffer: vec!(), // TODO: default size? detail_buffer: vec![], // TODO: default size?
detail_lookup: vec!(), detail_lookup: vec![],
lookup_is_sorted: false, lookup_is_sorted: false,
} }
} }
@ -217,7 +218,10 @@ impl<'a> DetailedGlyphStore {
detail_offset: self.detail_buffer.len(), detail_offset: self.detail_buffer.len(),
}; };
debug!("Adding entry[off={:?}] for detailed glyphs: {:?}", entry_offset, glyphs); debug!(
"Adding entry[off={:?}] for detailed glyphs: {:?}",
entry_offset, glyphs
);
/* TODO: don't actually assert this until asserts are compiled /* TODO: don't actually assert this until asserts are compiled
in/out based on severity, debug/release, etc. This assertion in/out based on severity, debug/release, etc. This assertion
@ -235,9 +239,15 @@ impl<'a> DetailedGlyphStore {
self.lookup_is_sorted = false; self.lookup_is_sorted = false;
} }
fn detailed_glyphs_for_entry(&'a self, entry_offset: ByteIndex, count: u16) fn detailed_glyphs_for_entry(
-> &'a [DetailedGlyph] { &'a self,
debug!("Requesting detailed glyphs[n={}] for entry[off={:?}]", count, entry_offset); entry_offset: ByteIndex,
count: u16,
) -> &'a [DetailedGlyph] {
debug!(
"Requesting detailed glyphs[n={}] for entry[off={:?}]",
count, entry_offset
);
// FIXME: Is this right? --pcwalton // FIXME: Is this right? --pcwalton
// TODO: should fix this somewhere else // TODO: should fix this somewhere else
@ -253,18 +263,21 @@ impl<'a> DetailedGlyphStore {
detail_offset: 0, // unused detail_offset: 0, // unused
}; };
let i = self.detail_lookup.binary_search(&key) let i = self
.detail_lookup
.binary_search(&key)
.expect("Invalid index not found in detailed glyph lookup table!"); .expect("Invalid index not found in detailed glyph lookup table!");
let main_detail_offset = self.detail_lookup[i].detail_offset; let main_detail_offset = self.detail_lookup[i].detail_offset;
assert!(main_detail_offset + (count as usize) <= self.detail_buffer.len()); assert!(main_detail_offset + (count as usize) <= self.detail_buffer.len());
// return a slice into the buffer // return a slice into the buffer
&self.detail_buffer[main_detail_offset .. main_detail_offset + count as usize] &self.detail_buffer[main_detail_offset..main_detail_offset + count as usize]
} }
fn detailed_glyph_with_index(&'a self, fn detailed_glyph_with_index(
&'a self,
entry_offset: ByteIndex, entry_offset: ByteIndex,
detail_offset: u16) detail_offset: u16,
-> &'a DetailedGlyph { ) -> &'a DetailedGlyph {
assert!((detail_offset as usize) <= self.detail_buffer.len()); assert!((detail_offset as usize) <= self.detail_buffer.len());
assert!(self.lookup_is_sorted); assert!(self.lookup_is_sorted);
@ -273,7 +286,9 @@ impl<'a> DetailedGlyphStore {
detail_offset: 0, // unused detail_offset: 0, // unused
}; };
let i = self.detail_lookup.binary_search(&key) let i = self
.detail_lookup
.binary_search(&key)
.expect("Invalid index not found in detailed glyph lookup table!"); .expect("Invalid index not found in detailed glyph lookup table!");
let main_detail_offset = self.detail_lookup[i].detail_offset; let main_detail_offset = self.detail_lookup[i].detail_offset;
assert!(main_detail_offset + (detail_offset as usize) < self.detail_buffer.len()); assert!(main_detail_offset + (detail_offset as usize) < self.detail_buffer.len());
@ -290,7 +305,7 @@ impl<'a> DetailedGlyphStore {
// immutable locations thus don't play well with freezing. // immutable locations thus don't play well with freezing.
// Thar be dragons here. You have been warned. (Tips accepted.) // Thar be dragons here. You have been warned. (Tips accepted.)
let mut unsorted_records: Vec<DetailedGlyphRecord> = vec!(); let mut unsorted_records: Vec<DetailedGlyphRecord> = vec![];
mem::swap(&mut self.detail_lookup, &mut unsorted_records); mem::swap(&mut self.detail_lookup, &mut unsorted_records);
let mut mut_records: Vec<DetailedGlyphRecord> = unsorted_records; let mut mut_records: Vec<DetailedGlyphRecord> = unsorted_records;
mut_records.sort_by(|a, b| { mut_records.sort_by(|a, b| {
@ -320,12 +335,13 @@ pub struct GlyphData {
impl GlyphData { impl GlyphData {
/// Creates a new entry for one glyph. /// Creates a new entry for one glyph.
pub fn new(id: GlyphId, pub fn new(
id: GlyphId,
advance: Au, advance: Au,
offset: Option<Point2D<Au>>, offset: Option<Point2D<Au>>,
cluster_start: bool, cluster_start: bool,
ligature_start: bool) ligature_start: bool,
-> GlyphData { ) -> GlyphData {
GlyphData { GlyphData {
id: id, id: id,
advance: advance, advance: advance,
@ -351,8 +367,11 @@ impl<'a> GlyphInfo<'a> {
match self { match self {
GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].id(), GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].id(),
GlyphInfo::Detail(store, entry_i, detail_j) => { GlyphInfo::Detail(store, entry_i, detail_j) => {
store.detail_store.detailed_glyph_with_index(entry_i, detail_j).id store
} .detail_store
.detailed_glyph_with_index(entry_i, detail_j)
.id
},
} }
} }
@ -362,8 +381,11 @@ impl<'a> GlyphInfo<'a> {
match self { match self {
GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].advance(), GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].advance(),
GlyphInfo::Detail(store, entry_i, detail_j) => { GlyphInfo::Detail(store, entry_i, detail_j) => {
store.detail_store.detailed_glyph_with_index(entry_i, detail_j).advance store
} .detail_store
.detailed_glyph_with_index(entry_i, detail_j)
.advance
},
} }
} }
@ -371,9 +393,12 @@ impl<'a> GlyphInfo<'a> {
pub fn offset(self) -> Option<Point2D<Au>> { pub fn offset(self) -> Option<Point2D<Au>> {
match self { match self {
GlyphInfo::Simple(_, _) => None, GlyphInfo::Simple(_, _) => None,
GlyphInfo::Detail(store, entry_i, detail_j) => { GlyphInfo::Detail(store, entry_i, detail_j) => Some(
Some(store.detail_store.detailed_glyph_with_index(entry_i, detail_j).offset) store
} .detail_store
.detailed_glyph_with_index(entry_i, detail_j)
.offset,
),
} }
} }
@ -477,10 +502,7 @@ impl<'a> GlyphStore {
} }
/// Adds a single glyph. /// Adds a single glyph.
pub fn add_glyph_for_byte_index(&mut self, pub fn add_glyph_for_byte_index(&mut self, i: ByteIndex, character: char, data: &GlyphData) {
i: ByteIndex,
character: char,
data: &GlyphData) {
let glyph_is_compressible = is_simple_glyph_id(data.id) && let glyph_is_compressible = is_simple_glyph_id(data.id) &&
is_simple_advance(data.advance) && is_simple_advance(data.advance) &&
data.offset == Point2D::zero() && data.offset == Point2D::zero() &&
@ -512,20 +534,29 @@ impl<'a> GlyphStore {
let glyph_count = data_for_glyphs.len(); let glyph_count = data_for_glyphs.len();
let first_glyph_data = data_for_glyphs[0]; let first_glyph_data = data_for_glyphs[0];
let glyphs_vec: Vec<DetailedGlyph> = (0..glyph_count).map(|i| { let glyphs_vec: Vec<DetailedGlyph> = (0..glyph_count)
DetailedGlyph::new(data_for_glyphs[i].id, .map(|i| {
DetailedGlyph::new(
data_for_glyphs[i].id,
data_for_glyphs[i].advance, data_for_glyphs[i].advance,
data_for_glyphs[i].offset) data_for_glyphs[i].offset,
)
}).collect(); }).collect();
self.has_detailed_glyphs = true; self.has_detailed_glyphs = true;
self.detail_store.add_detailed_glyphs_for_entry(i, &glyphs_vec); self.detail_store
.add_detailed_glyphs_for_entry(i, &glyphs_vec);
let entry = GlyphEntry::complex(first_glyph_data.cluster_start, let entry = GlyphEntry::complex(
first_glyph_data.cluster_start,
first_glyph_data.ligature_start, first_glyph_data.ligature_start,
glyph_count); glyph_count,
);
debug!("Adding multiple glyphs[idx={:?}, count={}]: {:?}", i, glyph_count, entry); debug!(
"Adding multiple glyphs[idx={:?}, count={}]: {:?}",
i, glyph_count, entry
);
self.entry_buffer[i.to_usize()] = entry; self.entry_buffer[i.to_usize()] = entry;
} }
@ -541,7 +572,11 @@ impl<'a> GlyphStore {
GlyphIterator { GlyphIterator {
store: self, store: self,
byte_index: if self.is_rtl { range.end() } else { range.begin() - ByteIndex(1) }, byte_index: if self.is_rtl {
range.end()
} else {
range.begin() - ByteIndex(1)
},
byte_range: *range, byte_range: *range,
glyph_range: None, glyph_range: None,
} }
@ -551,7 +586,12 @@ impl<'a> GlyphStore {
// and advance of the glyph in the range at the given advance, if reached. Otherwise, returns the // and advance of the glyph in the range at the given advance, if reached. Otherwise, returns the
// the number of glyphs and the advance for the given range. // the number of glyphs and the advance for the given range.
#[inline] #[inline]
pub fn range_index_of_advance(&self, range: &Range<ByteIndex>, advance: Au, extra_word_spacing: Au) -> (usize, Au) { pub fn range_index_of_advance(
&self,
range: &Range<ByteIndex>,
advance: Au,
extra_word_spacing: Au,
) -> (usize, Au) {
let mut index = 0; let mut index = 0;
let mut current_advance = Au(0); let mut current_advance = Au(0);
for glyph in self.iter_glyphs_for_byte_range(range) { for glyph in self.iter_glyphs_for_byte_range(range) {
@ -580,7 +620,11 @@ impl<'a> GlyphStore {
} }
#[inline] #[inline]
pub fn advance_for_byte_range_slow_path(&self, range: &Range<ByteIndex>, extra_word_spacing: Au) -> Au { pub fn advance_for_byte_range_slow_path(
&self,
range: &Range<ByteIndex>,
extra_word_spacing: Au,
) -> Au {
self.iter_glyphs_for_byte_range(range) self.iter_glyphs_for_byte_range(range)
.fold(Au(0), |advance, glyph| { .fold(Au(0), |advance, glyph| {
if glyph.char_is_space() { if glyph.char_is_space() {
@ -594,7 +638,11 @@ impl<'a> GlyphStore {
#[inline] #[inline]
#[cfg(feature = "unstable")] #[cfg(feature = "unstable")]
#[cfg(any(target_feature = "sse2", target_feature = "neon"))] #[cfg(any(target_feature = "sse2", target_feature = "neon"))]
fn advance_for_byte_range_simple_glyphs(&self, range: &Range<ByteIndex>, extra_word_spacing: Au) -> Au { fn advance_for_byte_range_simple_glyphs(
&self,
range: &Range<ByteIndex>,
extra_word_spacing: Au,
) -> Au {
let advance_mask = u32x4::splat(GLYPH_ADVANCE_MASK); let advance_mask = u32x4::splat(GLYPH_ADVANCE_MASK);
let space_flag_mask = u32x4::splat(FLAG_CHAR_IS_SPACE); let space_flag_mask = u32x4::splat(FLAG_CHAR_IS_SPACE);
let mut simd_advance = u32x4::splat(0); let mut simd_advance = u32x4::splat(0);
@ -614,13 +662,11 @@ impl<'a> GlyphStore {
simd_spaces = simd_spaces + spaces; simd_spaces = simd_spaces + spaces;
} }
let advance = let advance = (simd_advance.extract(0) +
(simd_advance.extract(0) +
simd_advance.extract(1) + simd_advance.extract(1) +
simd_advance.extract(2) + simd_advance.extract(2) +
simd_advance.extract(3)) as i32; simd_advance.extract(3)) as i32;
let spaces = let spaces = (simd_spaces.extract(0) +
(simd_spaces.extract(0) +
simd_spaces.extract(1) + simd_spaces.extract(1) +
simd_spaces.extract(2) + simd_spaces.extract(2) +
simd_spaces.extract(3)) as i32; simd_spaces.extract(3)) as i32;
@ -637,8 +683,15 @@ impl<'a> GlyphStore {
/// When SIMD isn't available, fallback to the slow path. /// When SIMD isn't available, fallback to the slow path.
#[inline] #[inline]
#[cfg(not(all(feature = "unstable", any(target_feature = "sse2", target_feature = "neon"))))] #[cfg(not(all(
fn advance_for_byte_range_simple_glyphs(&self, range: &Range<ByteIndex>, extra_word_spacing: Au) -> Au { feature = "unstable",
any(target_feature = "sse2", target_feature = "neon")
)))]
fn advance_for_byte_range_simple_glyphs(
&self,
range: &Range<ByteIndex>,
extra_word_spacing: Au,
) -> Au {
self.advance_for_byte_range_slow_path(range, extra_word_spacing) self.advance_for_byte_range_slow_path(range, extra_word_spacing)
} }
@ -676,23 +729,27 @@ impl fmt::Debug for GlyphStore {
let mut detailed_buffer = self.detail_store.detail_buffer.iter(); let mut detailed_buffer = self.detail_store.detail_buffer.iter();
for entry in self.entry_buffer.iter() { for entry in self.entry_buffer.iter() {
if entry.is_simple() { if entry.is_simple() {
write!(formatter, write!(
formatter,
" simple id={:?} advance={:?}\n", " simple id={:?} advance={:?}\n",
entry.id(), entry.id(),
entry.advance())?; entry.advance()
continue )?;
continue;
} }
if entry.is_initial() { if entry.is_initial() {
continue continue;
} }
write!(formatter, " complex...")?; write!(formatter, " complex...")?;
if detailed_buffer.next().is_none() { if detailed_buffer.next().is_none() {
continue continue;
} }
write!(formatter, write!(
formatter,
" detailed id={:?} advance={:?}\n", " detailed id={:?} advance={:?}\n",
entry.id(), entry.id(),
entry.advance())?; entry.advance()
)?;
} }
Ok(()) Ok(())
} }
@ -712,21 +769,31 @@ impl<'a> GlyphIterator<'a> {
fn next_glyph_range(&mut self) -> Option<GlyphInfo<'a>> { fn next_glyph_range(&mut self) -> Option<GlyphInfo<'a>> {
match self.glyph_range.as_mut().unwrap().next() { match self.glyph_range.as_mut().unwrap().next() {
Some(j) => { Some(j) => {
Some(GlyphInfo::Detail(self.store, self.byte_index, j.get() as u16 /* ??? */)) Some(GlyphInfo::Detail(
} self.store,
self.byte_index,
j.get() as u16, /* ??? */
))
},
None => { None => {
// No more glyphs for current character. Try to get another. // No more glyphs for current character. Try to get another.
self.glyph_range = None; self.glyph_range = None;
self.next() self.next()
} },
} }
} }
// Slow path when there is a complex glyph. // Slow path when there is a complex glyph.
#[inline(never)] #[inline(never)]
fn next_complex_glyph(&mut self, entry: &GlyphEntry, i: ByteIndex) -> Option<GlyphInfo<'a>> { fn next_complex_glyph(&mut self, entry: &GlyphEntry, i: ByteIndex) -> Option<GlyphInfo<'a>> {
let glyphs = self.store.detail_store.detailed_glyphs_for_entry(i, entry.glyph_count()); let glyphs = self
self.glyph_range = Some(range::each_index(ByteIndex(0), ByteIndex(glyphs.len() as isize))); .store
.detail_store
.detailed_glyphs_for_entry(i, entry.glyph_count());
self.glyph_range = Some(range::each_index(
ByteIndex(0),
ByteIndex(glyphs.len() as isize),
));
self.next() self.next()
} }
} }
@ -744,7 +811,7 @@ impl<'a> Iterator for GlyphIterator<'a> {
fn next(&mut self) -> Option<GlyphInfo<'a>> { fn next(&mut self) -> Option<GlyphInfo<'a>> {
// Would use 'match' here but it borrows contents in a way that interferes with mutation. // Would use 'match' here but it borrows contents in a way that interferes with mutation.
if self.glyph_range.is_some() { if self.glyph_range.is_some() {
return self.next_glyph_range() return self.next_glyph_range();
} }
// No glyph range. Look at next byte. // No glyph range. Look at next byte.
@ -755,7 +822,7 @@ impl<'a> Iterator for GlyphIterator<'a> {
}; };
let i = self.byte_index; let i = self.byte_index;
if !self.byte_range.contains(i) { if !self.byte_range.contains(i) {
return None return None;
} }
debug_assert!(i < self.store.len()); debug_assert!(i < self.store.len());
let entry = self.store.entry_buffer[i.to_usize()]; let entry = self.store.entry_buffer[i.to_usize()];

View file

@ -9,4 +9,3 @@ pub mod glyph;
pub mod shaping; pub mod shaping;
pub mod text_run; pub mod text_run;
pub mod util; pub mod util;

View file

@ -147,10 +147,11 @@ impl Drop for Shaper {
impl Shaper { impl Shaper {
pub fn new(font: *const Font) -> Shaper { pub fn new(font: *const Font) -> Shaper {
unsafe { unsafe {
let hb_face: *mut hb_face_t = let hb_face: *mut hb_face_t = hb_face_create_for_tables(
hb_face_create_for_tables(Some(font_table_func), Some(font_table_func),
font as *const c_void as *mut c_void, font as *const c_void as *mut c_void,
None); None,
);
let hb_font: *mut hb_font_t = hb_font_create(hb_face); let hb_font: *mut hb_font_t = hb_font_create(hb_face);
// Set points-per-em. if zero, performs no hinting in that direction. // Set points-per-em. if zero, performs no hinting in that direction.
@ -158,12 +159,19 @@ impl Shaper {
hb_font_set_ppem(hb_font, pt_size as c_uint, pt_size as c_uint); hb_font_set_ppem(hb_font, pt_size as c_uint, pt_size as c_uint);
// Set scaling. Note that this takes 16.16 fixed point. // Set scaling. Note that this takes 16.16 fixed point.
hb_font_set_scale(hb_font, hb_font_set_scale(
hb_font,
Shaper::float_to_fixed(pt_size) as c_int, Shaper::float_to_fixed(pt_size) as c_int,
Shaper::float_to_fixed(pt_size) as c_int); Shaper::float_to_fixed(pt_size) as c_int,
);
// configure static function callbacks. // configure static function callbacks.
hb_font_set_funcs(hb_font, HB_FONT_FUNCS.0, font as *mut Font as *mut c_void, None); hb_font_set_funcs(
hb_font,
HB_FONT_FUNCS.0,
font as *mut Font as *mut c_void,
None,
);
Shaper { Shaper {
hb_face: hb_face, hb_face: hb_face,
@ -188,22 +196,30 @@ impl ShaperMethods for Shaper {
fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) { fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
unsafe { unsafe {
let hb_buffer: *mut hb_buffer_t = hb_buffer_create(); let hb_buffer: *mut hb_buffer_t = hb_buffer_create();
hb_buffer_set_direction(hb_buffer, if options.flags.contains(ShapingFlags::RTL_FLAG) { hb_buffer_set_direction(
hb_buffer,
if options.flags.contains(ShapingFlags::RTL_FLAG) {
HB_DIRECTION_RTL HB_DIRECTION_RTL
} else { } else {
HB_DIRECTION_LTR HB_DIRECTION_LTR
}); },
);
hb_buffer_set_script(hb_buffer, options.script.to_hb_script()); hb_buffer_set_script(hb_buffer, options.script.to_hb_script());
hb_buffer_add_utf8(hb_buffer, hb_buffer_add_utf8(
hb_buffer,
text.as_ptr() as *const c_char, text.as_ptr() as *const c_char,
text.len() as c_int, text.len() as c_int,
0, 0,
text.len() as c_int); text.len() as c_int,
);
let mut features = Vec::new(); let mut features = Vec::new();
if options.flags.contains(ShapingFlags::IGNORE_LIGATURES_SHAPING_FLAG) { if options
.flags
.contains(ShapingFlags::IGNORE_LIGATURES_SHAPING_FLAG)
{
features.push(hb_feature_t { features.push(hb_feature_t {
tag: LIGA, tag: LIGA,
value: 0, value: 0,
@ -211,7 +227,10 @@ impl ShaperMethods for Shaper {
end: hb_buffer_get_length(hb_buffer), end: hb_buffer_get_length(hb_buffer),
}) })
} }
if options.flags.contains(ShapingFlags::DISABLE_KERNING_SHAPING_FLAG) { if options
.flags
.contains(ShapingFlags::DISABLE_KERNING_SHAPING_FLAG)
{
features.push(hb_feature_t { features.push(hb_feature_t {
tag: KERN, tag: KERN,
value: 0, value: 0,
@ -220,7 +239,12 @@ impl ShaperMethods for Shaper {
}) })
} }
hb_shape(self.hb_font, hb_buffer, features.as_mut_ptr(), features.len() as u32); hb_shape(
self.hb_font,
hb_buffer,
features.as_mut_ptr(),
features.len() as u32,
);
self.save_glyph_results(text, options, glyphs, hb_buffer); self.save_glyph_results(text, options, glyphs, hb_buffer);
hb_buffer_destroy(hb_buffer); hb_buffer_destroy(hb_buffer);
} }
@ -228,18 +252,21 @@ impl ShaperMethods for Shaper {
} }
impl Shaper { impl Shaper {
fn save_glyph_results(&self, fn save_glyph_results(
&self,
text: &str, text: &str,
options: &ShapingOptions, options: &ShapingOptions,
glyphs: &mut GlyphStore, glyphs: &mut GlyphStore,
buffer: *mut hb_buffer_t) { buffer: *mut hb_buffer_t,
) {
let glyph_data = ShapedGlyphData::new(buffer); let glyph_data = ShapedGlyphData::new(buffer);
let glyph_count = glyph_data.len(); let glyph_count = glyph_data.len();
let byte_max = text.len(); let byte_max = text.len();
debug!("Shaped text[byte count={}], got back {} glyph info records.", debug!(
byte_max, "Shaped text[byte count={}], got back {} glyph info records.",
glyph_count); byte_max, glyph_count
);
// make map of what chars have glyphs // make map of what chars have glyphs
let mut byte_to_glyph = vec![NO_GLYPH; byte_max]; let mut byte_to_glyph = vec![NO_GLYPH; byte_max];
@ -250,9 +277,10 @@ impl Shaper {
if loc < byte_max { if loc < byte_max {
byte_to_glyph[loc] = i as i32; byte_to_glyph[loc] = i as i32;
} else { } else {
debug!("ERROR: tried to set out of range byte_to_glyph: idx={}, glyph idx={}", debug!(
loc, "ERROR: tried to set out of range byte_to_glyph: idx={}, glyph idx={}",
i); loc, i
);
} }
debug!("{} -> {}", i, loc); debug!("{} -> {}", i, loc);
} }
@ -296,10 +324,14 @@ impl Shaper {
} }
// if there's just one glyph, then we don't need further checks. // if there's just one glyph, then we don't need further checks.
if glyph_span.len() == 1 { break; } if glyph_span.len() == 1 {
break;
}
// if no glyphs were found yet, extend the char byte range more. // if no glyphs were found yet, extend the char byte range more.
if glyph_span.len() == 0 { continue; } if glyph_span.len() == 0 {
continue;
}
// If byte_range now includes all the byte offsets found in glyph_span, then we // If byte_range now includes all the byte offsets found in glyph_span, then we
// have found a contiguous "cluster" and can stop extending it. // have found a contiguous "cluster" and can stop extending it.
@ -308,11 +340,11 @@ impl Shaper {
let loc = glyph_data.byte_offset_of_glyph(j) as usize; let loc = glyph_data.byte_offset_of_glyph(j) as usize;
if !(byte_range.start <= loc && loc < byte_range.end) { if !(byte_range.start <= loc && loc < byte_range.end) {
all_glyphs_are_within_cluster = false; all_glyphs_are_within_cluster = false;
break break;
} }
} }
if all_glyphs_are_within_cluster { if all_glyphs_are_within_cluster {
break break;
} }
// Otherwise, the bytes we have seen so far correspond to a non-contiguous set of // Otherwise, the bytes we have seen so far correspond to a non-contiguous set of
@ -348,33 +380,28 @@ impl Shaper {
const TAB_COLS: i32 = 8; const TAB_COLS: i32 = 8;
let (space_glyph_id, space_advance) = glyph_space_advance(self.font); let (space_glyph_id, space_advance) = glyph_space_advance(self.font);
let advance = Au::from_f64_px(space_advance) * TAB_COLS; let advance = Au::from_f64_px(space_advance) * TAB_COLS;
let data = GlyphData::new(space_glyph_id, let data =
advance, GlyphData::new(space_glyph_id, advance, Default::default(), true, true);
Default::default(),
true,
true);
glyphs.add_glyph_for_byte_index(byte_idx, character, &data); glyphs.add_glyph_for_byte_index(byte_idx, character, &data);
} else { } else {
let shape = glyph_data.entry_for_glyph(glyph_span.start, &mut y_pos); let shape = glyph_data.entry_for_glyph(glyph_span.start, &mut y_pos);
let advance = self.advance_for_shaped_glyph(shape.advance, character, options); let advance = self.advance_for_shaped_glyph(shape.advance, character, options);
let data = GlyphData::new(shape.codepoint, let data = GlyphData::new(shape.codepoint, advance, shape.offset, true, true);
advance,
shape.offset,
true,
true);
glyphs.add_glyph_for_byte_index(byte_idx, character, &data); glyphs.add_glyph_for_byte_index(byte_idx, character, &data);
} }
} else { } else {
// collect all glyphs to be assigned to the first character. // collect all glyphs to be assigned to the first character.
let mut datas = vec!(); let mut datas = vec![];
for glyph_i in glyph_span.clone() { for glyph_i in glyph_span.clone() {
let shape = glyph_data.entry_for_glyph(glyph_i, &mut y_pos); let shape = glyph_data.entry_for_glyph(glyph_i, &mut y_pos);
datas.push(GlyphData::new(shape.codepoint, datas.push(GlyphData::new(
shape.codepoint,
shape.advance, shape.advance,
shape.offset, shape.offset,
true, // treat as cluster start true, // treat as cluster start
glyph_i > glyph_span.start)); glyph_i > glyph_span.start,
));
// all but first are ligature continuations // all but first are ligature continuations
} }
// now add the detailed glyph entry. // now add the detailed glyph entry.
@ -390,8 +417,12 @@ impl Shaper {
glyphs.finalize_changes(); glyphs.finalize_changes();
} }
fn advance_for_shaped_glyph(&self, mut advance: Au, character: char, options: &ShapingOptions) fn advance_for_shaped_glyph(
-> Au { &self,
mut advance: Au,
character: char,
options: &ShapingOptions,
) -> Au {
if let Some(letter_spacing) = options.letter_spacing { if let Some(letter_spacing) = options.letter_spacing {
advance = advance + letter_spacing; advance = advance + letter_spacing;
}; };
@ -403,7 +434,8 @@ impl Shaper {
if character == ' ' || character == '\u{a0}' { if character == ' ' || character == '\u{a0}' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property // https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing; let (length, percent) = options.word_spacing;
advance = (advance + length) + Au::new((advance.0 as f32 * percent.into_inner()) as i32); advance =
(advance + length) + Au::new((advance.0 as f32 * percent.into_inner()) as i32);
} }
advance advance
@ -420,20 +452,29 @@ lazy_static! {
let hb_funcs = hb_font_funcs_create(); let hb_funcs = hb_font_funcs_create();
hb_font_funcs_set_nominal_glyph_func(hb_funcs, Some(glyph_func), ptr::null_mut(), None); hb_font_funcs_set_nominal_glyph_func(hb_funcs, Some(glyph_func), ptr::null_mut(), None);
hb_font_funcs_set_glyph_h_advance_func( hb_font_funcs_set_glyph_h_advance_func(
hb_funcs, Some(glyph_h_advance_func), ptr::null_mut(), None); hb_funcs,
Some(glyph_h_advance_func),
ptr::null_mut(),
None,
);
hb_font_funcs_set_glyph_h_kerning_func( hb_font_funcs_set_glyph_h_kerning_func(
hb_funcs, Some(glyph_h_kerning_func), ptr::null_mut(), None); hb_funcs,
Some(glyph_h_kerning_func),
ptr::null_mut(),
None,
);
FontFuncs(hb_funcs) FontFuncs(hb_funcs)
}; };
} }
extern fn glyph_func(_: *mut hb_font_t, extern "C" fn glyph_func(
_: *mut hb_font_t,
font_data: *mut c_void, font_data: *mut c_void,
unicode: hb_codepoint_t, unicode: hb_codepoint_t,
glyph: *mut hb_codepoint_t, glyph: *mut hb_codepoint_t,
_: *mut c_void) _: *mut c_void,
-> hb_bool_t { ) -> hb_bool_t {
let font: *const Font = font_data as *const Font; let font: *const Font = font_data as *const Font;
assert!(!font.is_null()); assert!(!font.is_null());
@ -442,17 +483,18 @@ extern fn glyph_func(_: *mut hb_font_t,
Some(g) => { Some(g) => {
*glyph = g as hb_codepoint_t; *glyph = g as hb_codepoint_t;
true as hb_bool_t true as hb_bool_t
} },
None => false as hb_bool_t None => false as hb_bool_t,
} }
} }
} }
extern fn glyph_h_advance_func(_: *mut hb_font_t, extern "C" fn glyph_h_advance_func(
_: *mut hb_font_t,
font_data: *mut c_void, font_data: *mut c_void,
glyph: hb_codepoint_t, glyph: hb_codepoint_t,
_: *mut c_void) _: *mut c_void,
-> hb_position_t { ) -> hb_position_t {
let font: *mut Font = font_data as *mut Font; let font: *mut Font = font_data as *mut Font;
assert!(!font.is_null()); assert!(!font.is_null());
@ -468,19 +510,20 @@ fn glyph_space_advance(font: *const Font) -> (hb_codepoint_t, f64) {
match unsafe { (*font).glyph_index(space_unicode) } { match unsafe { (*font).glyph_index(space_unicode) } {
Some(g) => { Some(g) => {
space_glyph = g as hb_codepoint_t; space_glyph = g as hb_codepoint_t;
} },
None => panic!("No space info") None => panic!("No space info"),
} }
let space_advance = unsafe { (*font).glyph_h_advance(space_glyph as GlyphId) }; let space_advance = unsafe { (*font).glyph_h_advance(space_glyph as GlyphId) };
(space_glyph, space_advance) (space_glyph, space_advance)
} }
extern fn glyph_h_kerning_func(_: *mut hb_font_t, extern "C" fn glyph_h_kerning_func(
_: *mut hb_font_t,
font_data: *mut c_void, font_data: *mut c_void,
first_glyph: hb_codepoint_t, first_glyph: hb_codepoint_t,
second_glyph: hb_codepoint_t, second_glyph: hb_codepoint_t,
_: *mut c_void) _: *mut c_void,
-> hb_position_t { ) -> hb_position_t {
let font: *mut Font = font_data as *mut Font; let font: *mut Font = font_data as *mut Font;
assert!(!font.is_null()); assert!(!font.is_null());
@ -491,10 +534,11 @@ extern fn glyph_h_kerning_func(_: *mut hb_font_t,
} }
// Callback to get a font table out of a font. // Callback to get a font table out of a font.
extern fn font_table_func(_: *mut hb_face_t, extern "C" fn font_table_func(
_: *mut hb_face_t,
tag: hb_tag_t, tag: hb_tag_t,
user_data: *mut c_void) user_data: *mut c_void,
-> *mut hb_blob_t { ) -> *mut hb_blob_t {
unsafe { unsafe {
// NB: These asserts have security implications. // NB: These asserts have security implications.
let font = user_data as *const Font; let font = user_data as *const Font;
@ -511,20 +555,22 @@ extern fn font_table_func(_: *mut hb_face_t,
let buf = (*font_table_ptr).buffer(); let buf = (*font_table_ptr).buffer();
// HarfBuzz calls `destroy_blob_func` when the buffer is no longer needed. // HarfBuzz calls `destroy_blob_func` when the buffer is no longer needed.
let blob = hb_blob_create(buf.as_ptr() as *const c_char, let blob = hb_blob_create(
buf.as_ptr() as *const c_char,
buf.len() as c_uint, buf.len() as c_uint,
HB_MEMORY_MODE_READONLY, HB_MEMORY_MODE_READONLY,
font_table_ptr as *mut c_void, font_table_ptr as *mut c_void,
Some(destroy_blob_func)); Some(destroy_blob_func),
);
assert!(!blob.is_null()); assert!(!blob.is_null());
blob blob
} },
} }
} }
} }
extern fn destroy_blob_func(font_table_ptr: *mut c_void) { extern "C" fn destroy_blob_func(font_table_ptr: *mut c_void) {
unsafe { unsafe {
drop(Box::from_raw(font_table_ptr as *mut FontTable)); drop(Box::from_raw(font_table_ptr as *mut FontTable));
} }

View file

@ -17,4 +17,3 @@ pub mod harfbuzz;
pub trait ShaperMethods { pub trait ShaperMethods {
fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore); fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore);
} }

View file

@ -155,7 +155,7 @@ impl<'a> Iterator for CharacterSliceIterator<'a> {
let byte_start = self.range.begin(); let byte_start = self.range.begin();
let byte_len = match self.text[byte_start.to_usize()..].chars().next() { let byte_len = match self.text[byte_start.to_usize()..].chars().next() {
Some(ch) => ByteIndex(ch.len_utf8() as isize), Some(ch) => ByteIndex(ch.len_utf8() as isize),
None => unreachable!() // XXX refactor? None => unreachable!(), // XXX refactor?
}; };
self.range.adjust_by(byte_len, -byte_len); self.range.adjust_by(byte_len, -byte_len);
@ -178,10 +178,16 @@ impl<'a> Iterator for CharacterSliceIterator<'a> {
impl<'a> TextRun { impl<'a> TextRun {
/// Constructs a new text run. Also returns if there is a line break at the beginning /// Constructs a new text run. Also returns if there is a line break at the beginning
pub fn new(font: &mut Font, text: String, options: &ShapingOptions, pub fn new(
bidi_level: bidi::Level, breaker: &mut Option<LineBreakLeafIter>) -> (TextRun, bool) { font: &mut Font,
text: String,
options: &ShapingOptions,
bidi_level: bidi::Level,
breaker: &mut Option<LineBreakLeafIter>,
) -> (TextRun, bool) {
let (glyphs, break_at_zero) = TextRun::break_and_shape(font, &text, options, breaker); let (glyphs, break_at_zero) = TextRun::break_and_shape(font, &text, options, breaker);
(TextRun { (
TextRun {
text: Arc::new(text), text: Arc::new(text),
font_metrics: font.metrics.clone(), font_metrics: font.metrics.clone(),
font_template: font.handle.template(), font_template: font.handle.template(),
@ -190,12 +196,18 @@ impl<'a> TextRun {
glyphs: Arc::new(glyphs), glyphs: Arc::new(glyphs),
bidi_level: bidi_level, bidi_level: bidi_level,
extra_word_spacing: Au(0), extra_word_spacing: Au(0),
}, break_at_zero) },
break_at_zero,
)
} }
pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions, pub fn break_and_shape(
breaker: &mut Option<LineBreakLeafIter>) -> (Vec<GlyphRun>, bool) { font: &mut Font,
let mut glyphs = vec!(); text: &str,
options: &ShapingOptions,
breaker: &mut Option<LineBreakLeafIter>,
) -> (Vec<GlyphRun>, bool) {
let mut glyphs = vec![];
let mut slice = 0..0; let mut slice = 0..0;
let mut finished = false; let mut finished = false;
@ -203,7 +215,7 @@ impl<'a> TextRun {
if breaker.is_none() { if breaker.is_none() {
if text.len() == 0 { if text.len() == 0 {
return (glyphs, true) return (glyphs, true);
} }
*breaker = Some(LineBreakLeafIter::new(&text, 0)); *breaker = Some(LineBreakLeafIter::new(&text, 0));
} }
@ -225,8 +237,12 @@ impl<'a> TextRun {
// Split off any trailing whitespace into a separate glyph run. // Split off any trailing whitespace into a separate glyph run.
let mut whitespace = slice.end..slice.end; let mut whitespace = slice.end..slice.end;
if let Some((i, _)) = word.char_indices().rev() if let Some((i, _)) = word
.take_while(|&(_, c)| char_is_whitespace(c)).last() { .char_indices()
.rev()
.take_while(|&(_, c)| char_is_whitespace(c))
.last()
{
whitespace.start = slice.start + i; whitespace.start = slice.start + i;
slice.end = whitespace.start; slice.end = whitespace.start;
} else if idx != text.len() && options.flags.contains(ShapingFlags::KEEP_ALL_FLAG) { } else if idx != text.len() && options.flags.contains(ShapingFlags::KEEP_ALL_FLAG) {
@ -237,17 +253,23 @@ impl<'a> TextRun {
if slice.len() > 0 { if slice.len() > 0 {
glyphs.push(GlyphRun { glyphs.push(GlyphRun {
glyph_store: font.shape_text(&text[slice.clone()], options), glyph_store: font.shape_text(&text[slice.clone()], options),
range: Range::new(ByteIndex(slice.start as isize), range: Range::new(
ByteIndex(slice.len() as isize)), ByteIndex(slice.start as isize),
ByteIndex(slice.len() as isize),
),
}); });
} }
if whitespace.len() > 0 { if whitespace.len() > 0 {
let mut options = options.clone(); let mut options = options.clone();
options.flags.insert(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG); options
.flags
.insert(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG);
glyphs.push(GlyphRun { glyphs.push(GlyphRun {
glyph_store: font.shape_text(&text[whitespace.clone()], &options), glyph_store: font.shape_text(&text[whitespace.clone()], &options),
range: Range::new(ByteIndex(whitespace.start as isize), range: Range::new(
ByteIndex(whitespace.len() as isize)), ByteIndex(whitespace.start as isize),
ByteIndex(whitespace.len() as isize),
),
}); });
} }
slice.start = whitespace.end; slice.start = whitespace.end;
@ -265,33 +287,43 @@ impl<'a> TextRun {
pub fn advance_for_range(&self, range: &Range<ByteIndex>) -> Au { pub fn advance_for_range(&self, range: &Range<ByteIndex>) -> Au {
if range.is_empty() { if range.is_empty() {
return Au(0) return Au(0);
} }
// TODO(Issue #199): alter advance direction for RTL // TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text // TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
self.natural_word_slices_in_range(range) self.natural_word_slices_in_range(range)
.fold(Au(0), |advance, slice| { .fold(Au(0), |advance, slice| {
advance + slice.glyphs.advance_for_byte_range(&slice.range, self.extra_word_spacing) advance + slice
.glyphs
.advance_for_byte_range(&slice.range, self.extra_word_spacing)
}) })
} }
pub fn metrics_for_range(&self, range: &Range<ByteIndex>) -> RunMetrics { pub fn metrics_for_range(&self, range: &Range<ByteIndex>) -> RunMetrics {
RunMetrics::new(self.advance_for_range(range), RunMetrics::new(
self.advance_for_range(range),
self.font_metrics.ascent, self.font_metrics.ascent,
self.font_metrics.descent) self.font_metrics.descent,
)
} }
pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range<ByteIndex>) pub fn metrics_for_slice(
-> RunMetrics { &self,
RunMetrics::new(glyphs.advance_for_byte_range(slice_range, self.extra_word_spacing), glyphs: &GlyphStore,
slice_range: &Range<ByteIndex>,
) -> RunMetrics {
RunMetrics::new(
glyphs.advance_for_byte_range(slice_range, self.extra_word_spacing),
self.font_metrics.ascent, self.font_metrics.ascent,
self.font_metrics.descent) self.font_metrics.descent,
)
} }
pub fn min_width_for_range(&self, range: &Range<ByteIndex>) -> Au { pub fn min_width_for_range(&self, range: &Range<ByteIndex>) -> Au {
debug!("iterating outer range {:?}", range); debug!("iterating outer range {:?}", range);
self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| { self.natural_word_slices_in_range(range)
.fold(Au(0), |max_piece_width, slice| {
debug!("iterated on {:?}[{:?}]", slice.offset, slice.range); debug!("iterated on {:?}[{:?}]", slice.offset, slice.range);
max(max_piece_width, self.advance_for_range(&slice.range)) max(max_piece_width, self.advance_for_range(&slice.range))
}) })
@ -309,13 +341,15 @@ impl<'a> TextRun {
let self_ptr = self as *const TextRun; let self_ptr = self as *const TextRun;
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| { INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((last_text_run, last_index, last_result)) = if let Some((last_text_run, last_index, last_result)) =
index_of_first_glyph_run_cache.get() { index_of_first_glyph_run_cache.get()
{
if last_text_run == self_ptr && last_index == index { if last_text_run == self_ptr && last_index == index {
return Some(last_result) return Some(last_result);
} }
} }
if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) { if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index))
{
index_of_first_glyph_run_cache.set(Some((self_ptr, index, result))); index_of_first_glyph_run_cache.set(Some((self_ptr, index, result)));
Some(result) Some(result)
} else { } else {
@ -339,18 +373,22 @@ impl<'a> TextRun {
let mut remaining = advance; let mut remaining = advance;
self.natural_word_slices_in_range(range) self.natural_word_slices_in_range(range)
.map(|slice| { .map(|slice| {
let (slice_index, slice_advance) = let (slice_index, slice_advance) = slice.glyphs.range_index_of_advance(
slice.glyphs.range_index_of_advance(&slice.range, remaining, self.extra_word_spacing); &slice.range,
remaining,
self.extra_word_spacing,
);
remaining -= slice_advance; remaining -= slice_advance;
slice_index slice_index
}) }).sum()
.sum()
} }
/// Returns an iterator that will iterate over all slices of glyphs that represent natural /// Returns an iterator that will iterate over all slices of glyphs that represent natural
/// words in the given range. /// words in the given range.
pub fn natural_word_slices_in_range(&'a self, range: &Range<ByteIndex>) pub fn natural_word_slices_in_range(
-> NaturalWordSliceIterator<'a> { &'a self,
range: &Range<ByteIndex>,
) -> NaturalWordSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) { let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(), None => self.glyphs.len(),
Some(index) => index, Some(index) => index,
@ -365,20 +403,22 @@ impl<'a> TextRun {
/// Returns an iterator that over natural word slices in visual order (left to right or /// Returns an iterator that over natural word slices in visual order (left to right or
/// right to left, depending on the bidirectional embedding level). /// right to left, depending on the bidirectional embedding level).
pub fn natural_word_slices_in_visual_order(&'a self, range: &Range<ByteIndex>) pub fn natural_word_slices_in_visual_order(
-> NaturalWordSliceIterator<'a> { &'a self,
range: &Range<ByteIndex>,
) -> NaturalWordSliceIterator<'a> {
// Iterate in reverse order if bidi level is RTL. // Iterate in reverse order if bidi level is RTL.
let reverse = self.bidi_level.is_rtl(); let reverse = self.bidi_level.is_rtl();
let index = if reverse { let index = if reverse {
match self.index_of_first_glyph_run_containing(range.end() - ByteIndex(1)) { match self.index_of_first_glyph_run_containing(range.end() - ByteIndex(1)) {
Some(i) => i + 1, // In reverse mode, index points one past the next element. Some(i) => i + 1, // In reverse mode, index points one past the next element.
None => 0 None => 0,
} }
} else { } else {
match self.index_of_first_glyph_run_containing(range.begin()) { match self.index_of_first_glyph_run_containing(range.begin()) {
Some(i) => i, Some(i) => i,
None => self.glyphs.len() None => self.glyphs.len(),
} }
}; };
NaturalWordSliceIterator { NaturalWordSliceIterator {
@ -391,8 +431,10 @@ impl<'a> TextRun {
/// Returns an iterator that will iterate over all slices of glyphs that represent individual /// Returns an iterator that will iterate over all slices of glyphs that represent individual
/// characters in the given range. /// characters in the given range.
pub fn character_slices_in_range(&'a self, range: &Range<ByteIndex>) pub fn character_slices_in_range(
-> CharacterSliceIterator<'a> { &'a self,
range: &Range<ByteIndex>,
) -> CharacterSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) { let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(), None => self.glyphs.len(),
Some(index) => index, Some(index) => index,

View file

@ -9,7 +9,7 @@ pub enum CompressionMode {
CompressNone, CompressNone,
CompressWhitespace, CompressWhitespace,
CompressWhitespaceNewline, CompressWhitespaceNewline,
DiscardNewline DiscardNewline,
} }
// ported from Gecko's nsTextFrameUtils::TransformText. // ported from Gecko's nsTextFrameUtils::TransformText.
@ -22,11 +22,12 @@ pub enum CompressionMode {
// * Issue #114: record skipped and kept chars for mapping original to new text // * Issue #114: record skipped and kept chars for mapping original to new text
// //
// * Untracked: various edge cases for bidi, CJK, etc. // * Untracked: various edge cases for bidi, CJK, etc.
pub fn transform_text(text: &str, pub fn transform_text(
text: &str,
mode: CompressionMode, mode: CompressionMode,
incoming_whitespace: bool, incoming_whitespace: bool,
output_text: &mut String) output_text: &mut String,
-> bool { ) -> bool {
let out_whitespace = match mode { let out_whitespace = match mode {
CompressionMode::CompressNone | CompressionMode::DiscardNewline => { CompressionMode::CompressNone | CompressionMode::DiscardNewline => {
for ch in text.chars() { for ch in text.chars() {
@ -58,7 +59,8 @@ pub fn transform_text(text: &str,
// TODO: record kept char // TODO: record kept char
output_text.push(ch); output_text.push(ch);
} }
} else { /* next_in_whitespace; possibly add a space char */ } else {
/* next_in_whitespace; possibly add a space char */
if in_whitespace { if in_whitespace {
// TODO: record skipped char // TODO: record skipped char
} else { } else {
@ -70,7 +72,7 @@ pub fn transform_text(text: &str,
in_whitespace = next_in_whitespace; in_whitespace = next_in_whitespace;
} /* /for str::each_char */ } /* /for str::each_char */
in_whitespace in_whitespace
} },
}; };
return out_whitespace; return out_whitespace;
@ -80,7 +82,7 @@ pub fn transform_text(text: &str,
(' ', _) => true, (' ', _) => true,
('\t', _) => true, ('\t', _) => true,
('\n', CompressionMode::CompressWhitespaceNewline) => true, ('\n', CompressionMode::CompressWhitespaceNewline) => true,
(_, _) => false (_, _) => false,
} }
} }
@ -89,8 +91,10 @@ pub fn transform_text(text: &str,
return true; return true;
} }
match mode { match mode {
CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch == '\n', CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => {
_ => false ch == '\n'
},
_ => false,
} }
} }
@ -113,7 +117,7 @@ pub fn is_bidi_control(c: char) -> bool {
'\u{202A}'...'\u{202E}' => true, '\u{202A}'...'\u{202E}' => true,
'\u{2066}'...'\u{2069}' => true, '\u{2066}'...'\u{2069}' => true,
'\u{200E}' | '\u{200F}' | '\u{061C}' => true, '\u{200E}' | '\u{200F}' | '\u{061C}' => true,
_ => false _ => false,
} }
} }
@ -143,15 +147,12 @@ pub fn is_cjk(codepoint: char) -> bool {
UnicodeBlock::CJKUnifiedIdeographs | UnicodeBlock::CJKUnifiedIdeographs |
UnicodeBlock::CJKCompatibilityIdeographs | UnicodeBlock::CJKCompatibilityIdeographs |
UnicodeBlock::CJKCompatibilityForms | UnicodeBlock::CJKCompatibilityForms |
UnicodeBlock::HalfwidthandFullwidthForms => { UnicodeBlock::HalfwidthandFullwidthForms => return true,
return true
}
_ => {} _ => {},
} }
} }
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Ideographic_Plane // https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Ideographic_Plane
unicode_plane(codepoint) == 2 unicode_plane(codepoint) == 2
} }

View file

@ -4,13 +4,15 @@
#![crate_name = "gfx_traits"] #![crate_name = "gfx_traits"]
#![crate_type = "rlib"] #![crate_type = "rlib"]
#![deny(unsafe_code)] #![deny(unsafe_code)]
extern crate malloc_size_of; extern crate malloc_size_of;
#[macro_use] extern crate malloc_size_of_derive; #[macro_use]
#[macro_use] extern crate range; extern crate malloc_size_of_derive;
#[macro_use] extern crate serde; #[macro_use]
extern crate range;
#[macro_use]
extern crate serde;
pub mod print_tree; pub mod print_tree;
@ -32,7 +34,7 @@ impl Epoch {
pub struct StackingContextId( pub struct StackingContextId(
/// The identifier for this StackingContext, derived from the Flow's memory address /// The identifier for this StackingContext, derived from the Flow's memory address
/// and fragment type. As a space optimization, these are combined into a single word. /// and fragment type. As a space optimization, these are combined into a single word.
pub u64 pub u64,
); );
impl StackingContextId { impl StackingContextId {

View file

@ -1,25 +1,26 @@
// FORK NOTE: Copied from liballoc_system, removed unnecessary APIs, // FORK NOTE: Copied from liballoc_system, removed unnecessary APIs,
// APIs take size/align directly instead of Layout // APIs take size/align directly instead of Layout
// The minimum alignment guaranteed by the architecture. This value is used to // The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values. In practice, the alignment is a // add fast paths for low alignment values. In practice, the alignment is a
// constant at the call site and the branch will be optimized out. // constant at the call site and the branch will be optimized out.
#[cfg(all(any(target_arch = "x86", #[cfg(all(any(
target_arch = "x86",
target_arch = "arm", target_arch = "arm",
target_arch = "mips", target_arch = "mips",
target_arch = "powerpc", target_arch = "powerpc",
target_arch = "powerpc64", target_arch = "powerpc64",
target_arch = "asmjs", target_arch = "asmjs",
target_arch = "wasm32")))] target_arch = "wasm32"
)))]
const MIN_ALIGN: usize = 8; const MIN_ALIGN: usize = 8;
#[cfg(all(any(target_arch = "x86_64", #[cfg(all(any(
target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "mips64", target_arch = "mips64",
target_arch = "s390x", target_arch = "s390x",
target_arch = "sparc64")))] target_arch = "sparc64"
)))]
const MIN_ALIGN: usize = 16; const MIN_ALIGN: usize = 16;
pub use self::platform::{alloc, dealloc, realloc}; pub use self::platform::{alloc, dealloc, realloc};
@ -100,7 +101,6 @@ mod platform {
type DWORD = u32; type DWORD = u32;
type BOOL = i32; type BOOL = i32;
extern "system" { extern "system" {
fn GetProcessHeap() -> HANDLE; fn GetProcessHeap() -> HANDLE;
fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID; fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID;
@ -123,8 +123,7 @@ mod platform {
} }
#[inline] #[inline]
unsafe fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 unsafe fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 {
{
if align <= MIN_ALIGN { if align <= MIN_ALIGN {
HeapAlloc(GetProcessHeap(), flags, size) HeapAlloc(GetProcessHeap(), flags, size)
} else { } else {
@ -147,21 +146,16 @@ mod platform {
pub unsafe fn dealloc(ptr: *mut u8, align: usize) { pub unsafe fn dealloc(ptr: *mut u8, align: usize) {
if align <= MIN_ALIGN { if align <= MIN_ALIGN {
let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);
debug_assert!(err != 0, "Failed to free heap memory: {}", debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError());
GetLastError());
} else { } else {
let header = get_header(ptr); let header = get_header(ptr);
let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID);
debug_assert!(err != 0, "Failed to free heap memory: {}", debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError());
GetLastError());
} }
} }
#[inline] #[inline]
pub unsafe fn realloc(ptr: *mut u8, new_size: usize) -> *mut u8 { pub unsafe fn realloc(ptr: *mut u8, new_size: usize) -> *mut u8 {
HeapReAlloc(GetProcessHeap(), HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut u8
0,
ptr as LPVOID,
new_size) as *mut u8
} }
} }

View file

@ -26,7 +26,6 @@ pub use std::collections::hash_set::{Iter as SetIter, IntoIter as SetIntoIter};
#[derive(Clone)] #[derive(Clone)]
pub struct HashMap<K, V, S = RandomState>(StdMap<K, V, S>); pub struct HashMap<K, V, S = RandomState>(StdMap<K, V, S>);
use FailedAllocationError; use FailedAllocationError;
impl<K, V, S> Deref for HashMap<K, V, S> { impl<K, V, S> Deref for HashMap<K, V, S> {
@ -43,8 +42,9 @@ impl<K, V, S> DerefMut for HashMap<K, V, S> {
} }
impl<K, V, S> HashMap<K, V, S> impl<K, V, S> HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
#[inline] #[inline]
pub fn try_with_hasher(hash_builder: S) -> Result<HashMap<K, V, S>, FailedAllocationError> { pub fn try_with_hasher(hash_builder: S) -> Result<HashMap<K, V, S>, FailedAllocationError> {
@ -52,17 +52,20 @@ impl<K, V, S> HashMap<K, V, S>
} }
#[inline] #[inline]
pub fn try_with_capacity_and_hasher(capacity: usize, pub fn try_with_capacity_and_hasher(
hash_builder: S) capacity: usize,
-> Result<HashMap<K, V, S>, FailedAllocationError> { hash_builder: S,
Ok(HashMap(StdMap::with_capacity_and_hasher(capacity, hash_builder))) ) -> Result<HashMap<K, V, S>, FailedAllocationError> {
Ok(HashMap(StdMap::with_capacity_and_hasher(
capacity,
hash_builder,
)))
} }
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> { pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> {
HashMap(StdMap::with_capacity_and_hasher(capacity, hash_builder)) HashMap(StdMap::with_capacity_and_hasher(capacity, hash_builder))
} }
#[inline] #[inline]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> { pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> {
Ok(self.reserve(additional)) Ok(self.reserve(additional))
@ -85,7 +88,6 @@ impl<K, V, S> HashMap<K, V, S>
#[derive(Clone)] #[derive(Clone)]
pub struct HashSet<T, S = RandomState>(StdSet<T, S>); pub struct HashSet<T, S = RandomState>(StdSet<T, S>);
impl<T, S> Deref for HashSet<T, S> { impl<T, S> Deref for HashSet<T, S> {
type Target = StdSet<T, S>; type Target = StdSet<T, S>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -111,17 +113,16 @@ impl<T: Hash + Eq> HashSet<T, RandomState> {
} }
} }
impl<T, S> HashSet<T, S> impl<T, S> HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
#[inline] #[inline]
pub fn with_hasher(hasher: S) -> HashSet<T, S> { pub fn with_hasher(hasher: S) -> HashSet<T, S> {
HashSet(StdSet::with_hasher(hasher)) HashSet(StdSet::with_hasher(hasher))
} }
#[inline] #[inline]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> { pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
HashSet(StdSet::with_capacity_and_hasher(capacity, hasher)) HashSet(StdSet::with_capacity_and_hasher(capacity, hasher))
@ -153,18 +154,21 @@ impl<K: Hash + Eq, V, S: BuildHasher + Default> Default for HashMap<K, V, S> {
} }
impl<K, V, S> fmt::Debug for HashMap<K, V, S> impl<K, V, S> fmt::Debug for HashMap<K, V, S>
where K: Eq + Hash + fmt::Debug, where
K: Eq + Hash + fmt::Debug,
V: fmt::Debug, V: fmt::Debug,
S: BuildHasher { S: BuildHasher,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f) self.0.fmt(f)
} }
} }
impl<K, V, S> PartialEq for HashMap<K, V, S> impl<K, V, S> PartialEq for HashMap<K, V, S>
where K: Eq + Hash, where
K: Eq + Hash,
V: PartialEq, V: PartialEq,
S: BuildHasher S: BuildHasher,
{ {
fn eq(&self, other: &HashMap<K, V, S>) -> bool { fn eq(&self, other: &HashMap<K, V, S>) -> bool {
self.0.eq(&other.0) self.0.eq(&other.0)
@ -172,15 +176,17 @@ impl<K, V, S> PartialEq for HashMap<K, V, S>
} }
impl<K, V, S> Eq for HashMap<K, V, S> impl<K, V, S> Eq for HashMap<K, V, S>
where K: Eq + Hash, where
K: Eq + Hash,
V: Eq, V: Eq,
S: BuildHasher S: BuildHasher,
{ {
} }
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
type Item = (&'a K, &'a V); type Item = (&'a K, &'a V);
type IntoIter = MapIter<'a, K, V>; type IntoIter = MapIter<'a, K, V>;
@ -191,8 +197,9 @@ impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S>
} }
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
type Item = (&'a K, &'a mut V); type Item = (&'a K, &'a mut V);
type IntoIter = MapIterMut<'a, K, V>; type IntoIter = MapIterMut<'a, K, V>;
@ -209,8 +216,9 @@ impl<T: Eq + Hash, S: BuildHasher + Default> Default for HashSet<T, S> {
} }
impl<T, S> fmt::Debug for HashSet<T, S> impl<T, S> fmt::Debug for HashSet<T, S>
where T: Eq + Hash + fmt::Debug, where
S: BuildHasher T: Eq + Hash + fmt::Debug,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f) self.0.fmt(f)
@ -218,8 +226,9 @@ impl<T, S> fmt::Debug for HashSet<T, S>
} }
impl<T, S> PartialEq for HashSet<T, S> impl<T, S> PartialEq for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
fn eq(&self, other: &HashSet<T, S>) -> bool { fn eq(&self, other: &HashSet<T, S>) -> bool {
self.0.eq(&other.0) self.0.eq(&other.0)
@ -227,14 +236,16 @@ impl<T, S> PartialEq for HashSet<T, S>
} }
impl<T, S> Eq for HashSet<T, S> impl<T, S> Eq for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
} }
impl<'a, T, S> IntoIterator for &'a HashSet<T, S> impl<'a, T, S> IntoIterator for &'a HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
type IntoIter = SetIter<'a, T>; type IntoIter = SetIter<'a, T>;
@ -245,16 +256,14 @@ impl<'a, T, S> IntoIterator for &'a HashSet<T, S>
} }
impl<T, S> IntoIterator for HashSet<T, S> impl<T, S> IntoIterator for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = T; type Item = T;
type IntoIter = SetIntoIter<T>; type IntoIter = SetIntoIter<T>;
fn into_iter(self) -> SetIntoIter<T> { fn into_iter(self) -> SetIntoIter<T> {
self.0.into_iter() self.0.into_iter()
} }
} }

View file

@ -50,7 +50,9 @@ impl DefaultResizePolicy {
// 3. Ensure it is at least the minimum size. // 3. Ensure it is at least the minimum size.
let mut raw_cap = len * 11 / 10; let mut raw_cap = len * 11 / 10;
assert!(raw_cap >= len, "raw_cap overflow"); assert!(raw_cap >= len, "raw_cap overflow");
raw_cap = raw_cap.checked_next_power_of_two().expect("raw_capacity overflow"); raw_cap = raw_cap
.checked_next_power_of_two()
.expect("raw_capacity overflow");
raw_cap = max(MIN_NONZERO_RAW_CAPACITY, raw_cap); raw_cap = max(MIN_NONZERO_RAW_CAPACITY, raw_cap);
raw_cap raw_cap
} }
@ -398,8 +400,9 @@ pub struct HashMap<K, V, S = RandomState> {
/// Search for a pre-hashed key. /// Search for a pre-hashed key.
#[inline] #[inline]
fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F) -> InternalEntry<K, V, M> fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F) -> InternalEntry<K, V, M>
where M: Deref<Target = RawTable<K, V>>, where
F: FnMut(&K) -> bool M: Deref<Target = RawTable<K, V>>,
F: FnMut(&K) -> bool,
{ {
// This is the only function where capacity can be zero. To avoid // This is the only function where capacity can be zero. To avoid
// undefined behavior when Bucket::new gets the raw bucket in this // undefined behavior when Bucket::new gets the raw bucket in this
@ -420,7 +423,7 @@ fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F) -> Inter
hash, hash,
elem: NoElem(bucket, displacement), elem: NoElem(bucket, displacement),
}; };
} },
Full(bucket) => bucket, Full(bucket) => bucket,
}; };
@ -449,9 +452,7 @@ fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F) -> Inter
} }
} }
fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>) fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>) -> (K, V, &mut RawTable<K, V>) {
-> (K, V, &mut RawTable<K, V>)
{
let (empty, retkey, retval) = starting_bucket.take(); let (empty, retkey, retval) = starting_bucket.take();
let mut gap = match empty.gap_peek() { let mut gap = match empty.gap_peek() {
Ok(b) => b, Ok(b) => b,
@ -475,12 +476,13 @@ fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>)
/// also pass that bucket's displacement so we don't have to recalculate it. /// also pass that bucket's displacement so we don't have to recalculate it.
/// ///
/// `hash`, `key`, and `val` are the elements to "robin hood" into the hashtable. /// `hash`, `key`, and `val` are the elements to "robin hood" into the hashtable.
fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, fn robin_hood<'a, K: 'a, V: 'a>(
bucket: FullBucketMut<'a, K, V>,
mut displacement: usize, mut displacement: usize,
mut hash: SafeHash, mut hash: SafeHash,
mut key: K, mut key: K,
mut val: V) mut val: V,
-> FullBucketMut<'a, K, V> { ) -> FullBucketMut<'a, K, V> {
let size = bucket.table().size(); let size = bucket.table().size();
let raw_capacity = bucket.table().capacity(); let raw_capacity = bucket.table().capacity();
// There can be at most `size - dib` buckets to displace, because // There can be at most `size - dib` buckets to displace, because
@ -513,7 +515,7 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>,
// FullBucketMut, into just one FullBucketMut. The "table" // FullBucketMut, into just one FullBucketMut. The "table"
// refers to the inner FullBucketMut in this context. // refers to the inner FullBucketMut in this context.
return bucket.into_table(); return bucket.into_table();
} },
Full(bucket) => bucket, Full(bucket) => bucket,
}; };
@ -531,11 +533,13 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>,
} }
impl<K, V, S> HashMap<K, V, S> impl<K, V, S> HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
fn make_hash<X: ?Sized>(&self, x: &X) -> SafeHash fn make_hash<X: ?Sized>(&self, x: &X) -> SafeHash
where X: Hash where
X: Hash,
{ {
table::make_hash(&self.hash_builder, x) table::make_hash(&self.hash_builder, x)
} }
@ -545,8 +549,9 @@ impl<K, V, S> HashMap<K, V, S>
/// search_hashed. /// search_hashed.
#[inline] #[inline]
fn search<'a, Q: ?Sized>(&'a self, q: &Q) -> InternalEntry<K, V, &'a RawTable<K, V>> fn search<'a, Q: ?Sized>(&'a self, q: &Q) -> InternalEntry<K, V, &'a RawTable<K, V>>
where K: Borrow<Q>, where
Q: Eq + Hash K: Borrow<Q>,
Q: Eq + Hash,
{ {
let hash = self.make_hash(q); let hash = self.make_hash(q);
search_hashed(&self.table, hash, |k| q.eq(k.borrow())) search_hashed(&self.table, hash, |k| q.eq(k.borrow()))
@ -554,8 +559,9 @@ impl<K, V, S> HashMap<K, V, S>
#[inline] #[inline]
fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q) -> InternalEntry<K, V, &'a mut RawTable<K, V>> fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q) -> InternalEntry<K, V, &'a mut RawTable<K, V>>
where K: Borrow<Q>, where
Q: Eq + Hash K: Borrow<Q>,
Q: Eq + Hash,
{ {
let hash = self.make_hash(q); let hash = self.make_hash(q);
search_hashed(&mut self.table, hash, |k| q.eq(k.borrow())) search_hashed(&mut self.table, hash, |k| q.eq(k.borrow()))
@ -574,7 +580,7 @@ impl<K, V, S> HashMap<K, V, S>
Empty(empty) => { Empty(empty) => {
empty.put(hash, k, v); empty.put(hash, k, v);
return; return;
} },
Full(b) => b.into_bucket(), Full(b) => b.into_bucket(),
}; };
buckets.next(); buckets.next();
@ -584,8 +590,9 @@ impl<K, V, S> HashMap<K, V, S>
} }
impl<K, V, S> HashMap<K, V, S> impl<K, V, S> HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
/// Creates an empty `HashMap` which will use the given hash builder to hash /// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys. /// keys.
@ -643,7 +650,10 @@ impl<K, V, S> HashMap<K, V, S>
/// map.insert(1, 2); /// map.insert(1, 2);
/// ``` /// ```
#[inline] #[inline]
pub fn try_with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Result<HashMap<K, V, S>, FailedAllocationError> { pub fn try_with_capacity_and_hasher(
capacity: usize,
hash_builder: S,
) -> Result<HashMap<K, V, S>, FailedAllocationError> {
let resize_policy = DefaultResizePolicy::new(); let resize_policy = DefaultResizePolicy::new();
let raw_cap = resize_policy.raw_capacity(capacity); let raw_cap = resize_policy.raw_capacity(capacity);
Ok(HashMap { Ok(HashMap {
@ -708,12 +718,14 @@ impl<K, V, S> HashMap<K, V, S>
self.try_reserve(additional).unwrap(); self.try_reserve(additional).unwrap();
} }
#[inline] #[inline]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> { pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> {
let remaining = self.capacity() - self.len(); // this can't overflow let remaining = self.capacity() - self.len(); // this can't overflow
if remaining < additional { if remaining < additional {
let min_cap = self.len().checked_add(additional).expect("reserve overflow"); let min_cap = self
.len()
.checked_add(additional)
.expect("reserve overflow");
let raw_cap = self.resize_policy.raw_capacity(min_cap); let raw_cap = self.resize_policy.raw_capacity(min_cap);
self.try_resize(raw_cap)?; self.try_resize(raw_cap)?;
} else if self.table.tag() && remaining <= self.len() { } else if self.table.tag() && remaining <= self.len() {
@ -763,7 +775,7 @@ impl<K, V, S> HashMap<K, V, S>
break; break;
} }
b.into_bucket() b.into_bucket()
} },
Empty(b) => b.into_bucket(), Empty(b) => b.into_bucket(),
}; };
bucket.next(); bucket.next();
@ -822,7 +834,7 @@ impl<K, V, S> HashMap<K, V, S>
Some(Vacant(elem)) => { Some(Vacant(elem)) => {
elem.insert(v); elem.insert(v);
None None
} },
None => unreachable!(), None => unreachable!(),
} }
} }
@ -892,7 +904,9 @@ impl<K, V, S> HashMap<K, V, S>
/// } /// }
/// ``` /// ```
pub fn values_mut(&mut self) -> ValuesMut<K, V> { pub fn values_mut(&mut self) -> ValuesMut<K, V> {
ValuesMut { inner: self.iter_mut() } ValuesMut {
inner: self.iter_mut(),
}
} }
/// An iterator visiting all key-value pairs in arbitrary order. /// An iterator visiting all key-value pairs in arbitrary order.
@ -913,7 +927,9 @@ impl<K, V, S> HashMap<K, V, S>
/// } /// }
/// ``` /// ```
pub fn iter(&self) -> Iter<K, V> { pub fn iter(&self) -> Iter<K, V> {
Iter { inner: self.table.iter() } Iter {
inner: self.table.iter(),
}
} }
/// An iterator visiting all key-value pairs in arbitrary order, /// An iterator visiting all key-value pairs in arbitrary order,
@ -940,7 +956,9 @@ impl<K, V, S> HashMap<K, V, S>
/// } /// }
/// ``` /// ```
pub fn iter_mut(&mut self) -> IterMut<K, V> { pub fn iter_mut(&mut self) -> IterMut<K, V> {
IterMut { inner: self.table.iter_mut() } IterMut {
inner: self.table.iter_mut(),
}
} }
/// Gets the given key's corresponding entry in the map for in-place manipulation. /// Gets the given key's corresponding entry in the map for in-place manipulation.
@ -972,7 +990,8 @@ impl<K, V, S> HashMap<K, V, S>
self.try_reserve(1)?; self.try_reserve(1)?;
let hash = self.make_hash(&key); let hash = self.make_hash(&key);
Ok(search_hashed(&mut self.table, hash, |q| q.eq(&key)) Ok(search_hashed(&mut self.table, hash, |q| q.eq(&key))
.into_entry(key).expect("unreachable")) .into_entry(key)
.expect("unreachable"))
} }
/// Returns the number of elements in the map. /// Returns the number of elements in the map.
@ -1028,8 +1047,14 @@ impl<K, V, S> HashMap<K, V, S>
/// assert!(a.is_empty()); /// assert!(a.is_empty());
/// ``` /// ```
#[inline] #[inline]
pub fn drain(&mut self) -> Drain<K, V> where K: 'static, V: 'static { pub fn drain(&mut self) -> Drain<K, V>
Drain { inner: self.table.drain() } where
K: 'static,
V: 'static,
{
Drain {
inner: self.table.drain(),
}
} }
/// Clears the map, removing all key-value pairs. Keeps the allocated memory /// Clears the map, removing all key-value pairs. Keeps the allocated memory
@ -1046,7 +1071,11 @@ impl<K, V, S> HashMap<K, V, S>
/// assert!(a.is_empty()); /// assert!(a.is_empty());
/// ``` /// ```
#[inline] #[inline]
pub fn clear(&mut self) where K: 'static, V: 'static { pub fn clear(&mut self)
where
K: 'static,
V: 'static,
{
self.drain(); self.drain();
} }
@ -1070,10 +1099,13 @@ impl<K, V, S> HashMap<K, V, S>
/// assert_eq!(map.get(&2), None); /// assert_eq!(map.get(&2), None);
/// ``` /// ```
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V> pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where K: Borrow<Q>, where
Q: Hash + Eq K: Borrow<Q>,
Q: Hash + Eq,
{ {
self.search(k).into_occupied_bucket().map(|bucket| bucket.into_refs().1) self.search(k)
.into_occupied_bucket()
.map(|bucket| bucket.into_refs().1)
} }
/// Returns true if the map contains a value for the specified key. /// Returns true if the map contains a value for the specified key.
@ -1096,8 +1128,9 @@ impl<K, V, S> HashMap<K, V, S>
/// assert_eq!(map.contains_key(&2), false); /// assert_eq!(map.contains_key(&2), false);
/// ``` /// ```
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where K: Borrow<Q>, where
Q: Hash + Eq K: Borrow<Q>,
Q: Hash + Eq,
{ {
self.search(k).into_occupied_bucket().is_some() self.search(k).into_occupied_bucket().is_some()
} }
@ -1124,10 +1157,13 @@ impl<K, V, S> HashMap<K, V, S>
/// assert_eq!(map[&1], "b"); /// assert_eq!(map[&1], "b");
/// ``` /// ```
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V> pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where K: Borrow<Q>, where
Q: Hash + Eq K: Borrow<Q>,
Q: Hash + Eq,
{ {
self.search_mut(k).into_occupied_bucket().map(|bucket| bucket.into_mut_refs().1) self.search_mut(k)
.into_occupied_bucket()
.map(|bucket| bucket.into_mut_refs().1)
} }
/// Inserts a key-value pair into the map. /// Inserts a key-value pair into the map.
@ -1187,14 +1223,17 @@ impl<K, V, S> HashMap<K, V, S>
/// assert_eq!(map.remove(&1), None); /// assert_eq!(map.remove(&1), None);
/// ``` /// ```
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V> pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where K: Borrow<Q>, where
Q: Hash + Eq K: Borrow<Q>,
Q: Hash + Eq,
{ {
if self.table.size() == 0 { if self.table.size() == 0 {
return None; return None;
} }
self.search_mut(k).into_occupied_bucket().map(|bucket| pop_internal(bucket).1) self.search_mut(k)
.into_occupied_bucket()
.map(|bucket| pop_internal(bucket).1)
} }
/// Retains only the elements specified by the predicate. /// Retains only the elements specified by the predicate.
@ -1211,7 +1250,8 @@ impl<K, V, S> HashMap<K, V, S>
/// assert_eq!(map.len(), 4); /// assert_eq!(map.len(), 4);
/// ``` /// ```
pub fn retain<F>(&mut self, mut f: F) pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(&K, &mut V) -> bool where
F: FnMut(&K, &mut V) -> bool,
{ {
if self.table.size() == 0 { if self.table.size() == 0 {
return; return;
@ -1236,9 +1276,7 @@ impl<K, V, S> HashMap<K, V, S>
full.into_bucket() full.into_bucket()
} }
}, },
Empty(b) => { Empty(b) => b.into_bucket(),
b.into_bucket()
}
}; };
bucket.prev(); // reverse iteration bucket.prev(); // reverse iteration
debug_assert!(elems_left == 0 || bucket.index() != start_index); debug_assert!(elems_left == 0 || bucket.index() != start_index);
@ -1247,30 +1285,34 @@ impl<K, V, S> HashMap<K, V, S>
} }
impl<K, V, S> PartialEq for HashMap<K, V, S> impl<K, V, S> PartialEq for HashMap<K, V, S>
where K: Eq + Hash, where
K: Eq + Hash,
V: PartialEq, V: PartialEq,
S: BuildHasher S: BuildHasher,
{ {
fn eq(&self, other: &HashMap<K, V, S>) -> bool { fn eq(&self, other: &HashMap<K, V, S>) -> bool {
if self.len() != other.len() { if self.len() != other.len() {
return false; return false;
} }
self.iter().all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
} }
} }
impl<K, V, S> Eq for HashMap<K, V, S> impl<K, V, S> Eq for HashMap<K, V, S>
where K: Eq + Hash, where
K: Eq + Hash,
V: Eq, V: Eq,
S: BuildHasher S: BuildHasher,
{ {
} }
impl<K, V, S> Debug for HashMap<K, V, S> impl<K, V, S> Debug for HashMap<K, V, S>
where K: Eq + Hash + Debug, where
K: Eq + Hash + Debug,
V: Debug, V: Debug,
S: BuildHasher S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map().entries(self.iter()).finish() f.debug_map().entries(self.iter()).finish()
@ -1278,8 +1320,9 @@ impl<K, V, S> Debug for HashMap<K, V, S>
} }
impl<K, V, S> Default for HashMap<K, V, S> impl<K, V, S> Default for HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher + Default K: Eq + Hash,
S: BuildHasher + Default,
{ {
/// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher. /// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
fn default() -> HashMap<K, V, S> { fn default() -> HashMap<K, V, S> {
@ -1288,9 +1331,10 @@ impl<K, V, S> Default for HashMap<K, V, S>
} }
impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap<K, V, S> impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap<K, V, S>
where K: Eq + Hash + Borrow<Q>, where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash, Q: Eq + Hash,
S: BuildHasher S: BuildHasher,
{ {
type Output = V; type Output = V;
@ -1314,15 +1358,15 @@ pub struct Iter<'a, K: 'a, V: 'a> {
// FIXME(#19839) Remove in favor of `#[derive(Clone)]` // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
impl<'a, K, V> Clone for Iter<'a, K, V> { impl<'a, K, V> Clone for Iter<'a, K, V> {
fn clone(&self) -> Iter<'a, K, V> { fn clone(&self) -> Iter<'a, K, V> {
Iter { inner: self.inner.clone() } Iter {
inner: self.inner.clone(),
}
} }
} }
impl<'a, K: Debug, V: Debug> fmt::Debug for Iter<'a, K, V> { impl<'a, K: Debug, V: Debug> fmt::Debug for Iter<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.clone()).finish()
.entries(self.clone())
.finish()
} }
} }
@ -1362,15 +1406,15 @@ pub struct Keys<'a, K: 'a, V: 'a> {
// FIXME(#19839) Remove in favor of `#[derive(Clone)]` // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
impl<'a, K, V> Clone for Keys<'a, K, V> { impl<'a, K, V> Clone for Keys<'a, K, V> {
fn clone(&self) -> Keys<'a, K, V> { fn clone(&self) -> Keys<'a, K, V> {
Keys { inner: self.inner.clone() } Keys {
inner: self.inner.clone(),
}
} }
} }
impl<'a, K: Debug, V> fmt::Debug for Keys<'a, K, V> { impl<'a, K: Debug, V> fmt::Debug for Keys<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.clone()).finish()
.entries(self.clone())
.finish()
} }
} }
@ -1388,15 +1432,15 @@ pub struct Values<'a, K: 'a, V: 'a> {
// FIXME(#19839) Remove in favor of `#[derive(Clone)]` // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
impl<'a, K, V> Clone for Values<'a, K, V> { impl<'a, K, V> Clone for Values<'a, K, V> {
fn clone(&self) -> Values<'a, K, V> { fn clone(&self) -> Values<'a, K, V> {
Values { inner: self.inner.clone() } Values {
inner: self.inner.clone(),
}
} }
} }
impl<'a, K, V: Debug> fmt::Debug for Values<'a, K, V> { impl<'a, K, V: Debug> fmt::Debug for Values<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.clone()).finish()
.entries(self.clone())
.finish()
} }
} }
@ -1423,7 +1467,9 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> {
} }
enum InternalEntry<K, V, M> { enum InternalEntry<K, V, M> {
Occupied { elem: FullBucket<K, V, M> }, Occupied {
elem: FullBucket<K, V, M>,
},
Vacant { Vacant {
hash: SafeHash, hash: SafeHash,
elem: VacantEntryState<K, V, M>, elem: VacantEntryState<K, V, M>,
@ -1445,19 +1491,11 @@ impl<'a, K, V> InternalEntry<K, V, &'a mut RawTable<K, V>> {
#[inline] #[inline]
fn into_entry(self, key: K) -> Option<Entry<'a, K, V>> { fn into_entry(self, key: K) -> Option<Entry<'a, K, V>> {
match self { match self {
InternalEntry::Occupied { elem } => { InternalEntry::Occupied { elem } => Some(Occupied(OccupiedEntry {
Some(Occupied(OccupiedEntry {
key: Some(key), key: Some(key),
elem, elem,
})) })),
} InternalEntry::Vacant { hash, elem } => Some(Vacant(VacantEntry { hash, key, elem })),
InternalEntry::Vacant { hash, elem } => {
Some(Vacant(VacantEntry {
hash,
key,
elem,
}))
}
InternalEntry::TableIsEmpty => None, InternalEntry::TableIsEmpty => None,
} }
} }
@ -1471,25 +1509,17 @@ impl<'a, K, V> InternalEntry<K, V, &'a mut RawTable<K, V>> {
/// [`entry`]: struct.HashMap.html#method.entry /// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K: 'a, V: 'a> { pub enum Entry<'a, K: 'a, V: 'a> {
/// An occupied entry. /// An occupied entry.
Occupied( OccupiedEntry<'a, K, V>), Occupied(OccupiedEntry<'a, K, V>),
/// A vacant entry. /// A vacant entry.
Vacant( VacantEntry<'a, K, V>), Vacant(VacantEntry<'a, K, V>),
} }
impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for Entry<'a, K, V> { impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for Entry<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
Vacant(ref v) => { Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
f.debug_tuple("Entry") Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
.field(v)
.finish()
}
Occupied(ref o) => {
f.debug_tuple("Entry")
.field(o)
.finish()
}
} }
} }
} }
@ -1524,9 +1554,7 @@ pub struct VacantEntry<'a, K: 'a, V: 'a> {
impl<'a, K: 'a + Debug, V: 'a> Debug for VacantEntry<'a, K, V> { impl<'a, K: 'a + Debug, V: 'a> Debug for VacantEntry<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("VacantEntry") f.debug_tuple("VacantEntry").field(self.key()).finish()
.field(self.key())
.finish()
} }
} }
@ -1540,8 +1568,9 @@ enum VacantEntryState<K, V, M> {
} }
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
type Item = (&'a K, &'a V); type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>; type IntoIter = Iter<'a, K, V>;
@ -1552,8 +1581,9 @@ impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S>
} }
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
type Item = (&'a K, &'a mut V); type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>; type IntoIter = IterMut<'a, K, V>;
@ -1564,8 +1594,9 @@ impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S>
} }
impl<K, V, S> IntoIterator for HashMap<K, V, S> impl<K, V, S> IntoIterator for HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
type Item = (K, V); type Item = (K, V);
type IntoIter = IntoIter<K, V>; type IntoIter = IntoIter<K, V>;
@ -1588,7 +1619,9 @@ impl<K, V, S> IntoIterator for HashMap<K, V, S>
/// let vec: Vec<(&str, isize)> = map.into_iter().collect(); /// let vec: Vec<(&str, isize)> = map.into_iter().collect();
/// ``` /// ```
fn into_iter(self) -> IntoIter<K, V> { fn into_iter(self) -> IntoIter<K, V> {
IntoIter { inner: self.table.into_iter() } IntoIter {
inner: self.table.into_iter(),
}
} }
} }
@ -1611,7 +1644,6 @@ impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {
} }
} }
impl<'a, K, V> Iterator for IterMut<'a, K, V> { impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V); type Item = (&'a K, &'a mut V);
@ -1632,13 +1664,12 @@ impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {
} }
impl<'a, K, V> fmt::Debug for IterMut<'a, K, V> impl<'a, K, V> fmt::Debug for IterMut<'a, K, V>
where K: fmt::Debug, where
K: fmt::Debug,
V: fmt::Debug, V: fmt::Debug,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.inner.iter()).finish()
.entries(self.inner.iter())
.finish()
} }
} }
@ -1663,9 +1694,7 @@ impl<K, V> ExactSizeIterator for IntoIter<K, V> {
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> { impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.inner.iter()).finish()
.entries(self.inner.iter())
.finish()
} }
} }
@ -1726,13 +1755,12 @@ impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
} }
impl<'a, K, V> fmt::Debug for ValuesMut<'a, K, V> impl<'a, K, V> fmt::Debug for ValuesMut<'a, K, V>
where K: fmt::Debug, where
K: fmt::Debug,
V: fmt::Debug, V: fmt::Debug,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.inner.inner.iter()).finish()
.entries(self.inner.inner.iter())
.finish()
} }
} }
@ -1756,13 +1784,12 @@ impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> {
} }
impl<'a, K, V> fmt::Debug for Drain<'a, K, V> impl<'a, K, V> fmt::Debug for Drain<'a, K, V>
where K: fmt::Debug, where
K: fmt::Debug,
V: fmt::Debug, V: fmt::Debug,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list() f.debug_list().entries(self.inner.iter()).finish()
.entries(self.inner.iter())
.finish()
} }
} }
@ -2057,8 +2084,9 @@ impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> {
} }
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S> impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher + Default K: Eq + Hash,
S: BuildHasher + Default,
{ {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> HashMap<K, V, S> { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> HashMap<K, V, S> {
let mut map = HashMap::with_hasher(Default::default()); let mut map = HashMap::with_hasher(Default::default());
@ -2068,8 +2096,9 @@ impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
} }
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S> impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where K: Eq + Hash, where
S: BuildHasher K: Eq + Hash,
S: BuildHasher,
{ {
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) { fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator. // Keys may be already present or show multiple times in the iterator.
@ -2090,9 +2119,10 @@ impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
} }
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S> impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where K: Eq + Hash + Copy, where
K: Eq + Hash + Copy,
V: Copy, V: Copy,
S: BuildHasher S: BuildHasher,
{ {
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) { fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
@ -2102,16 +2132,18 @@ impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
// FORK NOTE: These can be reused // FORK NOTE: These can be reused
pub use std::collections::hash_map::{DefaultHasher, RandomState}; pub use std::collections::hash_map::{DefaultHasher, RandomState};
impl<K, S, Q: ?Sized> super::Recover<Q> for HashMap<K, (), S> impl<K, S, Q: ?Sized> super::Recover<Q> for HashMap<K, (), S>
where K: Eq + Hash + Borrow<Q>, where
K: Eq + Hash + Borrow<Q>,
S: BuildHasher, S: BuildHasher,
Q: Eq + Hash Q: Eq + Hash,
{ {
type Key = K; type Key = K;
fn get(&self, key: &Q) -> Option<&K> { fn get(&self, key: &Q) -> Option<&K> {
self.search(key).into_occupied_bucket().map(|bucket| bucket.into_refs().0) self.search(key)
.into_occupied_bucket()
.map(|bucket| bucket.into_refs().0)
} }
fn take(&mut self, key: &Q) -> Option<K> { fn take(&mut self, key: &Q) -> Option<K> {
@ -2119,7 +2151,9 @@ impl<K, S, Q: ?Sized> super::Recover<Q> for HashMap<K, (), S>
return None; return None;
} }
self.search_mut(key).into_occupied_bucket().map(|bucket| pop_internal(bucket).0) self.search_mut(key)
.into_occupied_bucket()
.map(|bucket| pop_internal(bucket).0)
} }
fn replace(&mut self, key: K) -> Option<K> { fn replace(&mut self, key: K) -> Option<K> {
@ -2129,11 +2163,11 @@ impl<K, S, Q: ?Sized> super::Recover<Q> for HashMap<K, (), S>
Occupied(mut occupied) => { Occupied(mut occupied) => {
let key = occupied.take_key().unwrap(); let key = occupied.take_key().unwrap();
Some(mem::replace(occupied.elem.read_mut().0, key)) Some(mem::replace(occupied.elem.read_mut().0, key))
} },
Vacant(vacant) => { Vacant(vacant) => {
vacant.insert(()); vacant.insert(());
None None
} },
} }
} }
} }
@ -2170,8 +2204,9 @@ fn assert_covariance() {
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v v
} }
fn drain<'new>(d: Drain<'static, &'static str, &'static str>) fn drain<'new>(
-> Drain<'new, &'new str, &'new str> { d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d d
} }
} }
@ -2319,19 +2354,19 @@ mod test_map {
DROP_VECTOR.with(|v| { DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1); assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i+100], 1); assert_eq!(v.borrow()[i + 100], 1);
}); });
} }
DROP_VECTOR.with(|v| { DROP_VECTOR.with(|v| {
for i in 0..50 { for i in 0..50 {
assert_eq!(v.borrow()[i], 0); assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i+100], 0); assert_eq!(v.borrow()[i + 100], 0);
} }
for i in 50..100 { for i in 50..100 {
assert_eq!(v.borrow()[i], 1); assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i+100], 1); assert_eq!(v.borrow()[i + 100], 1);
} }
}); });
} }
@ -2388,13 +2423,9 @@ mod test_map {
for _ in half.by_ref() {} for _ in half.by_ref() {}
DROP_VECTOR.with(|v| { DROP_VECTOR.with(|v| {
let nk = (0..100) let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
.filter(|&i| v.borrow()[i] == 1)
.count();
let nv = (0..100) let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
.filter(|&i| v.borrow()[i + 100] == 1)
.count();
assert_eq!(nk, 50); assert_eq!(nk, 50);
assert_eq!(nv, 50); assert_eq!(nv, 50);
@ -2419,7 +2450,7 @@ mod test_map {
let mut m: HashMap<isize, bool> = HashMap::new(); let mut m: HashMap<isize, bool> = HashMap::new();
match m.entry(0) { match m.entry(0) {
Occupied(_) => panic!(), Occupied(_) => panic!(),
Vacant(_) => {} Vacant(_) => {},
} }
assert!(*m.entry(0).or_insert(true)); assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1); assert_eq!(m.len(), 1);
@ -2574,7 +2605,7 @@ mod test_map {
fn test_iterate() { fn test_iterate() {
let mut m = HashMap::with_capacity(4); let mut m = HashMap::with_capacity(4);
for i in 0..32 { for i in 0..32 {
assert!(m.insert(i, i*2).is_none()); assert!(m.insert(i, i * 2).is_none());
} }
assert_eq!(m.len(), 32); assert_eq!(m.len(), 32);
@ -2662,8 +2693,7 @@ mod test_map {
let map_str = format!("{:?}", map); let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}"); assert_eq!(format!("{:?}", empty), "{}");
} }
@ -2876,12 +2906,11 @@ mod test_map {
Occupied(mut view) => { Occupied(mut view) => {
assert_eq!(view.get(), &10); assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10); assert_eq!(view.insert(100), 10);
} },
} }
assert_eq!(map.get(&1).unwrap(), &100); assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6); assert_eq!(map.len(), 6);
// Existing key (update) // Existing key (update)
match map.entry(2) { match map.entry(2) {
Vacant(_) => unreachable!(), Vacant(_) => unreachable!(),
@ -2889,7 +2918,7 @@ mod test_map {
let v = view.get_mut(); let v = view.get_mut();
let new_v = (*v) * 10; let new_v = (*v) * 10;
*v = new_v; *v = new_v;
} },
} }
assert_eq!(map.get(&2).unwrap(), &200); assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6); assert_eq!(map.len(), 6);
@ -2899,18 +2928,17 @@ mod test_map {
Vacant(_) => unreachable!(), Vacant(_) => unreachable!(),
Occupied(view) => { Occupied(view) => {
assert_eq!(view.remove(), 30); assert_eq!(view.remove(), 30);
} },
} }
assert_eq!(map.get(&3), None); assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5); assert_eq!(map.len(), 5);
// Inexistent key (insert) // Inexistent key (insert)
match map.entry(10) { match map.entry(10) {
Occupied(_) => unreachable!(), Occupied(_) => unreachable!(),
Vacant(view) => { Vacant(view) => {
assert_eq!(*view.insert(1000), 1000); assert_eq!(*view.insert(1000), 1000);
} },
} }
assert_eq!(map.get(&10).unwrap(), &1000); assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6); assert_eq!(map.len(), 6);
@ -2922,8 +2950,7 @@ mod test_map {
// Test for #19292 // Test for #19292
fn check(m: &HashMap<isize, ()>) { fn check(m: &HashMap<isize, ()>) {
for k in m.keys() { for k in m.keys() {
assert!(m.contains_key(k), assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
"{} is in keys() but not in the map?", k);
} }
} }
@ -2939,11 +2966,11 @@ mod test_map {
for i in 0..1000 { for i in 0..1000 {
let x = rng.gen_range(-10, 10); let x = rng.gen_range(-10, 10);
match m.entry(x) { match m.entry(x) {
Vacant(_) => {} Vacant(_) => {},
Occupied(e) => { Occupied(e) => {
println!("{}: remove {}", i, x); println!("{}: remove {}", i, x);
e.remove(); e.remove();
} },
} }
check(&m); check(&m);
@ -3021,7 +3048,7 @@ mod test_map {
Vacant(e) => { Vacant(e) => {
assert_eq!(key, *e.key()); assert_eq!(key, *e.key());
e.insert(value.clone()); e.insert(value.clone());
} },
} }
assert_eq!(a.len(), 1); assert_eq!(a.len(), 1);
assert_eq!(a[key], value); assert_eq!(a[key], value);
@ -3029,7 +3056,7 @@ mod test_map {
#[test] #[test]
fn test_retain() { fn test_retain() {
let mut map: HashMap<isize, isize> = (0..100).map(|x|(x, x*10)).collect(); let mut map: HashMap<isize, isize> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0); map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50); assert_eq!(map.len(), 50);

View file

@ -122,8 +122,9 @@ pub struct HashSet<T, S = RandomState> {
} }
impl<T, S> HashSet<T, S> impl<T, S> HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
/// Creates a new empty hash set which will use the given hasher to hash /// Creates a new empty hash set which will use the given hasher to hash
/// keys. /// keys.
@ -147,7 +148,9 @@ impl<T, S> HashSet<T, S>
/// ``` /// ```
#[inline] #[inline]
pub fn with_hasher(hasher: S) -> HashSet<T, S> { pub fn with_hasher(hasher: S) -> HashSet<T, S> {
HashSet { map: HashMap::with_hasher(hasher) } HashSet {
map: HashMap::with_hasher(hasher),
}
} }
/// Creates an empty `HashSet` with with the specified capacity, using /// Creates an empty `HashSet` with with the specified capacity, using
@ -173,7 +176,9 @@ impl<T, S> HashSet<T, S>
/// ``` /// ```
#[inline] #[inline]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> { pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) } HashSet {
map: HashMap::with_capacity_and_hasher(capacity, hasher),
}
} }
/// Returns a reference to the set's [`BuildHasher`]. /// Returns a reference to the set's [`BuildHasher`].
@ -265,7 +270,9 @@ impl<T, S> HashSet<T, S>
/// } /// }
/// ``` /// ```
pub fn iter(&self) -> Iter<T> { pub fn iter(&self) -> Iter<T> {
Iter { iter: self.map.keys() } Iter {
iter: self.map.keys(),
}
} }
/// Visits the values representing the difference, /// Visits the values representing the difference,
@ -319,10 +326,13 @@ impl<T, S> HashSet<T, S>
/// assert_eq!(diff1, diff2); /// assert_eq!(diff1, diff2);
/// assert_eq!(diff1, [1, 4].iter().collect()); /// assert_eq!(diff1, [1, 4].iter().collect());
/// ``` /// ```
pub fn symmetric_difference<'a>(&'a self, pub fn symmetric_difference<'a>(
other: &'a HashSet<T, S>) &'a self,
-> SymmetricDifference<'a, T, S> { other: &'a HashSet<T, S>,
SymmetricDifference { iter: self.difference(other).chain(other.difference(self)) } ) -> SymmetricDifference<'a, T, S> {
SymmetricDifference {
iter: self.difference(other).chain(other.difference(self)),
}
} }
/// Visits the values representing the intersection, /// Visits the values representing the intersection,
@ -369,7 +379,9 @@ impl<T, S> HashSet<T, S>
/// assert_eq!(union, [1, 2, 3, 4].iter().collect()); /// assert_eq!(union, [1, 2, 3, 4].iter().collect());
/// ``` /// ```
pub fn union<'a>(&'a self, other: &'a HashSet<T, S>) -> Union<'a, T, S> { pub fn union<'a>(&'a self, other: &'a HashSet<T, S>) -> Union<'a, T, S> {
Union { iter: self.iter().chain(other.difference(self)) } Union {
iter: self.iter().chain(other.difference(self)),
}
} }
/// Returns the number of elements in the set. /// Returns the number of elements in the set.
@ -423,7 +435,9 @@ impl<T, S> HashSet<T, S>
/// ``` /// ```
#[inline] #[inline]
pub fn drain(&mut self) -> Drain<T> { pub fn drain(&mut self) -> Drain<T> {
Drain { iter: self.map.drain() } Drain {
iter: self.map.drain(),
}
} }
/// Clears the set, removing all values. /// Clears the set, removing all values.
@ -438,7 +452,10 @@ impl<T, S> HashSet<T, S>
/// v.clear(); /// v.clear();
/// assert!(v.is_empty()); /// assert!(v.is_empty());
/// ``` /// ```
pub fn clear(&mut self) where T: 'static { pub fn clear(&mut self)
where
T: 'static,
{
self.map.clear() self.map.clear()
} }
@ -461,8 +478,9 @@ impl<T, S> HashSet<T, S>
/// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html /// [`Hash`]: ../../std/hash/trait.Hash.html
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
where T: Borrow<Q>, where
Q: Hash + Eq T: Borrow<Q>,
Q: Hash + Eq,
{ {
self.map.contains_key(value) self.map.contains_key(value)
} }
@ -476,8 +494,9 @@ impl<T, S> HashSet<T, S>
/// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html /// [`Hash`]: ../../std/hash/trait.Hash.html
pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T> pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
where T: Borrow<Q>, where
Q: Hash + Eq T: Borrow<Q>,
Q: Hash + Eq,
{ {
Recover::get(&self.map, value) Recover::get(&self.map, value)
} }
@ -598,8 +617,9 @@ impl<T, S> HashSet<T, S>
/// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html /// [`Hash`]: ../../std/hash/trait.Hash.html
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
where T: Borrow<Q>, where
Q: Hash + Eq T: Borrow<Q>,
Q: Hash + Eq,
{ {
self.map.remove(value).is_some() self.map.remove(value).is_some()
} }
@ -613,8 +633,9 @@ impl<T, S> HashSet<T, S>
/// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html /// [`Hash`]: ../../std/hash/trait.Hash.html
pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T> pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
where T: Borrow<Q>, where
Q: Hash + Eq T: Borrow<Q>,
Q: Hash + Eq,
{ {
Recover::take(&mut self.map, value) Recover::take(&mut self.map, value)
} }
@ -634,15 +655,17 @@ impl<T, S> HashSet<T, S>
/// assert_eq!(set.len(), 3); /// assert_eq!(set.len(), 3);
/// ``` /// ```
pub fn retain<F>(&mut self, mut f: F) pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(&T) -> bool where
F: FnMut(&T) -> bool,
{ {
self.map.retain(|k, _| f(k)); self.map.retain(|k, _| f(k));
} }
} }
impl<T, S> PartialEq for HashSet<T, S> impl<T, S> PartialEq for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
fn eq(&self, other: &HashSet<T, S>) -> bool { fn eq(&self, other: &HashSet<T, S>) -> bool {
if self.len() != other.len() { if self.len() != other.len() {
@ -654,14 +677,16 @@ impl<T, S> PartialEq for HashSet<T, S>
} }
impl<T, S> Eq for HashSet<T, S> impl<T, S> Eq for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
} }
impl<T, S> fmt::Debug for HashSet<T, S> impl<T, S> fmt::Debug for HashSet<T, S>
where T: Eq + Hash + fmt::Debug, where
S: BuildHasher T: Eq + Hash + fmt::Debug,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_set().entries(self.iter()).finish() f.debug_set().entries(self.iter()).finish()
@ -669,8 +694,9 @@ impl<T, S> fmt::Debug for HashSet<T, S>
} }
impl<T, S> FromIterator<T> for HashSet<T, S> impl<T, S> FromIterator<T> for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher + Default T: Eq + Hash,
S: BuildHasher + Default,
{ {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> HashSet<T, S> { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> HashSet<T, S> {
let mut set = HashSet::with_hasher(Default::default()); let mut set = HashSet::with_hasher(Default::default());
@ -680,8 +706,9 @@ impl<T, S> FromIterator<T> for HashSet<T, S>
} }
impl<T, S> Extend<T> for HashSet<T, S> impl<T, S> Extend<T> for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
self.map.extend(iter.into_iter().map(|k| (k, ()))); self.map.extend(iter.into_iter().map(|k| (k, ())));
@ -689,8 +716,9 @@ impl<T, S> Extend<T> for HashSet<T, S>
} }
impl<'a, T, S> Extend<&'a T> for HashSet<T, S> impl<'a, T, S> Extend<&'a T> for HashSet<T, S>
where T: 'a + Eq + Hash + Copy, where
S: BuildHasher T: 'a + Eq + Hash + Copy,
S: BuildHasher,
{ {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned()); self.extend(iter.into_iter().cloned());
@ -698,18 +726,22 @@ impl<'a, T, S> Extend<&'a T> for HashSet<T, S>
} }
impl<T, S> Default for HashSet<T, S> impl<T, S> Default for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher + Default T: Eq + Hash,
S: BuildHasher + Default,
{ {
/// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher. /// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
fn default() -> HashSet<T, S> { fn default() -> HashSet<T, S> {
HashSet { map: HashMap::default() } HashSet {
map: HashMap::default(),
}
} }
} }
impl<'a, 'b, T, S> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S> impl<'a, 'b, T, S> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone, where
S: BuildHasher + Default T: Eq + Hash + Clone,
S: BuildHasher + Default,
{ {
type Output = HashSet<T, S>; type Output = HashSet<T, S>;
@ -739,8 +771,9 @@ impl<'a, 'b, T, S> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S>
} }
impl<'a, 'b, T, S> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S> impl<'a, 'b, T, S> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone, where
S: BuildHasher + Default T: Eq + Hash + Clone,
S: BuildHasher + Default,
{ {
type Output = HashSet<T, S>; type Output = HashSet<T, S>;
@ -770,8 +803,9 @@ impl<'a, 'b, T, S> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S>
} }
impl<'a, 'b, T, S> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S> impl<'a, 'b, T, S> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone, where
S: BuildHasher + Default T: Eq + Hash + Clone,
S: BuildHasher + Default,
{ {
type Output = HashSet<T, S>; type Output = HashSet<T, S>;
@ -801,8 +835,9 @@ impl<'a, 'b, T, S> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S>
} }
impl<'a, 'b, T, S> Sub<&'b HashSet<T, S>> for &'a HashSet<T, S> impl<'a, 'b, T, S> Sub<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone, where
S: BuildHasher + Default T: Eq + Hash + Clone,
S: BuildHasher + Default,
{ {
type Output = HashSet<T, S>; type Output = HashSet<T, S>;
@ -915,8 +950,9 @@ pub struct Union<'a, T: 'a, S: 'a> {
} }
impl<'a, T, S> IntoIterator for &'a HashSet<T, S> impl<'a, T, S> IntoIterator for &'a HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
type IntoIter = Iter<'a, T>; type IntoIter = Iter<'a, T>;
@ -927,8 +963,9 @@ impl<'a, T, S> IntoIterator for &'a HashSet<T, S>
} }
impl<T, S> IntoIterator for HashSet<T, S> impl<T, S> IntoIterator for HashSet<T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = T; type Item = T;
type IntoIter = IntoIter<T>; type IntoIter = IntoIter<T>;
@ -954,13 +991,17 @@ impl<T, S> IntoIterator for HashSet<T, S>
/// } /// }
/// ``` /// ```
fn into_iter(self) -> IntoIter<T> { fn into_iter(self) -> IntoIter<T> {
IntoIter { iter: self.map.into_iter() } IntoIter {
iter: self.map.into_iter(),
}
} }
} }
impl<'a, K> Clone for Iter<'a, K> { impl<'a, K> Clone for Iter<'a, K> {
fn clone(&self) -> Iter<'a, K> { fn clone(&self) -> Iter<'a, K> {
Iter { iter: self.iter.clone() } Iter {
iter: self.iter.clone(),
}
} }
} }
impl<'a, K> Iterator for Iter<'a, K> { impl<'a, K> Iterator for Iter<'a, K> {
@ -1003,10 +1044,7 @@ impl<K> ExactSizeIterator for IntoIter<K> {
impl<K: fmt::Debug> fmt::Debug for IntoIter<K> { impl<K: fmt::Debug> fmt::Debug for IntoIter<K> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let entries_iter = self.iter let entries_iter = self.iter.inner.iter().map(|(k, _)| k);
.inner
.iter()
.map(|(k, _)| k);
f.debug_list().entries(entries_iter).finish() f.debug_list().entries(entries_iter).finish()
} }
} }
@ -1029,23 +1067,24 @@ impl<'a, K> ExactSizeIterator for Drain<'a, K> {
impl<'a, K: fmt::Debug> fmt::Debug for Drain<'a, K> { impl<'a, K: fmt::Debug> fmt::Debug for Drain<'a, K> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let entries_iter = self.iter let entries_iter = self.iter.inner.iter().map(|(k, _)| k);
.inner
.iter()
.map(|(k, _)| k);
f.debug_list().entries(entries_iter).finish() f.debug_list().entries(entries_iter).finish()
} }
} }
impl<'a, T, S> Clone for Intersection<'a, T, S> { impl<'a, T, S> Clone for Intersection<'a, T, S> {
fn clone(&self) -> Intersection<'a, T, S> { fn clone(&self) -> Intersection<'a, T, S> {
Intersection { iter: self.iter.clone(), ..*self } Intersection {
iter: self.iter.clone(),
..*self
}
} }
} }
impl<'a, T, S> Iterator for Intersection<'a, T, S> impl<'a, T, S> Iterator for Intersection<'a, T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
@ -1065,8 +1104,9 @@ impl<'a, T, S> Iterator for Intersection<'a, T, S>
} }
impl<'a, T, S> fmt::Debug for Intersection<'a, T, S> impl<'a, T, S> fmt::Debug for Intersection<'a, T, S>
where T: fmt::Debug + Eq + Hash, where
S: BuildHasher T: fmt::Debug + Eq + Hash,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.clone()).finish() f.debug_list().entries(self.clone()).finish()
@ -1075,13 +1115,17 @@ impl<'a, T, S> fmt::Debug for Intersection<'a, T, S>
impl<'a, T, S> Clone for Difference<'a, T, S> { impl<'a, T, S> Clone for Difference<'a, T, S> {
fn clone(&self) -> Difference<'a, T, S> { fn clone(&self) -> Difference<'a, T, S> {
Difference { iter: self.iter.clone(), ..*self } Difference {
iter: self.iter.clone(),
..*self
}
} }
} }
impl<'a, T, S> Iterator for Difference<'a, T, S> impl<'a, T, S> Iterator for Difference<'a, T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
@ -1101,8 +1145,9 @@ impl<'a, T, S> Iterator for Difference<'a, T, S>
} }
impl<'a, T, S> fmt::Debug for Difference<'a, T, S> impl<'a, T, S> fmt::Debug for Difference<'a, T, S>
where T: fmt::Debug + Eq + Hash, where
S: BuildHasher T: fmt::Debug + Eq + Hash,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.clone()).finish() f.debug_list().entries(self.clone()).finish()
@ -1111,13 +1156,16 @@ impl<'a, T, S> fmt::Debug for Difference<'a, T, S>
impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> { impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> {
fn clone(&self) -> SymmetricDifference<'a, T, S> { fn clone(&self) -> SymmetricDifference<'a, T, S> {
SymmetricDifference { iter: self.iter.clone() } SymmetricDifference {
iter: self.iter.clone(),
}
} }
} }
impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S> impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
@ -1130,8 +1178,9 @@ impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
} }
impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S> impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S>
where T: fmt::Debug + Eq + Hash, where
S: BuildHasher T: fmt::Debug + Eq + Hash,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.clone()).finish() f.debug_list().entries(self.clone()).finish()
@ -1140,13 +1189,16 @@ impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S>
impl<'a, T, S> Clone for Union<'a, T, S> { impl<'a, T, S> Clone for Union<'a, T, S> {
fn clone(&self) -> Union<'a, T, S> { fn clone(&self) -> Union<'a, T, S> {
Union { iter: self.iter.clone() } Union {
iter: self.iter.clone(),
}
} }
} }
impl<'a, T, S> fmt::Debug for Union<'a, T, S> impl<'a, T, S> fmt::Debug for Union<'a, T, S>
where T: fmt::Debug + Eq + Hash, where
S: BuildHasher T: fmt::Debug + Eq + Hash,
S: BuildHasher,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.clone()).finish() f.debug_list().entries(self.clone()).finish()
@ -1154,8 +1206,9 @@ impl<'a, T, S> fmt::Debug for Union<'a, T, S>
} }
impl<'a, T, S> Iterator for Union<'a, T, S> impl<'a, T, S> Iterator for Union<'a, T, S>
where T: Eq + Hash, where
S: BuildHasher T: Eq + Hash,
S: BuildHasher,
{ {
type Item = &'a T; type Item = &'a T;
@ -1178,20 +1231,24 @@ fn assert_covariance() {
fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {
v v
} }
fn difference<'a, 'new>(v: Difference<'a, &'static str, RandomState>) fn difference<'a, 'new>(
-> Difference<'a, &'new str, RandomState> { v: Difference<'a, &'static str, RandomState>,
) -> Difference<'a, &'new str, RandomState> {
v v
} }
fn symmetric_difference<'a, 'new>(v: SymmetricDifference<'a, &'static str, RandomState>) fn symmetric_difference<'a, 'new>(
-> SymmetricDifference<'a, &'new str, RandomState> { v: SymmetricDifference<'a, &'static str, RandomState>,
) -> SymmetricDifference<'a, &'new str, RandomState> {
v v
} }
fn intersection<'a, 'new>(v: Intersection<'a, &'static str, RandomState>) fn intersection<'a, 'new>(
-> Intersection<'a, &'new str, RandomState> { v: Intersection<'a, &'static str, RandomState>,
) -> Intersection<'a, &'new str, RandomState> {
v v
} }
fn union<'a, 'new>(v: Union<'a, &'static str, RandomState>) fn union<'a, 'new>(
-> Union<'a, &'new str, RandomState> { v: Union<'a, &'static str, RandomState>,
) -> Union<'a, &'new str, RandomState> {
v v
} }
fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {

View file

@ -44,7 +44,10 @@ pub struct FailedAllocationError {
impl FailedAllocationError { impl FailedAllocationError {
#[inline] #[inline]
pub fn new(reason: &'static str) -> Self { pub fn new(reason: &'static str) -> Self {
Self { reason, allocation_info: None } Self {
reason,
allocation_info: None,
}
} }
} }
@ -57,9 +60,11 @@ impl error::Error for FailedAllocationError {
impl fmt::Display for FailedAllocationError { impl fmt::Display for FailedAllocationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.allocation_info { match self.allocation_info {
Some(ref info) => { Some(ref info) => write!(
write!(f, "{}, allocation: (size: {}, alignment: {})", self.reason, info.size, info.alignment) f,
}, "{}, allocation: (size: {}, alignment: {})",
self.reason, info.size, info.alignment
),
None => self.reason.fmt(f), None => self.reason.fmt(f),
} }
} }

View file

@ -29,9 +29,9 @@ impl<T: 'static> Unique<T> {
} }
} }
unsafe impl<T: Send + 'static> Send for Unique<T> { } unsafe impl<T: Send + 'static> Send for Unique<T> {}
unsafe impl<T: Sync + 'static> Sync for Unique<T> { } unsafe impl<T: Sync + 'static> Sync for Unique<T> {}
pub struct Shared<T: 'static> { pub struct Shared<T: 'static> {
ptr: NonZeroPtr<T>, ptr: NonZeroPtr<T>,

View file

@ -203,7 +203,9 @@ impl SafeHash {
// //
// Truncate hash to fit in `HashUint`. // Truncate hash to fit in `HashUint`.
let hash_bits = size_of::<HashUint>() * 8; let hash_bits = size_of::<HashUint>() * 8;
SafeHash { hash: (1 << (hash_bits - 1)) | (hash as HashUint) } SafeHash {
hash: (1 << (hash_bits - 1)) | (hash as HashUint),
}
} }
} }
@ -211,8 +213,9 @@ impl SafeHash {
/// This function wraps up `hash_keyed` to be the only way outside this /// This function wraps up `hash_keyed` to be the only way outside this
/// module to generate a SafeHash. /// module to generate a SafeHash.
pub fn make_hash<T: ?Sized, S>(hash_state: &S, t: &T) -> SafeHash pub fn make_hash<T: ?Sized, S>(hash_state: &S, t: &T) -> SafeHash
where T: Hash, where
S: BuildHasher T: Hash,
S: BuildHasher,
{ {
let mut state = hash_state.build_hasher(); let mut state = hash_state.build_hasher();
t.hash(&mut state); t.hash(&mut state);
@ -294,7 +297,8 @@ impl<K, V, M> Bucket<K, V, M> {
} }
impl<K, V, M> Deref for FullBucket<K, V, M> impl<K, V, M> Deref for FullBucket<K, V, M>
where M: Deref<Target = RawTable<K, V>> where
M: Deref<Target = RawTable<K, V>>,
{ {
type Target = RawTable<K, V>; type Target = RawTable<K, V>;
fn deref(&self) -> &RawTable<K, V> { fn deref(&self) -> &RawTable<K, V> {
@ -308,7 +312,6 @@ pub trait Put<K, V> {
unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V>; unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V>;
} }
impl<'t, K, V> Put<K, V> for &'t mut RawTable<K, V> { impl<'t, K, V> Put<K, V> for &'t mut RawTable<K, V> {
unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> { unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> {
*self *self
@ -316,7 +319,8 @@ impl<'t, K, V> Put<K, V> for &'t mut RawTable<K, V> {
} }
impl<K, V, M> Put<K, V> for Bucket<K, V, M> impl<K, V, M> Put<K, V> for Bucket<K, V, M>
where M: Put<K, V> where
M: Put<K, V>,
{ {
unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> { unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> {
self.table.borrow_table_mut() self.table.borrow_table_mut()
@ -324,7 +328,8 @@ impl<K, V, M> Put<K, V> for Bucket<K, V, M>
} }
impl<K, V, M> Put<K, V> for FullBucket<K, V, M> impl<K, V, M> Put<K, V> for FullBucket<K, V, M>
where M: Put<K, V> where
M: Put<K, V>,
{ {
unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> { unsafe fn borrow_table_mut(&mut self) -> &mut RawTable<K, V> {
self.table.borrow_table_mut() self.table.borrow_table_mut()
@ -336,20 +341,17 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> {
Bucket::at_index(table, hash.inspect() as usize) Bucket::at_index(table, hash.inspect() as usize)
} }
pub fn new_from(r: RawBucket<K, V>, t: M) pub fn new_from(r: RawBucket<K, V>, t: M) -> Bucket<K, V, M> {
-> Bucket<K, V, M> Bucket { raw: r, table: t }
{
Bucket {
raw: r,
table: t,
}
} }
pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> { pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> {
// if capacity is 0, then the RawBucket will be populated with bogus pointers. // if capacity is 0, then the RawBucket will be populated with bogus pointers.
// This is an uncommon case though, so avoid it in release builds. // This is an uncommon case though, so avoid it in release builds.
debug_assert!(table.capacity() > 0, debug_assert!(
"Table should have capacity at this point"); table.capacity() > 0,
"Table should have capacity at this point"
);
let ib_index = ib_index & table.capacity_mask; let ib_index = ib_index & table.capacity_mask;
Bucket { Bucket {
raw: table.raw_bucket_at(ib_index), raw: table.raw_bucket_at(ib_index),
@ -387,11 +389,11 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> {
} }
// Leaving this bucket in the last cluster for later. // Leaving this bucket in the last cluster for later.
full.into_bucket() full.into_bucket()
} },
Empty(b) => { Empty(b) => {
// Encountered a hole between clusters. // Encountered a hole between clusters.
b.into_bucket() b.into_bucket()
} },
}; };
bucket.next(); bucket.next();
} }
@ -404,18 +406,14 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> {
/// this module. /// this module.
pub fn peek(self) -> BucketState<K, V, M> { pub fn peek(self) -> BucketState<K, V, M> {
match unsafe { *self.raw.hash() } { match unsafe { *self.raw.hash() } {
EMPTY_BUCKET => { EMPTY_BUCKET => Empty(EmptyBucket {
Empty(EmptyBucket {
raw: self.raw, raw: self.raw,
table: self.table, table: self.table,
}) }),
} _ => Full(FullBucket {
_ => {
Full(FullBucket {
raw: self.raw, raw: self.raw,
table: self.table, table: self.table,
}) }),
}
} }
} }
@ -453,19 +451,15 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> EmptyBucket<K, V, M> {
}; };
match self.next().peek() { match self.next().peek() {
Full(bucket) => { Full(bucket) => Ok(GapThenFull { gap, full: bucket }),
Ok(GapThenFull {
gap,
full: bucket,
})
}
Empty(e) => Err(e.into_bucket()), Empty(e) => Err(e.into_bucket()),
} }
} }
} }
impl<K, V, M> EmptyBucket<K, V, M> impl<K, V, M> EmptyBucket<K, V, M>
where M: Put<K, V> where
M: Put<K, V>,
{ {
/// Puts given key and value pair, along with the key's hash, /// Puts given key and value pair, along with the key's hash,
/// into this bucket in the hashtable. Note how `self` is 'moved' into /// into this bucket in the hashtable. Note how `self` is 'moved' into
@ -528,7 +522,11 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> FullBucket<K, V, M> {
#[inline] #[inline]
pub fn hash(&self) -> SafeHash { pub fn hash(&self) -> SafeHash {
unsafe { SafeHash { hash: *self.raw.hash() } } unsafe {
SafeHash {
hash: *self.raw.hash(),
}
}
} }
/// Gets references to the key and value at a given index. /// Gets references to the key and value at a given index.
@ -554,12 +552,14 @@ impl<'t, K, V> FullBucket<K, V, &'t mut RawTable<K, V>> {
unsafe { unsafe {
*self.raw.hash() = EMPTY_BUCKET; *self.raw.hash() = EMPTY_BUCKET;
let (k, v) = ptr::read(self.raw.pair()); let (k, v) = ptr::read(self.raw.pair());
(EmptyBucket { (
EmptyBucket {
raw: self.raw, raw: self.raw,
table: self.table, table: self.table,
}, },
k, k,
v) v,
)
} }
} }
} }
@ -567,7 +567,8 @@ impl<'t, K, V> FullBucket<K, V, &'t mut RawTable<K, V>> {
// This use of `Put` is misleading and restrictive, but safe and sufficient for our use cases // This use of `Put` is misleading and restrictive, but safe and sufficient for our use cases
// where `M` is a full bucket or table reference type with mutable access to the table. // where `M` is a full bucket or table reference type with mutable access to the table.
impl<K, V, M> FullBucket<K, V, M> impl<K, V, M> FullBucket<K, V, M>
where M: Put<K, V> where
M: Put<K, V>,
{ {
pub fn replace(&mut self, h: SafeHash, k: K, v: V) -> (SafeHash, K, V) { pub fn replace(&mut self, h: SafeHash, k: K, v: V) -> (SafeHash, K, V) {
unsafe { unsafe {
@ -580,7 +581,8 @@ impl<K, V, M> FullBucket<K, V, M>
} }
impl<K, V, M> FullBucket<K, V, M> impl<K, V, M> FullBucket<K, V, M>
where M: Deref<Target = RawTable<K, V>> + DerefMut where
M: Deref<Target = RawTable<K, V>> + DerefMut,
{ {
/// Gets mutable references to the key and value at a given index. /// Gets mutable references to the key and value at a given index.
pub fn read_mut(&mut self) -> (&mut K, &mut V) { pub fn read_mut(&mut self) -> (&mut K, &mut V) {
@ -592,7 +594,8 @@ impl<K, V, M> FullBucket<K, V, M>
} }
impl<'t, K, V, M> FullBucket<K, V, M> impl<'t, K, V, M> FullBucket<K, V, M>
where M: Deref<Target = RawTable<K, V>> + 't where
M: Deref<Target = RawTable<K, V>> + 't,
{ {
/// Exchange a bucket state for immutable references into the table. /// Exchange a bucket state for immutable references into the table.
/// Because the underlying reference to the table is also consumed, /// Because the underlying reference to the table is also consumed,
@ -608,7 +611,8 @@ impl<'t, K, V, M> FullBucket<K, V, M>
} }
impl<'t, K, V, M> FullBucket<K, V, M> impl<'t, K, V, M> FullBucket<K, V, M>
where M: Deref<Target = RawTable<K, V>> + DerefMut + 't where
M: Deref<Target = RawTable<K, V>> + DerefMut + 't,
{ {
/// This works similarly to `into_refs`, exchanging a bucket state /// This works similarly to `into_refs`, exchanging a bucket state
/// for mutable references into the table. /// for mutable references into the table.
@ -621,7 +625,8 @@ impl<'t, K, V, M> FullBucket<K, V, M>
} }
impl<K, V, M> GapThenFull<K, V, M> impl<K, V, M> GapThenFull<K, V, M>
where M: Deref<Target = RawTable<K, V>> where
M: Deref<Target = RawTable<K, V>>,
{ {
#[inline] #[inline]
pub fn full(&self) -> &FullBucket<K, V, M> { pub fn full(&self) -> &FullBucket<K, V, M> {
@ -649,13 +654,12 @@ impl<K, V, M> GapThenFull<K, V, M>
self.full = bucket; self.full = bucket;
Ok(self) Ok(self)
} },
Empty(b) => Err(b.into_bucket()), Empty(b) => Err(b.into_bucket()),
} }
} }
} }
/// Rounds up to a multiple of a power of two. Returns the closest multiple /// Rounds up to a multiple of a power of two. Returns the closest multiple
/// of `target_alignment` that is higher or equal to `unrounded`. /// of `target_alignment` that is higher or equal to `unrounded`.
/// ///
@ -681,10 +685,11 @@ fn test_rounding() {
// Returns a tuple of (pairs_offset, end_of_pairs_offset), // Returns a tuple of (pairs_offset, end_of_pairs_offset),
// from the start of a mallocated array. // from the start of a mallocated array.
#[inline] #[inline]
fn calculate_offsets(hashes_size: usize, fn calculate_offsets(
hashes_size: usize,
pairs_size: usize, pairs_size: usize,
pairs_align: usize) pairs_align: usize,
-> (usize, usize, bool) { ) -> (usize, usize, bool) {
let pairs_offset = round_up_to_next(hashes_size, pairs_align); let pairs_offset = round_up_to_next(hashes_size, pairs_align);
let (end_of_pairs, oflo) = pairs_offset.overflowing_add(pairs_size); let (end_of_pairs, oflo) = pairs_offset.overflowing_add(pairs_size);
@ -693,11 +698,12 @@ fn calculate_offsets(hashes_size: usize,
// Returns a tuple of (minimum required malloc alignment, hash_offset, // Returns a tuple of (minimum required malloc alignment, hash_offset,
// array_size), from the start of a mallocated array. // array_size), from the start of a mallocated array.
fn calculate_allocation(hash_size: usize, fn calculate_allocation(
hash_size: usize,
hash_align: usize, hash_align: usize,
pairs_size: usize, pairs_size: usize,
pairs_align: usize) pairs_align: usize,
-> (usize, usize, usize, bool) { ) -> (usize, usize, usize, bool) {
let hash_offset = 0; let hash_offset = 0;
let (_, end_of_pairs, oflo) = calculate_offsets(hash_size, pairs_size, pairs_align); let (_, end_of_pairs, oflo) = calculate_offsets(hash_size, pairs_size, pairs_align);
@ -728,7 +734,9 @@ impl<K, V> RawTable<K, V> {
/// Does not initialize the buckets. The caller should ensure they, /// Does not initialize the buckets. The caller should ensure they,
/// at the very least, set every hash to EMPTY_BUCKET. /// at the very least, set every hash to EMPTY_BUCKET.
unsafe fn try_new_uninitialized(capacity: usize) -> Result<RawTable<K, V>, FailedAllocationError> { unsafe fn try_new_uninitialized(
capacity: usize,
) -> Result<RawTable<K, V>, FailedAllocationError> {
if capacity == 0 { if capacity == 0 {
return Ok(RawTable { return Ok(RawTable {
size: 0, size: 0,
@ -751,29 +759,38 @@ impl<K, V> RawTable<K, V> {
// This is great in theory, but in practice getting the alignment // This is great in theory, but in practice getting the alignment
// right is a little subtle. Therefore, calculating offsets has been // right is a little subtle. Therefore, calculating offsets has been
// factored out into a different function. // factored out into a different function.
let (alignment, hash_offset, size, oflo) = calculate_allocation(hashes_size, let (alignment, hash_offset, size, oflo) = calculate_allocation(
hashes_size,
align_of::<HashUint>(), align_of::<HashUint>(),
pairs_size, pairs_size,
align_of::<(K, V)>()); align_of::<(K, V)>(),
);
if oflo { if oflo {
return Err(FailedAllocationError::new("capacity overflow when allocating RawTable" )); return Err(FailedAllocationError::new(
"capacity overflow when allocating RawTable",
));
} }
// One check for overflow that covers calculation and rounding of size. // One check for overflow that covers calculation and rounding of size.
let size_of_bucket = size_of::<HashUint>().checked_add(size_of::<(K, V)>()).unwrap(); let size_of_bucket = size_of::<HashUint>()
.checked_add(size_of::<(K, V)>())
.unwrap();
let cap_bytes = capacity.checked_mul(size_of_bucket); let cap_bytes = capacity.checked_mul(size_of_bucket);
if let Some(cap_bytes) = cap_bytes { if let Some(cap_bytes) = cap_bytes {
if size < cap_bytes { if size < cap_bytes {
return Err(FailedAllocationError::new("capacity overflow when allocating RawTable")); return Err(FailedAllocationError::new(
"capacity overflow when allocating RawTable",
));
} }
} else { } else {
return Err(FailedAllocationError::new("capacity overflow when allocating RawTable")); return Err(FailedAllocationError::new(
"capacity overflow when allocating RawTable",
));
} }
// FORK NOTE: Uses alloc shim instead of Heap.alloc // FORK NOTE: Uses alloc shim instead of Heap.alloc
let buffer = alloc(size, alignment); let buffer = alloc(size, alignment);
@ -857,7 +874,9 @@ impl<K, V> RawTable<K, V> {
} }
pub fn into_iter(self) -> IntoIter<K, V> { pub fn into_iter(self) -> IntoIter<K, V> {
let RawBuckets { raw, elems_left, .. } = self.raw_buckets(); let RawBuckets {
raw, elems_left, ..
} = self.raw_buckets();
// Replace the marker regardless of lifetime bounds on parameters. // Replace the marker regardless of lifetime bounds on parameters.
IntoIter { IntoIter {
iter: RawBuckets { iter: RawBuckets {
@ -870,7 +889,9 @@ impl<K, V> RawTable<K, V> {
} }
pub fn drain(&mut self) -> Drain<K, V> { pub fn drain(&mut self) -> Drain<K, V> {
let RawBuckets { raw, elems_left, .. } = self.raw_buckets(); let RawBuckets {
raw, elems_left, ..
} = self.raw_buckets();
// Replace the marker regardless of lifetime bounds on parameters. // Replace the marker regardless of lifetime bounds on parameters.
Drain { Drain {
iter: RawBuckets { iter: RawBuckets {
@ -937,7 +958,6 @@ impl<'a, K, V> Clone for RawBuckets<'a, K, V> {
} }
} }
impl<'a, K, V> Iterator for RawBuckets<'a, K, V> { impl<'a, K, V> Iterator for RawBuckets<'a, K, V> {
type Item = RawBucket<K, V>; type Item = RawBucket<K, V>;
@ -1112,12 +1132,16 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
#[inline] #[inline]
fn next(&mut self) -> Option<(SafeHash, K, V)> { fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|raw| { self.iter.next().map(|raw| unsafe {
unsafe {
self.table.as_mut().size -= 1; self.table.as_mut().size -= 1;
let (k, v) = ptr::read(raw.pair()); let (k, v) = ptr::read(raw.pair());
(SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v) (
} SafeHash {
hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET),
},
k,
v,
)
}) })
} }
@ -1188,10 +1212,12 @@ impl<K, V> Drop for RawTable<K, V> {
let hashes_size = self.capacity() * size_of::<HashUint>(); let hashes_size = self.capacity() * size_of::<HashUint>();
let pairs_size = self.capacity() * size_of::<(K, V)>(); let pairs_size = self.capacity() * size_of::<(K, V)>();
let (align, _, _, oflo) = calculate_allocation(hashes_size, let (align, _, _, oflo) = calculate_allocation(
hashes_size,
align_of::<HashUint>(), align_of::<HashUint>(),
pairs_size, pairs_size,
align_of::<(K, V)>()); align_of::<(K, V)>(),
);
debug_assert!(!oflo, "should be impossible"); debug_assert!(!oflo, "should be impossible");

View file

@ -3,15 +3,15 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate quote; extern crate quote;
#[macro_use] extern crate syn; #[macro_use]
#[macro_use] extern crate synstructure; extern crate syn;
#[macro_use]
extern crate synstructure;
decl_derive!([JSTraceable] => js_traceable_derive); decl_derive!([JSTraceable] => js_traceable_derive);
fn js_traceable_derive(s: synstructure::Structure) -> quote::Tokens { fn js_traceable_derive(s: synstructure::Structure) -> quote::Tokens {
let match_body = s.each(|binding| { let match_body = s.each(|binding| Some(quote!(#binding.trace(tracer);)));
Some(quote!(#binding.trace(tracer);))
});
let ast = s.ast(); let ast = s.ast();
let name = ast.ident; let name = ast.ident;
@ -19,7 +19,9 @@ fn js_traceable_derive(s: synstructure::Structure) -> quote::Tokens {
let mut where_clause = where_clause.unwrap_or(&parse_quote!(where)).clone(); let mut where_clause = where_clause.unwrap_or(&parse_quote!(where)).clone();
for param in ast.generics.type_params() { for param in ast.generics.type_params() {
let ident = param.ident; let ident = param.ident;
where_clause.predicates.push(parse_quote!(#ident: ::dom::bindings::trace::JSTraceable)) where_clause
.predicates
.push(parse_quote!(#ident: ::dom::bindings::trace::JSTraceable))
} }
let tokens = quote! { let tokens = quote! {

View file

@ -32,8 +32,7 @@ pub fn update_animation_state<E>(
new_animations_receiver: &Receiver<Animation>, new_animations_receiver: &Receiver<Animation>,
pipeline_id: PipelineId, pipeline_id: PipelineId,
timer: &Timer, timer: &Timer,
) ) where
where
E: TElement, E: TElement,
{ {
let mut new_running_animations = vec![]; let mut new_running_animations = vec![];
@ -66,7 +65,7 @@ where
if running_animations.is_empty() && new_running_animations.is_empty() { if running_animations.is_empty() && new_running_animations.is_empty() {
// Nothing to do. Return early so we don't flood the compositor with // Nothing to do. Return early so we don't flood the compositor with
// `ChangeRunningAnimationsState` messages. // `ChangeRunningAnimationsState` messages.
return return;
} }
let now = timer.seconds(); let now = timer.seconds();
@ -82,28 +81,30 @@ where
let still_running = !running_animation.is_expired() && match running_animation { let still_running = !running_animation.is_expired() && match running_animation {
Animation::Transition(_, started_at, ref frame, _expired) => { Animation::Transition(_, started_at, ref frame, _expired) => {
now < started_at + frame.duration now < started_at + frame.duration
} },
Animation::Keyframes(_, _, _, ref mut state) => { Animation::Keyframes(_, _, _, ref mut state) => {
// This animation is still running, or we need to keep // This animation is still running, or we need to keep
// iterating. // iterating.
now < state.started_at + state.duration || state.tick() now < state.started_at + state.duration || state.tick()
} },
}; };
if still_running { if still_running {
animations_still_running.push(running_animation); animations_still_running.push(running_animation);
continue continue;
} }
if let Animation::Transition(node, _, ref frame, _) = running_animation { if let Animation::Transition(node, _, ref frame, _) = running_animation {
script_chan.send(ConstellationControlMsg::TransitionEnd(node.to_untrusted_node_address(), script_chan
frame.property_animation .send(ConstellationControlMsg::TransitionEnd(
.property_name().into(), node.to_untrusted_node_address(),
frame.duration)) frame.property_animation.property_name().into(),
.unwrap(); frame.duration,
)).unwrap();
} }
expired_animations.entry(*key) expired_animations
.entry(*key)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(running_animation); .push(running_animation);
} }
@ -125,14 +126,15 @@ where
match newly_transitioning_nodes { match newly_transitioning_nodes {
Some(ref mut nodes) => { Some(ref mut nodes) => {
nodes.push(new_running_animation.node().to_untrusted_node_address()); nodes.push(new_running_animation.node().to_untrusted_node_address());
} },
None => { None => {
warn!("New transition encountered from compositor-initiated layout."); warn!("New transition encountered from compositor-initiated layout.");
} },
} }
} }
running_animations.entry(*new_running_animation.node()) running_animations
.entry(*new_running_animation.node())
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(new_running_animation) .push(new_running_animation)
} }
@ -143,9 +145,11 @@ where
AnimationState::AnimationsPresent AnimationState::AnimationsPresent
}; };
constellation_chan.send(ConstellationMsg::ChangeRunningAnimationsState(pipeline_id, constellation_chan
animation_state)) .send(ConstellationMsg::ChangeRunningAnimationsState(
.unwrap(); pipeline_id,
animation_state,
)).unwrap();
} }
/// Recalculates style for a set of animations. This does *not* run with the DOM /// Recalculates style for a set of animations. This does *not* run with the DOM
@ -154,8 +158,7 @@ pub fn recalc_style_for_animations<E>(
context: &LayoutContext, context: &LayoutContext,
flow: &mut Flow, flow: &mut Flow,
animations: &FxHashMap<OpaqueNode, Vec<Animation>>, animations: &FxHashMap<OpaqueNode, Vec<Animation>>,
) ) where
where
E: TElement, E: TElement,
{ {
let mut damage = RestyleDamage::empty(); let mut damage = RestyleDamage::empty();
@ -170,10 +173,7 @@ where
&ServoMetricsProvider, &ServoMetricsProvider,
); );
let difference = let difference =
RestyleDamage::compute_style_difference( RestyleDamage::compute_style_difference(&old_style, &fragment.style);
&old_style,
&fragment.style,
);
damage |= difference.damage; damage |= difference.damage;
} }
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -32,7 +32,8 @@ pub type LayoutFontContext = FontContext<FontCacheThread>;
thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<LayoutFontContext>> = RefCell::new(None)); thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<LayoutFontContext>> = RefCell::new(None));
pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R
where F: FnOnce(&mut LayoutFontContext) -> R where
F: FnOnce(&mut LayoutFontContext) -> R,
{ {
FONT_CONTEXT_KEY.with(|k| { FONT_CONTEXT_KEY.with(|k| {
let mut font_context = k.borrow_mut(); let mut font_context = k.borrow_mut();
@ -69,9 +70,11 @@ pub struct LayoutContext<'a> {
pub font_cache_thread: Mutex<FontCacheThread>, pub font_cache_thread: Mutex<FontCacheThread>,
/// A cache of WebRender image info. /// A cache of WebRender image info.
pub webrender_image_cache: Arc<RwLock<HashMap<(ServoUrl, UsePlaceholder), pub webrender_image_cache: Arc<
WebRenderImageInfo, RwLock<
BuildHasherDefault<FnvHasher>>>>, HashMap<(ServoUrl, UsePlaceholder), WebRenderImageInfo, BuildHasherDefault<FnvHasher>>,
>,
>,
/// Paint worklets /// Paint worklets
pub registered_painters: &'a RegisteredPainters, pub registered_painters: &'a RegisteredPainters,
@ -101,11 +104,12 @@ impl<'a> LayoutContext<'a> {
&self.style_context &self.style_context
} }
pub fn get_or_request_image_or_meta(&self, pub fn get_or_request_image_or_meta(
&self,
node: OpaqueNode, node: OpaqueNode,
url: ServoUrl, url: ServoUrl,
use_placeholder: UsePlaceholder) use_placeholder: UsePlaceholder,
-> Option<ImageOrMetadataAvailable> { ) -> Option<ImageOrMetadataAvailable> {
//XXXjdm For cases where we do not request an image, we still need to //XXXjdm For cases where we do not request an image, we still need to
// ensure the node gets another script-initiated reflow or it // ensure the node gets another script-initiated reflow or it
// won't be requested at all. // won't be requested at all.
@ -116,9 +120,9 @@ impl<'a> LayoutContext<'a> {
}; };
// See if the image is already available // See if the image is already available
let result = self.image_cache.find_image_or_metadata(url.clone(), let result =
use_placeholder, self.image_cache
can_request); .find_image_or_metadata(url.clone(), use_placeholder, can_request);
match result { match result {
Ok(image_or_metadata) => Some(image_or_metadata), Ok(image_or_metadata) => Some(image_or_metadata),
// Image failed to load, so just return nothing // Image failed to load, so just return nothing
@ -130,9 +134,14 @@ impl<'a> LayoutContext<'a> {
node: node.to_untrusted_node_address(), node: node.to_untrusted_node_address(),
id: id, id: id,
}; };
self.pending_images.as_ref().unwrap().lock().unwrap().push(image); self.pending_images
.as_ref()
.unwrap()
.lock()
.unwrap()
.push(image);
None None
} },
// Image has been requested, is still pending. Return no image for this paint loop. // Image has been requested, is still pending. Return no image for this paint loop.
// When the image loads it will trigger a reflow and/or repaint. // When the image loads it will trigger a reflow and/or repaint.
Err(ImageState::Pending(id)) => { Err(ImageState::Pending(id)) => {
@ -148,19 +157,22 @@ impl<'a> LayoutContext<'a> {
pending_images.lock().unwrap().push(image); pending_images.lock().unwrap().push(image);
} }
None None
} },
} }
} }
pub fn get_webrender_image_for_url(&self, pub fn get_webrender_image_for_url(
&self,
node: OpaqueNode, node: OpaqueNode,
url: ServoUrl, url: ServoUrl,
use_placeholder: UsePlaceholder) use_placeholder: UsePlaceholder,
-> Option<WebRenderImageInfo> { ) -> Option<WebRenderImageInfo> {
if let Some(existing_webrender_image) = self.webrender_image_cache if let Some(existing_webrender_image) = self
.webrender_image_cache
.read() .read()
.get(&(url.clone(), use_placeholder)) { .get(&(url.clone(), use_placeholder))
return Some((*existing_webrender_image).clone()) {
return Some((*existing_webrender_image).clone());
} }
match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) { match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) {
@ -170,11 +182,10 @@ impl<'a> LayoutContext<'a> {
Some(image_info) Some(image_info)
} else { } else {
let mut webrender_image_cache = self.webrender_image_cache.write(); let mut webrender_image_cache = self.webrender_image_cache.write();
webrender_image_cache.insert((url, use_placeholder), webrender_image_cache.insert((url, use_placeholder), image_info);
image_info);
Some(image_info) Some(image_info)
} }
} },
None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None, None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None,
} }
} }

View file

@ -24,7 +24,6 @@ impl StyleAndLayoutData {
} }
} }
/// Data that layout associates with a node. /// Data that layout associates with a node.
#[repr(C)] #[repr(C)]
pub struct LayoutData { pub struct LayoutData {

View file

@ -444,8 +444,7 @@ fn convert_gradient_stops(
.filter_map(|item| match *item { .filter_map(|item| match *item {
GenericGradientItem::ColorStop(ref stop) => Some(*stop), GenericGradientItem::ColorStop(ref stop) => Some(*stop),
_ => None, _ => None,
}) }).collect::<Vec<_>>();
.collect::<Vec<_>>();
assert!(stop_items.len() >= 2); assert!(stop_items.len() >= 2);

View file

@ -1414,8 +1414,7 @@ impl FragmentDisplayListBuilding for Fragment {
url.clone(), url.clone(),
UsePlaceholder::No, UsePlaceholder::No,
) )
}) }).and_then(|image| {
.and_then(|image| {
build_image_border_details(image, border_style_struct, outset_layout) build_image_border_details(image, border_style_struct, outset_layout)
}), }),
}; };
@ -1957,8 +1956,7 @@ impl FragmentDisplayListBuilding for Fragment {
.send(CanvasMsg::FromLayout( .send(CanvasMsg::FromLayout(
FromLayoutMsg::SendData(sender), FromLayoutMsg::SendData(sender),
canvas_fragment_info.canvas_id.clone(), canvas_fragment_info.canvas_id.clone(),
)) )).unwrap();
.unwrap();
receiver.recv().unwrap().image_key receiver.recv().unwrap().image_key
}, },
None => return, None => return,
@ -2070,9 +2068,11 @@ impl FragmentDisplayListBuilding for Fragment {
// FIXME(pcwalton): Get the real container size. // FIXME(pcwalton): Get the real container size.
let container_size = Size2D::zero(); let container_size = Size2D::zero();
let metrics = &text_fragment.run.font_metrics; let metrics = &text_fragment.run.font_metrics;
let baseline_origin = stacking_relative_content_box.origin + let baseline_origin = stacking_relative_content_box.origin + LogicalPoint::new(
LogicalPoint::new(self.style.writing_mode, Au(0), metrics.ascent) self.style.writing_mode,
.to_physical(self.style.writing_mode, container_size) Au(0),
metrics.ascent,
).to_physical(self.style.writing_mode, container_size)
.to_vector(); .to_vector();
// Base item for all text/shadows // Base item for all text/shadows

View file

@ -513,8 +513,8 @@ impl ClippingRegion {
/// This is a quick, not a precise, test; it can yield false positives. /// This is a quick, not a precise, test; it can yield false positives.
#[inline] #[inline]
pub fn might_intersect_point(&self, point: &LayoutPoint) -> bool { pub fn might_intersect_point(&self, point: &LayoutPoint) -> bool {
self.main.contains(point) && self.main.contains(point) && self
self.complex .complex
.iter() .iter()
.all(|complex| complex.rect.contains(point)) .all(|complex| complex.rect.contains(point))
} }
@ -523,8 +523,8 @@ impl ClippingRegion {
/// otherwise. This is a quick, not a precise, test; it can yield false positives. /// otherwise. This is a quick, not a precise, test; it can yield false positives.
#[inline] #[inline]
pub fn might_intersect_rect(&self, rect: &LayoutRect) -> bool { pub fn might_intersect_rect(&self, rect: &LayoutRect) -> bool {
self.main.intersects(rect) && self.main.intersects(rect) && self
self.complex .complex
.iter() .iter()
.all(|complex| complex.rect.intersects(rect)) .all(|complex| complex.rect.intersects(rect))
} }
@ -532,8 +532,10 @@ impl ClippingRegion {
/// Returns true if this clipping region completely surrounds the given rect. /// Returns true if this clipping region completely surrounds the given rect.
#[inline] #[inline]
pub fn does_not_clip_rect(&self, rect: &LayoutRect) -> bool { pub fn does_not_clip_rect(&self, rect: &LayoutRect) -> bool {
self.main.contains(&rect.origin) && self.main.contains(&rect.bottom_right()) && self.main.contains(&rect.origin) && self.main.contains(&rect.bottom_right()) && self
self.complex.iter().all(|complex| { .complex
.iter()
.all(|complex| {
complex.rect.contains(&rect.origin) && complex.rect.contains(&rect.bottom_right()) complex.rect.contains(&rect.origin) && complex.rect.contains(&rect.bottom_right())
}) })
} }
@ -588,8 +590,7 @@ impl ClippingRegion {
rect: complex.rect.translate(delta), rect: complex.rect.translate(delta),
radii: complex.radii, radii: complex.radii,
mode: complex.mode, mode: complex.mode,
}) }).collect(),
.collect(),
} }
} }

View file

@ -45,25 +45,25 @@ enum AxisSize {
impl AxisSize { impl AxisSize {
/// Generate a new available cross or main axis size from the specified size of the container, /// Generate a new available cross or main axis size from the specified size of the container,
/// containing block size, min constraint, and max constraint /// containing block size, min constraint, and max constraint
pub fn new(size: LengthOrPercentageOrAuto, content_size: Option<Au>, min: LengthOrPercentage, pub fn new(
max: LengthOrPercentageOrNone) -> AxisSize { size: LengthOrPercentageOrAuto,
content_size: Option<Au>,
min: LengthOrPercentage,
max: LengthOrPercentageOrNone,
) -> AxisSize {
match size { match size {
LengthOrPercentageOrAuto::Length(length) => AxisSize::Definite(Au::from(length)), LengthOrPercentageOrAuto::Length(length) => AxisSize::Definite(Au::from(length)),
LengthOrPercentageOrAuto::Percentage(percent) => { LengthOrPercentageOrAuto::Percentage(percent) => match content_size {
match content_size {
Some(size) => AxisSize::Definite(size.scale_by(percent.0)), Some(size) => AxisSize::Definite(size.scale_by(percent.0)),
None => AxisSize::Infinite None => AxisSize::Infinite,
} },
} LengthOrPercentageOrAuto::Calc(calc) => match calc.to_used_value(content_size) {
LengthOrPercentageOrAuto::Calc(calc) => {
match calc.to_used_value(content_size) {
Some(length) => AxisSize::Definite(length), Some(length) => AxisSize::Definite(length),
None => AxisSize::Infinite, None => AxisSize::Infinite,
} },
}
LengthOrPercentageOrAuto::Auto => { LengthOrPercentageOrAuto::Auto => {
AxisSize::MinMax(SizeConstraint::new(content_size, min, max, None)) AxisSize::MinMax(SizeConstraint::new(content_size, min, max, None))
} },
} }
} }
} }
@ -112,7 +112,7 @@ struct FlexItem {
/// Whether the main size has met its constraint. /// Whether the main size has met its constraint.
pub is_frozen: bool, pub is_frozen: bool,
/// True if this flow has property 'visibility::collapse'. /// True if this flow has property 'visibility::collapse'.
pub is_strut: bool pub is_strut: bool,
} }
impl FlexItem { impl FlexItem {
@ -133,7 +133,7 @@ impl FlexItem {
flex_shrink: flex_shrink.into(), flex_shrink: flex_shrink.into(),
order: order, order: order,
is_frozen: false, is_frozen: false,
is_strut: false is_strut: false,
} }
} }
@ -147,41 +147,61 @@ impl FlexItem {
// should change to LengthOrPercentageOrAuto for automatic implied minimal size. // should change to LengthOrPercentageOrAuto for automatic implied minimal size.
// https://drafts.csswg.org/css-flexbox-1/#min-size-auto // https://drafts.csswg.org/css-flexbox-1/#min-size-auto
Direction::Inline => { Direction::Inline => {
let basis = from_flex_basis(block.fragment.style.get_position().flex_basis, let basis = from_flex_basis(
block.fragment.style.get_position().flex_basis,
block.fragment.style.content_inline_size(), block.fragment.style.content_inline_size(),
containing_length); containing_length,
);
// These methods compute auto margins to zero length, which is exactly what we want. // These methods compute auto margins to zero length, which is exactly what we want.
block.fragment.compute_border_and_padding(containing_length); block.fragment.compute_border_and_padding(containing_length);
block.fragment.compute_inline_direction_margins(containing_length); block
block.fragment.compute_block_direction_margins(containing_length); .fragment
.compute_inline_direction_margins(containing_length);
block
.fragment
.compute_block_direction_margins(containing_length);
let (border_padding, margin) = block.fragment.surrounding_intrinsic_inline_size(); let (border_padding, margin) = block.fragment.surrounding_intrinsic_inline_size();
let content_size = block.base.intrinsic_inline_sizes.preferred_inline_size let content_size = block.base.intrinsic_inline_sizes.preferred_inline_size -
- border_padding border_padding -
- margin margin +
+ block.fragment.box_sizing_boundary(direction); block.fragment.box_sizing_boundary(direction);
self.base_size = basis.specified_or_default(content_size); self.base_size = basis.specified_or_default(content_size);
self.max_size = self.max_size = block
block.fragment.style.max_inline_size() .fragment
.style
.max_inline_size()
.to_used_value(containing_length) .to_used_value(containing_length)
.unwrap_or(MAX_AU); .unwrap_or(MAX_AU);
self.min_size = block.fragment.style.min_inline_size().to_used_value(containing_length); self.min_size = block
} .fragment
.style
.min_inline_size()
.to_used_value(containing_length);
},
Direction::Block => { Direction::Block => {
let basis = from_flex_basis(block.fragment.style.get_position().flex_basis, let basis = from_flex_basis(
block.fragment.style.get_position().flex_basis,
block.fragment.style.content_block_size(), block.fragment.style.content_block_size(),
containing_length); containing_length,
let content_size = block.fragment.border_box.size.block );
- block.fragment.border_padding.block_start_end() let content_size = block.fragment.border_box.size.block -
+ block.fragment.box_sizing_boundary(direction); block.fragment.border_padding.block_start_end() +
block.fragment.box_sizing_boundary(direction);
self.base_size = basis.specified_or_default(content_size); self.base_size = basis.specified_or_default(content_size);
self.max_size = self.max_size = block
block.fragment.style.max_block_size() .fragment
.style
.max_block_size()
.to_used_value(containing_length) .to_used_value(containing_length)
.unwrap_or(MAX_AU); .unwrap_or(MAX_AU);
self.min_size = block.fragment.style.min_block_size().to_used_value(containing_length); self.min_size = block
} .fragment
.style
.min_block_size()
.to_used_value(containing_length);
},
} }
} }
@ -192,13 +212,14 @@ impl FlexItem {
let outer_width = match direction { let outer_width = match direction {
Direction::Inline => { Direction::Inline => {
fragment.border_padding.inline_start_end() + fragment.margin.inline_start_end() fragment.border_padding.inline_start_end() + fragment.margin.inline_start_end()
} },
Direction::Block => { Direction::Block => {
fragment.border_padding.block_start_end() + fragment.margin.block_start_end() fragment.border_padding.block_start_end() + fragment.margin.block_start_end()
} },
}; };
max(self.min_size, min(self.base_size, self.max_size)) max(self.min_size, min(self.base_size, self.max_size)) -
- fragment.box_sizing_boundary(direction) + outer_width fragment.box_sizing_boundary(direction) +
outer_width
} }
/// Returns the number of auto margins in given direction. /// Returns the number of auto margins in given direction.
@ -213,7 +234,7 @@ impl FlexItem {
if margin.inline_end == LengthOrPercentageOrAuto::Auto { if margin.inline_end == LengthOrPercentageOrAuto::Auto {
margin_count += 1; margin_count += 1;
} }
} },
Direction::Block => { Direction::Block => {
if margin.block_start == LengthOrPercentageOrAuto::Auto { if margin.block_start == LengthOrPercentageOrAuto::Auto {
margin_count += 1; margin_count += 1;
@ -221,7 +242,7 @@ impl FlexItem {
if margin.block_end == LengthOrPercentageOrAuto::Auto { if margin.block_end == LengthOrPercentageOrAuto::Auto {
margin_count += 1; margin_count += 1;
} }
} },
} }
margin_count margin_count
} }
@ -247,7 +268,7 @@ impl FlexLine {
range: range, range: range,
auto_margin_count: auto_margin_count, auto_margin_count: auto_margin_count,
free_space: free_space, free_space: free_space,
cross_size: Au(0) cross_size: Au(0),
} }
} }
@ -265,8 +286,11 @@ impl FlexLine {
// https://drafts.csswg.org/css-flexbox/#resolve-flexible-lengths // https://drafts.csswg.org/css-flexbox/#resolve-flexible-lengths
for item in items.iter_mut().filter(|i| !(i.is_strut && collapse)) { for item in items.iter_mut().filter(|i| !(i.is_strut && collapse)) {
item.main_size = max(item.min_size, min(item.base_size, item.max_size)); item.main_size = max(item.min_size, min(item.base_size, item.max_size));
if (self.free_space > Au(0) && (item.flex_grow == 0.0 || item.base_size >= item.max_size)) || if (self.free_space > Au(0) &&
(self.free_space < Au(0) && (item.flex_shrink == 0.0 || item.base_size <= item.min_size)) { (item.flex_grow == 0.0 || item.base_size >= item.max_size)) ||
(self.free_space < Au(0) &&
(item.flex_shrink == 0.0 || item.base_size <= item.min_size))
{
item.is_frozen = true; item.is_frozen = true;
} else { } else {
item.is_frozen = false; item.is_frozen = false;
@ -291,12 +315,19 @@ impl FlexLine {
}; };
total_variation = Au(0); total_variation = Au(0);
for item in items.iter_mut().filter(|i| !i.is_frozen).filter(|i| !(i.is_strut && collapse)) { for item in items
.iter_mut()
.filter(|i| !i.is_frozen)
.filter(|i| !(i.is_strut && collapse))
{
// Use this and the 'abs()' below to make the code work in both grow and shrink scenarios. // Use this and the 'abs()' below to make the code work in both grow and shrink scenarios.
let (factor, end_size) = if self.free_space > Au(0) { let (factor, end_size) = if self.free_space > Au(0) {
(item.flex_grow / total_grow, item.max_size) (item.flex_grow / total_grow, item.max_size)
} else { } else {
(item.flex_shrink * item.base_size.0 as f32 / total_scaled, item.min_size) (
item.flex_shrink * item.base_size.0 as f32 / total_scaled,
item.min_size,
)
}; };
let variation = self.free_space.scale_by(factor); let variation = self.free_space.scale_by(factor);
if variation.0.abs() >= (end_size - item.main_size).0.abs() { if variation.0.abs() >= (end_size - item.main_size).0.abs() {
@ -343,13 +374,11 @@ pub struct FlexFlow {
/// True if this flex container can be multiline. /// True if this flex container can be multiline.
is_wrappable: bool, is_wrappable: bool,
/// True if the cross direction is reversed. /// True if the cross direction is reversed.
cross_reverse: bool cross_reverse: bool,
} }
impl FlexFlow { impl FlexFlow {
pub fn from_fragment(fragment: Fragment, pub fn from_fragment(fragment: Fragment, flotation: Option<FloatKind>) -> FlexFlow {
flotation: Option<FloatKind>)
-> FlexFlow {
let main_mode; let main_mode;
let main_reverse; let main_reverse;
let is_wrappable; let is_wrappable;
@ -363,8 +392,7 @@ impl FlexFlow {
FlexDirection::ColumnReverse => (Direction::Block, true), FlexDirection::ColumnReverse => (Direction::Block, true),
}; };
main_mode = mode; main_mode = mode;
main_reverse = main_reverse = reverse == style.writing_mode.is_bidi_ltr();
reverse == style.writing_mode.is_bidi_ltr();
let (wrappable, reverse) = match fragment.style.get_position().flex_wrap { let (wrappable, reverse) = match fragment.style.get_position().flex_wrap {
FlexWrap::Nowrap => (false, false), FlexWrap::Nowrap => (false, false),
FlexWrap::Wrap => (true, false), FlexWrap::Wrap => (true, false),
@ -384,7 +412,7 @@ impl FlexFlow {
items: Vec::new(), items: Vec::new(),
main_reverse: main_reverse, main_reverse: main_reverse,
is_wrappable: is_wrappable, is_wrappable: is_wrappable,
cross_reverse: cross_reverse cross_reverse: cross_reverse,
} }
} }
@ -414,7 +442,10 @@ impl FlexFlow {
let kid = children.get(item.index); let kid = children.get(item.index);
item.init_sizes(kid, container_size, self.main_mode); item.init_sizes(kid, container_size, self.main_mode);
let outer_main_size = item.outer_main_size(kid, self.main_mode); let outer_main_size = item.outer_main_size(kid, self.main_mode);
if total_line_size + outer_main_size > container_size && end != start && self.is_wrappable { if total_line_size + outer_main_size > container_size &&
end != start &&
self.is_wrappable
{
break; break;
} }
margin_count += item.auto_margin_count(kid, self.main_mode); margin_count += item.auto_margin_count(kid, self.main_mode);
@ -439,7 +470,8 @@ impl FlexFlow {
if !fixed_width { if !fixed_width {
for kid in self.block_flow.base.children.iter_mut() { for kid in self.block_flow.base.children.iter_mut() {
let base = kid.mut_base(); let base = kid.mut_base();
let is_absolutely_positioned = base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED); let is_absolutely_positioned =
base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
if !is_absolutely_positioned { if !is_absolutely_positioned {
let flex_item_inline_sizes = IntrinsicISizes { let flex_item_inline_sizes = IntrinsicISizes {
minimum_inline_size: base.intrinsic_inline_sizes.minimum_inline_size, minimum_inline_size: base.intrinsic_inline_sizes.minimum_inline_size,
@ -465,15 +497,18 @@ impl FlexFlow {
if !fixed_width { if !fixed_width {
for kid in self.block_flow.base.children.iter_mut() { for kid in self.block_flow.base.children.iter_mut() {
let base = kid.mut_base(); let base = kid.mut_base();
let is_absolutely_positioned = base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED); let is_absolutely_positioned =
base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
if !is_absolutely_positioned { if !is_absolutely_positioned {
computation.content_intrinsic_sizes.minimum_inline_size = computation.content_intrinsic_sizes.minimum_inline_size = max(
max(computation.content_intrinsic_sizes.minimum_inline_size, computation.content_intrinsic_sizes.minimum_inline_size,
base.intrinsic_inline_sizes.minimum_inline_size); base.intrinsic_inline_sizes.minimum_inline_size,
);
computation.content_intrinsic_sizes.preferred_inline_size = computation.content_intrinsic_sizes.preferred_inline_size = max(
max(computation.content_intrinsic_sizes.preferred_inline_size, computation.content_intrinsic_sizes.preferred_inline_size,
base.intrinsic_inline_sizes.preferred_inline_size); base.intrinsic_inline_sizes.preferred_inline_size,
);
} }
} }
} }
@ -483,11 +518,13 @@ impl FlexFlow {
// TODO(zentner): This function needs to be radically different for multi-line flexbox. // TODO(zentner): This function needs to be radically different for multi-line flexbox.
// Currently, this is the core of BlockFlow::propagate_assigned_inline_size_to_children() with // Currently, this is the core of BlockFlow::propagate_assigned_inline_size_to_children() with
// all float and table logic stripped out. // all float and table logic stripped out.
fn block_mode_assign_inline_sizes(&mut self, fn block_mode_assign_inline_sizes(
&mut self,
_layout_context: &LayoutContext, _layout_context: &LayoutContext,
inline_start_content_edge: Au, inline_start_content_edge: Au,
inline_end_content_edge: Au, inline_end_content_edge: Au,
content_inline_size: Au) { content_inline_size: Au,
) {
let _scope = layout_debug_scope!("flex::block_mode_assign_inline_sizes"); let _scope = layout_debug_scope!("flex::block_mode_assign_inline_sizes");
debug!("flex::block_mode_assign_inline_sizes"); debug!("flex::block_mode_assign_inline_sizes");
@ -496,19 +533,22 @@ impl FlexFlow {
let container_block_size = match self.available_main_size { let container_block_size = match self.available_main_size {
AxisSize::Definite(length) => Some(length), AxisSize::Definite(length) => Some(length),
_ => None _ => None,
}; };
let container_inline_size = match self.available_cross_size { let container_inline_size = match self.available_cross_size {
AxisSize::Definite(length) => length, AxisSize::Definite(length) => length,
AxisSize::MinMax(ref constraint) => constraint.clamp(content_inline_size), AxisSize::MinMax(ref constraint) => constraint.clamp(content_inline_size),
AxisSize::Infinite => content_inline_size AxisSize::Infinite => content_inline_size,
}; };
let mut children = self.block_flow.base.children.random_access_mut(); let mut children = self.block_flow.base.children.random_access_mut();
for kid in &mut self.items { for kid in &mut self.items {
let kid_base = children.get(kid.index).mut_base(); let kid_base = children.get(kid.index).mut_base();
kid_base.block_container_explicit_block_size = container_block_size; kid_base.block_container_explicit_block_size = container_block_size;
if kid_base.flags.contains(FlowFlags::INLINE_POSITION_IS_STATIC) { if kid_base
.flags
.contains(FlowFlags::INLINE_POSITION_IS_STATIC)
{
// The inline-start margin edge of the child flow is at our inline-start content // The inline-start margin edge of the child flow is at our inline-start content
// edge, and its inline-size is our content inline-size. // edge, and its inline-size is our content inline-size.
kid_base.position.start.i = kid_base.position.start.i =
@ -525,11 +565,13 @@ impl FlexFlow {
} }
} }
fn inline_mode_assign_inline_sizes(&mut self, fn inline_mode_assign_inline_sizes(
&mut self,
layout_context: &LayoutContext, layout_context: &LayoutContext,
inline_start_content_edge: Au, inline_start_content_edge: Au,
_inline_end_content_edge: Au, _inline_end_content_edge: Au,
content_inline_size: Au) { content_inline_size: Au,
) {
let _scope = layout_debug_scope!("flex::inline_mode_assign_inline_sizes"); let _scope = layout_debug_scope!("flex::inline_mode_assign_inline_sizes");
debug!("inline_mode_assign_inline_sizes"); debug!("inline_mode_assign_inline_sizes");
@ -551,17 +593,25 @@ impl FlexFlow {
self.block_flow.base.position.size.inline = inline_size; self.block_flow.base.position.size.inline = inline_size;
// Calculate non-auto block size to pass to children. // Calculate non-auto block size to pass to children.
let box_border = self.block_flow.fragment.box_sizing_boundary(Direction::Block); let box_border = self
.block_flow
.fragment
.box_sizing_boundary(Direction::Block);
let parent_container_size = let parent_container_size = self
self.block_flow.explicit_block_containing_size(layout_context.shared_context()); .block_flow
.explicit_block_containing_size(layout_context.shared_context());
// https://drafts.csswg.org/css-ui-3/#box-sizing // https://drafts.csswg.org/css-ui-3/#box-sizing
let explicit_content_size = self let explicit_content_size = self
.block_flow .block_flow
.explicit_block_size(parent_container_size) .explicit_block_size(parent_container_size)
.map(|x| max(x - box_border, Au(0))); .map(|x| max(x - box_border, Au(0)));
let containing_block_text_align = let containing_block_text_align = self
self.block_flow.fragment.style().get_inherited_text().text_align; .block_flow
.fragment
.style()
.get_inherited_text()
.text_align;
while let Some(mut line) = self.get_flex_line(inline_size) { while let Some(mut line) = self.get_flex_line(inline_size) {
let items = &mut self.items[line.range.clone()]; let items = &mut self.items[line.range.clone()];
@ -572,32 +622,42 @@ impl FlexFlow {
let item_count = items.len() as i32; let item_count = items.len() as i32;
let mut cur_i = inline_start_content_edge; let mut cur_i = inline_start_content_edge;
let item_interval = if line.free_space >= Au(0) && line.auto_margin_count == 0 { let item_interval = if line.free_space >= Au(0) && line.auto_margin_count == 0 {
match self.block_flow.fragment.style().get_position().justify_content { match self
.block_flow
.fragment
.style()
.get_position()
.justify_content
{
JustifyContent::SpaceBetween => { JustifyContent::SpaceBetween => {
if item_count == 1 { if item_count == 1 {
Au(0) Au(0)
} else { } else {
line.free_space / (item_count - 1) line.free_space / (item_count - 1)
} }
} },
JustifyContent::SpaceAround => { JustifyContent::SpaceAround => line.free_space / item_count,
line.free_space / item_count
}
_ => Au(0), _ => Au(0),
} }
} else { } else {
Au(0) Au(0)
}; };
match self.block_flow.fragment.style().get_position().justify_content { match self
.block_flow
.fragment
.style()
.get_position()
.justify_content
{
// Overflow equally in both ends of line. // Overflow equally in both ends of line.
JustifyContent::Center | JustifyContent::SpaceAround => { JustifyContent::Center | JustifyContent::SpaceAround => {
cur_i += (line.free_space - item_interval * (item_count - 1)) / 2; cur_i += (line.free_space - item_interval * (item_count - 1)) / 2;
} },
JustifyContent::FlexEnd => { JustifyContent::FlexEnd => {
cur_i += line.free_space; cur_i += line.free_space;
} },
_ => {} _ => {},
} }
let mut children = self.block_flow.base.children.random_access_mut(); let mut children = self.block_flow.base.children.random_access_mut();
@ -613,8 +673,7 @@ impl FlexFlow {
block.base.flags.set_text_align(containing_block_text_align); block.base.flags.set_text_align(containing_block_text_align);
let margin = block.fragment.style().logical_margin(); let margin = block.fragment.style().logical_margin();
let auto_len = let auto_len = if line.auto_margin_count == 0 || line.free_space <= Au(0) {
if line.auto_margin_count == 0 || line.free_space <= Au(0) {
Au(0) Au(0)
} else { } else {
line.free_space / line.auto_margin_count line.free_space / line.auto_margin_count
@ -623,9 +682,9 @@ impl FlexFlow {
.specified_or_default(auto_len); .specified_or_default(auto_len);
let margin_inline_end = MaybeAuto::from_style(margin.inline_end, inline_size) let margin_inline_end = MaybeAuto::from_style(margin.inline_end, inline_size)
.specified_or_default(auto_len); .specified_or_default(auto_len);
let item_inline_size = item.main_size let item_inline_size = item.main_size -
- block.fragment.box_sizing_boundary(self.main_mode) block.fragment.box_sizing_boundary(self.main_mode) +
+ block.fragment.border_padding.inline_start_end(); block.fragment.border_padding.inline_start_end();
let item_outer_size = item_inline_size + block.fragment.margin.inline_start_end(); let item_outer_size = item_inline_size + block.fragment.margin.inline_start_end();
block.fragment.margin.inline_start = margin_inline_start; block.fragment.margin.inline_start = margin_inline_start;
@ -669,7 +728,12 @@ impl FlexFlow {
let _scope = layout_debug_scope!("flex::inline_mode_assign_block_size"); let _scope = layout_debug_scope!("flex::inline_mode_assign_block_size");
let line_count = self.lines.len() as i32; let line_count = self.lines.len() as i32;
let line_align = self.block_flow.fragment.style().get_position().align_content; let line_align = self
.block_flow
.fragment
.style()
.get_position()
.align_content;
let mut cur_b = self.block_flow.fragment.border_padding.block_start; let mut cur_b = self.block_flow.fragment.border_padding.block_start;
let mut total_cross_size = Au(0); let mut total_cross_size = Au(0);
let mut line_interval = Au(0); let mut line_interval = Au(0);
@ -679,17 +743,22 @@ impl FlexFlow {
for line in self.lines.iter_mut() { for line in self.lines.iter_mut() {
for item in &self.items[line.range.clone()] { for item in &self.items[line.range.clone()] {
let fragment = &children.get(item.index).as_block().fragment; let fragment = &children.get(item.index).as_block().fragment;
line.cross_size = max(line.cross_size, line.cross_size = max(
fragment.border_box.size.block + line.cross_size,
fragment.margin.block_start_end()); fragment.border_box.size.block + fragment.margin.block_start_end(),
);
} }
total_cross_size += line.cross_size; total_cross_size += line.cross_size;
} }
} }
let box_border = self.block_flow.fragment.box_sizing_boundary(Direction::Block); let box_border = self
let parent_container_size = .block_flow
self.block_flow.explicit_block_containing_size(layout_context.shared_context()); .fragment
.box_sizing_boundary(Direction::Block);
let parent_container_size = self
.block_flow
.explicit_block_containing_size(layout_context.shared_context());
// https://drafts.csswg.org/css-ui-3/#box-sizing // https://drafts.csswg.org/css-ui-3/#box-sizing
let explicit_content_size = self let explicit_content_size = self
.block_flow .block_flow
@ -713,25 +782,25 @@ impl FlexFlow {
} else { } else {
free_space / (line_count - 1) free_space / (line_count - 1)
} }
} },
AlignContent::SpaceAround => { AlignContent::SpaceAround => {
if line_count == 0 { if line_count == 0 {
Au(0) Au(0)
} else { } else {
free_space / line_count free_space / line_count
} }
} },
_ => Au(0), _ => Au(0),
}; };
match line_align { match line_align {
AlignContent::Center | AlignContent::SpaceAround => { AlignContent::Center | AlignContent::SpaceAround => {
cur_b += (free_space - line_interval * (line_count - 1)) / 2; cur_b += (free_space - line_interval * (line_count - 1)) / 2;
} },
AlignContent::FlexEnd => { AlignContent::FlexEnd => {
cur_b += free_space; cur_b += free_space;
} },
_ => {} _ => {},
} }
} }
@ -744,8 +813,9 @@ impl FlexFlow {
let mut margin_block_start = block.fragment.margin.block_start; let mut margin_block_start = block.fragment.margin.block_start;
let mut margin_block_end = block.fragment.margin.block_end; let mut margin_block_end = block.fragment.margin.block_end;
let mut free_space = line.cross_size - block.base.position.size.block let mut free_space = line.cross_size -
- block.fragment.margin.block_start_end(); block.base.position.size.block -
block.fragment.margin.block_start_end();
// The spec is a little vague here, but if I understand it correctly, the outer // The spec is a little vague here, but if I understand it correctly, the outer
// cross size of item should equal to the line size if any auto margin exists. // cross size of item should equal to the line size if any auto margin exists.
@ -758,13 +828,15 @@ impl FlexFlow {
free_space / auto_margin_count free_space / auto_margin_count
}; };
} }
margin_block_end = line.cross_size - margin_block_start - block.base.position.size.block; margin_block_end =
line.cross_size - margin_block_start - block.base.position.size.block;
free_space = Au(0); free_space = Au(0);
} }
let self_align = block.fragment.style().get_position().align_self; let self_align = block.fragment.style().get_position().align_self;
if self_align == AlignSelf::Stretch && if self_align == AlignSelf::Stretch &&
block.fragment.style().content_block_size() == LengthOrPercentageOrAuto::Auto { block.fragment.style().content_block_size() == LengthOrPercentageOrAuto::Auto
{
free_space = Au(0); free_space = Au(0);
block.base.block_container_explicit_block_size = Some(line.cross_size); block.base.block_container_explicit_block_size = Some(line.cross_size);
block.base.position.size.block = block.base.position.size.block =
@ -774,12 +846,12 @@ impl FlexFlow {
// as if it has a fixed cross size, all child blocks should resolve against it. // as if it has a fixed cross size, all child blocks should resolve against it.
// block.assign_block_size(layout_context); // block.assign_block_size(layout_context);
} }
block.base.position.start.b = margin_block_start + block.base.position.start.b = margin_block_start + if !self.cross_reverse {
if !self.cross_reverse {
cur_b cur_b
} else { } else {
self.block_flow.fragment.border_padding.block_start * 2 self.block_flow.fragment.border_padding.block_start * 2 + total_cross_size -
+ total_cross_size - cur_b - line.cross_size cur_b -
line.cross_size
}; };
// TODO(stshine): support baseline alignment. // TODO(stshine): support baseline alignment.
if free_space != Au(0) { if free_space != Au(0) {
@ -788,8 +860,7 @@ impl FlexFlow {
AlignSelf::Center => free_space / 2, AlignSelf::Center => free_space / 2,
_ => Au(0), _ => Au(0),
}; };
block.base.position.start.b += block.base.position.start.b += if !self.cross_reverse {
if !self.cross_reverse {
flex_cross flex_cross
} else { } else {
free_space - flex_cross free_space - flex_cross
@ -798,7 +869,8 @@ impl FlexFlow {
} }
cur_b += line_interval + line.cross_size; cur_b += line_interval + line.cross_size;
} }
let total_block_size = total_cross_size + self.block_flow.fragment.border_padding.block_start_end(); let total_block_size =
total_cross_size + self.block_flow.fragment.border_padding.block_start_end();
self.block_flow.fragment.border_box.size.block = total_block_size; self.block_flow.fragment.border_box.size.block = total_block_size;
self.block_flow.base.position.size.block = total_block_size; self.block_flow.base.position.size.block = total_block_size;
} }
@ -830,24 +902,29 @@ impl Flow for FlexFlow {
} }
fn bubble_inline_sizes(&mut self) { fn bubble_inline_sizes(&mut self) {
let _scope = layout_debug_scope!("flex::bubble_inline_sizes {:x}", let _scope = layout_debug_scope!(
self.block_flow.base.debug_id()); "flex::bubble_inline_sizes {:x}",
self.block_flow.base.debug_id()
);
// Flexbox Section 9.0: Generate anonymous flex items: // Flexbox Section 9.0: Generate anonymous flex items:
// This part was handled in the flow constructor. // This part was handled in the flow constructor.
// Flexbox Section 9.1: Re-order flex items according to their order. // Flexbox Section 9.1: Re-order flex items according to their order.
// FIXME(stshine): This should be done during flow construction. // FIXME(stshine): This should be done during flow construction.
let mut items: Vec<FlexItem> = let mut items: Vec<FlexItem> = self
self.block_flow .block_flow
.base .base
.children .children
.iter() .iter()
.enumerate() .enumerate()
.filter(|&(_, flow)| { .filter(|&(_, flow)| {
!flow.as_block().base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) !flow
}) .as_block()
.map(|(index, flow)| FlexItem::new(index, flow)) .base
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
}).map(|(index, flow)| FlexItem::new(index, flow))
.collect(); .collect();
items.sort_by_key(|item| item.order); items.sort_by_key(|item| item.order);
@ -855,47 +932,66 @@ impl Flow for FlexFlow {
match self.main_mode { match self.main_mode {
Direction::Inline => self.inline_mode_bubble_inline_sizes(), Direction::Inline => self.inline_mode_bubble_inline_sizes(),
Direction::Block => self.block_mode_bubble_inline_sizes() Direction::Block => self.block_mode_bubble_inline_sizes(),
} }
} }
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) { fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("flex::assign_inline_sizes {:x}", self.block_flow.base.debug_id()); let _scope = layout_debug_scope!(
"flex::assign_inline_sizes {:x}",
self.block_flow.base.debug_id()
);
debug!("assign_inline_sizes"); debug!("assign_inline_sizes");
if !self.block_flow.base.restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | if !self
ServoRestyleDamage::REFLOW) { .block_flow
return .base
.restyle_damage
.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW)
{
return;
} }
self.block_flow.initialize_container_size_for_root(layout_context.shared_context()); self.block_flow
.initialize_container_size_for_root(layout_context.shared_context());
// Our inline-size was set to the inline-size of the containing block by the flow's parent. // Our inline-size was set to the inline-size of the containing block by the flow's parent.
// Now compute the real value. // Now compute the real value.
let containing_block_inline_size = self.block_flow.base.block_container_inline_size; let containing_block_inline_size = self.block_flow.base.block_container_inline_size;
self.block_flow.compute_used_inline_size(layout_context.shared_context(), self.block_flow.compute_used_inline_size(
containing_block_inline_size); layout_context.shared_context(),
containing_block_inline_size,
);
if self.block_flow.base.flags.is_float() { if self.block_flow.base.flags.is_float() {
self.block_flow.float.as_mut().unwrap().containing_inline_size = containing_block_inline_size self.block_flow
.float
.as_mut()
.unwrap()
.containing_inline_size = containing_block_inline_size
} }
let (available_block_size, available_inline_size) = { let (available_block_size, available_inline_size) = {
let style = &self.block_flow.fragment.style; let style = &self.block_flow.fragment.style;
let (specified_block_size, specified_inline_size) = if style.writing_mode.is_vertical() { let (specified_block_size, specified_inline_size) = if style.writing_mode.is_vertical()
{
(style.get_position().width, style.get_position().height) (style.get_position().width, style.get_position().height)
} else { } else {
(style.get_position().height, style.get_position().width) (style.get_position().height, style.get_position().width)
}; };
let available_inline_size = AxisSize::new(specified_inline_size, let available_inline_size = AxisSize::new(
specified_inline_size,
Some(self.block_flow.base.block_container_inline_size), Some(self.block_flow.base.block_container_inline_size),
style.min_inline_size(), style.min_inline_size(),
style.max_inline_size()); style.max_inline_size(),
);
let available_block_size = AxisSize::new(specified_block_size, let available_block_size = AxisSize::new(
specified_block_size,
self.block_flow.base.block_container_explicit_block_size, self.block_flow.base.block_container_explicit_block_size,
style.min_block_size(), style.min_block_size(),
style.max_block_size()); style.max_block_size(),
);
(available_block_size, available_inline_size) (available_block_size, available_inline_size)
}; };
@ -903,37 +999,46 @@ impl Flow for FlexFlow {
let inline_start_content_edge = self.block_flow.fragment.border_box.start.i + let inline_start_content_edge = self.block_flow.fragment.border_box.start.i +
self.block_flow.fragment.border_padding.inline_start; self.block_flow.fragment.border_padding.inline_start;
debug!("inline_start_content_edge = {:?}", inline_start_content_edge); debug!(
"inline_start_content_edge = {:?}",
inline_start_content_edge
);
let padding_and_borders = self.block_flow.fragment.border_padding.inline_start_end(); let padding_and_borders = self.block_flow.fragment.border_padding.inline_start_end();
// Distance from the inline-end margin edge to the inline-end content edge. // Distance from the inline-end margin edge to the inline-end content edge.
let inline_end_content_edge = let inline_end_content_edge = self.block_flow.fragment.margin.inline_end +
self.block_flow.fragment.margin.inline_end +
self.block_flow.fragment.border_padding.inline_end; self.block_flow.fragment.border_padding.inline_end;
debug!("padding_and_borders = {:?}", padding_and_borders); debug!("padding_and_borders = {:?}", padding_and_borders);
debug!("self.block_flow.fragment.border_box.size.inline = {:?}", debug!(
self.block_flow.fragment.border_box.size.inline); "self.block_flow.fragment.border_box.size.inline = {:?}",
let content_inline_size = self.block_flow.fragment.border_box.size.inline - padding_and_borders; self.block_flow.fragment.border_box.size.inline
);
let content_inline_size =
self.block_flow.fragment.border_box.size.inline - padding_and_borders;
match self.main_mode { match self.main_mode {
Direction::Inline => { Direction::Inline => {
self.available_main_size = available_inline_size; self.available_main_size = available_inline_size;
self.available_cross_size = available_block_size; self.available_cross_size = available_block_size;
self.inline_mode_assign_inline_sizes(layout_context, self.inline_mode_assign_inline_sizes(
layout_context,
inline_start_content_edge, inline_start_content_edge,
inline_end_content_edge, inline_end_content_edge,
content_inline_size) content_inline_size,
} )
},
Direction::Block => { Direction::Block => {
self.available_main_size = available_block_size; self.available_main_size = available_block_size;
self.available_cross_size = available_inline_size; self.available_cross_size = available_inline_size;
self.block_mode_assign_inline_sizes(layout_context, self.block_mode_assign_inline_sizes(
layout_context,
inline_start_content_edge, inline_start_content_edge,
inline_end_content_edge, inline_end_content_edge,
content_inline_size) content_inline_size,
} )
},
} }
} }
@ -941,31 +1046,37 @@ impl Flow for FlexFlow {
match self.main_mode { match self.main_mode {
Direction::Inline => { Direction::Inline => {
self.inline_mode_assign_block_size(layout_context); self.inline_mode_assign_block_size(layout_context);
let block_start = AdjoiningMargins::from_margin(self.block_flow.fragment.margin.block_start); let block_start =
let block_end = AdjoiningMargins::from_margin(self.block_flow.fragment.margin.block_end); AdjoiningMargins::from_margin(self.block_flow.fragment.margin.block_start);
self.block_flow.base.collapsible_margins = CollapsibleMargins::Collapse(block_start, block_end); let block_end =
AdjoiningMargins::from_margin(self.block_flow.fragment.margin.block_end);
self.block_flow.base.collapsible_margins =
CollapsibleMargins::Collapse(block_start, block_end);
// TODO(stshine): assign proper static position for absolute descendants. // TODO(stshine): assign proper static position for absolute descendants.
if (&*self as &Flow).contains_roots_of_absolute_flow_tree() { if (&*self as &Flow).contains_roots_of_absolute_flow_tree() {
// Assign block-sizes for all flows in this absolute flow tree. // Assign block-sizes for all flows in this absolute flow tree.
// This is preorder because the block-size of an absolute flow may depend on // This is preorder because the block-size of an absolute flow may depend on
// the block-size of its containing block, which may also be an absolute flow. // the block-size of its containing block, which may also be an absolute flow.
let assign_abs_b_sizes = AbsoluteAssignBSizesTraversal(layout_context.shared_context()); let assign_abs_b_sizes =
AbsoluteAssignBSizesTraversal(layout_context.shared_context());
assign_abs_b_sizes.traverse_absolute_flows(&mut *self); assign_abs_b_sizes.traverse_absolute_flows(&mut *self);
} }
} },
Direction::Block =>{ Direction::Block => {
self.block_flow self.block_flow.assign_block_size_block_base(
.assign_block_size_block_base(layout_context, layout_context,
None, None,
MarginsMayCollapseFlag::MarginsMayNotCollapse); MarginsMayCollapseFlag::MarginsMayNotCollapse,
);
self.block_mode_assign_block_size(); self.block_mode_assign_block_size();
} },
} }
} }
fn compute_stacking_relative_position(&mut self, layout_context: &LayoutContext) { fn compute_stacking_relative_position(&mut self, layout_context: &LayoutContext) {
self.block_flow.compute_stacking_relative_position(layout_context) self.block_flow
.compute_stacking_relative_position(layout_context)
} }
fn place_float_if_applicable<'a>(&mut self) { fn place_float_if_applicable<'a>(&mut self) {
@ -973,11 +1084,13 @@ impl Flow for FlexFlow {
} }
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) { fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
self.block_flow.update_late_computed_inline_position_if_necessary(inline_position) self.block_flow
.update_late_computed_inline_position_if_necessary(inline_position)
} }
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) { fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
self.block_flow.update_late_computed_block_position_if_necessary(block_position) self.block_flow
.update_late_computed_block_position_if_necessary(block_position)
} }
fn build_display_list(&mut self, state: &mut DisplayListBuildState) { fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
@ -1008,11 +1121,17 @@ impl Flow for FlexFlow {
self.block_flow.generated_containing_block_size(flow) self.block_flow.generated_containing_block_size(flow)
} }
fn iterate_through_fragment_border_boxes(&self, fn iterate_through_fragment_border_boxes(
&self,
iterator: &mut FragmentBorderBoxIterator, iterator: &mut FragmentBorderBoxIterator,
level: i32, level: i32,
stacking_context_position: &Point2D<Au>) { stacking_context_position: &Point2D<Au>,
self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position); ) {
self.block_flow.iterate_through_fragment_border_boxes(
iterator,
level,
stacking_context_position,
);
} }
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {

View file

@ -16,7 +16,7 @@ use style::values::computed::LengthOrPercentageOrAuto;
#[derive(Clone, Copy, Debug, Serialize)] #[derive(Clone, Copy, Debug, Serialize)]
pub enum FloatKind { pub enum FloatKind {
Left, Left,
Right Right,
} }
impl FloatKind { impl FloatKind {
@ -78,7 +78,12 @@ impl FloatList {
impl fmt::Debug for FloatList { impl fmt::Debug for FloatList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "max_block_start={:?} floats={}", self.max_block_start, self.floats.len())?; write!(
f,
"max_block_start={:?} floats={}",
self.max_block_start,
self.floats.len()
)?;
for float in self.floats.iter() { for float in self.floats.iter() {
write!(f, " {:?}", float)?; write!(f, " {:?}", float)?;
} }
@ -95,22 +100,29 @@ pub struct PlacementInfo {
/// The maximum inline-end position of the float, generally determined by the containing block. /// The maximum inline-end position of the float, generally determined by the containing block.
pub max_inline_size: Au, pub max_inline_size: Au,
/// The kind of float. /// The kind of float.
pub kind: FloatKind pub kind: FloatKind,
} }
impl fmt::Debug for PlacementInfo { impl fmt::Debug for PlacementInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, write!(
f,
"size={:?} ceiling={:?} max_inline_size={:?} kind={:?}", "size={:?} ceiling={:?} max_inline_size={:?} kind={:?}",
self.size, self.size, self.ceiling, self.max_inline_size, self.kind
self.ceiling, )
self.max_inline_size,
self.kind)
} }
} }
fn range_intersect(block_start_1: Au, block_end_1: Au, block_start_2: Au, block_end_2: Au) -> (Au, Au) { fn range_intersect(
(max(block_start_1, block_start_2), min(block_end_1, block_end_2)) block_start_1: Au,
block_end_1: Au,
block_start_2: Au,
block_end_2: Au,
) -> (Au, Au) {
(
max(block_start_1, block_start_2),
min(block_end_1, block_end_2),
)
} }
/// Encapsulates information about floats. This is optimized to avoid allocation if there are /// Encapsulates information about floats. This is optimized to avoid allocation if there are
@ -162,8 +174,12 @@ impl Floats {
/// with inline-size small enough that it doesn't collide with any floats. max_x is the /// with inline-size small enough that it doesn't collide with any floats. max_x is the
/// inline-size beyond which floats have no effect. (Generally this is the containing block /// inline-size beyond which floats have no effect. (Generally this is the containing block
/// inline-size.) /// inline-size.)
pub fn available_rect(&self, block_start: Au, block_size: Au, max_x: Au) pub fn available_rect(
-> Option<LogicalRect<Au>> { &self,
block_start: Au,
block_size: Au,
max_x: Au,
) -> Option<LogicalRect<Au>> {
let list = &self.list; let list = &self.list;
let block_start = block_start - self.offset.block; let block_start = block_start - self.offset.block;
@ -186,30 +202,38 @@ impl Floats {
debug!("float_pos: {:?}, float_size: {:?}", float_pos, float_size); debug!("float_pos: {:?}, float_size: {:?}", float_pos, float_size);
match float.kind { match float.kind {
FloatKind::Left if float_pos.i + float_size.inline > max_inline_start && FloatKind::Left
if float_pos.i + float_size.inline > max_inline_start &&
float_pos.b + float_size.block > block_start && float_pos.b + float_size.block > block_start &&
float_pos.b < block_start + block_size => { float_pos.b < block_start + block_size =>
{
max_inline_start = float_pos.i + float_size.inline; max_inline_start = float_pos.i + float_size.inline;
l_block_start = Some(float_pos.b); l_block_start = Some(float_pos.b);
l_block_end = Some(float_pos.b + float_size.block); l_block_end = Some(float_pos.b + float_size.block);
debug!("available_rect: collision with inline_start float: new \ debug!(
"available_rect: collision with inline_start float: new \
max_inline_start is {:?}", max_inline_start is {:?}",
max_inline_start); max_inline_start
);
} }
FloatKind::Right if float_pos.i < min_inline_end && FloatKind::Right
if float_pos.i < min_inline_end &&
float_pos.b + float_size.block > block_start && float_pos.b + float_size.block > block_start &&
float_pos.b < block_start + block_size => { float_pos.b < block_start + block_size =>
{
min_inline_end = float_pos.i; min_inline_end = float_pos.i;
r_block_start = Some(float_pos.b); r_block_start = Some(float_pos.b);
r_block_end = Some(float_pos.b + float_size.block); r_block_end = Some(float_pos.b + float_size.block);
debug!("available_rect: collision with inline_end float: new min_inline_end \ debug!(
"available_rect: collision with inline_end float: new min_inline_end \
is {:?}", is {:?}",
min_inline_end); min_inline_end
);
} }
FloatKind::Left | FloatKind::Right => {} FloatKind::Left | FloatKind::Right => {},
} }
} }
@ -217,24 +241,27 @@ impl Floats {
// If there are floats on both sides, take the intersection of the // If there are floats on both sides, take the intersection of the
// two areas. Also make sure we never return a block-start smaller than the // two areas. Also make sure we never return a block-start smaller than the
// given upper bound. // given upper bound.
let (block_start, block_end) = match (r_block_start, let (block_start, block_end) =
r_block_end, match (r_block_start, r_block_end, l_block_start, l_block_end) {
l_block_start, (
l_block_end) { Some(r_block_start),
(Some(r_block_start), Some(r_block_end), Some(l_block_start), Some(l_block_end)) => { Some(r_block_end),
range_intersect(max(block_start, r_block_start), Some(l_block_start),
Some(l_block_end),
) => range_intersect(
max(block_start, r_block_start),
r_block_end, r_block_end,
max(block_start, l_block_start), max(block_start, l_block_start),
l_block_end) l_block_end,
} ),
(None, None, Some(l_block_start), Some(l_block_end)) => { (None, None, Some(l_block_start), Some(l_block_end)) => {
(max(block_start, l_block_start), l_block_end) (max(block_start, l_block_start), l_block_end)
} },
(Some(r_block_start), Some(r_block_end), None, None) => { (Some(r_block_start), Some(r_block_end), None, None) => {
(max(block_start, r_block_start), r_block_end) (max(block_start, r_block_start), r_block_end)
} },
(None, None, None, None) => return None, (None, None, None, None) => return None,
_ => panic!("Reached unreachable state when computing float area") _ => panic!("Reached unreachable state when computing float area"),
}; };
// FIXME(eatkinson): This assertion is too strong and fails in some cases. It is OK to // FIXME(eatkinson): This assertion is too strong and fails in some cases. It is OK to
@ -244,11 +271,13 @@ impl Floats {
assert!(block_start <= block_end, "Float position error"); assert!(block_start <= block_end, "Float position error");
Some(LogicalRect::new(self.writing_mode, Some(LogicalRect::new(
self.writing_mode,
max_inline_start + self.offset.inline, max_inline_start + self.offset.inline,
block_start + self.offset.block, block_start + self.offset.block,
min_inline_end - max_inline_start, min_inline_end - max_inline_start,
block_end - block_start)) block_end - block_start,
))
} }
/// Adds a new float to the list. /// Adds a new float to the list.
@ -260,7 +289,7 @@ impl Floats {
Some(max_block_start) => max(info.ceiling, max_block_start + self.offset.block), Some(max_block_start) => max(info.ceiling, max_block_start + self.offset.block),
}, },
max_inline_size: info.max_inline_size, max_inline_size: info.max_inline_size,
kind: info.kind kind: info.kind,
}; };
debug!("add_float: added float with info {:?}", new_info); debug!("add_float: added float with info {:?}", new_info);
@ -271,7 +300,7 @@ impl Floats {
self.place_between_floats(&new_info).start - self.offset, self.place_between_floats(&new_info).start - self.offset,
info.size, info.size,
), ),
kind: info.kind kind: info.kind,
}; };
self.list.floats = self.list.floats.prepend_elem(new_float); self.list.floats = self.list.floats.prepend_elem(new_float);
@ -284,8 +313,12 @@ impl Floats {
/// Given the three sides of the bounding rectangle in the block-start direction, finds the /// Given the three sides of the bounding rectangle in the block-start direction, finds the
/// largest block-size that will result in the rectangle not colliding with any floats. Returns /// largest block-size that will result in the rectangle not colliding with any floats. Returns
/// `None` if that block-size is infinite. /// `None` if that block-size is infinite.
fn max_block_size_for_bounds(&self, inline_start: Au, block_start: Au, inline_size: Au) fn max_block_size_for_bounds(
-> Option<Au> { &self,
inline_start: Au,
block_start: Au,
inline_size: Au,
) -> Option<Au> {
let list = &self.list; let list = &self.list;
let block_start = block_start - self.offset.block; let block_start = block_start - self.offset.block;
@ -295,7 +328,8 @@ impl Floats {
for float in list.floats.iter() { for float in list.floats.iter() {
if float.bounds.start.b + float.bounds.size.block > block_start && if float.bounds.start.b + float.bounds.size.block > block_start &&
float.bounds.start.i + float.bounds.size.inline > inline_start && float.bounds.start.i + float.bounds.size.inline > inline_start &&
float.bounds.start.i < inline_start + inline_size { float.bounds.start.i < inline_start + inline_size
{
let new_y = float.bounds.start.b; let new_y = float.bounds.start.b;
max_block_size = Some(min(max_block_size.unwrap_or(new_y), new_y)); max_block_size = Some(min(max_block_size.unwrap_or(new_y), new_y));
} }
@ -318,85 +352,88 @@ impl Floats {
Au(0), Au(0),
info.ceiling, info.ceiling,
info.max_inline_size, info.max_inline_size,
MAX_AU) MAX_AU,
} )
},
FloatKind::Right => { FloatKind::Right => {
return LogicalRect::new( return LogicalRect::new(
self.writing_mode, self.writing_mode,
info.max_inline_size - info.size.inline, info.max_inline_size - info.size.inline,
info.ceiling, info.ceiling,
info.max_inline_size, info.max_inline_size,
MAX_AU) MAX_AU,
} )
},
} }
} }
// Can't go any higher than previous floats or previous elements in the document. // Can't go any higher than previous floats or previous elements in the document.
let mut float_b = info.ceiling; let mut float_b = info.ceiling;
loop { loop {
let maybe_location = self.available_rect(float_b, let maybe_location =
info.size.block, self.available_rect(float_b, info.size.block, info.max_inline_size);
info.max_inline_size); debug!(
debug!("place_float: got available rect: {:?} for block-pos: {:?}", "place_float: got available rect: {:?} for block-pos: {:?}",
maybe_location, maybe_location, float_b
float_b); );
match maybe_location { match maybe_location {
// If there are no floats blocking us, return the current location // If there are no floats blocking us, return the current location
// TODO(eatkinson): integrate with overflow // TODO(eatkinson): integrate with overflow
None => { None => {
return match info.kind { return match info.kind {
FloatKind::Left => { FloatKind::Left => LogicalRect::new(
LogicalRect::new(
self.writing_mode, self.writing_mode,
Au(0), Au(0),
float_b, float_b,
info.max_inline_size, info.max_inline_size,
MAX_AU) MAX_AU,
} ),
FloatKind::Right => { FloatKind::Right => LogicalRect::new(
LogicalRect::new(
self.writing_mode, self.writing_mode,
info.max_inline_size - info.size.inline, info.max_inline_size - info.size.inline,
float_b, float_b,
info.max_inline_size, info.max_inline_size,
MAX_AU) MAX_AU,
} ),
}
} }
},
Some(rect) => { Some(rect) => {
assert_ne!(rect.start.b + rect.size.block, float_b, assert_ne!(
"Non-terminating float placement"); rect.start.b + rect.size.block,
float_b,
"Non-terminating float placement"
);
// Place here if there is enough room // Place here if there is enough room
if rect.size.inline >= info.size.inline { if rect.size.inline >= info.size.inline {
let block_size = self.max_block_size_for_bounds(rect.start.i, let block_size = self.max_block_size_for_bounds(
rect.start.i,
rect.start.b, rect.start.b,
rect.size.inline); rect.size.inline,
);
let block_size = block_size.unwrap_or(MAX_AU); let block_size = block_size.unwrap_or(MAX_AU);
return match info.kind { return match info.kind {
FloatKind::Left => { FloatKind::Left => LogicalRect::new(
LogicalRect::new(
self.writing_mode, self.writing_mode,
rect.start.i, rect.start.i,
float_b, float_b,
rect.size.inline, rect.size.inline,
block_size) block_size,
} ),
FloatKind::Right => { FloatKind::Right => LogicalRect::new(
LogicalRect::new(
self.writing_mode, self.writing_mode,
rect.start.i + rect.size.inline - info.size.inline, rect.start.i + rect.size.inline - info.size.inline,
float_b, float_b,
rect.size.inline, rect.size.inline,
block_size) block_size,
} ),
} };
} }
// Try to place at the next-lowest location. // Try to place at the next-lowest location.
// Need to be careful of fencepost errors. // Need to be careful of fencepost errors.
float_b = rect.start.b + rect.size.block; float_b = rect.start.b + rect.size.block;
} },
} }
} }
} }
@ -411,8 +448,8 @@ impl Floats {
(ClearType::Both, _) => { (ClearType::Both, _) => {
let b = self.offset.block + float.bounds.start.b + float.bounds.size.block; let b = self.offset.block + float.bounds.start.b + float.bounds.size.block;
clearance = max(clearance, b); clearance = max(clearance, b);
} },
_ => {} _ => {},
} }
} }
clearance clearance
@ -486,14 +523,20 @@ impl SpeculatedFloatPlacement {
} }
} }
self.left = max(self.left, block_flow.base.speculated_float_placement_in.left); self.left = max(
self.right = max(self.right, block_flow.base.speculated_float_placement_in.right); self.left,
block_flow.base.speculated_float_placement_in.left,
);
self.right = max(
self.right,
block_flow.base.speculated_float_placement_in.right,
);
} }
} }
let base_flow = flow.base(); let base_flow = flow.base();
if !base_flow.flags.is_float() { if !base_flow.flags.is_float() {
return return;
} }
let mut float_inline_size = base_flow.intrinsic_inline_sizes.preferred_inline_size; let mut float_inline_size = base_flow.intrinsic_inline_sizes.preferred_inline_size;
@ -504,7 +547,8 @@ impl SpeculatedFloatPlacement {
// that the layout traversal logic will know that objects later in the document // that the layout traversal logic will know that objects later in the document
// might flow around this float. // might flow around this float.
if let LengthOrPercentageOrAuto::Percentage(percentage) = if let LengthOrPercentageOrAuto::Percentage(percentage) =
flow.as_block().fragment.style.content_inline_size() { flow.as_block().fragment.style.content_inline_size()
{
if percentage.0 > 0.0 { if percentage.0 > 0.0 {
float_inline_size = Au::from_px(1) float_inline_size = Au::from_px(1)
} }
@ -513,7 +557,7 @@ impl SpeculatedFloatPlacement {
} }
match base_flow.flags.float_kind() { match base_flow.flags.float_kind() {
StyleFloat::None => {} StyleFloat::None => {},
StyleFloat::Left => self.left = self.left + float_inline_size, StyleFloat::Left => self.left = self.left + float_inline_size,
StyleFloat::Right => self.right = self.right + float_inline_size, StyleFloat::Right => self.right = self.right + float_inline_size,
} }
@ -522,17 +566,18 @@ impl SpeculatedFloatPlacement {
/// Given a flow, computes the speculated inline size of the floats in of its first child. /// Given a flow, computes the speculated inline size of the floats in of its first child.
pub fn compute_floats_in_for_first_child(parent_flow: &mut Flow) -> SpeculatedFloatPlacement { pub fn compute_floats_in_for_first_child(parent_flow: &mut Flow) -> SpeculatedFloatPlacement {
if !parent_flow.is_block_like() { if !parent_flow.is_block_like() {
return parent_flow.base().speculated_float_placement_in return parent_flow.base().speculated_float_placement_in;
} }
let parent_block_flow = parent_flow.as_block(); let parent_block_flow = parent_flow.as_block();
if parent_block_flow.formatting_context_type() != FormattingContextType::None { if parent_block_flow.formatting_context_type() != FormattingContextType::None {
return SpeculatedFloatPlacement::zero() return SpeculatedFloatPlacement::zero();
} }
let mut placement = parent_block_flow.base.speculated_float_placement_in; let mut placement = parent_block_flow.base.speculated_float_placement_in;
let speculated_inline_content_edge_offsets = let speculated_inline_content_edge_offsets = parent_block_flow
parent_block_flow.fragment.guess_inline_content_edge_offsets(); .fragment
.guess_inline_content_edge_offsets();
if speculated_inline_content_edge_offsets.start > Au(0) { if speculated_inline_content_edge_offsets.start > Au(0) {
placement.left = if placement.left > speculated_inline_content_edge_offsets.start { placement.left = if placement.left > speculated_inline_content_edge_offsets.start {
@ -552,4 +597,3 @@ impl SpeculatedFloatPlacement {
placement placement
} }
} }

View file

@ -251,11 +251,15 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
/// and return a new flow similar to `self` with the rest of the content. /// and return a new flow similar to `self` with the rest of the content.
/// ///
/// The default is to make a flow "atomic": it can not be fragmented. /// The default is to make a flow "atomic": it can not be fragmented.
fn fragment(&mut self, fn fragment(
&mut self,
layout_context: &LayoutContext, layout_context: &LayoutContext,
_fragmentation_context: Option<FragmentationContext>) _fragmentation_context: Option<FragmentationContext>,
-> Option<Arc<Flow>> { ) -> Option<Arc<Flow>> {
fn recursive_assign_block_size<F: ?Sized + Flow + GetBaseFlow>(flow: &mut F, ctx: &LayoutContext) { fn recursive_assign_block_size<F: ?Sized + Flow + GetBaseFlow>(
flow: &mut F,
ctx: &LayoutContext,
) {
for child in flow.mut_base().child_iter_mut() { for child in flow.mut_base().child_iter_mut() {
recursive_assign_block_size(child, ctx) recursive_assign_block_size(child, ctx)
} }
@ -277,17 +281,20 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
/// `parent_thread_id` is the thread ID of the parent. This is used for the layout tinting /// `parent_thread_id` is the thread ID of the parent. This is used for the layout tinting
/// debug mode; if the block size of this flow was determined by its parent, we should treat /// debug mode; if the block size of this flow was determined by its parent, we should treat
/// it as laid out by its parent. /// it as laid out by its parent.
fn assign_block_size_for_inorder_child_if_necessary(&mut self, fn assign_block_size_for_inorder_child_if_necessary(
&mut self,
layout_context: &LayoutContext, layout_context: &LayoutContext,
parent_thread_id: u8, parent_thread_id: u8,
_content_box: LogicalRect<Au>) _content_box: LogicalRect<Au>,
-> bool { ) -> bool {
let might_have_floats_in_or_out = self.base().might_have_floats_in() || let might_have_floats_in_or_out =
self.base().might_have_floats_out(); self.base().might_have_floats_in() || self.base().might_have_floats_out();
if might_have_floats_in_or_out { if might_have_floats_in_or_out {
self.mut_base().thread_id = parent_thread_id; self.mut_base().thread_id = parent_thread_id;
self.assign_block_size(layout_context); self.assign_block_size(layout_context);
self.mut_base().restyle_damage.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW); self.mut_base()
.restyle_damage
.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
} }
might_have_floats_in_or_out might_have_floats_in_or_out
} }
@ -295,23 +302,32 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
fn get_overflow_in_parent_coordinates(&self) -> Overflow { fn get_overflow_in_parent_coordinates(&self) -> Overflow {
// FIXME(#2795): Get the real container size. // FIXME(#2795): Get the real container size.
let container_size = Size2D::zero(); let container_size = Size2D::zero();
let position = self.base().position.to_physical(self.base().writing_mode, container_size); let position = self
.base()
.position
.to_physical(self.base().writing_mode, container_size);
let mut overflow = self.base().overflow; let mut overflow = self.base().overflow;
match self.class() { match self.class() {
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {} FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {},
_ => { _ => {
overflow.translate(&position.origin.to_vector()); overflow.translate(&position.origin.to_vector());
return overflow; return overflow;
} },
} }
let border_box = self.as_block().fragment.stacking_relative_border_box( let border_box = self.as_block().fragment.stacking_relative_border_box(
&self.base().stacking_relative_position, &self.base().stacking_relative_position,
&self.base().early_absolute_position_info.relative_containing_block_size, &self
self.base().early_absolute_position_info.relative_containing_block_mode, .base()
CoordinateSystem::Own); .early_absolute_position_info
.relative_containing_block_size,
self.base()
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own,
);
if StyleOverflow::Visible != self.as_block().fragment.style.get_box().overflow_x { if StyleOverflow::Visible != self.as_block().fragment.style.get_box().overflow_x {
overflow.paint.origin.x = Au(0); overflow.paint.origin.x = Au(0);
overflow.paint.size.width = border_box.size.width; overflow.paint.size.width = border_box.size.width;
@ -325,24 +341,35 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
overflow.scroll.size.height = border_box.size.height; overflow.scroll.size.height = border_box.size.height;
} }
if !self.as_block().fragment.establishes_stacking_context() || if !self.as_block().fragment.establishes_stacking_context() || self
self.as_block().fragment.style.get_box().transform.0.is_empty() { .as_block()
.fragment
.style
.get_box()
.transform
.0
.is_empty()
{
overflow.translate(&position.origin.to_vector()); overflow.translate(&position.origin.to_vector());
return overflow; return overflow;
} }
// TODO: Take into account 3d transforms, even though it's a fairly // TODO: Take into account 3d transforms, even though it's a fairly
// uncommon case. // uncommon case.
let transform_2d = self.as_block() let transform_2d = self
.as_block()
.fragment .fragment
.transform_matrix(&position) .transform_matrix(&position)
.unwrap_or(LayoutTransform::identity()) .unwrap_or(LayoutTransform::identity())
.to_2d().to_untyped(); .to_2d()
.to_untyped();
let transformed_overflow = Overflow { let transformed_overflow = Overflow {
paint: f32_rect_to_au_rect(transform_2d.transform_rect( paint: f32_rect_to_au_rect(
&au_rect_to_f32_rect(overflow.paint))), transform_2d.transform_rect(&au_rect_to_f32_rect(overflow.paint)),
scroll: f32_rect_to_au_rect(transform_2d.transform_rect( ),
&au_rect_to_f32_rect(overflow.scroll))), scroll: f32_rect_to_au_rect(
transform_2d.transform_rect(&au_rect_to_f32_rect(overflow.scroll)),
),
}; };
// TODO: We are taking the union of the overflow and transformed overflow here, which // TODO: We are taking the union of the overflow and transformed overflow here, which
@ -369,14 +396,12 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
// Calculate overflow on a per-fragment basis. // Calculate overflow on a per-fragment basis.
let mut overflow = self.compute_overflow(); let mut overflow = self.compute_overflow();
match self.class() { match self.class() {
FlowClass::Block | FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {
FlowClass::TableCaption |
FlowClass::TableCell => {
for kid in self.mut_base().children.iter_mut() { for kid in self.mut_base().children.iter_mut() {
overflow.union(&kid.get_overflow_in_parent_coordinates()); overflow.union(&kid.get_overflow_in_parent_coordinates());
} }
} },
_ => {} _ => {},
} }
self.mut_base().overflow = overflow self.mut_base().overflow = overflow
} }
@ -396,17 +421,21 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
/// Iterates through border boxes of all of this flow's fragments. /// Iterates through border boxes of all of this flow's fragments.
/// Level provides a zero based index indicating the current /// Level provides a zero based index indicating the current
/// depth of the flow tree during fragment iteration. /// depth of the flow tree during fragment iteration.
fn iterate_through_fragment_border_boxes(&self, fn iterate_through_fragment_border_boxes(
&self,
iterator: &mut FragmentBorderBoxIterator, iterator: &mut FragmentBorderBoxIterator,
level: i32, level: i32,
stacking_context_position: &Point2D<Au>); stacking_context_position: &Point2D<Au>,
);
/// Mutably iterates through fragments in this flow. /// Mutably iterates through fragments in this flow.
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)); fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment));
fn compute_collapsible_block_start_margin(&mut self, fn compute_collapsible_block_start_margin(
&mut self,
_layout_context: &mut LayoutContext, _layout_context: &mut LayoutContext,
_margin_collapse_info: &mut MarginCollapseInfo) { _margin_collapse_info: &mut MarginCollapseInfo,
) {
// The default implementation is a no-op. // The default implementation is a no-op.
} }
@ -436,8 +465,10 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
} }
fn contains_positioned_fragments(&self) -> bool { fn contains_positioned_fragments(&self) -> bool {
self.contains_relatively_positioned_fragments() || self.contains_relatively_positioned_fragments() || self
self.base().flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) .base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
} }
fn contains_relatively_positioned_fragments(&self) -> bool { fn contains_relatively_positioned_fragments(&self) -> bool {
@ -476,7 +507,7 @@ pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
/// Print any extra children (such as fragments) contained in this Flow /// Print any extra children (such as fragments) contained in this Flow
/// for debugging purposes. Any items inserted into the tree will become /// for debugging purposes. Any items inserted into the tree will become
/// children of this flow. /// children of this flow.
fn print_extra_flow_children(&self, _: &mut PrintTree) { } fn print_extra_flow_children(&self, _: &mut PrintTree) {}
fn clipping_and_scrolling(&self) -> ClippingAndScrolling { fn clipping_and_scrolling(&self) -> ClippingAndScrolling {
match self.base().clipping_and_scrolling { match self.base().clipping_and_scrolling {
@ -566,8 +597,10 @@ pub trait MutableOwnedFlowUtils {
/// </span> /// </span>
/// </div> /// </div>
/// ``` /// ```
fn take_applicable_absolute_descendants(&mut self, fn take_applicable_absolute_descendants(
absolute_descendants: &mut AbsoluteDescendants); &mut self,
absolute_descendants: &mut AbsoluteDescendants,
);
} }
#[derive(Clone, Copy, Debug, PartialEq, Serialize)] #[derive(Clone, Copy, Debug, PartialEq, Serialize)]
@ -590,9 +623,15 @@ pub enum FlowClass {
impl FlowClass { impl FlowClass {
fn is_block_like(self) -> bool { fn is_block_like(self) -> bool {
match self { match self {
FlowClass::Block | FlowClass::ListItem | FlowClass::Table | FlowClass::TableRowGroup | FlowClass::Block |
FlowClass::TableRow | FlowClass::TableCaption | FlowClass::TableCell | FlowClass::ListItem |
FlowClass::TableWrapper | FlowClass::Flex => true, FlowClass::Table |
FlowClass::TableRowGroup |
FlowClass::TableRow |
FlowClass::TableCaption |
FlowClass::TableCell |
FlowClass::TableWrapper |
FlowClass::Flex => true,
_ => false, _ => false,
} }
} }
@ -776,7 +815,9 @@ pub struct AbsoluteDescendantIter<'a> {
impl<'a> Iterator for AbsoluteDescendantIter<'a> { impl<'a> Iterator for AbsoluteDescendantIter<'a> {
type Item = &'a mut Flow; type Item = &'a mut Flow;
fn next(&mut self) -> Option<&'a mut Flow> { fn next(&mut self) -> Option<&'a mut Flow> {
self.iter.next().map(|info| FlowRef::deref_mut(&mut info.flow)) self.iter
.next()
.map(|info| FlowRef::deref_mut(&mut info.flow))
} }
fn size_hint(&self) -> (usize, Option<usize>) { fn size_hint(&self) -> (usize, Option<usize>) {
@ -953,7 +994,8 @@ impl fmt::Debug for BaseFlow {
"".to_owned() "".to_owned()
}; };
write!(f, write!(
f,
"\nsc={:?}\ "\nsc={:?}\
\npos={:?}{}{}\ \npos={:?}{}{}\
\nfloatspec-in={:?}\ \nfloatspec-in={:?}\
@ -961,14 +1003,23 @@ impl fmt::Debug for BaseFlow {
\noverflow={:?}{}{}{}", \noverflow={:?}{}{}{}",
self.stacking_context_id, self.stacking_context_id,
self.position, self.position,
if self.flags.contains(FlowFlags::FLOATS_LEFT) { "FL" } else { "" }, if self.flags.contains(FlowFlags::FLOATS_LEFT) {
if self.flags.contains(FlowFlags::FLOATS_RIGHT) { "FR" } else { "" }, "FL"
} else {
""
},
if self.flags.contains(FlowFlags::FLOATS_RIGHT) {
"FR"
} else {
""
},
self.speculated_float_placement_in, self.speculated_float_placement_in,
self.speculated_float_placement_out, self.speculated_float_placement_out,
self.overflow, self.overflow,
child_count_string, child_count_string,
absolute_descendants_string, absolute_descendants_string,
damage_string) damage_string
)
} }
} }
@ -976,7 +1027,10 @@ impl Serialize for BaseFlow {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut serializer = serializer.serialize_struct("base", 5)?; let mut serializer = serializer.serialize_struct("base", 5)?;
serializer.serialize_field("id", &self.debug_id())?; serializer.serialize_field("id", &self.debug_id())?;
serializer.serialize_field("stacking_relative_position", &self.stacking_relative_position)?; serializer.serialize_field(
"stacking_relative_position",
&self.stacking_relative_position,
)?;
serializer.serialize_field("intrinsic_inline_sizes", &self.intrinsic_inline_sizes)?; serializer.serialize_field("intrinsic_inline_sizes", &self.intrinsic_inline_sizes)?;
serializer.serialize_field("position", &self.position)?; serializer.serialize_field("position", &self.position)?;
serializer.serialize_field("children", &self.children)?; serializer.serialize_field("children", &self.children)?;
@ -996,10 +1050,11 @@ pub enum ForceNonfloatedFlag {
impl BaseFlow { impl BaseFlow {
#[inline] #[inline]
pub fn new(style: Option<&ComputedValues>, pub fn new(
style: Option<&ComputedValues>,
writing_mode: WritingMode, writing_mode: WritingMode,
force_nonfloated: ForceNonfloatedFlag) force_nonfloated: ForceNonfloatedFlag,
-> BaseFlow { ) -> BaseFlow {
let mut flags = FlowFlags::empty(); let mut flags = FlowFlags::empty();
match style { match style {
Some(style) => { Some(style) => {
@ -1013,41 +1068,47 @@ impl BaseFlow {
let logical_position = style.logical_position(); let logical_position = style.logical_position();
if logical_position.inline_start == LengthOrPercentageOrAuto::Auto && if logical_position.inline_start == LengthOrPercentageOrAuto::Auto &&
logical_position.inline_end == LengthOrPercentageOrAuto::Auto { logical_position.inline_end == LengthOrPercentageOrAuto::Auto
{
flags.insert(FlowFlags::INLINE_POSITION_IS_STATIC); flags.insert(FlowFlags::INLINE_POSITION_IS_STATIC);
} }
if logical_position.block_start == LengthOrPercentageOrAuto::Auto && if logical_position.block_start == LengthOrPercentageOrAuto::Auto &&
logical_position.block_end == LengthOrPercentageOrAuto::Auto { logical_position.block_end == LengthOrPercentageOrAuto::Auto
{
flags.insert(FlowFlags::BLOCK_POSITION_IS_STATIC); flags.insert(FlowFlags::BLOCK_POSITION_IS_STATIC);
} }
} },
_ => flags.insert(FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC), _ => flags.insert(
FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC,
),
} }
if force_nonfloated == ForceNonfloatedFlag::FloatIfNecessary { if force_nonfloated == ForceNonfloatedFlag::FloatIfNecessary {
match style.get_box().float { match style.get_box().float {
Float::None => {} Float::None => {},
Float::Left => flags.insert(FlowFlags::FLOATS_LEFT), Float::Left => flags.insert(FlowFlags::FLOATS_LEFT),
Float::Right => flags.insert(FlowFlags::FLOATS_RIGHT), Float::Right => flags.insert(FlowFlags::FLOATS_RIGHT),
} }
} }
match style.get_box().clear { match style.get_box().clear {
Clear::None => {} Clear::None => {},
Clear::Left => flags.insert(FlowFlags::CLEARS_LEFT), Clear::Left => flags.insert(FlowFlags::CLEARS_LEFT),
Clear::Right => flags.insert(FlowFlags::CLEARS_RIGHT), Clear::Right => flags.insert(FlowFlags::CLEARS_RIGHT),
Clear::Both => { Clear::Both => {
flags.insert(FlowFlags::CLEARS_LEFT); flags.insert(FlowFlags::CLEARS_LEFT);
flags.insert(FlowFlags::CLEARS_RIGHT); flags.insert(FlowFlags::CLEARS_RIGHT);
} },
} }
if !style.get_counters().counter_reset.is_empty() || if !style.get_counters().counter_reset.is_empty() ||
!style.get_counters().counter_increment.is_empty() { !style.get_counters().counter_increment.is_empty()
{
flags.insert(FlowFlags::AFFECTS_COUNTERS) flags.insert(FlowFlags::AFFECTS_COUNTERS)
} }
} },
None => flags.insert(FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC), None => flags
.insert(FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC),
} }
// New flows start out as fully damaged. // New flows start out as fully damaged.
@ -1089,17 +1150,24 @@ impl BaseFlow {
pub fn update_flags_if_needed(&mut self, style: &ComputedValues) { pub fn update_flags_if_needed(&mut self, style: &ComputedValues) {
// For absolutely-positioned flows, changes to top/bottom/left/right can cause these flags // For absolutely-positioned flows, changes to top/bottom/left/right can cause these flags
// to get out of date: // to get out of date:
if self.restyle_damage.contains(ServoRestyleDamage::REFLOW_OUT_OF_FLOW) { if self
.restyle_damage
.contains(ServoRestyleDamage::REFLOW_OUT_OF_FLOW)
{
// Note: We don't need to check whether IS_ABSOLUTELY_POSITIONED has changed, because // Note: We don't need to check whether IS_ABSOLUTELY_POSITIONED has changed, because
// changes to the 'position' property trigger flow reconstruction. // changes to the 'position' property trigger flow reconstruction.
if self.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) { if self.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) {
let logical_position = style.logical_position(); let logical_position = style.logical_position();
self.flags.set(FlowFlags::INLINE_POSITION_IS_STATIC, self.flags.set(
FlowFlags::INLINE_POSITION_IS_STATIC,
logical_position.inline_start == LengthOrPercentageOrAuto::Auto && logical_position.inline_start == LengthOrPercentageOrAuto::Auto &&
logical_position.inline_end == LengthOrPercentageOrAuto::Auto); logical_position.inline_end == LengthOrPercentageOrAuto::Auto,
self.flags.set(FlowFlags::BLOCK_POSITION_IS_STATIC, );
self.flags.set(
FlowFlags::BLOCK_POSITION_IS_STATIC,
logical_position.block_start == LengthOrPercentageOrAuto::Auto && logical_position.block_start == LengthOrPercentageOrAuto::Auto &&
logical_position.block_end == LengthOrPercentageOrAuto::Auto); logical_position.block_end == LengthOrPercentageOrAuto::Auto,
);
} }
} }
} }
@ -1108,8 +1176,10 @@ impl BaseFlow {
pub fn clone_with_children(&self, children: FlowList) -> BaseFlow { pub fn clone_with_children(&self, children: FlowList) -> BaseFlow {
BaseFlow { BaseFlow {
children: children, children: children,
restyle_damage: self.restyle_damage | ServoRestyleDamage::REPAINT | restyle_damage: self.restyle_damage |
ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW, ServoRestyleDamage::REPAINT |
ServoRestyleDamage::REFLOW_OUT_OF_FLOW |
ServoRestyleDamage::REFLOW,
parallel: FlowParallelInfo::new(), parallel: FlowParallelInfo::new(),
floats: self.floats.clone(), floats: self.floats.clone(),
abs_descendants: self.abs_descendants.clone(), abs_descendants: self.abs_descendants.clone(),
@ -1138,8 +1208,10 @@ impl BaseFlow {
return self as *const BaseFlow as usize; return self as *const BaseFlow as usize;
} }
pub fn collect_stacking_contexts_for_children(&mut self, pub fn collect_stacking_contexts_for_children(
state: &mut StackingContextCollectionState) { &mut self,
state: &mut StackingContextCollectionState,
) {
for kid in self.children.iter_mut() { for kid in self.children.iter_mut() {
kid.collect_stacking_contexts(state); kid.collect_stacking_contexts(state);
} }
@ -1157,15 +1229,17 @@ impl BaseFlow {
self.speculated_float_placement_out.right > Au(0) self.speculated_float_placement_out.right > Au(0)
} }
/// Compute the fragment position relative to the parent stacking context. If the fragment /// Compute the fragment position relative to the parent stacking context. If the fragment
/// itself establishes a stacking context, then the origin of its position will be (0, 0) /// itself establishes a stacking context, then the origin of its position will be (0, 0)
/// for the purposes of this computation. /// for the purposes of this computation.
pub fn stacking_relative_border_box_for_display_list(&self, fragment: &Fragment) -> Rect<Au> { pub fn stacking_relative_border_box_for_display_list(&self, fragment: &Fragment) -> Rect<Au> {
fragment.stacking_relative_border_box( fragment.stacking_relative_border_box(
&self.stacking_relative_position, &self.stacking_relative_position,
&self.early_absolute_position_info.relative_containing_block_size, &self
self.early_absolute_position_info.relative_containing_block_mode, .early_absolute_position_info
.relative_containing_block_size,
self.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own, CoordinateSystem::Own,
) )
} }
@ -1182,8 +1256,10 @@ impl<'a> ImmutableFlowUtils for &'a Flow {
/// table-column-group flow, or table-caption flow. /// table-column-group flow, or table-caption flow.
fn is_proper_table_child(self) -> bool { fn is_proper_table_child(self) -> bool {
match self.class() { match self.class() {
FlowClass::TableRow | FlowClass::TableRowGroup | FlowClass::TableRow |
FlowClass::TableColGroup | FlowClass::TableCaption => true, FlowClass::TableRowGroup |
FlowClass::TableColGroup |
FlowClass::TableCaption => true,
_ => false, _ => false,
} }
} }
@ -1239,9 +1315,13 @@ impl<'a> ImmutableFlowUtils for &'a Flow {
/// Returns true if this flow is one of table-related flows. /// Returns true if this flow is one of table-related flows.
fn is_table_kind(self) -> bool { fn is_table_kind(self) -> bool {
match self.class() { match self.class() {
FlowClass::TableWrapper | FlowClass::Table | FlowClass::TableWrapper |
FlowClass::TableColGroup | FlowClass::TableRowGroup | FlowClass::Table |
FlowClass::TableRow | FlowClass::TableCaption | FlowClass::TableCell => true, FlowClass::TableColGroup |
FlowClass::TableRowGroup |
FlowClass::TableRow |
FlowClass::TableCaption |
FlowClass::TableCell => true,
_ => false, _ => false,
} }
} }
@ -1268,7 +1348,7 @@ impl<'a> ImmutableFlowUtils for &'a Flow {
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => { FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {
// FIXME: Actually check the type of the node // FIXME: Actually check the type of the node
self.child_count() != 0 self.child_count() != 0
} },
_ => false, _ => false,
} }
} }
@ -1307,13 +1387,13 @@ impl<'a> ImmutableFlowUtils for &'a Flow {
fn floats_might_flow_through(self) -> bool { fn floats_might_flow_through(self) -> bool {
if !self.base().might_have_floats_in() && !self.base().might_have_floats_out() { if !self.base().might_have_floats_in() && !self.base().might_have_floats_out() {
return false return false;
} }
if self.is_root() { if self.is_root() {
return false return false;
} }
if !self.is_block_like() { if !self.is_block_like() {
return true return true;
} }
self.as_block().formatting_context_type() == FormattingContextType::None self.as_block().formatting_context_type() == FormattingContextType::None
} }
@ -1322,12 +1402,16 @@ impl<'a> ImmutableFlowUtils for &'a Flow {
for kid in self.base().children.iter().rev() { for kid in self.base().children.iter().rev() {
if kid.is_inline_flow() { if kid.is_inline_flow() {
if let Some(baseline_offset) = kid.as_inline().baseline_offset_of_last_line() { if let Some(baseline_offset) = kid.as_inline().baseline_offset_of_last_line() {
return Some(kid.base().position.start.b + baseline_offset) return Some(kid.base().position.start.b + baseline_offset);
} }
} }
if kid.is_block_like() && !kid.base().flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) { if kid.is_block_like() && !kid
.base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
{
if let Some(baseline_offset) = kid.baseline_offset_of_last_line_box_in_flow() { if let Some(baseline_offset) = kid.baseline_offset_of_last_line_box_in_flow() {
return Some(kid.base().position.start.b + baseline_offset) return Some(kid.base().position.start.b + baseline_offset);
} }
} }
} }
@ -1374,17 +1458,19 @@ impl MutableOwnedFlowUtils for FlowRef {
/// </span> /// </span>
/// </div> /// </div>
/// ``` /// ```
fn take_applicable_absolute_descendants(&mut self, fn take_applicable_absolute_descendants(
absolute_descendants: &mut AbsoluteDescendants) { &mut self,
absolute_descendants: &mut AbsoluteDescendants,
) {
let mut applicable_absolute_descendants = AbsoluteDescendants::new(); let mut applicable_absolute_descendants = AbsoluteDescendants::new();
for absolute_descendant in absolute_descendants.descendant_links.iter() { for absolute_descendant in absolute_descendants.descendant_links.iter() {
if absolute_descendant.has_reached_containing_block { if absolute_descendant.has_reached_containing_block {
applicable_absolute_descendants.push(absolute_descendant.flow.clone()); applicable_absolute_descendants.push(absolute_descendant.flow.clone());
} }
} }
absolute_descendants.descendant_links.retain(|descendant| { absolute_descendants
!descendant.has_reached_containing_block .descendant_links
}); .retain(|descendant| !descendant.has_reached_containing_block);
let this = self.clone(); let this = self.clone();
let base = FlowRef::deref_mut(self).mut_base(); let base = FlowRef::deref_mut(self).mut_base();
@ -1412,9 +1498,7 @@ pub struct ContainingBlockLink {
impl ContainingBlockLink { impl ContainingBlockLink {
fn new() -> ContainingBlockLink { fn new() -> ContainingBlockLink {
ContainingBlockLink { ContainingBlockLink { link: None }
link: None,
}
} }
fn set(&mut self, link: FlowRef) { fn set(&mut self, link: FlowRef) {
@ -1424,34 +1508,38 @@ impl ContainingBlockLink {
#[inline] #[inline]
pub fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> { pub fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> {
match self.link { match self.link {
None => { None => panic!(
panic!("Link to containing block not established; perhaps you forgot to call \ "Link to containing block not established; perhaps you forgot to call \
`set_absolute_descendants`?") `set_absolute_descendants`?"
} ),
Some(ref link) => { Some(ref link) => {
let flow = link.upgrade().unwrap(); let flow = link.upgrade().unwrap();
flow.generated_containing_block_size(for_flow) flow.generated_containing_block_size(for_flow)
} },
} }
} }
#[inline] #[inline]
pub fn explicit_block_containing_size(&self, shared_context: &SharedStyleContext) -> Option<Au> { pub fn explicit_block_containing_size(
&self,
shared_context: &SharedStyleContext,
) -> Option<Au> {
match self.link { match self.link {
None => { None => panic!(
panic!("Link to containing block not established; perhaps you forgot to call \ "Link to containing block not established; perhaps you forgot to call \
`set_absolute_descendants`?") `set_absolute_descendants`?"
} ),
Some(ref link) => { Some(ref link) => {
let flow = link.upgrade().unwrap(); let flow = link.upgrade().unwrap();
if flow.is_block_like() { if flow.is_block_like() {
flow.as_block().explicit_block_containing_size(shared_context) flow.as_block()
.explicit_block_containing_size(shared_context)
} else if flow.is_inline_flow() { } else if flow.is_inline_flow() {
Some(flow.as_inline().minimum_line_metrics.space_above_baseline) Some(flow.as_inline().minimum_line_metrics.space_above_baseline)
} else { } else {
None None
} }
} },
} }
} }
} }

View file

@ -38,10 +38,13 @@ impl Serialize for FlowList {
FlowClass::TableRow => to_value(f.as_table_row()).unwrap(), FlowClass::TableRow => to_value(f.as_table_row()).unwrap(),
FlowClass::TableCell => to_value(f.as_table_cell()).unwrap(), FlowClass::TableCell => to_value(f.as_table_cell()).unwrap(),
FlowClass::Flex => to_value(f.as_flex()).unwrap(), FlowClass::Flex => to_value(f.as_flex()).unwrap(),
FlowClass::ListItem | FlowClass::TableColGroup | FlowClass::TableCaption | FlowClass::ListItem |
FlowClass::Multicol | FlowClass::MulticolColumn => { FlowClass::TableColGroup |
FlowClass::TableCaption |
FlowClass::Multicol |
FlowClass::MulticolColumn => {
Value::Null // Not implemented yet Value::Null // Not implemented yet
} },
}; };
flow_val.insert("data".to_owned(), data); flow_val.insert("data".to_owned(), data);
serializer.serialize_element(&flow_val)?; serializer.serialize_element(&flow_val)?;
@ -152,7 +155,7 @@ impl FlowList {
#[inline] #[inline]
pub fn split_off(&mut self, i: usize) -> Self { pub fn split_off(&mut self, i: usize) -> Self {
FlowList { FlowList {
flows: self.flows.split_off(i) flows: self.flows.split_off(i),
} }
} }
} }

View file

@ -8,7 +8,6 @@
//! be superfluous. This design is largely duplicating logic of Arc<T> and //! be superfluous. This design is largely duplicating logic of Arc<T> and
//! Weak<T>; please see comments there for details. //! Weak<T>; please see comments there for details.
use flow::Flow; use flow::Flow;
use std::ops::Deref; use std::ops::Deref;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
@ -63,4 +62,3 @@ impl WeakFlowRef {
self.0.upgrade().map(FlowRef) self.0.upgrade().map(FlowRef)
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -25,73 +25,103 @@ use text::TextRunScanner;
use traversal::InorderFlowTraversal; use traversal::InorderFlowTraversal;
// Decimal styles per CSS-COUNTER-STYLES § 6.1: // Decimal styles per CSS-COUNTER-STYLES § 6.1:
static DECIMAL: [char; 10] = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' ]; static DECIMAL: [char; 10] = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
// TODO(pcwalton): `decimal-leading-zero` // TODO(pcwalton): `decimal-leading-zero`
static ARABIC_INDIC: [char; 10] = [ '٠', '١', '٢', '٣', '٤', '٥', '٦', '٧', '٨', '٩' ]; static ARABIC_INDIC: [char; 10] = ['٠', '١', '٢', '٣', '٤', '٥', '٦', '٧', '٨', '٩'];
// TODO(pcwalton): `armenian`, `upper-armenian`, `lower-armenian` // TODO(pcwalton): `armenian`, `upper-armenian`, `lower-armenian`
static BENGALI: [char; 10] = [ '', '১', '২', '৩', '', '৫', '৬', '', '৮', '৯' ]; static BENGALI: [char; 10] = [
static CAMBODIAN: [char; 10] = [ '០', '១', '២', '៣', '៤', '៥', '៦', '៧', '៨', '៩' ]; '', '১', '২', '৩', '', '৫', '৬', '', '৮', '৯',
];
static CAMBODIAN: [char; 10] = [
'០', '១', '២', '៣', '៤', '៥', '៦', '៧', '៨', '៩',
];
// TODO(pcwalton): Suffix for CJK decimal. // TODO(pcwalton): Suffix for CJK decimal.
static CJK_DECIMAL: [char; 10] = [ '', '一', '二', '三', '四', '五', '六', '七', '八', '九' ]; static CJK_DECIMAL: [char; 10] = [
static DEVANAGARI: [char; 10] = [ '', '१', '२', '३', '४', '५', '६', '७', '८', '९' ]; '', '一', '二', '三', '四', '五', '六', '七', '八', '九',
];
static DEVANAGARI: [char; 10] = [
'', '१', '२', '३', '४', '५', '६', '७', '८', '९',
];
// TODO(pcwalton): `georgian` // TODO(pcwalton): `georgian`
static GUJARATI: [char; 10] = ['', '૧', '૨', '૩', '૪', '૫', '૬', '૭', '૮', '૯']; static GUJARATI: [char; 10] = [
static GURMUKHI: [char; 10] = ['', '', '੨', '੩', '', '੫', '੬', '੭', '੮', '੯']; '', '૧', '૨', '૩', '૪', '૫', '૬', '૭', '૮', '૯',
];
static GURMUKHI: [char; 10] = [
'', '', '੨', '੩', '', '੫', '੬', '੭', '੮', '੯',
];
// TODO(pcwalton): `hebrew` // TODO(pcwalton): `hebrew`
static KANNADA: [char; 10] = ['', '೧', '೨', '೩', '೪', '೫', '೬', '೭', '೮', '೯']; static KANNADA: [char; 10] = [
static LAO: [char; 10] = ['', '໑', '໒', '໓', '໔', '໕', '໖', '໗', '໘', '໙']; '', '೧', '೨', '೩', '೪', '೫', '೬', '೭', '೮', '೯',
static MALAYALAM: [char; 10] = ['', '൧', '൨', '൩', '൪', '൫', '൬', '', '൮', '൯']; ];
static MONGOLIAN: [char; 10] = ['᠐', '᠑', '᠒', '᠓', '᠔', '᠕', '᠖', '᠗', '᠘', '᠙']; static LAO: [char; 10] = [
static MYANMAR: [char; 10] = ['', '၁', '၂', '၃', '၄', '၅', '၆', '၇', '၈', '၉']; '', '໑', '໒', '໓', '໔', '໕', '໖', '໗', '໘', '໙',
static ORIYA: [char; 10] = ['', '୧', '', '୩', '୪', '୫', '୬', '୭', '୮', '୯']; ];
static MALAYALAM: [char; 10] = [
'', '൧', '൨', '൩', '൪', '൫', '൬', '', '൮', '൯',
];
static MONGOLIAN: [char; 10] = [
'᠐', '᠑', '᠒', '᠓', '᠔', '᠕', '᠖', '᠗', '᠘', '᠙',
];
static MYANMAR: [char; 10] = [
'', '၁', '၂', '၃', '၄', '၅', '၆', '၇', '၈', '၉',
];
static ORIYA: [char; 10] = [
'', '୧', '', '୩', '୪', '୫', '୬', '୭', '୮', '୯',
];
static PERSIAN: [char; 10] = ['۰', '۱', '۲', '۳', '۴', '۵', '۶', '۷', '۸', '۹']; static PERSIAN: [char; 10] = ['۰', '۱', '۲', '۳', '۴', '۵', '۶', '۷', '۸', '۹'];
// TODO(pcwalton): `lower-roman`, `upper-roman` // TODO(pcwalton): `lower-roman`, `upper-roman`
static TELUGU: [char; 10] = ['', '౧', '౨', '౩', '౪', '౫', '౬', '౭', '౮', '౯']; static TELUGU: [char; 10] = [
static THAI: [char; 10] = ['', '๑', '๒', '๓', '๔', '๕', '๖', '๗', '๘', '๙']; '', '౧', '౨', '౩', '౪', '౫', '౬', '౭', '౮', '౯',
static TIBETAN: [char; 10] = ['༠', '༡', '༢', '༣', '༤', '༥', '༦', '༧', '༨', '༩']; ];
static THAI: [char; 10] = [
'', '๑', '๒', '๓', '๔', '๕', '๖', '๗', '๘', '๙',
];
static TIBETAN: [char; 10] = [
'༠', '༡', '༢', '༣', '༤', '༥', '༦', '༧', '༨', '༩',
];
// Alphabetic styles per CSS-COUNTER-STYLES § 6.2: // Alphabetic styles per CSS-COUNTER-STYLES § 6.2:
static LOWER_ALPHA: [char; 26] = [ static LOWER_ALPHA: [char; 26] = [
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't', 'u', 'v', 'w', 'x', 'y', 'z' 't', 'u', 'v', 'w', 'x', 'y', 'z',
]; ];
static UPPER_ALPHA: [char; 26] = [ static UPPER_ALPHA: [char; 26] = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
'T', 'U', 'V', 'W', 'X', 'Y', 'Z' 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
]; ];
static CJK_EARTHLY_BRANCH: [char; 12] = [ static CJK_EARTHLY_BRANCH: [char; 12] = [
'子', '丑', '寅', '卯', '辰', '巳', '午', '未', '申', '酉', '戌', '亥' '子', '丑', '寅', '卯', '辰', '巳', '午', '未', '申', '酉', '戌', '亥',
]; ];
static CJK_HEAVENLY_STEM: [char; 10] = [ static CJK_HEAVENLY_STEM: [char; 10] = [
'甲', '乙', '丙', '丁', '戊', '己', '庚', '辛', '壬', '癸' '甲', '乙', '丙', '丁', '戊', '己', '庚', '辛', '壬', '癸',
]; ];
static LOWER_GREEK: [char; 24] = [ static LOWER_GREEK: [char; 24] = [
'α', 'β', 'γ', 'δ', 'ε', 'ζ', 'η', 'θ', 'ι', 'κ', 'λ', 'μ', 'ν', 'ξ', 'ο', 'π', 'ρ', 'σ', 'τ', 'α', 'β', 'γ', 'δ', 'ε', 'ζ', 'η', 'θ', 'ι', 'κ', 'λ', 'μ', 'ν', 'ξ', 'ο', 'π',
'υ', 'φ', 'χ', 'ψ', 'ω' 'ρ', 'σ', 'τ', 'υ', 'φ', 'χ', 'ψ', 'ω',
]; ];
static HIRAGANA: [char; 48] = [ static HIRAGANA: [char; 48] = [
'あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', 'け', 'こ', 'さ', 'し', 'す', 'せ', 'そ', 'あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', 'け', 'こ', 'さ', 'し', 'す',
'た', 'ち', 'つ', 'て', 'と', 'な', 'に', 'ぬ', 'ね', 'の', 'は', 'ひ', 'ふ', 'へ', 'ほ', 'せ', 'そ', 'た', 'ち', 'つ', 'て', 'と', 'な', 'に', 'ぬ', 'ね', 'の', 'は',
'ま', 'み', 'む', 'め', 'も', 'や', 'ゆ', 'よ', 'ら', 'り', 'る', 'れ', 'ろ', 'ひ', 'ふ', 'へ', 'ほ', 'ま', 'み', 'む', 'め', 'も', 'や', 'ゆ', 'よ', 'ら',
'わ', 'ゐ', 'ゑ', 'を', 'ん' 'り', 'る', 'れ', 'ろ', 'わ', 'ゐ', 'ゑ', 'を', 'ん',
]; ];
static HIRAGANA_IROHA: [char; 47] = [ static HIRAGANA_IROHA: [char; 47] = [
'い', 'ろ', 'は', 'に', 'ほ', 'へ', 'と', 'ち', 'り', 'ぬ', 'る', 'を', 'わ', 'か', 'よ', 'い', 'ろ', 'は', 'に', 'ほ', 'へ', 'と', 'ち', 'り', 'ぬ', 'る', 'を', 'わ',
'た', 'れ', 'そ', 'つ', 'ね', 'な', 'ら', 'む', 'う', 'ゐ', 'の', 'お', 'く', 'や', 'ま', 'か', 'よ', 'た', 'れ', 'そ', 'つ', 'ね', 'な', 'ら', 'む', 'う', 'ゐ', 'の',
'け', 'ふ', 'こ', 'え', 'て', 'あ', 'さ', 'き', 'ゆ', 'め', 'み', 'し', 'ゑ', 'お', 'く', 'や', 'ま', 'け', 'ふ', 'こ', 'え', 'て', 'あ', 'さ', 'き', 'ゆ',
'ひ', 'も', 'せ', 'す' 'め', 'み', 'し', 'ゑ', 'ひ', 'も', 'せ', 'す',
]; ];
static KATAKANA: [char; 48] = [ static KATAKANA: [char; 48] = [
'ア', 'イ', 'ウ', 'エ', 'オ', 'カ', 'キ', 'ク', 'ケ', 'コ', 'サ', 'シ', 'ス', 'セ', 'ソ', 'ア', 'イ', 'ウ', 'エ', 'オ', 'カ', 'キ', 'ク', 'ケ', 'コ', 'サ', 'シ', 'ス',
'タ', 'チ', 'ツ', 'テ', 'ト', 'ナ', 'ニ', 'ヌ', 'ネ', '', 'ハ', 'ヒ', 'フ', 'ヘ', 'ホ', 'セ', 'ソ', 'タ', 'チ', 'ツ', 'テ', 'ト', 'ナ', 'ニ', 'ヌ', 'ネ', '', 'ハ',
'マ', 'ミ', 'ム', 'メ', 'モ', 'ヤ', 'ユ', 'ヨ', 'ラ', 'リ', 'ル', 'レ', 'ロ', 'ヒ', 'フ', 'ヘ', 'ホ', 'マ', 'ミ', 'ム', 'メ', 'モ', 'ヤ', 'ユ', 'ヨ', 'ラ',
'ワ', 'ヰ', 'ヱ', 'ヲ', 'ン' 'リ', 'ル', 'レ', 'ロ', 'ワ', 'ヰ', 'ヱ', 'ヲ', 'ン',
]; ];
static KATAKANA_IROHA: [char; 47] = [ static KATAKANA_IROHA: [char; 47] = [
'イ', 'ロ', 'ハ', 'ニ', 'ホ', 'ヘ', 'ト', 'チ', 'リ', 'ヌ', 'ル', 'ヲ', 'ワ', 'カ', 'ヨ', 'イ', 'ロ', 'ハ', 'ニ', 'ホ', 'ヘ', 'ト', 'チ', 'リ', 'ヌ', 'ル', 'ヲ', 'ワ',
'タ', 'レ', 'ソ', 'ツ', 'ネ', 'ナ', 'ラ', 'ム', 'ウ', 'ヰ', '', 'オ', 'ク', 'ヤ', 'マ', 'カ', 'ヨ', 'タ', 'レ', 'ソ', 'ツ', 'ネ', 'ナ', 'ラ', 'ム', 'ウ', 'ヰ', '',
'ケ', 'フ', 'コ', 'エ', 'テ', 'ア', 'サ', 'キ', 'ユ', 'メ', 'ミ', 'シ', 'ヱ', 'オ', 'ク', 'ヤ', 'マ', 'ケ', 'フ', 'コ', 'エ', 'テ', 'ア', 'サ', 'キ', 'ユ',
'ヒ', 'モ', 'セ', 'ス' 'メ', 'ミ', 'シ', 'ヱ', 'ヒ', 'モ', 'セ', 'ス',
]; ];
/// The generated content resolution traversal. /// The generated content resolution traversal.
@ -132,8 +162,12 @@ impl<'a> InorderFlowTraversal for ResolveGeneratedContent<'a> {
#[inline] #[inline]
fn should_process_subtree(&mut self, flow: &mut Flow) -> bool { fn should_process_subtree(&mut self, flow: &mut Flow) -> bool {
flow.base().restyle_damage.intersects(ServoRestyleDamage::RESOLVE_GENERATED_CONTENT) || flow.base()
flow.base().flags.intersects(FlowFlags::AFFECTS_COUNTERS | FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN) .restyle_damage
.intersects(ServoRestyleDamage::RESOLVE_GENERATED_CONTENT) ||
flow.base().flags.intersects(
FlowFlags::AFFECTS_COUNTERS | FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN,
)
} }
} }
@ -168,81 +202,97 @@ impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
if let SpecificFragmentInfo::GeneratedContent(ref mut info) = fragment.specific { if let SpecificFragmentInfo::GeneratedContent(ref mut info) = fragment.specific {
info info
} else { } else {
return return;
}; };
match **info { match **info {
GeneratedContentInfo::ListItem => { GeneratedContentInfo::ListItem => {
new_info = self.traversal.list_item.render(self.traversal.layout_context, new_info = self.traversal.list_item.render(
self.traversal.layout_context,
fragment.node, fragment.node,
fragment.pseudo.clone(), fragment.pseudo.clone(),
fragment.style.clone(), fragment.style.clone(),
list_style_type, list_style_type,
RenderingMode::Suffix(".\u{00a0}")) RenderingMode::Suffix(".\u{00a0}"),
} )
},
GeneratedContentInfo::Empty | GeneratedContentInfo::Empty |
GeneratedContentInfo::ContentItem(ContentItem::String(_)) => { GeneratedContentInfo::ContentItem(ContentItem::String(_)) => {
// Nothing to do here. // Nothing to do here.
} },
GeneratedContentInfo::ContentItem(ContentItem::Counter(ref counter_name, GeneratedContentInfo::ContentItem(ContentItem::Counter(
counter_style)) => { ref counter_name,
counter_style,
)) => {
let temporary_counter = Counter::new(); let temporary_counter = Counter::new();
let counter = self.traversal let counter = self
.traversal
.counters .counters
.get(&*counter_name.0) .get(&*counter_name.0)
.unwrap_or(&temporary_counter); .unwrap_or(&temporary_counter);
new_info = counter.render(self.traversal.layout_context, new_info = counter.render(
self.traversal.layout_context,
fragment.node, fragment.node,
fragment.pseudo.clone(), fragment.pseudo.clone(),
fragment.style.clone(), fragment.style.clone(),
counter_style, counter_style,
RenderingMode::Plain) RenderingMode::Plain,
} )
GeneratedContentInfo::ContentItem(ContentItem::Counters(ref counter_name, },
GeneratedContentInfo::ContentItem(ContentItem::Counters(
ref counter_name,
ref separator, ref separator,
counter_style)) => { counter_style,
)) => {
let temporary_counter = Counter::new(); let temporary_counter = Counter::new();
let counter = self.traversal let counter = self
.traversal
.counters .counters
.get(&*counter_name.0) .get(&*counter_name.0)
.unwrap_or(&temporary_counter); .unwrap_or(&temporary_counter);
new_info = counter.render(self.traversal.layout_context, new_info = counter.render(
self.traversal.layout_context,
fragment.node, fragment.node,
fragment.pseudo, fragment.pseudo,
fragment.style.clone(), fragment.style.clone(),
counter_style, counter_style,
RenderingMode::All(&separator)); RenderingMode::All(&separator),
} );
},
GeneratedContentInfo::ContentItem(ContentItem::OpenQuote) => { GeneratedContentInfo::ContentItem(ContentItem::OpenQuote) => {
new_info = render_text(self.traversal.layout_context, new_info = render_text(
self.traversal.layout_context,
fragment.node, fragment.node,
fragment.pseudo, fragment.pseudo,
fragment.style.clone(), fragment.style.clone(),
self.quote(&*fragment.style, false)); self.quote(&*fragment.style, false),
);
self.traversal.quote += 1 self.traversal.quote += 1
} },
GeneratedContentInfo::ContentItem(ContentItem::CloseQuote) => { GeneratedContentInfo::ContentItem(ContentItem::CloseQuote) => {
if self.traversal.quote >= 1 { if self.traversal.quote >= 1 {
self.traversal.quote -= 1 self.traversal.quote -= 1
} }
new_info = render_text(self.traversal.layout_context, new_info = render_text(
self.traversal.layout_context,
fragment.node, fragment.node,
fragment.pseudo, fragment.pseudo,
fragment.style.clone(), fragment.style.clone(),
self.quote(&*fragment.style, true)); self.quote(&*fragment.style, true),
} );
},
GeneratedContentInfo::ContentItem(ContentItem::NoOpenQuote) => { GeneratedContentInfo::ContentItem(ContentItem::NoOpenQuote) => {
self.traversal.quote += 1 self.traversal.quote += 1
} },
GeneratedContentInfo::ContentItem(ContentItem::NoCloseQuote) => { GeneratedContentInfo::ContentItem(ContentItem::NoCloseQuote) => {
if self.traversal.quote >= 1 { if self.traversal.quote >= 1 {
self.traversal.quote -= 1 self.traversal.quote -= 1
} }
} },
GeneratedContentInfo::ContentItem(ContentItem::Url(..)) => { GeneratedContentInfo::ContentItem(ContentItem::Url(..)) => {
unreachable!("Servo doesn't parse content: url(..) yet") unreachable!("Servo doesn't parse content: url(..) yet")
} },
} }
}; };
@ -252,7 +302,7 @@ impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
// so that it isn't processed again on the next layout. FIXME (mbrubeck): When // so that it isn't processed again on the next layout. FIXME (mbrubeck): When
// processing an inline flow, this traversal should be allowed to insert or remove // processing an inline flow, this traversal should be allowed to insert or remove
// fragments. Then we can just remove these fragments rather than adding placeholders. // fragments. Then we can just remove these fragments rather than adding placeholders.
None => SpecificFragmentInfo::GeneratedContent(Box::new(GeneratedContentInfo::Empty)) None => SpecificFragmentInfo::GeneratedContent(Box::new(GeneratedContentInfo::Empty)),
}; };
} }
@ -263,9 +313,12 @@ impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
} }
match list_style_type { match list_style_type {
ListStyleType::Disc | ListStyleType::None | ListStyleType::Circle | ListStyleType::Disc |
ListStyleType::Square | ListStyleType::DisclosureOpen | ListStyleType::None |
ListStyleType::DisclosureClosed => {} ListStyleType::Circle |
ListStyleType::Square |
ListStyleType::DisclosureOpen |
ListStyleType::DisclosureClosed => {},
_ => self.traversal.list_item.increment(self.level, 1), _ => self.traversal.list_item.increment(self.level, 1),
} }
@ -279,24 +332,28 @@ impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
let counter_name = &*pair.name.0; let counter_name = &*pair.name.0;
if let Some(ref mut counter) = self.traversal.counters.get_mut(counter_name) { if let Some(ref mut counter) = self.traversal.counters.get_mut(counter_name) {
counter.reset(self.level, pair.value); counter.reset(self.level, pair.value);
continue continue;
} }
let mut counter = Counter::new(); let mut counter = Counter::new();
counter.reset(self.level, pair.value); counter.reset(self.level, pair.value);
self.traversal.counters.insert(counter_name.to_owned(), counter); self.traversal
.counters
.insert(counter_name.to_owned(), counter);
} }
for pair in &*fragment.style().get_counters().counter_increment { for pair in &*fragment.style().get_counters().counter_increment {
let counter_name = &*pair.name.0; let counter_name = &*pair.name.0;
if let Some(ref mut counter) = self.traversal.counters.get_mut(counter_name) { if let Some(ref mut counter) = self.traversal.counters.get_mut(counter_name) {
counter.increment(self.level, pair.value); counter.increment(self.level, pair.value);
continue continue;
} }
let mut counter = Counter::new(); let mut counter = Counter::new();
counter.increment(self.level, pair.value); counter.increment(self.level, pair.value);
self.traversal.counters.insert(counter_name.to_owned(), counter); self.traversal
.counters
.insert(counter_name.to_owned(), counter);
} }
self.incremented = true self.incremented = true
@ -305,10 +362,10 @@ impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
fn quote(&self, style: &ComputedValues, close: bool) -> String { fn quote(&self, style: &ComputedValues, close: bool) -> String {
let quotes = &style.get_list().quotes; let quotes = &style.get_list().quotes;
if quotes.0.is_empty() { if quotes.0.is_empty() {
return String::new() return String::new();
} }
let &(ref open_quote, ref close_quote) = let &(ref open_quote, ref close_quote) = if self.traversal.quote as usize >= quotes.0.len()
if self.traversal.quote as usize >= quotes.0.len() { {
quotes.0.last().unwrap() quotes.0.last().unwrap()
} else { } else {
&quotes.0[self.traversal.quote as usize] &quotes.0[self.traversal.quote as usize]
@ -329,9 +386,7 @@ struct Counter {
impl Counter { impl Counter {
fn new() -> Counter { fn new() -> Counter {
Counter { Counter { values: Vec::new() }
values: Vec::new(),
}
} }
fn reset(&mut self, level: u32, value: i32) { fn reset(&mut self, level: u32, value: i32) {
@ -339,7 +394,7 @@ impl Counter {
if let Some(ref mut existing_value) = self.values.last_mut() { if let Some(ref mut existing_value) = self.values.last_mut() {
if level == existing_value.level { if level == existing_value.level {
existing_value.value = value; existing_value.value = value;
return return;
} }
} }
@ -359,7 +414,7 @@ impl Counter {
fn increment(&mut self, level: u32, amount: i32) { fn increment(&mut self, level: u32, amount: i32) {
if let Some(ref mut value) = self.values.last_mut() { if let Some(ref mut value) = self.values.last_mut() {
value.value += amount; value.value += amount;
return return;
} }
self.values.push(CounterValue { self.values.push(CounterValue {
@ -368,14 +423,15 @@ impl Counter {
}) })
} }
fn render(&self, fn render(
&self,
layout_context: &LayoutContext, layout_context: &LayoutContext,
node: OpaqueNode, node: OpaqueNode,
pseudo: PseudoElementType, pseudo: PseudoElementType,
style: ::ServoArc<ComputedValues>, style: ::ServoArc<ComputedValues>,
list_style_type: ListStyleType, list_style_type: ListStyleType,
mode: RenderingMode) mode: RenderingMode,
-> Option<SpecificFragmentInfo> { ) -> Option<SpecificFragmentInfo> {
let mut string = String::new(); let mut string = String::new();
match mode { match mode {
RenderingMode::Plain => { RenderingMode::Plain => {
@ -384,7 +440,7 @@ impl Counter {
None => 0, None => 0,
}; };
push_representation(value, list_style_type, &mut string) push_representation(value, list_style_type, &mut string)
} },
RenderingMode::Suffix(suffix) => { RenderingMode::Suffix(suffix) => {
let value = match self.values.last() { let value = match self.values.last() {
Some(ref value) => value.value, Some(ref value) => value.value,
@ -392,7 +448,7 @@ impl Counter {
}; };
push_representation(value, list_style_type, &mut string); push_representation(value, list_style_type, &mut string);
string.push_str(suffix) string.push_str(suffix)
} },
RenderingMode::All(separator) => { RenderingMode::All(separator) => {
let mut first = true; let mut first = true;
for value in &self.values { for value in &self.values {
@ -402,7 +458,7 @@ impl Counter {
first = false; first = false;
push_representation(value.value, list_style_type, &mut string) push_representation(value.value, list_style_type, &mut string)
} }
} },
} }
if string.is_empty() { if string.is_empty() {
@ -432,22 +488,26 @@ struct CounterValue {
} }
/// Creates fragment info for a literal string. /// Creates fragment info for a literal string.
fn render_text(layout_context: &LayoutContext, fn render_text(
layout_context: &LayoutContext,
node: OpaqueNode, node: OpaqueNode,
pseudo: PseudoElementType, pseudo: PseudoElementType,
style: ::ServoArc<ComputedValues>, style: ::ServoArc<ComputedValues>,
string: String) string: String,
-> Option<SpecificFragmentInfo> { ) -> Option<SpecificFragmentInfo> {
let mut fragments = LinkedList::new(); let mut fragments = LinkedList::new();
let info = SpecificFragmentInfo::UnscannedText( let info = SpecificFragmentInfo::UnscannedText(Box::new(UnscannedTextFragmentInfo::new(
Box::new(UnscannedTextFragmentInfo::new(string.into_boxed_str(), None)) string.into_boxed_str(),
); None,
fragments.push_back(Fragment::from_opaque_node_and_style(node, )));
fragments.push_back(Fragment::from_opaque_node_and_style(
node,
pseudo, pseudo,
style.clone(), style.clone(),
style, style,
RestyleDamage::rebuild_and_reflow(), RestyleDamage::rebuild_and_reflow(),
info)); info,
));
// FIXME(pcwalton): This should properly handle multiple marker fragments. This could happen // FIXME(pcwalton): This should properly handle multiple marker fragments. This could happen
// due to text run splitting. // due to text run splitting.
let fragments = with_thread_local_font_context(layout_context, |font_context| { let fragments = with_thread_local_font_context(layout_context, |font_context| {
@ -464,39 +524,28 @@ fn render_text(layout_context: &LayoutContext,
/// `list-style-type` onto the given string. /// `list-style-type` onto the given string.
fn push_representation(value: i32, list_style_type: ListStyleType, accumulator: &mut String) { fn push_representation(value: i32, list_style_type: ListStyleType, accumulator: &mut String) {
match list_style_type { match list_style_type {
ListStyleType::None => {} ListStyleType::None => {},
ListStyleType::Disc | ListStyleType::Disc |
ListStyleType::Circle | ListStyleType::Circle |
ListStyleType::Square | ListStyleType::Square |
ListStyleType::DisclosureOpen | ListStyleType::DisclosureOpen |
ListStyleType::DisclosureClosed => { ListStyleType::DisclosureClosed => accumulator.push(static_representation(list_style_type)),
accumulator.push(static_representation(list_style_type))
}
ListStyleType::Decimal => push_numeric_representation(value, &DECIMAL, accumulator), ListStyleType::Decimal => push_numeric_representation(value, &DECIMAL, accumulator),
ListStyleType::ArabicIndic => { ListStyleType::ArabicIndic => {
push_numeric_representation(value, &ARABIC_INDIC, accumulator) push_numeric_representation(value, &ARABIC_INDIC, accumulator)
} },
ListStyleType::Bengali => push_numeric_representation(value, &BENGALI, accumulator), ListStyleType::Bengali => push_numeric_representation(value, &BENGALI, accumulator),
ListStyleType::Cambodian | ListStyleType::Cambodian | ListStyleType::Khmer => {
ListStyleType::Khmer => {
push_numeric_representation(value, &CAMBODIAN, accumulator) push_numeric_representation(value, &CAMBODIAN, accumulator)
} },
ListStyleType::CjkDecimal => { ListStyleType::CjkDecimal => push_numeric_representation(value, &CJK_DECIMAL, accumulator),
push_numeric_representation(value, &CJK_DECIMAL, accumulator) ListStyleType::Devanagari => push_numeric_representation(value, &DEVANAGARI, accumulator),
}
ListStyleType::Devanagari => {
push_numeric_representation(value, &DEVANAGARI, accumulator)
}
ListStyleType::Gujarati => push_numeric_representation(value, &GUJARATI, accumulator), ListStyleType::Gujarati => push_numeric_representation(value, &GUJARATI, accumulator),
ListStyleType::Gurmukhi => push_numeric_representation(value, &GURMUKHI, accumulator), ListStyleType::Gurmukhi => push_numeric_representation(value, &GURMUKHI, accumulator),
ListStyleType::Kannada => push_numeric_representation(value, &KANNADA, accumulator), ListStyleType::Kannada => push_numeric_representation(value, &KANNADA, accumulator),
ListStyleType::Lao => push_numeric_representation(value, &LAO, accumulator), ListStyleType::Lao => push_numeric_representation(value, &LAO, accumulator),
ListStyleType::Malayalam => { ListStyleType::Malayalam => push_numeric_representation(value, &MALAYALAM, accumulator),
push_numeric_representation(value, &MALAYALAM, accumulator) ListStyleType::Mongolian => push_numeric_representation(value, &MONGOLIAN, accumulator),
}
ListStyleType::Mongolian => {
push_numeric_representation(value, &MONGOLIAN, accumulator)
}
ListStyleType::Myanmar => push_numeric_representation(value, &MYANMAR, accumulator), ListStyleType::Myanmar => push_numeric_representation(value, &MYANMAR, accumulator),
ListStyleType::Oriya => push_numeric_representation(value, &ORIYA, accumulator), ListStyleType::Oriya => push_numeric_representation(value, &ORIYA, accumulator),
ListStyleType::Persian => push_numeric_representation(value, &PERSIAN, accumulator), ListStyleType::Persian => push_numeric_representation(value, &PERSIAN, accumulator),
@ -505,31 +554,27 @@ fn push_representation(value: i32, list_style_type: ListStyleType, accumulator:
ListStyleType::Tibetan => push_numeric_representation(value, &TIBETAN, accumulator), ListStyleType::Tibetan => push_numeric_representation(value, &TIBETAN, accumulator),
ListStyleType::LowerAlpha => { ListStyleType::LowerAlpha => {
push_alphabetic_representation(value, &LOWER_ALPHA, accumulator) push_alphabetic_representation(value, &LOWER_ALPHA, accumulator)
} },
ListStyleType::UpperAlpha => { ListStyleType::UpperAlpha => {
push_alphabetic_representation(value, &UPPER_ALPHA, accumulator) push_alphabetic_representation(value, &UPPER_ALPHA, accumulator)
} },
ListStyleType::CjkEarthlyBranch => { ListStyleType::CjkEarthlyBranch => {
push_alphabetic_representation(value, &CJK_EARTHLY_BRANCH, accumulator) push_alphabetic_representation(value, &CJK_EARTHLY_BRANCH, accumulator)
} },
ListStyleType::CjkHeavenlyStem => { ListStyleType::CjkHeavenlyStem => {
push_alphabetic_representation(value, &CJK_HEAVENLY_STEM, accumulator) push_alphabetic_representation(value, &CJK_HEAVENLY_STEM, accumulator)
} },
ListStyleType::LowerGreek => { ListStyleType::LowerGreek => {
push_alphabetic_representation(value, &LOWER_GREEK, accumulator) push_alphabetic_representation(value, &LOWER_GREEK, accumulator)
} },
ListStyleType::Hiragana => { ListStyleType::Hiragana => push_alphabetic_representation(value, &HIRAGANA, accumulator),
push_alphabetic_representation(value, &HIRAGANA, accumulator)
}
ListStyleType::HiraganaIroha => { ListStyleType::HiraganaIroha => {
push_alphabetic_representation(value, &HIRAGANA_IROHA, accumulator) push_alphabetic_representation(value, &HIRAGANA_IROHA, accumulator)
} },
ListStyleType::Katakana => { ListStyleType::Katakana => push_alphabetic_representation(value, &KATAKANA, accumulator),
push_alphabetic_representation(value, &KATAKANA, accumulator)
}
ListStyleType::KatakanaIroha => { ListStyleType::KatakanaIroha => {
push_alphabetic_representation(value, &KATAKANA_IROHA, accumulator) push_alphabetic_representation(value, &KATAKANA_IROHA, accumulator)
} },
} }
} }
@ -572,7 +617,7 @@ fn push_numeric_representation(value: i32, system: &[char], accumulator: &mut St
// Step 1. // Step 1.
if abs_value == 0 { if abs_value == 0 {
accumulator.push(system[0]); accumulator.push(system[0]);
return return;
} }
// Step 2. // Step 2.

View file

@ -11,7 +11,7 @@ use style::servo::restyle_damage::ServoRestyleDamage;
#[derive(Clone, Copy, PartialEq)] #[derive(Clone, Copy, PartialEq)]
pub enum RelayoutMode { pub enum RelayoutMode {
Incremental, Incremental,
Force Force,
} }
bitflags! { bitflags! {
@ -30,7 +30,10 @@ pub trait LayoutDamageComputation {
impl<'a> LayoutDamageComputation for &'a mut Flow { impl<'a> LayoutDamageComputation for &'a mut Flow {
fn compute_layout_damage(self) -> SpecialRestyleDamage { fn compute_layout_damage(self) -> SpecialRestyleDamage {
let mut special_damage = SpecialRestyleDamage::empty(); let mut special_damage = SpecialRestyleDamage::empty();
let is_absolutely_positioned = self.base().flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED); let is_absolutely_positioned = self
.base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
// In addition to damage, we use this phase to compute whether nodes affect CSS counters. // In addition to damage, we use this phase to compute whether nodes affect CSS counters.
let mut has_counter_affecting_children = false; let mut has_counter_affecting_children = false;
@ -41,35 +44,47 @@ impl<'a> LayoutDamageComputation for &'a mut Flow {
let parent_damage = self_base.restyle_damage; let parent_damage = self_base.restyle_damage;
for kid in self_base.children.iter_mut() { for kid in self_base.children.iter_mut() {
let child_is_absolutely_positioned = let child_is_absolutely_positioned = kid
kid.base().flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED); .base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
kid.mut_base().restyle_damage.insert( kid.mut_base().restyle_damage.insert(
parent_damage.damage_for_child(is_absolutely_positioned, parent_damage
child_is_absolutely_positioned)); .damage_for_child(is_absolutely_positioned, child_is_absolutely_positioned),
);
{ {
let kid: &mut Flow = kid; let kid: &mut Flow = kid;
special_damage.insert(kid.compute_layout_damage()); special_damage.insert(kid.compute_layout_damage());
} }
self_base.restyle_damage self_base.restyle_damage.insert(
.insert(kid.base().restyle_damage.damage_for_parent( kid.base()
child_is_absolutely_positioned)); .restyle_damage
.damage_for_parent(child_is_absolutely_positioned),
);
has_counter_affecting_children = has_counter_affecting_children || has_counter_affecting_children =
kid.base().flags.intersects(FlowFlags::AFFECTS_COUNTERS | has_counter_affecting_children || kid.base().flags.intersects(
FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN); FlowFlags::AFFECTS_COUNTERS | FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN,
);
} }
} }
let self_base = self.mut_base(); let self_base = self.mut_base();
if self_base.flags.float_kind() != Float::None && if self_base.flags.float_kind() != Float::None && self_base
self_base.restyle_damage.intersects(ServoRestyleDamage::REFLOW) { .restyle_damage
.intersects(ServoRestyleDamage::REFLOW)
{
special_damage.insert(SpecialRestyleDamage::REFLOW_ENTIRE_DOCUMENT); special_damage.insert(SpecialRestyleDamage::REFLOW_ENTIRE_DOCUMENT);
} }
if has_counter_affecting_children { if has_counter_affecting_children {
self_base.flags.insert(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN) self_base
.flags
.insert(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN)
} else { } else {
self_base.flags.remove(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN) self_base
.flags
.remove(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN)
} }
special_damage special_damage
@ -77,8 +92,12 @@ impl<'a> LayoutDamageComputation for &'a mut Flow {
fn reflow_entire_document(self) { fn reflow_entire_document(self) {
let self_base = self.mut_base(); let self_base = self.mut_base();
self_base.restyle_damage.insert(RestyleDamage::rebuild_and_reflow()); self_base
self_base.restyle_damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW); .restyle_damage
.insert(RestyleDamage::rebuild_and_reflow());
self_base
.restyle_damage
.remove(ServoRestyleDamage::RECONSTRUCT_FLOW);
for kid in self_base.children.iter_mut() { for kid in self_base.children.iter_mut() {
kid.reflow_entire_document(); kid.reflow_entire_document();
} }

File diff suppressed because it is too large Load diff

View file

@ -47,7 +47,7 @@ impl ScopeData {
name: name, name: name,
pre: pre, pre: pre,
post: Value::Null, post: Value::Null,
children: vec!(), children: vec![],
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show more