Merge inbound to mozilla-central. a=merge
authorBrindusan Cristian <cbrindusan@mozilla.com>
Sun, 14 Oct 2018 12:25:38 +0300
changeset 499600 d49587f5ccd37180d1f0d980c9dd076e7afa1bcb
parent 499570 b6abd17c078bae35faac3aa50682a7f6a107d490 (current diff)
parent 499599 ec0b10e9a616588eb248a72dd117582b00211fab (diff)
child 499601 4b02380c0bbb5151f1a1f4606c29f2a1cbb70225
child 499608 298dcf923f48ff967efcab0a41c33046bed3817a
child 499620 4ac8ee3c801972a1c47e8270dccc801612c3aa21
push id1864
push userffxbld-merge
push dateMon, 03 Dec 2018 15:51:40 +0000
treeherdermozilla-release@f040763d99ad [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone64.0a1
first release with
nightly linux32
d49587f5ccd3 / 64.0a1 / 20181014100140 / files
nightly linux64
d49587f5ccd3 / 64.0a1 / 20181014100140 / files
nightly mac
d49587f5ccd3 / 64.0a1 / 20181014100140 / files
nightly win32
d49587f5ccd3 / 64.0a1 / 20181014100140 / files
nightly win64
d49587f5ccd3 / 64.0a1 / 20181014100140 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central. a=merge
third_party/rust/atty-0.1.2/.cargo-checksum.json
third_party/rust/atty-0.1.2/.travis.yml
third_party/rust/atty-0.1.2/CHANGELOG.md
third_party/rust/atty-0.1.2/Cargo.toml
third_party/rust/atty-0.1.2/LICENSE
third_party/rust/atty-0.1.2/README.md
third_party/rust/atty-0.1.2/appveyor.yml
third_party/rust/atty-0.1.2/src/lib.rs
third_party/rust/ena/src/cc/mod.rs
third_party/rust/ena/src/cc/test.rs
third_party/rust/ena/src/constraint/mod.rs
third_party/rust/ena/src/constraint/test.rs
third_party/rust/ena/src/debug.rs
third_party/rust/ena/src/graph/mod.rs
third_party/rust/ena/src/graph/tests.rs
third_party/rust/gleam/.travis.yml
third_party/rust/gleam/COPYING
third_party/rust/gleam/LICENSE-APACHE
third_party/rust/gleam/LICENSE-MIT
third_party/rust/gleam/README.md
third_party/rust/gleam/build.rs
third_party/rust/gleam/src/gl.rs
third_party/rust/gleam/src/gl_fns.rs
third_party/rust/gleam/src/gles_fns.rs
third_party/rust/gleam/src/lib.rs
third_party/rust/lalrpop-intern/.cargo-checksum.json
third_party/rust/lalrpop-intern/Cargo.toml
third_party/rust/lalrpop-intern/src/lib.rs
third_party/rust/lalrpop-intern/src/test.rs
third_party/rust/lalrpop-snap/src/parser/lrgrammar.rs
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -69,26 +69,16 @@ dependencies = [
 
 [[package]]
 name = "atomic_refcell"
 version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "atty"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "atty"
 version = "0.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -196,16 +186,29 @@ dependencies = [
 ]
 
 [[package]]
 name = "binary-space-partition"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "binast"
+version = "0.1.1"
+dependencies = [
+ "binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "yaml-rust 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "bincode"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -224,49 +227,36 @@ dependencies = [
  "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "binjs_meta"
-version = "0.3.10"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "Inflector 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "binsource"
-version = "0.1.0"
-dependencies = [
- "binjs_meta 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "yaml-rust 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bit-set"
-version = "0.4.0"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bit-vec"
-version = "0.4.4"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "bitflags"
 version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -279,26 +269,40 @@ name = "blake2-rfc"
 version = "0.2.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "block-buffer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "boxfnonce"
 version = "0.0.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "build_const"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "byte-tools"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "byteorder"
 version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "bytes"
 version = "0.4.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -707,16 +711,24 @@ dependencies = [
 ]
 
 [[package]]
 name = "diff"
 version = "0.1.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "digest"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "dirs"
 version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -767,18 +779,21 @@ dependencies = [
 
 [[package]]
 name = "either"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "ena"
-version = "0.5.0"
+version = "0.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
 
 [[package]]
 name = "encoding_c"
 version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "encoding_rs 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -843,16 +858,21 @@ source = "registry+https://github.com/ru
 dependencies = [
  "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "synstructure 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "fake-simd"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "fallible"
 version = "0.0.1"
 dependencies = [
  "hashglobe 0.1.0",
  "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -975,16 +995,24 @@ dependencies = [
  "selectors 0.20.0",
  "servo_arc 0.1.1",
  "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "style 0.0.1",
  "style_traits 0.0.1",
 ]
 
 [[package]]
+name = "generic-array"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "gkrust"
 version = "0.1.0"
 dependencies = [
  "gkrust-shared 0.1.0",
  "stylo_tests 0.0.1",
 ]
 
 [[package]]
@@ -1035,17 +1063,17 @@ source = "registry+https://github.com/ru
 dependencies = [
  "khronos_api 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "xml-rs 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "gleam"
-version = "0.6.2"
+version = "0.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "gl_generator 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "glob"
 version = "0.2.11"
@@ -1216,67 +1244,63 @@ dependencies = [
 
 [[package]]
 name = "khronos_api"
 version = "2.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "lalrpop"
-version = "0.15.1"
+version = "0.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "lalrpop-snap 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
+ "sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
-name = "lalrpop-intern"
-version = "0.15.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
 name = "lalrpop-snap"
-version = "0.15.1"
+version = "0.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "lalrpop-util"
-version = "0.15.1"
+version = "0.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "lazy_static"
 version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -1768,17 +1792,17 @@ source = "registry+https://github.com/ru
 
 [[package]]
 name = "percent-encoding"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "petgraph"
-version = "0.4.11"
+version = "0.4.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "phf"
@@ -2210,16 +2234,27 @@ dependencies = [
 name = "servo_arc"
 version = "0.1.1"
 dependencies = [
  "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
  "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "sha2"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "simd"
 version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "siphasher"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2710,16 +2745,21 @@ dependencies = [
 ]
 
 [[package]]
 name = "try-lock"
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "typenum"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "u2fhid"
 version = "0.2.2"
 dependencies = [
  "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "devd-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2868,21 +2908,21 @@ dependencies = [
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
  "tokio 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "webidl"
-version = "0.6.0"
+version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "lalrpop 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "webrender"
 version = "0.57.2"
 dependencies = [
  "app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2891,17 +2931,17 @@ dependencies = [
  "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-graphics 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-text 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "dwrote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "plane-split 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "ron 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2935,17 +2975,17 @@ dependencies = [
  "app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-graphics 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "dwrote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "foreign-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "nsstring 0.1.0",
  "rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "uuid 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "webrender 0.57.2",
 ]
 
@@ -3073,32 +3113,33 @@ dependencies = [
 "checksum aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "68f56c7353e5a9547cbd76ed90f7bb5ffc3ba09d4ea9bd1d8c06c8b1142eeb5a"
 "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
 "checksum app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9dadc668390b373e73e4abbfc1f07238b09a25858f2f39c06cebc6d8e141d774"
 "checksum argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3f67b0b6a86dae6e67ff4ca2b6201396074996379fba2b92ff649126f37cb392"
 "checksum arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd1479b7c29641adbd35ff3b5c293922d696a92f25c8c975da3e0acbc87258f"
 "checksum arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0ef4a9820019a0c91d918918c93dc71d469f581a49b47ddc1d285d4270bbe2"
 "checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
 "checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
-"checksum atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d0fd4c0631f06448cc45a6bbb3b710ebb7ff8ccb96a0800c994afe23a70d5df2"
 "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
 "checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"
 "checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"
 "checksum base64 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "85415d2594767338a74a30c1d370b2f3262ec1b4ed2d7bba5b3faf4de40467d9"
 "checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
 "checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
 "checksum bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eac4ed5f2de9efc3c87cb722468fa49d0763e98f999d539bfc5e452c13d85c91"
-"checksum binjs_meta 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "cc0956bac41c458cf38340699dbb54c2220c91cdbfa33be19670fe69e0a6ac9b"
-"checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
-"checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f"
+"checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"
+"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
+"checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
 "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
 "checksum bitreader 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "80b13e2ab064ff3aa0bdbf1eff533f9822dc37899821f5f98c67f263eab51707"
 "checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400"
+"checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
 "checksum boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8380105befe91099e6f69206164072c05bc92427ff6aa8a5171388317346dd75"
 "checksum build_const 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e90dc84f5e62d2ebe7676b83c22d33b6db8bd27340fb6ffbff0a364efa0cb9c9"
+"checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"
 "checksum byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "652805b7e73fada9d85e9a6682a4abd490cb52d96aeecc12e33a0de34dfd0d23"
 "checksum bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e178b8e0e239e844b083d5a0d4a156b2654e67f9f80144d48398fcd736a24fb8"
 "checksum bzip2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3eafc42c44e0d827de6b1c131175098fe7fb53b8ce8a47e65cb3ea94688be24"
 "checksum bzip2-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2c5162604199bbb17690ede847eaa6120a3f33d5ab4dcc8e7c25b16d849ae79b"
 "checksum cc 1.0.23 (registry+https://github.com/rust-lang/crates.io-index)" = "c37f0efaa4b9b001fa6f02d4b644dee4af97d3414df07c51e3e4f015f3a3e131"
 "checksum cexpr 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "42aac45e9567d97474a834efdee3081b3c942b2205be932092f53354ce503d6c"
 "checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
 "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"
@@ -3134,63 +3175,65 @@ dependencies = [
 "checksum cubeb-backend 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fdcac95519416d9ec814db2dc40e6293e7da25b906023d93f48b87f0587ab138"
 "checksum cubeb-core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37f7b20f757a4e4b6aa28863236551bff77682dc6db192eba15af615492b5445"
 "checksum cubeb-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "653b9e245d35dbe2a2da7c4586275cee75ff656ddeb02d4a73b4afdfa6d67502"
 "checksum darling 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a78af487e4eb8f4421a1770687b328af6bb4494ca93435210678c6eea875c11"
 "checksum darling_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b315f49c7b6db3708bca6e6913c194581a44ec619b7a39e131d4dd63733a3698"
 "checksum darling_macro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb69a38fdeaeaf3db712e1df170de67ee9dfc24fb88ca3e9d21e703ec25a4d8e"
 "checksum devd-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e7c9ac481c38baf400d3b732e4a06850dfaa491d1b6379a249d9d40d14c2434c"
 "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
+"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
 "checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
 "checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"
 "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
 "checksum dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d"
 "checksum dwrote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "30a998e9ff70cd208ccdc4f864e998688bf61d7b897dccec8e17a884d17358bf"
 "checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
-"checksum ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cabe5a5078ac8c506d3e4430763b1ba9b609b1286913e7d08e581d1c2de9b7e5"
+"checksum ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"
 "checksum encoding_c 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "769ecb8b33323998e482b218c0d13cd64c267609023b4b7ec3ee740714c318ee"
 "checksum encoding_rs 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f1a79fa56c329a5b087de13748054fb3b974c4a672c12c71f0b66e35c5addec5"
 "checksum env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0561146661ae44c579e993456bc76d11ce1e0c7d745e57b2fa7146b6e49fa2ad"
 "checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
 "checksum euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "70a2ebdf55fb9d6329046e026329a55ef8fbaae5ea833f56e170beb3125a8a5f"
 "checksum failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7efb22686e4a466b1ec1a15c2898f91fa9cb340452496dca654032de20ff95b9"
 "checksum failure_derive 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "946d0e98a50d9831f5d589038d2ca7f8f455b1c21028c0db0e84116a12696426"
+"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
 "checksum fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478"
 "checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909"
 "checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
 "checksum foreign-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ebc04f19019fff1f2d627b5581574ead502f80c48c88900575a46e0840fe5d0"
 "checksum freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b659e75b7a7338fe75afd7f909fc2b71937845cffb6ebe54ba2e50f13d8e903d"
 "checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
 "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
 "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
 "checksum futures 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "884dbe32a6ae4cd7da5c6db9b78114449df9953b8d490c9d7e1b51720b922c62"
 "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4"
 "checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
 "checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
+"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
 "checksum gl_generator 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a795170cbd85b5a7baa58d6d7525cae6a03e486859860c220f7ebbbdd379d0a"
-"checksum gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ba6539d49223f6bca4f076d9490c001bdbfe07d59cb0ad4079033c75bdc92d"
+"checksum gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2260952cc0393ca6f183e1a91a035c65c85ddb02505f3d53e5a775eb05946f44"
 "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
 "checksum goblin 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5911d7df7b8f65ab676c5327b50acea29d3c6a1a4ad05e444cf5dce321b26db2"
 "checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c"
 "checksum http 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dca621d0fa606a5ff2850b6e337b57ad6137ee4d67e940449643ff45af6874c6"
 "checksum httparse 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "af2f2dd97457e8fb1ae7c5a420db346af389926e36f43768b96f101546b04a07"
 "checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
 "checksum hyper 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c087746de95e20e4dabe86606c3a019964a8fde2d5f386152939063c116c5971"
 "checksum ident_case 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c9826188e666f2ed92071d2dadef6edc430b11b158b5b2b3f4babbcc891eaaa"
 "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
 "checksum indexmap 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08173ba1e906efb6538785a8844dd496f5d34f0a2d88038e95195172fc667220"
 "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
 "checksum itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b07332223953b5051bceb67e8c4700aa65291535568e1f12408c43c4a42c0394"
 "checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
 "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
 "checksum khronos_api 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "037ab472c33f67b5fbd3e9163a2645319e5356fcd355efa6d4eb7fff4bbcb554"
-"checksum lalrpop 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88035943c3cfbb897a499a556212b2b053574f32b4238b71b61625bc470f80aa"
-"checksum lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0"
-"checksum lalrpop-snap 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f244285324e4e33d486910b66fd3b7cb37e2072c5bf63319f506fe99ed72650"
-"checksum lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "de408fd50dea8ad7a77107144983a25c7fdabf5f8faf707a6e020d68874ed06c"
+"checksum lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f7014afd5642680074fd5dcc624d544f9eabfa281cba2c3ac56c3db6d21ad1b"
+"checksum lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0b85aa455529344133d7ecaaac04c01ed87f459deeaa0fe5422885e2095d8cdc"
+"checksum lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2400aeebcd11259370d038c24821b93218dd2f33a53f53e9c8fcccca70be6696"
 "checksum lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e6412c5e2ad9584b0b8e979393122026cdd6d2a80b933f890dcd694ddbe73739"
 "checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
 "checksum lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a6f08839bc70ef4a3fe1d566d5350f519c5912ea86be0df1740a7d247c7fc0ef"
 "checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
 "checksum libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3ad660d7cb8c5822cd83d10897b0f1f1526792737a179e73896152f85b88c2"
 "checksum libudev 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea626d3bdf40a1c5aee3bcd4f40826970cae8d80a8fec934c82a63840094dcfe"
 "checksum libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd64ef8ee652185674455c1d450b83cbc8ad895625d543b5324d923f82e4d8"
 "checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
@@ -3226,17 +3269,17 @@ dependencies = [
 "checksum object 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cca6ad89d0801138cb4ef606908ae12d83edc4c790ef5178fc7b4c72d959e90"
 "checksum ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0015e9e8e28ee20c581cfbfe47c650cedeb9ed0721090e0b7ebb10b9cdbcc2"
 "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
 "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
 "checksum parking_lot 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69376b761943787ebd5cc85a5bc95958651a22609c5c1c2b65de21786baec72b"
 "checksum parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa"
 "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
 "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
-"checksum petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7e5234c228fbfa874c86a77f685886127f82e0aef602ad1d48333fcac6ad61"
+"checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f"
 "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
 "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
 "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
 "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
 "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
 "checksum plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
 "checksum plane-split 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d252db71f3d2109c4936e87d9f29f3c737e89f9ac239999d78866bdd60b9deda"
 "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
@@ -3274,16 +3317,17 @@ dependencies = [
 "checksum scroll 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f84d114ef17fd144153d608fba7c446b0145d038985e7a8cc5d08bb0ce20383"
 "checksum scroll_derive 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1aa96c45e7f5a91cb7fabe7b279f02fea7126239fc40b732316e8b6a2d0fcb"
 "checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
 "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
 "checksum serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "e9a2d9a9ac5120e0f768801ca2b58ad6eec929dc9d1d616c162f208869c2ce95"
 "checksum serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adb6e51a6b3696b301bc221d785f898b4457c619b51d7ce195a6d20baecb37b3"
 "checksum serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)" = "<none>"
 "checksum serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)" = "44dd2cfde475037451fa99b7e5df77aa3cfd1536575fa8e7a538ab36dcde49ae"
+"checksum sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0"
 "checksum simd 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0048b17eb9577ac545c61d85c3559b41dfb4cbea41c9bd9ca6a4f73ff05fda84"
 "checksum siphasher 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ffc669b726f2bc9a3bcff66e5e23b56ba6bf70e22a34c3d7b6d0b3450b65b84"
 "checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
 "checksum slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9776d6b986f77b35c6cf846c11ad986ff128fe0b2b63a3628e3755e8d3102d"
 "checksum smallbitvec 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c63726029f0069f88467873e47f392575f28f9f16b72ac65465263db4b3a13c"
 "checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d"
 "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
 "checksum string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00caf261d6f90f588f8450b8e1230fa0d5be49ee6140fdfbcb55335aff350970"
@@ -3318,16 +3362,17 @@ dependencies = [
 "checksum tokio-reactor 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8703a5762ff6913510dc64272c714c4389ffd8c4b3cf602879b8bd14ff06b604"
 "checksum tokio-tcp 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4c329b47f071eb8a746040465fa751bd95e4716e98daef6a9b4e434c17d565"
 "checksum tokio-threadpool 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "24ab84f574027b0e875378f31575cf175360891919e93a3490f07e76e00e4efb"
 "checksum tokio-timer 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1c76b4e97a4f61030edff8bd272364e4f731b9f54c7307eb4eb733c3926eb96a"
 "checksum tokio-udp 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "43eb534af6e8f37d43ab1b612660df14755c42bd003c5f8d2475ee78cc4600c0"
 "checksum tokio-uds 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "65ae5d255ce739e8537221ed2942e0445f4b3b813daebac1c0050ddaaa3587f9"
 "checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e"
 "checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
+"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
 "checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
 "checksum uluru 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d2606e9192f308ddc4f0b3c5d1bf3400e28a70fff956e9d9f46d23b094746d9f"
 "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
 "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
 "checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
 "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
 "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
 "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
@@ -3336,17 +3381,17 @@ dependencies = [
 "checksum uuid 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcc7e3b898aa6f6c08e5295b6c89258d1331e9ac578cc992fb818759951bdc22"
 "checksum uuid 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e1436e58182935dcd9ce0add9ea0b558e8a87befe01c1a301e6020aeb0876363"
 "checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b"
 "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
 "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
 "checksum walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "63636bd0eb3d00ccb8b9036381b526efac53caf112b7783b730ab3f8e44da369"
 "checksum want 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "797464475f30ddb8830cc529aaaae648d581f99e2036a928877dfde027ddf6b3"
 "checksum wasmparser 0.17.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fed18a63a6796175be2254fccca1da4e8b8fec0abca37ad155aea345feb50798"
-"checksum webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc14e4b71f94b5bb4c6d696e3b3be4d2e9ee6750a60870ecae09ff7138a131a7"
+"checksum webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0f807f7488d680893f7188aa09d7672a3a0a8461975a098a2edf0a52e3fee29"
 "checksum which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4be6cfa54dab45266e98b5d7be2f8ce959ddd49abd141a05d52dce4b07f803bb"
 "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
 "checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
 "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
 "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
 "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 "checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767"
 "checksum winreg 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a27a759395c1195c4cc5cda607ef6f8f6498f64e78f7900f5de0a127a424704a"
--- a/browser/components/extensions/test/browser/browser-common.ini
+++ b/browser/components/extensions/test/browser/browser-common.ini
@@ -90,16 +90,17 @@ skip-if = (verify && (os == 'linux' || o
 [browser_ext_currentWindow.js]
 [browser_ext_devtools_inspectedWindow.js]
 [browser_ext_devtools_inspectedWindow_eval_bindings.js]
 [browser_ext_devtools_inspectedWindow_reload.js]
 [browser_ext_devtools_network.js]
 [browser_ext_devtools_page.js]
 [browser_ext_devtools_panel.js]
 [browser_ext_devtools_panels_elements.js]
+skip-if = true # Bug 1393760
 [browser_ext_devtools_panels_elements_sidebar.js]
 support-files =
   ../../../../../devtools/client/inspector/extensions/test/head_devtools_inspector_sidebar.js
 [browser_ext_find.js]
 skip-if = (verify && (os == 'linux' || os == 'mac'))
 [browser_ext_getViews.js]
 [browser_ext_history_redirect.js]
 [browser_ext_identity_indication.js]
--- a/devtools/client/inspector/test/browser.ini
+++ b/devtools/client/inspector/test/browser.ini
@@ -77,16 +77,17 @@ skip-if = os == "mac" # Full keyboard na
 [browser_inspector_highlighter-cancel.js]
 [browser_inspector_highlighter-comments.js]
 [browser_inspector_highlighter-cssgrid_01.js]
 [browser_inspector_highlighter-cssgrid_02.js]
 [browser_inspector_highlighter-cssshape_01.js]
 [browser_inspector_highlighter-cssshape_02.js]
 [browser_inspector_highlighter-cssshape_03.js]
 [browser_inspector_highlighter-cssshape_04.js]
+skip-if = (os == 'win' && (debug||asan)) || (os == 'linux' && bits == 64 && !debug) # Bug 1453214
 [browser_inspector_highlighter-cssshape_05.js]
 [browser_inspector_highlighter-cssshape_06-scale.js]
 [browser_inspector_highlighter-cssshape_06-translate.js]
 [browser_inspector_highlighter-cssshape_07.js]
 [browser_inspector_highlighter-cssshape_iframe_01.js]
 skip-if = (verify && debug)
 [browser_inspector_highlighter-csstransform_01.js]
 [browser_inspector_highlighter-csstransform_02.js]
--- a/devtools/shared/css/generated/properties-db.js
+++ b/devtools/shared/css/generated/properties-db.js
@@ -8846,16 +8846,17 @@ exports.CSS_PROPERTIES = {
   "text-transform": {
     "isInherited": true,
     "subproperties": [
       "text-transform"
     ],
     "supports": [],
     "values": [
       "capitalize",
+      "full-size-kana",
       "full-width",
       "inherit",
       "initial",
       "lowercase",
       "none",
       "unset",
       "uppercase"
     ]
--- a/gfx/webrender/Cargo.toml
+++ b/gfx/webrender/Cargo.toml
@@ -22,17 +22,17 @@ serialize_program = ["serde"]
 app_units = "0.7"
 base64 = { optional = true, version = "0.6" }
 bincode = "1.0"
 bitflags = "1.0"
 byteorder = "1.0"
 cfg-if = "0.1.2"
 euclid = "0.19"
 fxhash = "0.2.1"
-gleam = "0.6.2"
+gleam = "0.6.3"
 image = { optional = true, version = "0.19" }
 lazy_static = "1"
 log = "0.4"
 num-traits = "0.2"
 plane-split = "0.13.2"
 png = { optional = true, version = "0.12" }
 rayon = "1"
 ron = { optional = true, version = "0.1.7" }
--- a/gfx/webrender/src/debug_render.rs
+++ b/gfx/webrender/src/debug_render.rs
@@ -118,25 +118,28 @@ impl DebugRenderer {
             "",
             &DESC_COLOR,
         )?;
 
         let font_vao = device.create_vao(&DESC_FONT);
         let line_vao = device.create_vao(&DESC_COLOR);
         let tri_vao = device.create_vao(&DESC_COLOR);
 
-        let font_texture = device.create_texture::<u8>(
+        let font_texture = device.create_texture(
             TextureTarget::Array,
             ImageFormat::R8,
             debug_font_data::BMP_WIDTH,
             debug_font_data::BMP_HEIGHT,
             TextureFilter::Linear,
             None,
             1,
-            Some(&debug_font_data::FONT_BITMAP),
+        );
+        device.upload_texture_immediate(
+            &font_texture,
+            &debug_font_data::FONT_BITMAP
         );
 
         Ok(DebugRenderer {
             font_vertices: Vec::new(),
             font_indices: Vec::new(),
             line_vertices: Vec::new(),
             tri_vao,
             tri_vertices: Vec::new(),
--- a/gfx/webrender/src/device/gl.rs
+++ b/gfx/webrender/src/device/gl.rs
@@ -9,16 +9,17 @@ use api::TextureTarget;
 #[cfg(any(feature = "debug_renderer", feature="capture"))]
 use api::ImageDescriptor;
 use euclid::Transform3D;
 use gleam::gl;
 use internal_types::{FastHashMap, RenderTargetInfo};
 use log::Level;
 use smallvec::SmallVec;
 use std::cell::RefCell;
+use std::cmp;
 use std::fs::File;
 use std::io::Read;
 use std::marker::PhantomData;
 use std::mem;
 use std::ops::Add;
 use std::path::PathBuf;
 use std::ptr;
 use std::rc::Rc;
@@ -40,22 +41,16 @@ impl FrameId {
 impl Add<usize> for FrameId {
     type Output = FrameId;
 
     fn add(self, other: usize) -> FrameId {
         FrameId(self.0 + other)
     }
 }
 
-const GL_FORMAT_RGBA: gl::GLuint = gl::RGBA;
-
-const GL_FORMAT_BGRA_GL: gl::GLuint = gl::BGRA;
-
-const GL_FORMAT_BGRA_GLES: gl::GLuint = gl::BGRA_EXT;
-
 const SHADER_VERSION_GL: &str = "#version 150\n";
 const SHADER_VERSION_GLES: &str = "#version 300 es\n";
 
 const SHADER_KIND_VERTEX: &str = "#define WR_VERTEX_SHADER\n";
 const SHADER_KIND_FRAGMENT: &str = "#define WR_FRAGMENT_SHADER\n";
 const SHADER_IMPORT: &str = "#include ";
 
 pub struct TextureSlot(pub usize);
@@ -732,32 +727,39 @@ pub struct Device {
 
     device_pixel_ratio: f32,
     upload_method: UploadMethod,
 
     // HW or API capabilities
     #[cfg(feature = "debug_renderer")]
     capabilities: Capabilities,
 
-    bgra_format: gl::GLuint,
+    bgra_format_internal: gl::GLuint,
+    bgra_format_external: gl::GLuint,
 
     // debug
     inside_frame: bool,
 
     // resources
     resource_override_path: Option<PathBuf>,
 
     max_texture_size: u32,
     renderer_name: String,
     cached_programs: Option<Rc<ProgramCache>>,
 
     // Frame counter. This is used to map between CPU
     // frames and GPU frames.
     frame_id: FrameId,
 
+    /// Whether glTexStorage* is supported. We prefer this over glTexImage*
+    /// because it guarantees that mipmaps won't be generated (which they
+    /// otherwise are on some drivers, particularly ANGLE), If it's not
+    /// supported, we fall back to glTexImage*.
+    supports_texture_storage: bool,
+
     // GL extensions
     extensions: Vec<String>,
 }
 
 impl Device {
     pub fn new(
         gl: Rc<gl::Gl>,
         resource_override_path: Option<PathBuf>,
@@ -776,56 +778,77 @@ impl Device {
             gl.get_integer_v(gl::NUM_EXTENSIONS, &mut extension_count);
         }
         let extension_count = extension_count[0] as gl::GLuint;
         let mut extensions = Vec::new();
         for i in 0 .. extension_count {
             extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
         }
 
+        // Our common-case image data in Firefox is BGRA, so we make an effort
+        // to use BGRA as the internal texture storage format to avoid the need
+        // to swizzle during upload. Currently we only do this on GLES (and thus
+        // for Windows, via ANGLE).
+        //
+        // On Mac, Apple docs [1] claim that BGRA is a more efficient internal
+        // format, so we may want to consider doing that at some point, since it
+        // would give us both a more efficient internal format and avoid the
+        // swizzling in the common case.
+        //
+        // We also need our internal format types to be sized, since glTexStorage*
+        // will reject non-sized internal format types.
+        //
+        // [1] https://developer.apple.com/library/archive/documentation/
+        //     GraphicsImaging/Conceptual/OpenGL-MacProgGuide/opengl_texturedata/
+        //     opengl_texturedata.html#//apple_ref/doc/uid/TP40001987-CH407-SW22
         let supports_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
-        let bgra_format = match gl.get_type() {
-            gl::GlType::Gl => GL_FORMAT_BGRA_GL,
-            gl::GlType::Gles => if supports_bgra {
-                GL_FORMAT_BGRA_GLES
-            } else {
-                GL_FORMAT_RGBA
-            }
+        let (bgra_format_internal, bgra_format_external) = if supports_bgra {
+            assert_eq!(gl.get_type(), gl::GlType::Gles, "gleam only detects bgra on gles");
+            (gl::BGRA8_EXT, gl::BGRA_EXT)
+        } else {
+            (gl::RGBA8, gl::BGRA)
+        };
+
+        let supports_texture_storage = match gl.get_type() {
+            gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_texture_storage"),
+            gl::GlType::Gles => true,
         };
 
         Device {
             gl,
             resource_override_path,
             // This is initialized to 1 by default, but it is reset
             // at the beginning of each frame in `Renderer::bind_frame_data`.
             device_pixel_ratio: 1.0,
             upload_method,
             inside_frame: false,
 
             #[cfg(feature = "debug_renderer")]
             capabilities: Capabilities {
                 supports_multisampling: false, //TODO
             },
 
-            bgra_format,
+            bgra_format_internal,
+            bgra_format_external,
 
             bound_textures: [0; 16],
             bound_program: 0,
             bound_vao: 0,
             bound_read_fbo: FBOId(0),
             bound_draw_fbo: FBOId(0),
             program_mode_id: UniformLocation::INVALID,
             default_read_fbo: 0,
             default_draw_fbo: 0,
 
             max_texture_size,
             renderer_name,
             cached_programs,
             frame_id: FrameId(0),
             extensions,
+            supports_texture_storage,
         }
     }
 
     pub fn gl(&self) -> &gl::Gl {
         &*self.gl
     }
 
     pub fn rc_gl(&self) -> &Rc<gl::Gl> {
@@ -1177,26 +1200,25 @@ impl Device {
 
         if self.bound_program != program.id {
             self.gl.use_program(program.id);
             self.bound_program = program.id;
             self.program_mode_id = UniformLocation(program.u_mode);
         }
     }
 
-    pub fn create_texture<T: Texel>(
+    pub fn create_texture(
         &mut self,
         target: TextureTarget,
         format: ImageFormat,
         mut width: u32,
         mut height: u32,
         filter: TextureFilter,
         render_target: Option<RenderTargetInfo>,
         layer_count: i32,
-        pixels: Option<&[T]>,
     ) -> Texture {
         debug_assert!(self.inside_frame);
 
         if width > self.max_texture_size || height > self.max_texture_size {
             error!("Attempting to allocate a texture of size {}x{} above the limit, trimming", width, height);
             width = width.min(self.max_texture_size);
             height = height.min(self.max_texture_size);
         }
@@ -1215,46 +1237,79 @@ impl Device {
             depth_rb: None,
             last_frame_used: self.frame_id,
         };
         self.bind_texture(DEFAULT_TEXTURE, &texture);
         self.set_texture_parameters(texture.target, filter);
 
         // Allocate storage.
         let desc = self.gl_describe_format(texture.format);
-        match texture.target {
-            gl::TEXTURE_2D_ARRAY => {
+        let is_array = match texture.target {
+            gl::TEXTURE_2D_ARRAY => true,
+            gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => false,
+            _ => panic!("BUG: Unexpected texture target!"),
+        };
+        assert!(is_array || texture.layer_count == 1);
+
+        // Firefox doesn't use mipmaps, but Servo uses them for standalone image
+        // textures images larger than 512 pixels. This is the only case where
+        // we set the filter to trilinear.
+        let mipmap_levels =  if texture.filter == TextureFilter::Trilinear {
+            let max_dimension = cmp::max(width, height);
+            ((max_dimension) as f64).log2() as gl::GLint + 1
+        } else {
+            1
+        };
+
+        // Use glTexStorage where available, since it avoids allocating
+        // unnecessary mipmap storage and generally improves performance with
+        // stronger invariants.
+        match (self.supports_texture_storage, is_array) {
+            (true, true) =>
+                self.gl.tex_storage_3d(
+                    gl::TEXTURE_2D_ARRAY,
+                    mipmap_levels,
+                    desc.internal,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
+                    texture.layer_count,
+                ),
+            (true, false) =>
+                self.gl.tex_storage_2d(
+                    texture.target,
+                    mipmap_levels,
+                    desc.internal,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
+                ),
+            (false, true) =>
                 self.gl.tex_image_3d(
                     gl::TEXTURE_2D_ARRAY,
                     0,
-                    desc.internal,
-                    texture.width as _,
-                    texture.height as _,
+                    desc.internal as gl::GLint,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
                     texture.layer_count,
                     0,
                     desc.external,
                     desc.pixel_type,
-                    pixels.map(texels_to_u8_slice),
-                )
-            }
-            gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
-                assert_eq!(texture.layer_count, 1);
+                    None,
+                ),
+            (false, false) =>
                 self.gl.tex_image_2d(
                     texture.target,
                     0,
-                    desc.internal,
-                    texture.width as _,
-                    texture.height as _,
+                    desc.internal as gl::GLint,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
                     0,
                     desc.external,
                     desc.pixel_type,
-                    pixels.map(texels_to_u8_slice),
-                )
-            },
-            _ => panic!("BUG: Unexpected texture target!"),
+                    None,
+                ),
         }
 
         // Set up FBOs, if required.
         if let Some(rt_info) = render_target {
             self.init_fbos(&mut texture, rt_info);
         }
 
         texture
@@ -1297,16 +1352,36 @@ impl Device {
         for (read_fbo, draw_fbo) in src.fbo_ids.iter().zip(&dst.fbo_ids) {
             self.bind_read_target_impl(*read_fbo);
             self.bind_draw_target_impl(*draw_fbo);
             self.blit_render_target(rect, rect);
         }
         self.bind_read_target(None);
     }
 
+    /// Notifies the device that the contents of a render target are no longer
+    /// needed.
+    pub fn invalidate_render_target(&mut self, texture: &Texture) {
+        let attachments: &[gl::GLenum] = if texture.has_depth() {
+            &[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT]
+        } else {
+            &[gl::COLOR_ATTACHMENT0]
+        };
+
+        let original_bound_fbo = self.bound_draw_fbo;
+        for fbo_id in texture.fbo_ids.iter() {
+            // Note: The invalidate extension may not be supported, in which
+            // case this is a no-op. That's ok though, because it's just a
+            // hint.
+            self.bind_external_draw_target(*fbo_id);
+            self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
+        }
+        self.bind_external_draw_target(original_bound_fbo);
+    }
+
     /// Notifies the device that a render target is about to be reused.
     ///
     /// This method adds or removes a depth target as necessary.
     pub fn reuse_render_target<T: Texel>(
         &mut self,
         texture: &mut Texture,
         rt_info: RenderTargetInfo,
     ) {
@@ -1574,24 +1649,63 @@ impl Device {
                 }
                 Some(PixelBuffer::new(hint.to_gl(), upload_size))
             },
         };
 
         TextureUploader {
             target: UploadTarget {
                 gl: &*self.gl,
-                bgra_format: self.bgra_format,
+                bgra_format: self.bgra_format_external,
                 texture,
             },
             buffer,
             marker: PhantomData,
         }
     }
 
+    /// Performs an immediate (non-PBO) texture upload.
+    pub fn upload_texture_immediate<T: Texel>(
+        &mut self,
+        texture: &Texture,
+        pixels: &[T]
+    ) {
+        self.bind_texture(DEFAULT_TEXTURE, texture);
+        let desc = self.gl_describe_format(texture.format);
+        match texture.target {
+            gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES =>
+                self.gl.tex_sub_image_2d(
+                    texture.target,
+                    0,
+                    0,
+                    0,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
+                    desc.external,
+                    desc.pixel_type,
+                    texels_to_u8_slice(pixels),
+                ),
+            gl::TEXTURE_2D_ARRAY =>
+                self.gl.tex_sub_image_3d(
+                    texture.target,
+                    0,
+                    0,
+                    0,
+                    0,
+                    texture.width as gl::GLint,
+                    texture.height as gl::GLint,
+                    texture.layer_count as gl::GLint,
+                    desc.external,
+                    desc.pixel_type,
+                    texels_to_u8_slice(pixels),
+                ),
+            _ => panic!("BUG: Unexpected texture target!"),
+        }
+    }
+
     #[cfg(any(feature = "debug_renderer", feature = "capture"))]
     pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
         let desc = self.gl_describe_format(img_desc.format);
         self.gl.read_pixels(
             0, 0,
             img_desc.size.width as i32,
             img_desc.size.height as i32,
             desc.external,
@@ -1608,17 +1722,17 @@ impl Device {
     ) {
         let (bytes_per_pixel, desc) = match format {
             ReadPixelsFormat::Standard(imf) => {
                 (imf.bytes_per_pixel(), self.gl_describe_format(imf))
             }
             ReadPixelsFormat::Rgba8 => {
                 (4, FormatDesc {
                     external: gl::RGBA,
-                    internal: gl::RGBA8 as _,
+                    internal: gl::RGBA8,
                     pixel_type: gl::UNSIGNED_BYTE,
                 })
             }
         };
         let size_in_bytes = (bytes_per_pixel * rect.size.width * rect.size.height) as usize;
         assert_eq!(output.len(), size_in_bytes);
 
         self.gl.flush();
@@ -2165,57 +2279,53 @@ impl Device {
             };
             log!(level, "({}) {}", ty, msg.message);
         }
     }
 
     fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
         match format {
             ImageFormat::R8 => FormatDesc {
-                internal: gl::R8 as _,
+                internal: gl::R8,
                 external: gl::RED,
                 pixel_type: gl::UNSIGNED_BYTE,
             },
             ImageFormat::R16 => FormatDesc {
-                internal: gl::R16 as _,
+                internal: gl::R16,
                 external: gl::RED,
                 pixel_type: gl::UNSIGNED_SHORT,
             },
             ImageFormat::BGRA8 => {
-                let external = self.bgra_format;
                 FormatDesc {
-                    internal: match self.gl.get_type() {
-                        gl::GlType::Gl => gl::RGBA as _,
-                        gl::GlType::Gles => external as _,
-                    },
-                    external,
+                    internal: self.bgra_format_internal,
+                    external: self.bgra_format_external,
                     pixel_type: gl::UNSIGNED_BYTE,
                 }
             },
             ImageFormat::RGBAF32 => FormatDesc {
-                internal: gl::RGBA32F as _,
+                internal: gl::RGBA32F,
                 external: gl::RGBA,
                 pixel_type: gl::FLOAT,
             },
             ImageFormat::RGBAI32 => FormatDesc {
-                internal: gl::RGBA32I as _,
+                internal: gl::RGBA32I,
                 external: gl::RGBA_INTEGER,
                 pixel_type: gl::INT,
             },
             ImageFormat::RG8 => FormatDesc {
-                internal: gl::RG8 as _,
+                internal: gl::RG8,
                 external: gl::RG,
                 pixel_type: gl::UNSIGNED_BYTE,
             },
         }
     }
 }
 
 struct FormatDesc {
-    internal: gl::GLint,
+    internal: gl::GLenum,
     external: gl::GLuint,
     pixel_type: gl::GLuint,
 }
 
 struct UploadChunk {
     rect: DeviceUintRect,
     layer_index: i32,
     stride: Option<u32>,
--- a/gfx/webrender/src/gpu_glyph_renderer.rs
+++ b/gfx/webrender/src/gpu_glyph_renderer.rs
@@ -57,18 +57,18 @@ impl GpuGlyphRenderer {
         let area_lut_texture = device.create_texture(
             TextureTarget::Default,
             ImageFormat::R8,
             area_lut_width,
             area_lut_height,
             TextureFilter::Linear,
             None,
             1,
-            Some(area_lut_pixels)
         );
+        device.upload_texture_immediate(&area_lut_texture, area_lut_pixels);
 
         let vector_stencil_vao =
             device.create_vao_with_new_instances(&renderer::desc::VECTOR_STENCIL, prim_vao);
         let vector_cover_vao = device.create_vao_with_new_instances(&renderer::desc::VECTOR_COVER,
                                                                     prim_vao);
 
         // Load Pathfinder vector graphics shaders.
         let vector_stencil = try!{
@@ -105,27 +105,26 @@ impl Renderer {
                           stats: &mut RendererStats)
                           -> Option<StenciledGlyphPage> {
         if glyphs.is_empty() {
             return None
         }
 
         let _timer = self.gpu_profile.start_timer(GPU_TAG_GLYPH_STENCIL);
 
-        let texture = self.device.create_texture::<f32>(
+        let texture = self.device.create_texture(
             TextureTarget::Default,
             ImageFormat::RGBAF32,
             target_size.width,
             target_size.height,
             TextureFilter::Nearest,
             Some(RenderTargetInfo {
                 has_depth: false,
             }),
             1,
-            None
         );
 
         // Initialize temporary framebuffer.
         // FIXME(pcwalton): Cache this!
         // FIXME(pcwalton): Use RF32, not RGBAF32!
         let mut current_page = StenciledGlyphPage {
             texture,
             glyphs: vec![],
@@ -183,18 +182,18 @@ impl Renderer {
         let path_info_texture = self.device.create_texture(
             TextureTarget::Default,
             ImageFormat::RGBAF32,
             3,
             glyphs.len() as u32,
             TextureFilter::Nearest,
             None,
             1,
-            Some(&path_info_texels)
         );
+        self.device.upload_texture_immediate(&path_info_texture, &path_info_texels);
 
         self.gpu_glyph_renderer.vector_stencil.bind(&mut self.device,
                                                     projection,
                                                     &mut self.renderer_errors);
 
         self.device.bind_draw_target(Some((&current_page.texture, 0)), Some(*target_size));
         self.device.clear_target(Some([0.0, 0.0, 0.0, 0.0]), None, None);
 
--- a/gfx/webrender/src/renderer.rs
+++ b/gfx/webrender/src/renderer.rs
@@ -808,25 +808,24 @@ struct TextureResolver {
     /// See the comments in `allocate_target_texture` for more insight on why
     /// reuse is a win.
     render_target_pool: Vec<Texture>,
 }
 
 impl TextureResolver {
     fn new(device: &mut Device) -> TextureResolver {
         let dummy_cache_texture = device
-            .create_texture::<u8>(
+            .create_texture(
                 TextureTarget::Array,
                 ImageFormat::BGRA8,
                 1,
                 1,
                 TextureFilter::Linear,
                 None,
                 1,
-                None,
             );
 
         TextureResolver {
             texture_cache_map: FastHashMap::default(),
             external_images: FastHashMap::default(),
             dummy_cache_texture,
             prev_pass_alpha: None,
             prev_pass_color: None,
@@ -850,69 +849,78 @@ impl TextureResolver {
     fn begin_frame(&mut self) {
         assert!(self.prev_pass_color.is_none());
         assert!(self.prev_pass_alpha.is_none());
         assert!(self.saved_targets.is_empty());
     }
 
     fn end_frame(&mut self, device: &mut Device, frame_id: FrameId) {
         // return the cached targets to the pool
-        self.end_pass(None, None);
+        self.end_pass(device, None, None);
         // return the saved targets as well
-        self.render_target_pool.extend(self.saved_targets.drain(..));
+        while let Some(target) = self.saved_targets.pop() {
+            self.return_to_pool(device, target);
+        }
 
         // GC the render target pool.
         //
         // We use a simple scheme whereby we drop any texture that hasn't been used
         // in the last 30 frames. This should generally prevent any sustained build-
         // up of unused textures, unless we don't generate frames for a long period.
         // This can happen when the window is minimized, and we probably want to
         // flush all the WebRender caches in that case [1].
         //
         // [1] https://bugzilla.mozilla.org/show_bug.cgi?id=1494099
         self.retain_targets(device, |texture| texture.used_recently(frame_id, 30));
     }
 
+    /// Transfers ownership of a render target back to the pool.
+    fn return_to_pool(&mut self, device: &mut Device, target: Texture) {
+        device.invalidate_render_target(&target);
+        self.render_target_pool.push(target);
+    }
+
     /// Drops all targets from the render target pool that do not satisfy the predicate.
     pub fn retain_targets<F: Fn(&Texture) -> bool>(&mut self, device: &mut Device, f: F) {
         // We can't just use retain() because `Texture` requires manual cleanup.
         let mut tmp = SmallVec::<[Texture; 8]>::new();
         for target in self.render_target_pool.drain(..) {
             if f(&target) {
                 tmp.push(target);
             } else {
                 device.delete_texture(target);
             }
         }
         self.render_target_pool.extend(tmp);
     }
 
     fn end_pass(
         &mut self,
+        device: &mut Device,
         a8_texture: Option<ActiveTexture>,
         rgba8_texture: Option<ActiveTexture>,
     ) {
         // If we have cache textures from previous pass, return them to the pool.
         // Also assign the pool index of those cache textures to last pass's index because this is
         // the result of last pass.
         // Note: the order here is important, needs to match the logic in `RenderPass::build()`.
         if let Some(at) = self.prev_pass_color.take() {
             if let Some(index) = at.saved_index {
                 assert_eq!(self.saved_targets.len(), index.0);
                 self.saved_targets.push(at.texture);
             } else {
-                self.render_target_pool.push(at.texture);
+                self.return_to_pool(device, at.texture);
             }
         }
         if let Some(at) = self.prev_pass_alpha.take() {
             if let Some(index) = at.saved_index {
                 assert_eq!(self.saved_targets.len(), index.0);
                 self.saved_targets.push(at.texture);
             } else {
-                self.render_target_pool.push(at.texture);
+                self.return_to_pool(device, at.texture);
             }
         }
 
         // We have another pass to process, make these textures available
         // as inputs to the next pass.
         self.prev_pass_color = rgba8_texture;
         self.prev_pass_alpha = a8_texture;
     }
@@ -1098,25 +1106,24 @@ impl GpuCacheTexture {
             if rt_info.is_some() {
                 blit_source = Some(t);
             } else {
                 device.delete_texture(t);
             }
         }
 
         // Create the new texture.
-        let mut texture = device.create_texture::<u8>(
+        let mut texture = device.create_texture(
             TextureTarget::Default,
             ImageFormat::RGBAF32,
             new_size.width,
             new_size.height,
             TextureFilter::Nearest,
             rt_info,
             1,
-            None,
         );
 
         // Blit the contents of the previous texture, if applicable.
         if let Some(blit_source) = blit_source {
             device.blit_renderable_texture(&mut texture, &blit_source);
             device.delete_texture(blit_source);
         }
 
@@ -1395,25 +1402,24 @@ impl VertexDataTexture {
         // Create a new texture if needed.
         if needed_height > existing_height {
             // Drop the existing texture, if any.
             if let Some(t) = self.texture.take() {
                 device.delete_texture(t);
             }
             let new_height = (needed_height + 127) & !127;
 
-            let texture = device.create_texture::<u8>(
+            let texture = device.create_texture(
                 TextureTarget::Default,
                 self.format,
                 width,
                 new_height,
                 TextureFilter::Nearest,
                 None,
                 1,
-                None,
             );
             self.texture = Some(texture);
         }
 
         let rect = DeviceUintRect::new(
             DeviceUintPoint::zero(),
             DeviceUintSize::new(width, needed_height),
         );
@@ -1751,26 +1757,26 @@ impl Renderer {
                 38,
                 22,
                 41,
                 25,
                 37,
                 21,
             ];
 
-            let mut texture = device.create_texture::<u8>(
+            let mut texture = device.create_texture(
                 TextureTarget::Default,
                 ImageFormat::R8,
                 8,
                 8,
                 TextureFilter::Nearest,
                 None,
                 1,
-                Some(&dither_matrix),
             );
+            device.upload_texture_immediate(&texture, &dither_matrix);
 
             Some(texture)
         } else {
             None
         };
 
         let x0 = 0.0;
         let y0 = 0.0;
@@ -2769,25 +2775,24 @@ impl Renderer {
                         format,
                         filter,
                         render_target,
                     } => {
                         // Create a new native texture, as requested by the texture cache.
                         //
                         // Ensure no PBO is bound when creating the texture storage,
                         // or GL will attempt to read data from there.
-                        let texture = self.device.create_texture::<u8>(
+                        let texture = self.device.create_texture(
                             TextureTarget::Array,
                             format,
                             width,
                             height,
                             filter,
                             render_target,
                             layer_count,
-                            None,
                         );
                         self.texture_resolver.texture_cache_map.insert(update.id, texture);
                     }
                     TextureUpdateOp::Update {
                         rect,
                         source,
                         stride,
                         layer_index,
@@ -3810,27 +3815,25 @@ impl Renderer {
 
         let rt_info = RenderTargetInfo { has_depth: list.needs_depth() };
         let texture = if let Some(idx) = index {
             let mut t = self.texture_resolver.render_target_pool.swap_remove(idx);
             self.device.reuse_render_target::<u8>(&mut t, rt_info);
             t
         } else {
             counters.targets_created.inc();
-            let mut t = self.device.create_texture::<u8>(
+            self.device.create_texture(
                 TextureTarget::Array,
                 list.format,
                 list.max_size.width,
                 list.max_size.height,
                 TextureFilter::Linear,
                 Some(rt_info),
                 list.targets.len() as _,
-                None,
-            );
-            t
+            )
         };
 
         list.check_ready(&texture);
         Some(ActiveTexture {
             texture,
             saved_index: list.saved_index.clone(),
         })
     }
@@ -4011,16 +4014,17 @@ impl Renderer {
                         );
                     }
 
                     (alpha_tex, color_tex)
                 }
             };
 
             self.texture_resolver.end_pass(
+                &mut self.device,
                 cur_alpha,
                 cur_color,
             );
         }
 
         self.texture_resolver.end_frame(&mut self.device, frame_id);
 
         #[cfg(feature = "debug_renderer")]
@@ -4758,18 +4762,18 @@ impl Renderer {
         let texture = device.create_texture(
             target,
             plain.format,
             plain.size.0,
             plain.size.1,
             plain.filter,
             plain.render_target,
             plain.size.2,
-            Some(texels.as_slice()),
         );
+        device.upload_texture_immediate(&texture, &texels);
 
         (texture, texels)
     }
 
     #[cfg(feature = "capture")]
     fn save_capture(
         &mut self,
         config: CaptureConfig,
--- a/gfx/webrender_bindings/Cargo.toml
+++ b/gfx/webrender_bindings/Cargo.toml
@@ -4,17 +4,17 @@ version = "0.1.0"
 authors = ["The Mozilla Project Developers"]
 license = "MPL-2.0"
 
 [dependencies]
 rayon = "1"
 thread_profiler = "0.1.1"
 euclid = { version = "0.19", features = ["serde"] }
 app_units = "0.7"
-gleam = "0.6.2"
+gleam = "0.6.3"
 log = "0.4"
 nsstring = { path = "../../servo/support/gecko/nsstring" }
 bincode = "1.0"
 uuid = { version = "0.5", features = ["v4"] }
 fxhash = "0.2.1"
 
 [dependencies.webrender]
 path = "../webrender"
--- a/gfx/webrender_bindings/revision.txt
+++ b/gfx/webrender_bindings/revision.txt
@@ -1,1 +1,1 @@
-7aa1d42ad41097b68e8026e3384127242601c95b
+5adc86c19cbef6697975ea078fa0d10635e5d660
--- a/image/MultipartImage.cpp
+++ b/image/MultipartImage.cpp
@@ -36,24 +36,26 @@ public:
     mImage = aImage;
 
     RefPtr<ProgressTracker> tracker = mImage->GetProgressTracker();
     tracker->AddObserver(this);
   }
 
   void BlockUntilDecodedAndFinishObserving()
   {
-    // Use GetFrame() to block until our image finishes decoding.
-    RefPtr<SourceSurface> surface =
-      mImage->GetFrame(imgIContainer::FRAME_CURRENT,
-                       imgIContainer::FLAG_SYNC_DECODE);
+    // Use RequestDecodeForSize() to block until our image finishes decoding.
+    // The size is ignored because we don't pass the FLAG_HIGH_QUALITY_SCALING
+    // flag.
+    mImage->RequestDecodeForSize(gfx::IntSize(0, 0),
+                                 imgIContainer::FLAG_SYNC_DECODE);
 
-    // GetFrame() should've sent synchronous notifications that would have
-    // caused us to call FinishObserving() (and null out mImage) already. If for
-    // some reason it didn't, we should do so here.
+
+    // RequestDecodeForSize() should've sent synchronous notifications that
+    // would have caused us to call FinishObserving() (and null out mImage)
+    // already. If for some reason it didn't, we should do so here.
     if (mImage) {
       FinishObserving();
     }
   }
 
   virtual void Notify(int32_t aType,
                       const nsIntRect* aRect = nullptr) override
   {
--- a/js/src/frontend/BinSource-auto.cpp
+++ b/js/src/frontend/BinSource-auto.cpp
@@ -2082,17 +2082,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     }
     auto result = Ok();
     return result;
 }
 
 
 /*
  interface AssertedBoundName : Node {
-    IdentifierName name;
+    [IdentifierName] string name;
     bool isCaptured;
  }
 */
 template<typename Tok> JS::Result<Ok>
 BinASTParser<Tok>::parseAssertedBoundName(
         AssertedScopeKind scopeKind)
 {
     BinKind kind;
@@ -2119,17 +2119,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[2] = { BinField::Name, BinField::IsCaptured };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
     ParseContext::Scope* scope;
     DeclarationKind declKind;
     MOZ_TRY(getBoundScope(scopeKind, scope, declKind));
     MOZ_TRY(addScopeName(scopeKind, name, scope, declKind, isCaptured));
     auto result = Ok();
     return result;
@@ -2187,17 +2187,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     }
     auto result = Ok();
     return result;
 }
 
 
 /*
  interface AssertedDeclaredName : Node {
-    IdentifierName name;
+    [IdentifierName] string name;
     AssertedDeclaredKind kind;
     bool isCaptured;
  }
 */
 template<typename Tok> JS::Result<Ok>
 BinASTParser<Tok>::parseAssertedDeclaredName(
         AssertedScopeKind scopeKind)
 {
@@ -2225,17 +2225,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[3] = { BinField::Name, BinField::Kind, BinField::IsCaptured };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     BINJS_MOZ_TRY_DECL(kind_, parseAssertedDeclaredKind());
 
     BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
     ParseContext::Scope* scope;
     DeclarationKind declKind;
     MOZ_TRY(getDeclaredScope(scopeKind, kind_, scope, declKind));
     MOZ_TRY(addScopeName(scopeKind, name, scope, declKind, isCaptured));
@@ -2252,17 +2252,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[2] = { BinField::Name, BinField::IsCaptured };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
     ParseContext::Scope* scope;
     DeclarationKind declKind;
     MOZ_TRY(getBoundScope(scopeKind, scope, declKind));
     MOZ_TRY(addScopeName(scopeKind, name, scope, declKind, isCaptured));
     auto result = Ok();
     return result;
@@ -2339,17 +2339,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
 #if defined(DEBUG)
     const BinField expected_fields[3] = { BinField::Index, BinField::Name, BinField::IsCaptured };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     BINJS_MOZ_TRY_DECL(index, tokenizer_->readUnsignedLong());
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
     // FIXME: The following checks should be performed inside
     // checkPositionalParameterIndices to match the spec's order
     // (bug 1490976).
     if (index >= positionalParams.get().length()) {
         return raiseError("AssertedPositionalParameterName.length out of range");
     }
     if (positionalParams.get()[index]) {
         return raiseError("AssertedPositionalParameterName has duplicate entry for the same index");
@@ -2373,17 +2373,17 @@ BinASTParser<Tok>::parseInterfaceAsserte
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[2] = { BinField::Name, BinField::IsCaptured };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
     ParseContext::Scope* scope;
     DeclarationKind declKind;
     MOZ_TRY(getBoundScope(scopeKind, scope, declKind));
     MOZ_TRY(addScopeName(scopeKind, name, scope, declKind, isCaptured));
     auto result = Ok();
     return result;
@@ -2514,17 +2514,17 @@ BinASTParser<Tok>::parseInterfaceAssignm
 
     BINJS_TRY_DECL(result, factory_.newAssignment(ParseNodeKind::Assign, binding, expression));
     return result;
 }
 
 
 /*
  interface AssignmentTargetIdentifier : Node {
-    Identifier name;
+    [IdentifierName] string name;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseAssignmentTargetIdentifier()
 {
     BinKind kind;
     BinFields fields(cx_);
     AutoTaggedTuple guard(*tokenizer_);
@@ -2547,17 +2547,17 @@ BinASTParser<Tok>::parseInterfaceAssignm
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[1] = { BinField::Name };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     if (!IsIdentifier(name)) {
         return raiseError("Invalid identifier");
     }
     BINJS_TRY(usedNames_.noteUse(cx_, name, parseContext_->scriptId(), parseContext_->innermostScope()->id()));
     BINJS_TRY_DECL(result, factory_.newName(name->asPropertyName(), tokenizer_->pos(start), cx_));
     return result;
 }
@@ -2697,17 +2697,17 @@ BinASTParser<Tok>::parseInterfaceBinaryE
         result = list;
     }
     return result;
 }
 
 
 /*
  interface BindingIdentifier : Node {
-    Identifier name;
+    [IdentifierName] string name;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseBindingIdentifier()
 {
     BinKind kind;
     BinFields fields(cx_);
     AutoTaggedTuple guard(*tokenizer_);
@@ -2730,17 +2730,17 @@ BinASTParser<Tok>::parseInterfaceBinding
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[1] = { BinField::Name };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     if (!IsIdentifier(name)) {
         return raiseError("Invalid identifier");
     }
     BINJS_TRY_DECL(result, factory_.newName(name->asPropertyName(), tokenizer_->pos(start), cx_));
     return result;
 }
 
@@ -3536,18 +3536,18 @@ template<typename Tok> JS::Result<ParseN
 BinASTParser<Tok>::parseInterfaceExportFrom(const size_t start, const BinKind kind, const BinFields& fields)
 {
     return raiseError("FIXME: Not implemented yet (ExportFrom)");
 }
 
 
 /*
  interface ExportFromSpecifier : Node {
-    IdentifierName name;
-    IdentifierName? exportedName;
+    [IdentifierName] string name;
+    [IdentifierName] string? exportedName;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseExportFromSpecifier()
 {
     BinKind kind;
     BinFields fields(cx_);
     AutoTaggedTuple guard(*tokenizer_);
@@ -3568,17 +3568,17 @@ BinASTParser<Tok>::parseInterfaceExportF
 {
     return raiseError("FIXME: Not implemented yet (ExportFromSpecifier)");
 }
 
 
 /*
  interface ExportLocalSpecifier : Node {
     IdentifierExpression name;
-    IdentifierName? exportedName;
+    [PropertyKey] string? exportedName;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseExportLocalSpecifier()
 {
     BinKind kind;
     BinFields fields(cx_);
     AutoTaggedTuple guard(*tokenizer_);
@@ -3975,17 +3975,17 @@ BinASTParser<Tok>::parseInterfaceGetterC
     *bodyOut = body;
     auto result = Ok();
     return result;
 }
 
 
 /*
  interface IdentifierExpression : Node {
-    Identifier name;
+    [IdentifierName] string name;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseIdentifierExpression()
 {
     BinKind kind;
     BinFields fields(cx_);
     AutoTaggedTuple guard(*tokenizer_);
@@ -4008,17 +4008,17 @@ BinASTParser<Tok>::parseInterfaceIdentif
     BINJS_TRY(CheckRecursionLimit(cx_));
 
 #if defined(DEBUG)
     const BinField expected_fields[1] = { BinField::Name };
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
 
     RootedAtom name(cx_);
-    MOZ_TRY_VAR(name, tokenizer_->readAtom());
+    MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
 
     if (!IsIdentifier(name)) {
         return raiseError("Invalid identifier");
     }
     BINJS_TRY(usedNames_.noteUse(cx_, name, parseContext_->scriptId(), parseContext_->innermostScope()->id()));
     BINJS_TRY_DECL(result, factory_.newName(name->asPropertyName(), tokenizer_->pos(start), cx_));
     return result;
 }
@@ -4054,17 +4054,17 @@ template<typename Tok> JS::Result<ParseN
 BinASTParser<Tok>::parseInterfaceImportNamespace(const size_t start, const BinKind kind, const BinFields& fields)
 {
     return raiseError("FIXME: Not implemented yet (ImportNamespace)");
 }
 
 
 /*
  interface ImportSpecifier : Node {
-    IdentifierName? name;
+    [PropertyKey] string? name;
     BindingIdentifier binding;
  }
 */
 template<typename Tok> JS::Result<ParseNode*>
 BinASTParser<Tok>::parseImportSpecifier()
 {
     BinKind kind;
     BinFields fields(cx_);
@@ -4595,17 +4595,17 @@ BinASTParser<Tok>::parseInterfaceStaticM
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
     size_t nameStart;
 
     BINJS_MOZ_TRY_DECL(object, parseExpressionOrSuper());
     RootedAtom property(cx_);
     {
         nameStart = tokenizer_->offset();
-        MOZ_TRY_VAR(property, tokenizer_->readAtom());
+        MOZ_TRY_VAR(property, tokenizer_->readPropertyKey());
 
     }
 
     BINJS_TRY_DECL(name, factory_.newPropertyName(property->asPropertyName(), tokenizer_->pos(nameStart)));
     BINJS_TRY_DECL(result, factory_.newPropertyAccess(object, name));
     return result;
 }
 
@@ -4620,17 +4620,17 @@ BinASTParser<Tok>::parseInterfaceStaticM
     MOZ_TRY(tokenizer_->checkFields(kind, fields, expected_fields));
 #endif // defined(DEBUG)
     size_t nameStart;
 
     BINJS_MOZ_TRY_DECL(object, parseExpressionOrSuper());
     RootedAtom property(cx_);
     {
         nameStart = tokenizer_->offset();
-        MOZ_TRY_VAR(property, tokenizer_->readAtom());
+        MOZ_TRY_VAR(property, tokenizer_->readPropertyKey());
 
     }
 
     BINJS_TRY_DECL(name, factory_.newPropertyName(property->asPropertyName(), tokenizer_->pos(nameStart)));
     BINJS_TRY_DECL(result, factory_.newPropertyAccess(object, name));
     return result;
 }
 
--- a/js/src/frontend/BinSource.webidl_
+++ b/js/src/frontend/BinSource.webidl_
@@ -1,14 +1,15 @@
 // Type aliases and enums.
 
 typedef FrozenArray<(SpreadElement or Expression)> Arguments;
 typedef DOMString string;
 typedef string Identifier;
 typedef string IdentifierName;
+typedef string PropertyKey;
 typedef string Label;
 
 enum VariableDeclarationKind {
   "var",
   "let",
   "const"
 };
 
--- a/js/src/frontend/BinToken.h
+++ b/js/src/frontend/BinToken.h
@@ -213,21 +213,23 @@ namespace frontend {
     F(OptionalAssignmentTarget, "OptionalAssignmentTarget") \
     F(OptionalBinding, "OptionalBinding") \
     F(OptionalBindingIdentifier, "OptionalBindingIdentifier") \
     F(OptionalBindingOrBindingWithInitializer, "OptionalBindingOrBindingWithInitializer") \
     F(OptionalCatchClause, "OptionalCatchClause") \
     F(OptionalExpression, "OptionalExpression") \
     F(OptionalIdentifierName, "OptionalIdentifierName") \
     F(OptionalLabel, "OptionalLabel") \
+    F(OptionalPropertyKey, "OptionalPropertyKey") \
     F(OptionalSpreadElementOrExpression, "OptionalSpreadElementOrExpression") \
     F(OptionalStatement, "OptionalStatement") \
     F(OptionalVariableDeclarationOrExpression, "OptionalVariableDeclarationOrExpression") \
     F(Parameter, "Parameter") \
     F(Program, "Program") \
+    F(PropertyKey, "PropertyKey") \
     F(PropertyName, "PropertyName") \
     F(ReturnStatement, "ReturnStatement") \
     F(Script, "Script") \
     F(Setter, "Setter") \
     F(SetterContents, "SetterContents") \
     F(ShorthandProperty, "ShorthandProperty") \
     F(SimpleAssignmentTarget, "SimpleAssignmentTarget") \
     F(SpreadElement, "SpreadElement") \
@@ -262,17 +264,17 @@ namespace frontend {
 
 enum class BinKind {
 #define EMIT_ENUM(name, _) name,
     FOR_EACH_BIN_KIND(EMIT_ENUM)
 #undef EMIT_ENUM
 };
 
 // The number of distinct values of BinKind.
-const size_t BINKIND_LIMIT = 198;
+const size_t BINKIND_LIMIT = 200;
 
 
 
 
 /**
  * The different variants of Binary AST string enums, as per
  * the specifications of Binary AST, as a single macro and
  * `enum class`.
--- a/js/src/frontend/BinTokenReaderMultipart.cpp
+++ b/js/src/frontend/BinTokenReaderMultipart.cpp
@@ -281,16 +281,36 @@ BinTokenReaderMultipart::readAtom()
 
     if (!maybe) {
         return raiseError("Empty string");
     }
 
     return maybe;
 }
 
+JS::Result<JSAtom*>
+BinTokenReaderMultipart::readMaybeIdentifierName() {
+    return readMaybeAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderMultipart::readIdentifierName() {
+    return readAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderMultipart::readMaybePropertyKey() {
+    return readMaybeAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderMultipart::readPropertyKey() {
+    return readAtom();
+}
+
 JS::Result<Ok>
 BinTokenReaderMultipart::readChars(Chars& out)
 {
     updateLatestKnownGood();
     BINJS_MOZ_TRY_DECL(index, readInternalUint32());
 
     if (index >= metadata_->numStrings()) {
         return raiseError("Invalid index to strings table for string enum");
--- a/js/src/frontend/BinTokenReaderMultipart.h
+++ b/js/src/frontend/BinTokenReaderMultipart.h
@@ -92,16 +92,27 @@ class MOZ_STACK_CLASS BinTokenReaderMult
     /**
      * Read a single `string | null` value.
      *
      * Fails if that string is not valid UTF-8.
      */
     MOZ_MUST_USE JS::Result<JSAtom*> readMaybeAtom();
     MOZ_MUST_USE JS::Result<JSAtom*> readAtom();
 
+    /**
+     * Read a single IdentifierName value.
+     */
+    MOZ_MUST_USE JS::Result<JSAtom*> readMaybeIdentifierName();
+    MOZ_MUST_USE JS::Result<JSAtom*> readIdentifierName();
+
+    /**
+     * Read a single PropertyKey value.
+     */
+    MOZ_MUST_USE JS::Result<JSAtom*> readMaybePropertyKey();
+    MOZ_MUST_USE JS::Result<JSAtom*> readPropertyKey();
 
     /**
      * Read a single `string | null` value.
      *
      * MAY check if that string is not valid UTF-8.
      */
     MOZ_MUST_USE JS::Result<Ok> readChars(Chars&);
 
--- a/js/src/frontend/BinTokenReaderTester.cpp
+++ b/js/src/frontend/BinTokenReaderTester.cpp
@@ -135,16 +135,36 @@ BinTokenReaderTester::readMaybeAtom()
         BINJS_TRY_VAR(result, Atomize(cx_, (const char*)current_, byteLen));
     }
 
     current_ += byteLen;
     MOZ_TRY(readConst("</string>"));
     return result.get();
 }
 
+JS::Result<JSAtom*>
+BinTokenReaderTester::readMaybeIdentifierName() {
+    return readMaybeAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderTester::readIdentifierName() {
+    return readAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderTester::readMaybePropertyKey() {
+    return readMaybeAtom();
+}
+
+JS::Result<JSAtom*>
+BinTokenReaderTester::readPropertyKey() {
+    return readAtom();
+}
+
 
 // Nullable strings:
 // - "<string>" (not counted in byte length)
 // - byte length (not counted in byte length)
 // - bytes (UTF-8)
 // - "</string>" (not counted in byte length)
 //
 // The special sequence of bytes `[255, 0]` (which is an invalid UTF-8 sequence)
--- a/js/src/frontend/BinTokenReaderTester.h
+++ b/js/src/frontend/BinTokenReaderTester.h
@@ -122,16 +122,28 @@ class MOZ_STACK_CLASS BinTokenReaderTest
      * Read a single `string` value.
      *
      * Fails if that string is not valid UTF-8 or in case of `null` string.
      *
      * The returned `JSAtom*` is never `nullptr`.
      */
     MOZ_MUST_USE JS::Result<JSAtom*> readAtom();
 
+    /**
+     * Read a single IdentifierName value.
+     */
+    MOZ_MUST_USE JS::Result<JSAtom*> readMaybeIdentifierName();
+    MOZ_MUST_USE JS::Result<JSAtom*> readIdentifierName();
+
+    /**
+     * Read a single PropertyKey value.
+     */
+    MOZ_MUST_USE JS::Result<JSAtom*> readMaybePropertyKey();
+    MOZ_MUST_USE JS::Result<JSAtom*> readPropertyKey();
+
 
     /**
      * Read a single `string | null` value.
      *
      * There is no guarantee that the string is valid UTF-8.
      */
     MOZ_MUST_USE JS::Result<Ok> readChars(Chars&);
 
--- a/js/src/frontend/binsource/Cargo.toml
+++ b/js/src/frontend/binsource/Cargo.toml
@@ -1,13 +1,13 @@
 [package]
-name = "binsource"
-version = "0.1.0"
+name = "binast"
+version = "0.1.1"
 authors = ["David Teller <D.O.Teller@gmail.com>"]
 
 [dependencies]
-binjs_meta = "^0.3.10"
+binjs_meta = "^0.4.3"
 clap = "^2"
 env_logger = "^0.5.6"
 itertools = "^0.7.6"
 log = "0.4"
 yaml-rust = "^0.4"
-webidl = "^0.6.0"
+webidl = "^0.8"
--- a/js/src/frontend/binsource/src/main.rs
+++ b/js/src/frontend/binsource/src/main.rs
@@ -544,17 +544,17 @@ struct CPPExporter {
 impl CPPExporter {
     fn new(syntax: Spec, rules: GlobalRules) -> Self {
         let mut list_parsers_to_generate = vec![];
         let mut option_parsers_to_generate = vec![];
         for (parser_node_name, typedef) in syntax.typedefs_by_name() {
             if typedef.is_optional() {
                 let content_name = TypeName::type_spec(typedef.spec());
                 let content_node_name = syntax.get_node_name(&content_name)
-                    .unwrap_or_else(|| panic!("While generating an option parser, could not find node name {}", content_name))
+                    .unwrap_or_else(|| panic!("While generating an option parser, could not find node name \"{}\"", content_name))
                     .clone();
                 debug!(target: "generate_spidermonkey", "CPPExporter::new adding optional typedef {:?} => {:?} => {:?}",
                     parser_node_name,
                     content_name,
                     content_node_name);
                 option_parsers_to_generate.push(OptionParserData {
                     name: parser_node_name.clone(),
                     elements: content_node_name
@@ -708,16 +708,20 @@ impl CPPExporter {
                         _ => {}
                     }
                 },
                 _ => {}
             }
             refgraph.insert(string_from_nodename(&parser.name), edges);
         }
 
+        // 6. Primitive values.
+        refgraph.insert(Rc::new("IdentifierName".to_string()), HashSet::new());
+        refgraph.insert(Rc::new("PropertyKey".to_string()), HashSet::new());
+
         self.refgraph = refgraph;
     }
 
     /// Trace the reference graph from the node with `name and mark all nodes
     /// as used. `name` is the name of the method, without leading "parse".
     fn trace(&mut self, name: Rc<String>) {
         self.refgraph.trace(name)
     }
@@ -1471,16 +1475,75 @@ impl CPPExporter {
 }}
 
 ",
                                 first_line = first_line,
                                 build = build_result,
                             ));
                         }
                     }
+                    &TypeSpec::IdentifierName => {
+                        let build_result = rules_for_this_node.init.reindent("    ");
+                        let first_line = self.get_method_definition_start(&parser.name, "", "",
+                                                                          &extra_params);
+                        if build_result.len() == 0 {
+                            buffer.push_str(&format!("{first_line}
+{{
+    return raiseError(\"FIXME: Not implemented yet ({kind})\");
+}}
+
+",
+                                first_line = first_line,
+                                kind = parser.name.to_str()));
+                        } else {
+                            buffer.push_str(&format!("{first_line}
+{{
+    BINJS_MOZ_TRY_DECL(result, tokenizer_->readMaybeIdentifierName());
+
+{build}
+
+    return result;
+}}
+
+",
+                                first_line = first_line,
+                                build = build_result,
+                            ));
+                        }
+                    }
+                    &TypeSpec::PropertyKey => {
+                        debug!(target: "generate_spidermonkey", "Generating method for PropertyKey: {:?}", parser.name);
+                        let build_result = rules_for_this_node.init.reindent("    ");
+                        let first_line = self.get_method_definition_start(&parser.name, "", "",
+                                                                          &extra_params);
+                        if build_result.len() == 0 {
+                            buffer.push_str(&format!("{first_line}
+{{
+    return raiseError(\"FIXME: Not implemented yet ({kind})\");
+}}
+
+",
+                                first_line = first_line,
+                                kind = parser.name.to_str()));
+                        } else {
+                            buffer.push_str(&format!("{first_line}
+{{
+    BINJS_MOZ_TRY_DECL(result, tokenizer_->readMaybePropertyKey());
+
+{build}
+
+    return result;
+}}
+
+",
+                                first_line = first_line,
+                                build = build_result,
+                            ));
+                        }
+                    }
                     _else => unimplemented!("{:?}", _else)
                 }
             }
             NamedType::StringEnum(_) => {
                 unimplemented!()
             }
         }
     }
@@ -1599,16 +1662,36 @@ impl CPPExporter {
                     warn!("Internal error: We shouldn't have any `void` types at this stage.");
                     (Some(format!("// Skipping void field {}", field.name().to_str())),
                         None)
                 }
                 Some(IsNullable { is_nullable: false, content: Primitive::String }) => {
                     (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
                         Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readAtom());", var_name = var_name)))
                 }
+                Some(IsNullable { is_nullable: false, content: Primitive::IdentifierName }) => {
+                    (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
+                        Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readIdentifierName());", var_name = var_name)))
+                }
+                Some(IsNullable { is_nullable: false, content: Primitive::PropertyKey }) => {
+                    (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
+                        Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readPropertyKey());", var_name = var_name)))
+                }
+                Some(IsNullable { is_nullable: true, content: Primitive::String }) => {
+                    (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
+                        Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybeAtom());", var_name = var_name)))
+                }
+                Some(IsNullable { is_nullable: true, content: Primitive::IdentifierName }) => {
+                    (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
+                        Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybeIdentifierName());", var_name = var_name)))
+                }
+                Some(IsNullable { is_nullable: true, content: Primitive::PropertyKey }) => {
+                    (Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
+                        Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybePropertyKey());", var_name = var_name)))
+                }
                 _else => {
                     let typename = TypeName::type_(field.type_());
                     let name = self.syntax.get_node_name(typename.to_str())
                         .expect("NodeName for the field type should exist.");
                     let field_extra_args = rules_for_this_field.extra_args;
 
                     let (decl_var, call_kind) = if needs_block {
                         (Some(format!("{typename} {var_name};",
@@ -1621,17 +1704,16 @@ impl CPPExporter {
                     };
 
                     (decl_var,
                      Some(self.get_method_call(var_name.to_str(),
                                                &name, "", "", &field_extra_args,
                                                call_kind)))
                 }
             };
-
             let rendered = {
                 if rules_for_this_field.replace.is_some() {
                     for &(condition, rule_name) in &[
                         (rules_for_this_field.before_field.is_some(), "before:"),
                         (rules_for_this_field.after_field.is_some(), "after:"),
                         (rules_for_this_field.declare.is_some(), "declare:"),
                     ] {
                         if condition {
--- a/layout/generic/nsTextRunTransformations.cpp
+++ b/layout/generic/nsTextRunTransformations.cpp
@@ -602,16 +602,58 @@ nsCaseTransformTextRunFactory::Transform
         }
       }
       break;
 
     case NS_STYLE_TEXT_TRANSFORM_FULL_WIDTH:
       ch = mozilla::unicode::GetFullWidth(ch);
       break;
 
+    case NS_STYLE_TEXT_TRANSFORM_FULL_SIZE_KANA: {
+      static const uint16_t kSmallKanas[] = {
+        // ぁ   ぃ      ぅ      ぇ      ぉ      っ      ゃ      ゅ      ょ
+        0x3041, 0x3043, 0x3045, 0x3047, 0x3049, 0x3063, 0x3083, 0x3085, 0x3087,
+        // ゎ   ゕ      ゖ
+        0x308E, 0x3095, 0x3096,
+        // ァ   ィ      ゥ      ェ      ォ      ッ      ャ      ュ      ョ
+        0x30A1, 0x30A3, 0x30A5, 0x30A7, 0x30A9, 0x30C3, 0x30E3, 0x30E5, 0x30E7,
+        // ヮ   ヵ      ヶ      ㇰ      ㇱ      ㇲ      ㇳ      ㇴ      ㇵ
+        0x30EE, 0x30F5, 0x30F6, 0x31F0, 0x31F1, 0x31F2, 0x31F3, 0x31F4, 0x31F5,
+        // ㇶ   ㇷ      ㇸ      ㇹ      ㇺ      ㇻ      ㇼ      ㇽ      ㇾ
+        0x31F6, 0x31F7, 0x31F8, 0x31F9, 0x31FA, 0x31FB, 0x31FC, 0x31FD, 0x31FE,
+        // ㇿ
+        0x31FF,
+        // ァ    ィ       ゥ       ェ       ォ       ャ       ュ       ョ       ッ
+        0xFF67, 0xFF68, 0xFF69, 0xFF6A, 0xFF6B, 0xFF6C, 0xFF6D, 0xFF6E, 0xFF6F
+      };
+      static const uint16_t kFullSizeKanas[] = {
+        // あ   い      う      え      お      つ      や      ゆ      よ
+        0x3042, 0x3044, 0x3046, 0x3048, 0x304A, 0x3064, 0x3084, 0x3086, 0x3088,
+        // わ   か      け
+        0x308F, 0x304B, 0x3051,
+        // ア   イ      ウ      エ      オ      ツ      ヤ      ユ      ヨ
+        0x30A2, 0x30A4, 0x30A6, 0x30A8, 0x30AA, 0x30C4, 0x30E4, 0x30E6, 0x30E8,
+        // ワ   カ      ケ      ク      シ      ス      ト      ヌ      ハ
+        0x30EF, 0x30AB, 0x30B1, 0x30AF, 0x30B7, 0x30B9, 0x30C8, 0x30CC, 0x30CF,
+        // ヒ   フ      ヘ      ホ      ム      ラ      リ      ル      レ
+        0x30D2, 0x30D5, 0x30D8, 0x30DB, 0x30E0, 0x30E9, 0x30EA, 0x30EB, 0x30EC,
+        // ロ
+        0x30ED,
+        // ア    イ       ウ       エ       オ       ヤ       ユ       ヨ        ツ
+        0xFF71, 0xFF72, 0xFF73, 0xFF74, 0xFF75, 0xFF94, 0xFF95, 0xFF96, 0xFF82
+      };
+
+      size_t index;
+      const uint16_t len = MOZ_ARRAY_LENGTH(kSmallKanas);
+      if (mozilla::BinarySearch(kSmallKanas, 0, len, ch, &index)) {
+        ch = kFullSizeKanas[index];
+      }
+      break;
+    }
+
     default:
       break;
     }
 
     if (forceNonFullWidth) {
       ch = mozilla::unicode::GetFullWidthInverse(ch);
     }
 
--- a/layout/style/nsStyleConsts.h
+++ b/layout/style/nsStyleConsts.h
@@ -801,16 +801,17 @@ enum class StyleGridTrackBreadth : uint8
 #define NS_STYLE_TEXT_OVERFLOW_STRING   2
 
 // See nsStyleText
 #define NS_STYLE_TEXT_TRANSFORM_NONE            0
 #define NS_STYLE_TEXT_TRANSFORM_CAPITALIZE      1
 #define NS_STYLE_TEXT_TRANSFORM_LOWERCASE       2
 #define NS_STYLE_TEXT_TRANSFORM_UPPERCASE       3
 #define NS_STYLE_TEXT_TRANSFORM_FULL_WIDTH      4
+#define NS_STYLE_TEXT_TRANSFORM_FULL_SIZE_KANA  5
 
 // See nsStyleDisplay
 #define NS_STYLE_TOUCH_ACTION_NONE            (1 << 0)
 #define NS_STYLE_TOUCH_ACTION_AUTO            (1 << 1)
 #define NS_STYLE_TOUCH_ACTION_PAN_X           (1 << 2)
 #define NS_STYLE_TOUCH_ACTION_PAN_Y           (1 << 3)
 #define NS_STYLE_TOUCH_ACTION_MANIPULATION    (1 << 4)
 
--- a/layout/style/test/property_database.js
+++ b/layout/style/test/property_database.js
@@ -4580,17 +4580,17 @@ var gCSSProperties = {
   "text-transform": {
     domProp: "textTransform",
     inherited: true,
     type: CSS_TYPE_LONGHAND,
     applies_to_first_letter: true,
     applies_to_first_line: true,
     applies_to_placeholder: true,
     initial_values: [ "none" ],
-    other_values: [ "capitalize", "uppercase", "lowercase", "full-width" ],
+    other_values: [ "capitalize", "uppercase", "lowercase", "full-width", "full-size-kana" ],
     invalid_values: []
   },
   "top": {
     domProp: "top",
     inherited: false,
     type: CSS_TYPE_LONGHAND,
     /* FIXME: run tests with multiple prerequisites */
     prerequisites: { "position": "relative" },
--- a/servo/components/style/properties/longhands/inherited_text.mako.rs
+++ b/servo/components/style/properties/longhands/inherited_text.mako.rs
@@ -18,17 +18,17 @@
 )}
 
 // CSS Text Module Level 3
 
 // TODO(pcwalton): `full-width`
 ${helpers.single_keyword(
     "text-transform",
     "none capitalize uppercase lowercase",
-    extra_gecko_values="full-width",
+    extra_gecko_values="full-width full-size-kana",
     animation_value_type="discrete",
     flags="APPLIES_TO_FIRST_LETTER APPLIES_TO_FIRST_LINE APPLIES_TO_PLACEHOLDER",
     spec="https://drafts.csswg.org/css-text/#propdef-text-transform",
     servo_restyle_damage="rebuild_and_reflow",
 )}
 
 ${helpers.single_keyword(
     "hyphens",
--- a/testing/mozharness/configs/android/androidx86_7_0.py
+++ b/testing/mozharness/configs/android/androidx86_7_0.py
@@ -3,20 +3,20 @@
 # This configuration should be combined with suite definitions and other
 # mozharness configuration from android_common.py, or similar.
 
 config = {
     "tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86_7_0/releng.manifest",
     "emulator_manifest": """
         [
         {
-        "size": 135064025,
-        "digest": "125678c5b0d93ead8bbf01ba94253e532909417b40637460624cfca34e92f431534fc77a0225e9c4728dcbcf2884a8f7fa1ee059efdfa82d827ca20477d41705",
+        "size": 131698372,
+        "digest": "2f62e4f39e2bd858f640b53bbb6cd33de6646f21419d1a9531d9ab5528a7ca6ab6f4cfe370cbb72c4fd475cb9db842a89acdbb9b647d9c0861ee85bc5901dfed",
         "algorithm": "sha512",
-        "filename": "android-sdk_r27.1.12-linux-x86emu.tar.gz",
+        "filename": "android-sdk_r27.3.10-linux-x86emu.tar.gz",
         "unpack": "True"
         }
         ] """,
     "emulator_avd_name": "test-1",
     "emulator_process_name": "emulator64-x86",
     "emulator_extra_args": "-gpu swiftshader_indirect -skip-adb-auth -verbose -show-kernel -use-system-libs -ranchu -selinux permissive -memory 3072 -cores 4",
     "exes": {
         'adb': '%(abs_work_dir)s/android-sdk-linux/platform-tools/adb',
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html.ini
@@ -0,0 +1,4 @@
+[MediaStream-MediaElement-srcObject.https.html]
+  [Tests that a MediaStream can be assigned to a video element with srcObject]
+    expected: FAIL
+
--- a/testing/web-platform/meta/mozilla-sync
+++ b/testing/web-platform/meta/mozilla-sync
@@ -1,2 +1,2 @@
-local: 1e4ae874837c1012d1d4c982008196cc3bb73e17
-upstream: f6bca7b6218f591edc1bcb87c9ab0837ca41970b
+local: 05a38ce912d57ee269bd0af8f0b3ced1bb5f068b
+upstream: 9f2daa2a5d08d0d44f680501af2b62292f6264f0
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/webaudio/the-audio-api/the-audioworklet-interface/audioworklet-postmessage-sharedarraybuffer.https.html.ini
@@ -0,0 +1,4 @@
+[audioworklet-postmessage-sharedarraybuffer.https.html]
+  [\n      Test passing SharedArrayBuffer to an AudioWorklet\n    ]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-position/position-sticky-child-multicolumn-ref.html
@@ -0,0 +1,50 @@
+<!DOCTYPE html>
+<title>Reference for multicolumn under position:sticky should be positioned correctly</title>
+<style>
+  body {
+    margin: 0;
+  }
+  #scroller {
+    overflow-y: scroll;
+    width: 200px;
+    height: 200px;
+  }
+  #relative {
+    position: relative;
+    top: 100px;
+    margin: 10px;
+  }
+  #child {
+    width: 100px;
+    height: 100px;
+    background: green;
+  }
+  #contents {
+    position: relative;
+    top: 10%;
+    left: 10%;
+    width: 80%;
+    height: 80%;
+    background: lightgreen;
+  }
+  #spacer {
+    height: 400px;
+  }
+</style>
+
+<div id="scroller">
+  <div id="relative">
+    <div id="child">
+      <div id="contents"></div>
+    </div>
+  </div>
+  <div id="spacer"></div>
+</div>
+
+<div>You should see a light green box above with a dark green border.</div>
+
+<script>
+  window.addEventListener('load', function() {
+    scroller.scrollTop = 100;
+  });
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-position/position-sticky-child-multicolumn.html
@@ -0,0 +1,55 @@
+<!DOCTYPE html>
+<title>Multicolumn under position:sticky should be positioned correctly</title>
+<link rel="help" href="https://www.w3.org/TR/css-position-3/#sticky-pos" />
+<link rel="match" href="position-sticky-child-multicolumn-ref.html" />
+<link rel="author" title="Philip Rogers" href="mailto:pdr@chromium.org" />
+<meta name="assert" content="This test checks that a multicolumn element is positioned relative to a sticky position" />
+
+<style>
+  body {
+    margin: 0;
+  }
+  #scroller {
+    overflow-y: scroll;
+    width: 200px;
+    height: 200px;
+  }
+  #sticky {
+    position: sticky;
+    top: 10px;
+    margin: 10px;
+  }
+  #multicolumn {
+    width: 100px;
+    height: 100px;
+    background: green;
+    columns: 1;
+  }
+  #contents {
+    margin-left: 10%;
+    margin-top: 10%;
+    width: 80%;
+    height: 80%;
+    background: lightgreen;
+  }
+  #spacer {
+    height: 400px;
+  }
+</style>
+
+<div id="scroller">
+  <div id="sticky">
+    <div id="multicolumn">
+      <div id="contents"></div>
+    </div>
+  </div>
+  <div id="spacer"></div>
+</div>
+
+<div>You should see a light green box above with a dark green border.</div>
+
+<script>
+  window.addEventListener('load', function() {
+    scroller.scrollTop = 100;
+  });
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-text/text-transform/reference/text-transform-full-size-kana-001-ref.html
@@ -0,0 +1,72 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8"/>
+<title>CSS3 Text, text transform: full-size-kana, small kanas</title>
+<meta name="assert" content="For small kanas, text-transform: full-size-kana puts all kanas in full-size kanas.">
+<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
+<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
+<style type='text/css'>
+.test, .ref { font-size: 200%; line-height: 2.5em; }
+.test span, .ref span { margin-right: 1em; white-space: nowrap; }
+/* the CSS above is not part of the test */
+</style>
+</head>
+<body>
+<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
+<div class="test">
+  <span title="U+3041">&#x3042; &#x3042;</span>
+  <span title="U+3043">&#x3044; &#x3044;</span>
+  <span title="U+3045">&#x3046; &#x3046;</span>
+  <span title="U+3047">&#x3048; &#x3048;</span>
+  <span title="U+3049">&#x304A; &#x304A;</span>
+  <span title="U+3095">&#x304B; &#x304B;</span>
+  <span title="U+3096">&#x3051; &#x3051;</span>
+  <span title="U+3063">&#x3064; &#x3064;</span>
+  <span title="U+3083">&#x3084; &#x3084;</span>
+  <span title="U+3085">&#x3086; &#x3086;</span>
+  <span title="U+3087">&#x3088; &#x3088;</span>
+  <span title="U+308E">&#x308F; &#x308F;</span>
+  <span title="U+30A1">&#x30A2; &#x30A2;</span>
+  <span title="U+30A3">&#x30A4; &#x30A4;</span>
+  <span title="U+30A5">&#x30A6; &#x30A6;</span>
+  <span title="U+30A7">&#x30A8; &#x30A8;</span>
+  <span title="U+30A9">&#x30AA; &#x30AA;</span>
+  <span title="U+30F5">&#x30AB; &#x30AB;</span>
+  <span title="U+31F0">&#x30AF; &#x30AF;</span>
+  <span title="U+30F6">&#x30B1; &#x30B1;</span>
+  <span title="U+31F1">&#x30B7; &#x30B7;</span>
+  <span title="U+31F2">&#x30B9; &#x30B9;</span>
+  <span title="U+30C3">&#x30C4; &#x30C4;</span>
+  <span title="U+31F3">&#x30C8; &#x30C8;</span>
+  <span title="U+31F4">&#x30CC; &#x30CC;</span>
+  <span title="U+31F5">&#x30CF; &#x30CF;</span>
+  <span title="U+31F6">&#x30D2; &#x30D2;</span>
+  <span title="U+31F7">&#x30D5; &#x30D5;</span>
+  <span title="U+31F8">&#x30D8; &#x30D8;</span>
+  <span title="U+31F9">&#x30DB; &#x30DB;</span>
+  <span title="U+31FA">&#x30E0; &#x30E0;</span>
+  <span title="U+30E3">&#x30E4; &#x30E4;</span>
+  <span title="U+30E5">&#x30E6; &#x30E6;</span>
+  <span title="U+30E7">&#x30E8; &#x30E8;</span>
+  <span title="U+31FB">&#x30E9; &#x30E9;</span>
+  <span title="U+31FC">&#x30EA; &#x30EA;</span>
+  <span title="U+31FD">&#x30EB; &#x30EB;</span>
+  <span title="U+31FE">&#x30EC; &#x30EC;</span>
+  <span title="U+31FF">&#x30ED; &#x30ED;</span>
+  <span title="U+30EE">&#x30EF; &#x30EF;</span>
+  <span title="U+FF67">&#xFF71; &#xFF71;</span>
+  <span title="U+FF68">&#xFF72; &#xFF72;</span>
+  <span title="U+FF69">&#xFF73; &#xFF73;</span>
+  <span title="U+FF6A">&#xFF74; &#xFF74;</span>
+  <span title="U+FF6B">&#xFF75; &#xFF75;</span>
+  <span title="U+FF6F">&#xFF82; &#xFF82;</span>
+  <span title="U+FF6C">&#xFF94; &#xFF94;</span>
+  <span title="U+FF6D">&#xFF95; &#xFF95;</span>
+  <span title="U+FF6E">&#xFF96; &#xFF96;</span>
+</div>
+<!--Notes:
+Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
+-->
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-text/text-transform/reference/text-transform-full-size-kana-002-ref.html
@@ -0,0 +1,228 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8"/>
+<title>CSS3 Text, text transform: full-size-kana, full-size kanas</title>
+<meta name="assert" content="For full-size kanas, text-transform: full-size-kana leaves all kanas unaltered.">
+<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
+<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
+<style type='text/css'>
+.test, .ref { font-size: 200%; line-height: 2.5em; }
+.test span, .ref span { margin-right: 1em; white-space: nowrap; }
+/* the CSS above is not part of the test */
+</style>
+</head>
+<body>
+<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
+<div class="test">
+  <span title="U+30FC">&#x30FC; &#x30FC;</span>
+  <span title="U+3042">&#x3042; &#x3042;</span>
+  <span title="U+3044">&#x3044; &#x3044;</span>
+  <span title="U+3046">&#x3046; &#x3046;</span>
+  <span title="U+3048">&#x3048; &#x3048;</span>
+  <span title="U+304A">&#x304A; &#x304A;</span>
+  <span title="U+304B">&#x304B; &#x304B;</span>
+  <span title="U+304C">&#x304C; &#x304C;</span>
+  <span title="U+304D">&#x304D; &#x304D;</span>
+  <span title="U+304E">&#x304E; &#x304E;</span>
+  <span title="U+304F">&#x304F; &#x304F;</span>
+  <span title="U+3050">&#x3050; &#x3050;</span>
+  <span title="U+3051">&#x3051; &#x3051;</span>
+  <span title="U+3052">&#x3052; &#x3052;</span>
+  <span title="U+3053">&#x3053; &#x3053;</span>
+  <span title="U+3054">&#x3054; &#x3054;</span>
+  <span title="U+3055">&#x3055; &#x3055;</span>
+  <span title="U+3056">&#x3056; &#x3056;</span>
+  <span title="U+3057">&#x3057; &#x3057;</span>
+  <span title="U+3058">&#x3058; &#x3058;</span>
+  <span title="U+3059">&#x3059; &#x3059;</span>
+  <span title="U+305A">&#x305A; &#x305A;</span>
+  <span title="U+305B">&#x305B; &#x305B;</span>
+  <span title="U+305C">&#x305C; &#x305C;</span>
+  <span title="U+305D">&#x305D; &#x305D;</span>
+  <span title="U+305E">&#x305E; &#x305E;</span>
+  <span title="U+305F">&#x305F; &#x305F;</span>
+  <span title="U+3060">&#x3060; &#x3060;</span>
+  <span title="U+3061">&#x3061; &#x3061;</span>
+  <span title="U+3062">&#x3062; &#x3062;</span>
+  <span title="U+3064">&#x3064; &#x3064;</span>
+  <span title="U+3065">&#x3065; &#x3065;</span>
+  <span title="U+3066">&#x3066; &#x3066;</span>
+  <span title="U+3067">&#x3067; &#x3067;</span>
+  <span title="U+3068">&#x3068; &#x3068;</span>
+  <span title="U+3069">&#x3069; &#x3069;</span>
+  <span title="U+306A">&#x306A; &#x306A;</span>
+  <span title="U+306B">&#x306B; &#x306B;</span>
+  <span title="U+306C">&#x306C; &#x306C;</span>
+  <span title="U+306D">&#x306D; &#x306D;</span>
+  <span title="U+306E">&#x306E; &#x306E;</span>
+  <span title="U+306F">&#x306F; &#x306F;</span>
+  <span title="U+3070">&#x3070; &#x3070;</span>
+  <span title="U+3071">&#x3071; &#x3071;</span>
+  <span title="U+3072">&#x3072; &#x3072;</span>
+  <span title="U+3073">&#x3073; &#x3073;</span>
+  <span title="U+3074">&#x3074; &#x3074;</span>
+  <span title="U+3075">&#x3075; &#x3075;</span>
+  <span title="U+3076">&#x3076; &#x3076;</span>
+  <span title="U+3077">&#x3077; &#x3077;</span>
+  <span title="U+3078">&#x3078; &#x3078;</span>
+  <span title="U+3079">&#x3079; &#x3079;</span>
+  <span title="U+307A">&#x307A; &#x307A;</span>
+  <span title="U+307B">&#x307B; &#x307B;</span>
+  <span title="U+307C">&#x307C; &#x307C;</span>
+  <span title="U+307D">&#x307D; &#x307D;</span>
+  <span title="U+307E">&#x307E; &#x307E;</span>
+  <span title="U+307F">&#x307F; &#x307F;</span>
+  <span title="U+3080">&#x3080; &#x3080;</span>
+  <span title="U+3081">&#x3081; &#x3081;</span>
+  <span title="U+3082">&#x3082; &#x3082;</span>
+  <span title="U+3084">&#x3084; &#x3084;</span>
+  <span title="U+3086">&#x3086; &#x3086;</span>
+  <span title="U+3088">&#x3088; &#x3088;</span>
+  <span title="U+3089">&#x3089; &#x3089;</span>
+  <span title="U+308A">&#x308A; &#x308A;</span>
+  <span title="U+308B">&#x308B; &#x308B;</span>
+  <span title="U+308C">&#x308C; &#x308C;</span>
+  <span title="U+308D">&#x308D; &#x308D;</span>
+  <span title="U+308F">&#x308F; &#x308F;</span>
+  <span title="U+3090">&#x3090; &#x3090;</span>
+  <span title="U+3091">&#x3091; &#x3091;</span>
+  <span title="U+3092">&#x3092; &#x3092;</span>
+  <span title="U+3093">&#x3093; &#x3093;</span>
+  <span title="U+30A2">&#x30A2; &#x30A2;</span>
+  <span title="U+30A4">&#x30A4; &#x30A4;</span>
+  <span title="U+30A6">&#x30A6; &#x30A6;</span>
+  <span title="U+30A8">&#x30A8; &#x30A8;</span>
+  <span title="U+30AA">&#x30AA; &#x30AA;</span>
+  <span title="U+30AB">&#x30AB; &#x30AB;</span>
+  <span title="U+30AC">&#x30AC; &#x30AC;</span>
+  <span title="U+30AD">&#x30AD; &#x30AD;</span>
+  <span title="U+30AE">&#x30AE; &#x30AE;</span>
+  <span title="U+30AF">&#x30AF; &#x30AF;</span>
+  <span title="U+30B0">&#x30B0; &#x30B0;</span>
+  <span title="U+30B1">&#x30B1; &#x30B1;</span>
+  <span title="U+30B2">&#x30B2; &#x30B2;</span>
+  <span title="U+30B3">&#x30B3; &#x30B3;</span>
+  <span title="U+30B4">&#x30B4; &#x30B4;</span>
+  <span title="U+30B5">&#x30B5; &#x30B5;</span>
+  <span title="U+30B6">&#x30B6; &#x30B6;</span>
+  <span title="U+30B7">&#x30B7; &#x30B7;</span>
+  <span title="U+30B8">&#x30B8; &#x30B8;</span>
+  <span title="U+30B9">&#x30B9; &#x30B9;</span>
+  <span title="U+30BA">&#x30BA; &#x30BA;</span>
+  <span title="U+30BB">&#x30BB; &#x30BB;</span>
+  <span title="U+30BC">&#x30BC; &#x30BC;</span>
+  <span title="U+30BD">&#x30BD; &#x30BD;</span>
+  <span title="U+30BE">&#x30BE; &#x30BE;</span>
+  <span title="U+30BF">&#x30BF; &#x30BF;</span>
+  <span title="U+30C0">&#x30C0; &#x30C0;</span>
+  <span title="U+30C1">&#x30C1; &#x30C1;</span>
+  <span title="U+30C2">&#x30C2; &#x30C2;</span>
+  <span title="U+30C4">&#x30C4; &#x30C4;</span>
+  <span title="U+30C5">&#x30C5; &#x30C5;</span>
+  <span title="U+30C6">&#x30C6; &#x30C6;</span>
+  <span title="U+30C7">&#x30C7; &#x30C7;</span>
+  <span title="U+30C8">&#x30C8; &#x30C8;</span>
+  <span title="U+30C9">&#x30C9; &#x30C9;</span>
+  <span title="U+30CA">&#x30CA; &#x30CA;</span>
+  <span title="U+30CB">&#x30CB; &#x30CB;</span>
+  <span title="U+30CC">&#x30CC; &#x30CC;</span>
+  <span title="U+30CD">&#x30CD; &#x30CD;</span>
+  <span title="U+30CE">&#x30CE; &#x30CE;</span>
+  <span title="U+30CF">&#x30CF; &#x30CF;</span>
+  <span title="U+30D0">&#x30D0; &#x30D0;</span>
+  <span title="U+30D1">&#x30D1; &#x30D1;</span>
+  <span title="U+30D2">&#x30D2; &#x30D2;</span>
+  <span title="U+30D3">&#x30D3; &#x30D3;</span>
+  <span title="U+30D4">&#x30D4; &#x30D4;</span>
+  <span title="U+30D5">&#x30D5; &#x30D5;</span>
+  <span title="U+30D6">&#x30D6; &#x30D6;</span>
+  <span title="U+30D7">&#x30D7; &#x30D7;</span>
+  <span title="U+30D8">&#x30D8; &#x30D8;</span>
+  <span title="U+30D9">&#x30D9; &#x30D9;</span>
+  <span title="U+30DA">&#x30DA; &#x30DA;</span>
+  <span title="U+30DB">&#x30DB; &#x30DB;</span>
+  <span title="U+30DC">&#x30DC; &#x30DC;</span>
+  <span title="U+30DD">&#x30DD; &#x30DD;</span>
+  <span title="U+30DE">&#x30DE; &#x30DE;</span>
+  <span title="U+30DF">&#x30DF; &#x30DF;</span>
+  <span title="U+30E0">&#x30E0; &#x30E0;</span>
+  <span title="U+30E1">&#x30E1; &#x30E1;</span>
+  <span title="U+30E2">&#x30E2; &#x30E2;</span>
+  <span title="U+30E4">&#x30E4; &#x30E4;</span>
+  <span title="U+30E6">&#x30E6; &#x30E6;</span>
+  <span title="U+30E8">&#x30E8; &#x30E8;</span>
+  <span title="U+30E9">&#x30E9; &#x30E9;</span>
+  <span title="U+30EA">&#x30EA; &#x30EA;</span>
+  <span title="U+30EB">&#x30EB; &#x30EB;</span>
+  <span title="U+30EC">&#x30EC; &#x30EC;</span>
+  <span title="U+30ED">&#x30ED; &#x30ED;</span>
+  <span title="U+30EF">&#x30EF; &#x30EF;</span>
+  <span title="U+30F0">&#x30F0; &#x30F0;</span>
+  <span title="U+30F1">&#x30F1; &#x30F1;</span>
+  <span title="U+30F2">&#x30F2; &#x30F2;</span>
+  <span title="U+30F3">&#x30F3; &#x30F3;</span>
+  <span title="U+30F4">&#x30F4; &#x30F4;</span>
+  <span title="U+309B">&#x309B; &#x309B;</span>
+  <span title="U+309C">&#x309C; &#x309C;</span>
+  <span title="U+FF60">&#xFF60; &#xFF60;</span>
+  <span title="U+FF61">&#xFF61; &#xFF61;</span>
+  <span title="U+FF62">&#xFF62; &#xFF62;</span>
+  <span title="U+FF63">&#xFF63; &#xFF63;</span>
+  <span title="U+FF64">&#xFF64; &#xFF64;</span>
+  <span title="U+FF65">&#xFF65; &#xFF65;</span>
+  <span title="U+FF66">&#xFF66; &#xFF66;</span>
+  <span title="U+FF70">&#xFF70; &#xFF70;</span>
+  <span title="U+FF71">&#xFF71; &#xFF71;</span>
+  <span title="U+FF72">&#xFF72; &#xFF72;</span>
+  <span title="U+FF73">&#xFF73; &#xFF73;</span>
+  <span title="U+FF74">&#xFF74; &#xFF74;</span>
+  <span title="U+FF75">&#xFF75; &#xFF75;</span>
+  <span title="U+FF76">&#xFF76; &#xFF76;</span>
+  <span title="U+FF77">&#xFF77; &#xFF77;</span>
+  <span title="U+FF78">&#xFF78; &#xFF78;</span>
+  <span title="U+FF79">&#xFF79; &#xFF79;</span>
+  <span title="U+FF7A">&#xFF7A; &#xFF7A;</span>
+  <span title="U+FF7B">&#xFF7B; &#xFF7B;</span>
+  <span title="U+FF7C">&#xFF7C; &#xFF7C;</span>
+  <span title="U+FF7D">&#xFF7D; &#xFF7D;</span>
+  <span title="U+FF7E">&#xFF7E; &#xFF7E;</span>
+  <span title="U+FF7F">&#xFF7F; &#xFF7F;</span>
+  <span title="U+FF80">&#xFF80; &#xFF80;</span>
+  <span title="U+FF81">&#xFF81; &#xFF81;</span>
+  <span title="U+FF82">&#xFF82; &#xFF82;</span>
+  <span title="U+FF83">&#xFF83; &#xFF83;</span>
+  <span title="U+FF84">&#xFF84; &#xFF84;</span>
+  <span title="U+FF85">&#xFF85; &#xFF85;</span>
+  <span title="U+FF86">&#xFF86; &#xFF86;</span>
+  <span title="U+FF87">&#xFF87; &#xFF87;</span>
+  <span title="U+FF88">&#xFF88; &#xFF88;</span>
+  <span title="U+FF89">&#xFF89; &#xFF89;</span>
+  <span title="U+FF8A">&#xFF8A; &#xFF8A;</span>
+  <span title="U+FF8B">&#xFF8B; &#xFF8B;</span>
+  <span title="U+FF8C">&#xFF8C; &#xFF8C;</span>
+  <span title="U+FF8D">&#xFF8D; &#xFF8D;</span>
+  <span title="U+FF8E">&#xFF8E; &#xFF8E;</span>
+  <span title="U+FF8F">&#xFF8F; &#xFF8F;</span>
+  <span title="U+FF90">&#xFF90; &#xFF90;</span>
+  <span title="U+FF91">&#xFF91; &#xFF91;</span>
+  <span title="U+FF92">&#xFF92; &#xFF92;</span>
+  <span title="U+FF93">&#xFF93; &#xFF93;</span>
+  <span title="U+FF94">&#xFF94; &#xFF94;</span>
+  <span title="U+FF95">&#xFF95; &#xFF95;</span>
+  <span title="U+FF96">&#xFF96; &#xFF96;</span>
+  <span title="U+FF97">&#xFF97; &#xFF97;</span>
+  <span title="U+FF98">&#xFF98; &#xFF98;</span>
+  <span title="U+FF99">&#xFF99; &#xFF99;</span>
+  <span title="U+FF9A">&#xFF9A; &#xFF9A;</span>
+  <span title="U+FF9B">&#xFF9B; &#xFF9B;</span>
+  <span title="U+FF9C">&#xFF9C; &#xFF9C;</span>
+  <span title="U+FF9D">&#xFF9D; &#xFF9D;</span>
+  <span title="U+FF9E">&#xFF9E; &#xFF9E;</span>
+  <span title="U+FF9F">&#xFF9F; &#xFF9F;</span>
+</div>
+<!--Notes:
+Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
+-->
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-text/text-transform/text-transform-full-size-kana-001.html
@@ -0,0 +1,74 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8"/>
+<title>CSS3 Text, text transform: full-size-kana, small kanas</title>
+<meta name="assert" content="For small kanas, text-transform: full-size-kana puts all kanas in full-size kanas.">
+<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
+<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
+<link rel="match" href="reference/text-transform-full-size-kana-001-ref.html">
+<style type='text/css'>
+.test, .ref { font-size: 200%; line-height: 2.5em; }
+.test span, .ref span { margin-right: 1em; white-space: nowrap; }
+/* the CSS above is not part of the test */
+.test { text-transform: full-size-kana; }
+</style>
+</head>
+<body>
+<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
+<div class="test">
+  <span title="U+3041">&#x3041; &#x3042;</span>
+  <span title="U+3043">&#x3043; &#x3044;</span>
+  <span title="U+3045">&#x3045; &#x3046;</span>
+  <span title="U+3047">&#x3047; &#x3048;</span>
+  <span title="U+3049">&#x3049; &#x304A;</span>
+  <span title="U+3095">&#x3095; &#x304B;</span>
+  <span title="U+3096">&#x3096; &#x3051;</span>
+  <span title="U+3063">&#x3063; &#x3064;</span>
+  <span title="U+3083">&#x3083; &#x3084;</span>
+  <span title="U+3085">&#x3085; &#x3086;</span>
+  <span title="U+3087">&#x3087; &#x3088;</span>
+  <span title="U+308E">&#x308E; &#x308F;</span>
+  <span title="U+30A1">&#x30A1; &#x30A2;</span>
+  <span title="U+30A3">&#x30A3; &#x30A4;</span>
+  <span title="U+30A5">&#x30A5; &#x30A6;</span>
+  <span title="U+30A7">&#x30A7; &#x30A8;</span>
+  <span title="U+30A9">&#x30A9; &#x30AA;</span>
+  <span title="U+30F5">&#x30F5; &#x30AB;</span>
+  <span title="U+31F0">&#x31F0; &#x30AF;</span>
+  <span title="U+30F6">&#x30F6; &#x30B1;</span>
+  <span title="U+31F1">&#x31F1; &#x30B7;</span>
+  <span title="U+31F2">&#x31F2; &#x30B9;</span>
+  <span title="U+30C3">&#x30C3; &#x30C4;</span>
+  <span title="U+31F3">&#x31F3; &#x30C8;</span>
+  <span title="U+31F4">&#x31F4; &#x30CC;</span>
+  <span title="U+31F5">&#x31F5; &#x30CF;</span>
+  <span title="U+31F6">&#x31F6; &#x30D2;</span>
+  <span title="U+31F7">&#x31F7; &#x30D5;</span>
+  <span title="U+31F8">&#x31F8; &#x30D8;</span>
+  <span title="U+31F9">&#x31F9; &#x30DB;</span>
+  <span title="U+31FA">&#x31FA; &#x30E0;</span>
+  <span title="U+30E3">&#x30E3; &#x30E4;</span>
+  <span title="U+30E5">&#x30E5; &#x30E6;</span>
+  <span title="U+30E7">&#x30E7; &#x30E8;</span>
+  <span title="U+31FB">&#x31FB; &#x30E9;</span>
+  <span title="U+31FC">&#x31FC; &#x30EA;</span>
+  <span title="U+31FD">&#x31FD; &#x30EB;</span>
+  <span title="U+31FE">&#x31FE; &#x30EC;</span>
+  <span title="U+31FF">&#x31FF; &#x30ED;</span>
+  <span title="U+30EE">&#x30EE; &#x30EF;</span>
+  <span title="U+FF67">&#xFF67; &#xFF71;</span>
+  <span title="U+FF68">&#xFF68; &#xFF72;</span>
+  <span title="U+FF69">&#xFF69; &#xFF73;</span>
+  <span title="U+FF6A">&#xFF6A; &#xFF74;</span>
+  <span title="U+FF6B">&#xFF6B; &#xFF75;</span>
+  <span title="U+FF6F">&#xFF6F; &#xFF82;</span>
+  <span title="U+FF6C">&#xFF6C; &#xFF94;</span>
+  <span title="U+FF6D">&#xFF6D; &#xFF95;</span>
+  <span title="U+FF6E">&#xFF6E; &#xFF96;</span>
+</div>
+<!--Notes:
+Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
+-->
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/css/css-text/text-transform/text-transform-full-size-kana-002.html
@@ -0,0 +1,230 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8"/>
+<title>CSS3 Text, text transform: full-size-kana, full-size kanas</title>
+<meta name="assert" content="For full-size kanas, text-transform: full-size-kana leaves all kanas unaltered.">
+<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
+<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
+<link rel="match" href="reference/text-transform-full-size-kana-002-ref.html">
+<style type='text/css'>
+.test, .ref { font-size: 200%; line-height: 2.5em; }
+.test span, .ref span { margin-right: 1em; white-space: nowrap; }
+/* the CSS above is not part of the test */
+.test { text-transform: full-size-kana; }
+</style>
+</head>
+<body>
+<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
+<div class="test">
+  <span title="U+30FC">&#x30FC; &#x30FC;</span>
+  <span title="U+3042">&#x3042; &#x3042;</span>
+  <span title="U+3044">&#x3044; &#x3044;</span>
+  <span title="U+3046">&#x3046; &#x3046;</span>
+  <span title="U+3048">&#x3048; &#x3048;</span>
+  <span title="U+304A">&#x304A; &#x304A;</span>
+  <span title="U+304B">&#x304B; &#x304B;</span>
+  <span title="U+304C">&#x304C; &#x304C;</span>
+  <span title="U+304D">&#x304D; &#x304D;</span>
+  <span title="U+304E">&#x304E; &#x304E;</span>
+  <span title="U+304F">&#x304F; &#x304F;</span>
+  <span title="U+3050">&#x3050; &#x3050;</span>
+  <span title="U+3051">&#x3051; &#x3051;</span>
+  <span title="U+3052">&#x3052; &#x3052;</span>
+  <span title="U+3053">&#x3053; &#x3053;</span>
+  <span title="U+3054">&#x3054; &#x3054;</span>
+  <span title="U+3055">&#x3055; &#x3055;</span>
+  <span title="U+3056">&#x3056; &#x3056;</span>
+  <span title="U+3057">&#x3057; &#x3057;</span>
+  <span title="U+3058">&#x3058; &#x3058;</span>
+  <span title="U+3059">&#x3059; &#x3059;</span>
+  <span title="U+305A">&#x305A; &#x305A;</span>
+  <span title="U+305B">&#x305B; &#x305B;</span>
+  <span title="U+305C">&#x305C; &#x305C;</span>
+  <span title="U+305D">&#x305D; &#x305D;</span>
+  <span title="U+305E">&#x305E; &#x305E;</span>
+  <span title="U+305F">&#x305F; &#x305F;</span>
+  <span title="U+3060">&#x3060; &#x3060;</span>
+  <span title="U+3061">&#x3061; &#x3061;</span>
+  <span title="U+3062">&#x3062; &#x3062;</span>
+  <span title="U+3064">&#x3064; &#x3064;</span>
+  <span title="U+3065">&#x3065; &#x3065;</span>
+  <span title="U+3066">&#x3066; &#x3066;</span>
+  <span title="U+3067">&#x3067; &#x3067;</span>
+  <span title="U+3068">&#x3068; &#x3068;</span>
+  <span title="U+3069">&#x3069; &#x3069;</span>
+  <span title="U+306A">&#x306A; &#x306A;</span>
+  <span title="U+306B">&#x306B; &#x306B;</span>
+  <span title="U+306C">&#x306C; &#x306C;</span>
+  <span title="U+306D">&#x306D; &#x306D;</span>
+  <span title="U+306E">&#x306E; &#x306E;</span>
+  <span title="U+306F">&#x306F; &#x306F;</span>
+  <span title="U+3070">&#x3070; &#x3070;</span>
+  <span title="U+3071">&#x3071; &#x3071;</span>
+  <span title="U+3072">&#x3072; &#x3072;</span>
+  <span title="U+3073">&#x3073; &#x3073;</span>
+  <span title="U+3074">&#x3074; &#x3074;</span>
+  <span title="U+3075">&#x3075; &#x3075;</span>
+  <span title="U+3076">&#x3076; &#x3076;</span>
+  <span title="U+3077">&#x3077; &#x3077;</span>
+  <span title="U+3078">&#x3078; &#x3078;</span>
+  <span title="U+3079">&#x3079; &#x3079;</span>
+  <span title="U+307A">&#x307A; &#x307A;</span>
+  <span title="U+307B">&#x307B; &#x307B;</span>
+  <span title="U+307C">&#x307C; &#x307C;</span>
+  <span title="U+307D">&#x307D; &#x307D;</span>
+  <span title="U+307E">&#x307E; &#x307E;</span>
+  <span title="U+307F">&#x307F; &#x307F;</span>
+  <span title="U+3080">&#x3080; &#x3080;</span>
+  <span title="U+3081">&#x3081; &#x3081;</span>
+  <span title="U+3082">&#x3082; &#x3082;</span>
+  <span title="U+3084">&#x3084; &#x3084;</span>
+  <span title="U+3086">&#x3086; &#x3086;</span>
+  <span title="U+3088">&#x3088; &#x3088;</span>
+  <span title="U+3089">&#x3089; &#x3089;</span>
+  <span title="U+308A">&#x308A; &#x308A;</span>
+  <span title="U+308B">&#x308B; &#x308B;</span>
+  <span title="U+308C">&#x308C; &#x308C;</span>
+  <span title="U+308D">&#x308D; &#x308D;</span>
+  <span title="U+308F">&#x308F; &#x308F;</span>
+  <span title="U+3090">&#x3090; &#x3090;</span>
+  <span title="U+3091">&#x3091; &#x3091;</span>
+  <span title="U+3092">&#x3092; &#x3092;</span>
+  <span title="U+3093">&#x3093; &#x3093;</span>
+  <span title="U+30A2">&#x30A2; &#x30A2;</span>
+  <span title="U+30A4">&#x30A4; &#x30A4;</span>
+  <span title="U+30A6">&#x30A6; &#x30A6;</span>
+  <span title="U+30A8">&#x30A8; &#x30A8;</span>
+  <span title="U+30AA">&#x30AA; &#x30AA;</span>
+  <span title="U+30AB">&#x30AB; &#x30AB;</span>
+  <span title="U+30AC">&#x30AC; &#x30AC;</span>
+  <span title="U+30AD">&#x30AD; &#x30AD;</span>
+  <span title="U+30AE">&#x30AE; &#x30AE;</span>
+  <span title="U+30AF">&#x30AF; &#x30AF;</span>
+  <span title="U+30B0">&#x30B0; &#x30B0;</span>
+  <span title="U+30B1">&#x30B1; &#x30B1;</span>
+  <span title="U+30B2">&#x30B2; &#x30B2;</span>
+  <span title="U+30B3">&#x30B3; &#x30B3;</span>
+  <span title="U+30B4">&#x30B4; &#x30B4;</span>
+  <span title="U+30B5">&#x30B5; &#x30B5;</span>
+  <span title="U+30B6">&#x30B6; &#x30B6;</span>
+  <span title="U+30B7">&#x30B7; &#x30B7;</span>
+  <span title="U+30B8">&#x30B8; &#x30B8;</span>
+  <span title="U+30B9">&#x30B9; &#x30B9;</span>
+  <span title="U+30BA">&#x30BA; &#x30BA;</span>
+  <span title="U+30BB">&#x30BB; &#x30BB;</span>
+  <span title="U+30BC">&#x30BC; &#x30BC;</span>
+  <span title="U+30BD">&#x30BD; &#x30BD;</span>
+  <span title="U+30BE">&#x30BE; &#x30BE;</span>
+  <span title="U+30BF">&#x30BF; &#x30BF;</span>
+  <span title="U+30C0">&#x30C0; &#x30C0;</span>
+  <span title="U+30C1">&#x30C1; &#x30C1;</span>
+  <span title="U+30C2">&#x30C2; &#x30C2;</span>
+  <span title="U+30C4">&#x30C4; &#x30C4;</span>
+  <span title="U+30C5">&#x30C5; &#x30C5;</span>
+  <span title="U+30C6">&#x30C6; &#x30C6;</span>
+  <span title="U+30C7">&#x30C7; &#x30C7;</span>
+  <span title="U+30C8">&#x30C8; &#x30C8;</span>
+  <span title="U+30C9">&#x30C9; &#x30C9;</span>
+  <span title="U+30CA">&#x30CA; &#x30CA;</span>
+  <span title="U+30CB">&#x30CB; &#x30CB;</span>
+  <span title="U+30CC">&#x30CC; &#x30CC;</span>
+  <span title="U+30CD">&#x30CD; &#x30CD;</span>
+  <span title="U+30CE">&#x30CE; &#x30CE;</span>
+  <span title="U+30CF">&#x30CF; &#x30CF;</span>
+  <span title="U+30D0">&#x30D0; &#x30D0;</span>
+  <span title="U+30D1">&#x30D1; &#x30D1;</span>
+  <span title="U+30D2">&#x30D2; &#x30D2;</span>
+  <span title="U+30D3">&#x30D3; &#x30D3;</span>
+  <span title="U+30D4">&#x30D4; &#x30D4;</span>
+  <span title="U+30D5">&#x30D5; &#x30D5;</span>
+  <span title="U+30D6">&#x30D6; &#x30D6;</span>
+  <span title="U+30D7">&#x30D7; &#x30D7;</span>
+  <span title="U+30D8">&#x30D8; &#x30D8;</span>
+  <span title="U+30D9">&#x30D9; &#x30D9;</span>
+  <span title="U+30DA">&#x30DA; &#x30DA;</span>
+  <span title="U+30DB">&#x30DB; &#x30DB;</span>
+  <span title="U+30DC">&#x30DC; &#x30DC;</span>
+  <span title="U+30DD">&#x30DD; &#x30DD;</span>
+  <span title="U+30DE">&#x30DE; &#x30DE;</span>
+  <span title="U+30DF">&#x30DF; &#x30DF;</span>
+  <span title="U+30E0">&#x30E0; &#x30E0;</span>
+  <span title="U+30E1">&#x30E1; &#x30E1;</span>
+  <span title="U+30E2">&#x30E2; &#x30E2;</span>
+  <span title="U+30E4">&#x30E4; &#x30E4;</span>
+  <span title="U+30E6">&#x30E6; &#x30E6;</span>
+  <span title="U+30E8">&#x30E8; &#x30E8;</span>
+  <span title="U+30E9">&#x30E9; &#x30E9;</span>
+  <span title="U+30EA">&#x30EA; &#x30EA;</span>
+  <span title="U+30EB">&#x30EB; &#x30EB;</span>
+  <span title="U+30EC">&#x30EC; &#x30EC;</span>
+  <span title="U+30ED">&#x30ED; &#x30ED;</span>
+  <span title="U+30EF">&#x30EF; &#x30EF;</span>
+  <span title="U+30F0">&#x30F0; &#x30F0;</span>
+  <span title="U+30F1">&#x30F1; &#x30F1;</span>
+  <span title="U+30F2">&#x30F2; &#x30F2;</span>
+  <span title="U+30F3">&#x30F3; &#x30F3;</span>
+  <span title="U+30F4">&#x30F4; &#x30F4;</span>
+  <span title="U+309B">&#x309B; &#x309B;</span>
+  <span title="U+309C">&#x309C; &#x309C;</span>
+  <span title="U+FF60">&#xFF60; &#xFF60;</span>
+  <span title="U+FF61">&#xFF61; &#xFF61;</span>
+  <span title="U+FF62">&#xFF62; &#xFF62;</span>
+  <span title="U+FF63">&#xFF63; &#xFF63;</span>
+  <span title="U+FF64">&#xFF64; &#xFF64;</span>
+  <span title="U+FF65">&#xFF65; &#xFF65;</span>
+  <span title="U+FF66">&#xFF66; &#xFF66;</span>
+  <span title="U+FF70">&#xFF70; &#xFF70;</span>
+  <span title="U+FF71">&#xFF71; &#xFF71;</span>
+  <span title="U+FF72">&#xFF72; &#xFF72;</span>
+  <span title="U+FF73">&#xFF73; &#xFF73;</span>
+  <span title="U+FF74">&#xFF74; &#xFF74;</span>
+  <span title="U+FF75">&#xFF75; &#xFF75;</span>
+  <span title="U+FF76">&#xFF76; &#xFF76;</span>
+  <span title="U+FF77">&#xFF77; &#xFF77;</span>
+  <span title="U+FF78">&#xFF78; &#xFF78;</span>
+  <span title="U+FF79">&#xFF79; &#xFF79;</span>
+  <span title="U+FF7A">&#xFF7A; &#xFF7A;</span>
+  <span title="U+FF7B">&#xFF7B; &#xFF7B;</span>
+  <span title="U+FF7C">&#xFF7C; &#xFF7C;</span>
+  <span title="U+FF7D">&#xFF7D; &#xFF7D;</span>
+  <span title="U+FF7E">&#xFF7E; &#xFF7E;</span>
+  <span title="U+FF7F">&#xFF7F; &#xFF7F;</span>
+  <span title="U+FF80">&#xFF80; &#xFF80;</span>
+  <span title="U+FF81">&#xFF81; &#xFF81;</span>
+  <span title="U+FF82">&#xFF82; &#xFF82;</span>
+  <span title="U+FF83">&#xFF83; &#xFF83;</span>
+  <span title="U+FF84">&#xFF84; &#xFF84;</span>
+  <span title="U+FF85">&#xFF85; &#xFF85;</span>
+  <span title="U+FF86">&#xFF86; &#xFF86;</span>
+  <span title="U+FF87">&#xFF87; &#xFF87;</span>
+  <span title="U+FF88">&#xFF88; &#xFF88;</span>
+  <span title="U+FF89">&#xFF89; &#xFF89;</span>
+  <span title="U+FF8A">&#xFF8A; &#xFF8A;</span>
+  <span title="U+FF8B">&#xFF8B; &#xFF8B;</span>
+  <span title="U+FF8C">&#xFF8C; &#xFF8C;</span>
+  <span title="U+FF8D">&#xFF8D; &#xFF8D;</span>
+  <span title="U+FF8E">&#xFF8E; &#xFF8E;</span>
+  <span title="U+FF8F">&#xFF8F; &#xFF8F;</span>
+  <span title="U+FF90">&#xFF90; &#xFF90;</span>
+  <span title="U+FF91">&#xFF91; &#xFF91;</span>
+  <span title="U+FF92">&#xFF92; &#xFF92;</span>
+  <span title="U+FF93">&#xFF93; &#xFF93;</span>
+  <span title="U+FF94">&#xFF94; &#xFF94;</span>
+  <span title="U+FF95">&#xFF95; &#xFF95;</span>
+  <span title="U+FF96">&#xFF96; &#xFF96;</span>
+  <span title="U+FF97">&#xFF97; &#xFF97;</span>
+  <span title="U+FF98">&#xFF98; &#xFF98;</span>
+  <span title="U+FF99">&#xFF99; &#xFF99;</span>
+  <span title="U+FF9A">&#xFF9A; &#xFF9A;</span>
+  <span title="U+FF9B">&#xFF9B; &#xFF9B;</span>
+  <span title="U+FF9C">&#xFF9C; &#xFF9C;</span>
+  <span title="U+FF9D">&#xFF9D; &#xFF9D;</span>
+  <span title="U+FF9E">&#xFF9E; &#xFF9E;</span>
+  <span title="U+FF9F">&#xFF9F; &#xFF9F;</span>
+</div>
+<!--Notes:
+Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
+-->
+</body>
+</html>
--- a/testing/web-platform/tests/docs/_running-tests/safari.md
+++ b/testing/web-platform/tests/docs/_running-tests/safari.md
@@ -17,12 +17,12 @@ To run Safari on macOS, some manual setu
     [macOS High Sierra issue](https://github.com/web-platform-tests/wpt/issues/9007).
 
 Now, run the tests using the `safari` product:
 ```
 ./wpt run safari [test_list]
 ```
 
 This will use the `safaridriver` found on the path, which will be stable Safari.
-To run Safari Technology Preview instead, use the `--webdriver-binary` argument:
+To run Safari Technology Preview instead, use the `--channel=preview` argument:
 ```
-./wpt run --webdriver-binary "/Applications/Safari Technology Preview.app/Contents/MacOS/safaridriver" safari [test_list]
+./wpt run --channel=preview safari [test_list]
 ```
--- a/testing/web-platform/tests/html/browsers/the-window-object/apis-for-creating-and-navigating-browsing-contexts-by-name/open-features-tokenization-noopener.html
+++ b/testing/web-platform/tests/html/browsers/the-window-object/apis-for-creating-and-navigating-browsing-contexts-by-name/open-features-tokenization-noopener.html
@@ -35,18 +35,20 @@ test (t => {
   // Each of these variants should tokenize as feature ('noopener', '')
   // except where indicated
   // Note also that `value` is lowercased during tokenization
   var featureVariants = [
     'NOOPENER',
     'noOpenER',
     '  NOopener',
     '=NOOPENER',
-    'noopener=NOOPENER', // => ('noopener', 'noopener')
-    'NOOPENER=noopener' // => ('noopener', 'noopener')
+    'noopener=1',
+    'NOOPENER=1',
+    'NOOPENER=yes',
+    'noopener=YES',
   ];
   featureVariants.forEach(feature => {
     var win = window.open(windowURL, '', feature);
     assert_equals(win, null, `"${feature}" should activate feature "noopener"`);
   });
 }, 'feature `name` should be converted to ASCII lowercase');
 
 test (t => {
@@ -77,62 +79,72 @@ test (t => {
   // before collecting `value`
   // Each of these variants should tokenize as feature ('noopener', '')
   // Except where indicated
   var featureVariants = [
     'noopener=  yes', // => ('noopener', 'yes')
     'noopener==,',
     'noopener=\n ,',
     'noopener = \t ,',
-    'noopener\n=\r noopener,', // => ('noopener', 'noopener')
+    'noopener\n=\r 1,', // => ('noopener', '1')
     'noopener=,yes', // => ('noopener'), ('yes')
-    'noopener= foo=,', // => ('noopener', 'foo')
+    'noopener= yes=,', // => ('noopener', 'yes')
     'noopener = \u000Cyes' // => ('noopener', 'yes')
   ];
   featureVariants.forEach(feature => {
     var win = window.open(windowURL, '', feature);
     assert_equals(win, null, `"${feature}" should activate feature "noopener"`);
   });
 }, 'Tokenizing should ignore window feature separators except "," after initial "=" and before value');
 
 test (t => {
   // Tokenizing `value` should collect any non-separator code points until first separator
   var featureVariants = [
-    'noopener=noopener', // => ('noopener', 'noopener')
+    'noopener=1', // => ('noopener', 'noopener')
     'noopener=yes', // => ('noopener', 'yes')
     'noopener = yes ,', // => ('noopener', 'yes')
     'noopener=\nyes  ,', // => ('noopener', 'yes')
     'noopener=yes yes', // => ('noopener', 'yes'), ('yes', '')
     'noopener=yes\ts', // => ('noopener', 'yes'), ('s', '')
     'noopener==', // => ('noopener', '')
-    'noopener=0\n,', // => ('noopener', '0')
+    'noopener=1\n,', // => ('noopener', '1')
     '==noopener===', // => ('noopener', '')
     'noopener==\u000C' // => ('noopener', '')
   ];
   featureVariants.forEach(feature => {
     var win = window.open(windowURL, '', feature);
     assert_equals(win, null, `"${feature}" should set "noopener"`);
   });
 }, 'Tokenizing should read characters until first window feature separator as `value`');
 
 test (t => {
-  // If tokenizedFeatures contains an entry with the key "noopener"...disown opener
-  // i.e. `value` should be irrelevant
   var featureVariants = [
-    'noopener=false',
-    ',noopener=0, ',
-    'foo=bar,noopener=noopener,',
-    'noopener=true',
-    'noopener=foo\nbar\t'
+    'noopener=1',
+    'noopener=2',
+    'noopener=12345',
+    'noopener=1.5',
+    'noopener=-1',
   ];
   featureVariants.forEach(feature => {
     var win = window.open(windowURL, '', feature);
     assert_equals(win, null, `"${feature}" should activate feature "noopener"`);
   });
-}, '"noopener" should be based on name (key), not value');
+}, 'Integer values other than 0 should activate the feature');
+
+test (t => {
+  var featureVariants = [
+    'noopener=0',
+    'noopener=0.5',
+    'noopener=error',
+  ];
+  featureVariants.forEach(feature => {
+    var win = window.open(windowURL, '', feature);
+    assert_not_equals(win, null, `"${feature}" should NOT activate feature "noopener"`);
+  });
+}, 'Integer value of 0 should not activate the feature');
 
 test (t => {
   var invalidFeatureVariants = [
     '-noopener', //     => ('-noopener', '')
     'NOOPENERRRR', //   => ('noopenerrr', '')
     'noOpenErR', //     => ('noopenerr', '')
     'no_opener', //     => ('no_opener', '')
     ' no opener', //    => ('no', ''), ('opener', '')
--- a/testing/web-platform/tests/interfaces/web-animations.idl
+++ b/testing/web-platform/tests/interfaces/web-animations.idl
@@ -1,12 +1,12 @@
 // GENERATED CONTENT - DO NOT EDIT
 // Content was automatically extracted by Reffy into reffy-reports
 // (https://github.com/tidoust/reffy-reports)
-// Source: Web Animations (https://w3c.github.io/web-animations/)
+// Source: Web Animations (https://drafts.csswg.org/web-animations-1/)
 
 [Exposed=Window]
 interface AnimationTimeline {
     readonly attribute double? currentTime;
 };
 
 dictionary DocumentTimelineOptions {
   DOMHighResTimeStamp originTime = 0;
--- a/testing/web-platform/tests/interfaces/web-share.idl
+++ b/testing/web-platform/tests/interfaces/web-share.idl
@@ -1,12 +1,12 @@
 // GENERATED CONTENT - DO NOT EDIT
 // Content was automatically extracted by Reffy into reffy-reports
 // (https://github.com/tidoust/reffy-reports)
-// Source: Web Share API (https://wicg.github.io/web-share/)
+// Source: Web Share API - Level 1 (https://wicg.github.io/web-share/)
 
 partial interface Navigator {
   [SecureContext] Promise<void> share(optional ShareData data);
 };
 
 dictionary ShareData {
   USVString title;
   USVString text;
--- a/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html
@@ -23,25 +23,36 @@ const vid = document.getElementById("vid
 
 promise_test(async t => {
   const wait = ms => new Promise(r => t.step_timeout(r, ms));
   const timeout = (promise, time, msg) =>
     Promise.race([promise, wait(time).then(() => Promise.reject(new Error(msg)))]);
 
   const stream = await timeout(navigator.mediaDevices.getUserMedia({video: true}), 10000, "getUserMedia timeout");
   t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
+  vid.defaultPlaybackRate = 0.4;
+  vid.playbackRate = 0.4;
+  vid.preload = "metadata";
   vid.srcObject = stream;
+  vid.onratechange = t.unreached_func('ratechange event must not be fired');
   vid.play();
   assert_true(!vid.seeking, "A MediaStream is not seekable");
   assert_equals(vid.seekable.length, 0, "A MediaStream is not seekable");
   assert_equals(vid.defaultPlaybackRate, 1, "playback rate is always 1");
+  vid.defaultPlaybackRate = 0.5;
+  assert_equals(vid.defaultPlaybackRate, 1, "Setting defaultPlaybackRate must be ignored");
   assert_equals(vid.playbackRate, 1, "playback rate is always 1");
+  vid.playbackRate = 0.5;
+  assert_equals(vid.playbackRate, 1, "Setting playbackRate must be ignored");
   assert_equals(vid.buffered.length, 0, "A MediaStream cannot be preloaded.  Therefore, there is no buffered timeranges");
   assert_equals(vid.readyState, vid.HAVE_NOTHING, "readyState is HAVE_NOTHING initially");
   assert_equals(vid.duration, NaN, "A MediaStream does not have any duration initially.");
+  assert_equals(vid.preload, "none", "preload must always be none");
+  vid.preload = "metadata";
+  assert_equals(vid.preload, "none", "Setting preload must be ignored");
 
   const haveLoadedData = new Promise(r => vid.addEventListener("loadeddata", r, {once: true}));
 
   await new Promise(r => vid.addEventListener("timeupdate", r, {once: true}));
   assert_equals(vid.played.length, 1, "A MediaStream's timeline always consists of a single range");
   assert_equals(vid.played.start(0), 0, "A MediaStream's timeline always starts at zero");
   assert_equals(vid.played.end(0), vid.currentTime, "A MediaStream's end MUST return the last known currentTime, says mediacapture-main");
   assert_equals(vid.duration, Infinity, "A MediaStream does not have a pre-defined duration. ");
--- a/testing/web-platform/tests/tools/wpt/browser.py
+++ b/testing/web-platform/tests/tools/wpt/browser.py
@@ -39,17 +39,17 @@ class Browser(object):
 
         If the WebDriver for the browser is able to find the binary itself, this
         method doesn't need to be implemented, in which case NotImplementedError
         is suggested to be raised to prevent accidental use.
         """
         return NotImplemented
 
     @abstractmethod
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         """Find the binary of the WebDriver."""
         return NotImplemented
 
     @abstractmethod
     def version(self, binary=None):
         """Retrieve the release version of the installed browser."""
         return NotImplemented
 
@@ -209,17 +209,17 @@ class Firefox(Browser):
     def find_certutil(self):
         path = find_executable("certutil")
         if path is None:
             return None
         if os.path.splitdrive(path)[1].split(os.path.sep) == ["", "Windows", "system32", "certutil.exe"]:
             return None
         return path
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("geckodriver")
 
     def get_version_and_channel(self, binary):
         version_string = call(binary, "--version").strip()
         m = re.match(r"Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?", version_string)
         if not m:
             return None, "nightly"
         version, status = m.groups()
@@ -385,17 +385,17 @@ class Fennec(Browser):
     requirements = "requirements_firefox.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         raise NotImplementedError
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary=None):
         return None
 
@@ -439,17 +439,17 @@ class Chrome(Browser):
         elif platform == "win":
             bits = "32"
 
         return "%s%s" % (platform, bits)
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("chromedriver")
 
     def install_webdriver(self, dest=None, channel=None):
         if dest is None:
             dest = os.pwd
         latest = get("http://chromedriver.storage.googleapis.com/LATEST_RELEASE").text.strip()
         url = "http://chromedriver.storage.googleapis.com/%s/chromedriver_%s.zip" % (latest,
                                                                                      self.platform_string())
@@ -487,17 +487,17 @@ class ChromeAndroid(Browser):
     requirements = "requirements_chrome_android.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("chromedriver")
 
     def install_webdriver(self, dest=None, channel=None):
         chrome = Chrome()
         return chrome.install_webdriver(dest, channel)
 
     def version(self, binary):
         return None
@@ -540,17 +540,17 @@ class Opera(Browser):
         elif platform == "win":
             bits = "32"
 
         return "%s%s" % (platform, bits)
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("operadriver")
 
     def install_webdriver(self, dest=None, channel=None):
         if dest is None:
             dest = os.pwd
         latest = get("https://api.github.com/repos/operasoftware/operachromiumdriver/releases/latest").json()["tag_name"]
         url = "https://github.com/operasoftware/operachromiumdriver/releases/download/%s/operadriver_%s.zip" % (latest,
                                                                                                                 self.platform_string())
@@ -583,17 +583,17 @@ class Edge(Browser):
     requirements = "requirements_edge.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("MicrosoftWebDriver")
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         return None
 
@@ -609,17 +609,17 @@ class InternetExplorer(Browser):
     requirements = "requirements_ie.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return find_executable("IEDriverServer.exe")
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         return None
 
@@ -634,18 +634,21 @@ class Safari(Browser):
     requirements = "requirements_safari.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
-        return find_executable("safaridriver")
+    def find_webdriver(self, channel=None):
+        path = None
+        if channel == "preview":
+            path = "/Applications/Safari Technology Preview.app/Contents/MacOS"
+        return find_executable("safaridriver", path)
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         return None
 
 
@@ -698,17 +701,17 @@ class Servo(Browser):
         return path
 
     def find_binary(self, venv_path=None, channel=None):
         path = find_executable("servo", os.path.join(venv_path, "servo"))
         if path is None:
             path = find_executable("servo")
         return path
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return None
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         """Retrieve the release version of the installed browser."""
         output = call(binary, "--version")
@@ -722,17 +725,17 @@ class Sauce(Browser):
     requirements = "requirements_sauce.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venev_path=None, channel=None):
         raise NotImplementedError
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         raise NotImplementedError
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         return None
 
@@ -744,16 +747,16 @@ class WebKit(Browser):
     requirements = "requirements_webkit.txt"
 
     def install(self, dest=None, channel=None):
         raise NotImplementedError
 
     def find_binary(self, venv_path=None, channel=None):
         return None
 
-    def find_webdriver(self):
+    def find_webdriver(self, channel=None):
         return None
 
     def install_webdriver(self, dest=None, channel=None):
         raise NotImplementedError
 
     def version(self, binary):
         return None
--- a/testing/web-platform/tests/tools/wpt/install.py
+++ b/testing/web-platform/tests/tools/wpt/install.py
@@ -1,16 +1,18 @@
 import argparse
 import browser
 import sys
 
 
 latest_channels = {
     'firefox': 'nightly',
     'chrome': 'dev',
+    'safari': 'preview',
+    'safari_webdriver': 'preview',
     'servo': 'nightly'
 }
 
 channel_by_name = {
     'stable': 'stable',
     'release': 'stable',
     'beta': 'beta',
     'nightly': latest_channels,
--- a/testing/web-platform/tests/tools/wpt/run.py
+++ b/testing/web-platform/tests/tools/wpt/run.py
@@ -378,17 +378,17 @@ class Safari(BrowserSetup):
     name = "safari"
     browser_cls = browser.Safari
 
     def install(self, venv, channel=None):
         raise NotImplementedError
 
     def setup_kwargs(self, kwargs):
         if kwargs["webdriver_binary"] is None:
-            webdriver_binary = self.browser.find_webdriver()
+            webdriver_binary = self.browser.find_webdriver(channel=kwargs["browser_channel"])
 
             if webdriver_binary is None:
                 raise WptrunError("Unable to locate safaridriver binary")
 
             kwargs["webdriver_binary"] = webdriver_binary
 
 
 class SafariWebDriver(Safari):
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioworklet-interface/audioworklet-postmessage-sharedarraybuffer.https.html
@@ -0,0 +1,85 @@
+<!DOCTYPE html>
+<html>
+  <head>
+    <title>
+      Test passing SharedArrayBuffer to an AudioWorklet
+    </title>
+    <script src="/resources/testharness.js"></script>
+    <script src="/resources/testharnessreport.js"></script>
+    <script src="/webaudio/resources/audit.js"></script>
+  </head>
+  <body>
+    <script id="layout-test-code">
+      let audit = Audit.createTaskRunner();
+
+      let context = new AudioContext();
+
+      let filePath = 'processors/sharedarraybuffer-processor.js';
+
+      if (window.SharedArrayBuffer) {
+        audit.define(
+            'Test postMessage from AudioWorkletProcessor to AudioWorkletNode',
+            (task, should) => {
+              let workletNode =
+                  new AudioWorkletNode(context, 'sharedarraybuffer-processor');
+
+              // After it is created, the worklet will send a new
+              // SharedArrayBuffer to the main thread.
+              //
+              // The worklet will then wait to receive a message from the main
+              // thread.
+              //
+              // When it receives the message, it will check whether it is a
+              // SharedArrayBuffer, and send this information back to the main
+              // thread.
+
+              workletNode.port.onmessage = (event) => {
+                let data = event.data;
+                switch (data.state) {
+                  case 'created':
+                    should(
+                        data.sab instanceof SharedArrayBuffer,
+                        'event.data.sab from worklet is an instance of SharedArrayBuffer')
+                        .beTrue();
+
+                    // Send a SharedArrayBuffer back to the worklet.
+                    let sab = new SharedArrayBuffer(8);
+                    workletNode.port.postMessage(sab);
+                    break;
+
+                  case 'received message':
+                    should(data.isSab, 'event.data from main thread is an instance of SharedArrayBuffer')
+                        .beTrue();
+                    task.done();
+                    break;
+
+                  default:
+                    should(false,
+                           `Got unexpected message from worklet: ${data.state}`)
+                        .beTrue();
+                    task.done();
+                    break;
+                }
+              };
+
+              workletNode.port.onmessageerror = (event) => {
+                should(false, 'Got messageerror from worklet').beTrue();
+                task.done();
+              };
+            });
+      } else {
+        // NOTE(binji): SharedArrayBuffer is only enabled where we have site
+        // isolation.
+        audit.define('Skipping test because SharedArrayBuffer is not defined',
+          (task, should) => {
+            task.done();
+          });
+      }
+
+      context.audioWorklet.addModule(filePath).then(() => {
+        audit.run();
+      });
+    </script>
+  </body>
+</html>
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioworklet-interface/processors/sharedarraybuffer-processor.js
@@ -0,0 +1,35 @@
+/**
+ * @class SharedArrayBufferProcessor
+ * @extends AudioWorkletProcessor
+ *
+ * This processor class demonstrates passing SharedArrayBuffers to and from
+ * workers.
+ */
+class SharedArrayBufferProcessor extends AudioWorkletProcessor {
+  constructor() {
+    super();
+    this.port.onmessage = this.handleMessage.bind(this);
+    this.port.onmessageerror = this.handleMessageError.bind(this);
+    let sab = new SharedArrayBuffer(8);
+    this.port.postMessage({state: 'created', sab});
+  }
+
+  handleMessage(event) {
+    this.port.postMessage({
+      state: 'received message',
+      isSab: event.data instanceof SharedArrayBuffer
+    });
+  }
+
+  handleMessageError(event) {
+    this.port.postMessage({
+      state: 'received messageerror'
+    });
+  }
+
+  process() {
+    return true;
+  }
+}
+
+registerProcessor('sharedarraybuffer-processor', SharedArrayBufferProcessor);
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/.cargo-checksum.json
+++ /dev/null
@@ -1,1 +0,0 @@
-{"files":{".travis.yml":"71e232ec96a9f11083a6ac2e3de7d3482032a4a9ed02c0e1be30b46da47cacef","CHANGELOG.md":"6c8e98f58fc7c4c3b7039027ff521a070b511f2882eb9985e32f118aff4ee4c0","Cargo.toml":"c87fbd92db7e1f7ace5b082a4168161e21e5ac76320ad44e01b7e3ea88aeee6e","LICENSE":"643adba34cf48432ba1bac872fdd5686d129c64e06246399bacf20142820620b","README.md":"3768d87584c808a133df7547996900d7574801f2021b6e6bc8c94cd0040b3cf8","appveyor.yml":"ab80c6004eeccda11d3e10284c7cd1bc8ecc87765204dfbf9c1dc4eb3843b86a","src/lib.rs":"16610a89cc5b9f0682a08507b4aea6b1e50ed6e78bc9a63acb6317e23a84477b"},"package":"d0fd4c0631f06448cc45a6bbb3b710ebb7ff8ccb96a0800c994afe23a70d5df2"}
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/.travis.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-sudo: false
-language: rust
-matrix:
-  fast_finish: true
-  include:
-    - rust: nightly
-    - rust: beta
-    - rust: stable
-os:
- - linux
- - osx
-script:
-  - cargo build
-  - cargo test
-cache:
-  apt: true
-  directories:
-  - target/debug/deps
-  - target/debug/build
-addons:
-  apt:
-    packages:
-    - libcurl4-openssl-dev
-    - libelf-dev
-    - libdw-dev
-    - binutils-dev # required for `kcov --verify`
-    - libbfd-dev # required for `kcov --verify`
-after_success: |
-  [ $TRAVIS_RUST_VERSION = stable ] &&
-  wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
-  tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
-  ls target/debug &&
-  ./kcov-master/tmp/usr/local/bin/kcov --verify --coveralls-id=$TRAVIS_JOB_ID --exclude-pattern=/.cargo target/kcov target/debug/atty-* &&
-  [ $TRAVIS_BRANCH = master ] &&
-  [ $TRAVIS_PULL_REQUEST = false ] &&
-  cargo doc --no-deps &&
-  echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
-  sudo pip install --user ghp-import &&
-  /home/travis/.local/bin/ghp-import -n target/doc &&
-  git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
-env:
-  global:
-    secure: acjXoBFG4yFklz/iW4q9PLaMmTgug0c8hOov4uiaXYjDkVGhnEePBozGc8ctKuFv2BVlwBSzvE1neE9dHcCS6il0x+G79sVTekfVN5dERja3UpwrC0/QodJuDmErIUpb6zylupPnUGq5pzZabRPNKyAnsFS5wYhLMSLxGPu4pfYdW0Eu8CEPIgPYsI6o2pfKgNpXbeizdHRLMeZCN4cbEPohO1odc+Z6WJvgKn2xEkpAcfhAuaroqGGxRtmDiJZ/JaBijAKY/O9Q3Xq1GSGOPT5lmwJSp3Fxw5dgmeX6LmN0ZODASdnEoYfoqUDUFzkCON3Sk4a7hugxlkZ7cx1tfqXxMg+0BgYIUdGQNloDJnuusWvXPBFdB2jxMsfcbrCjNsrJ8kjN6uBsW9yy0kqN7a8eOJckwh5fYRWfNta0R+BrveNXWmGp4u4aBq/85jEiHi30XKTzaEUbF0Y3cIONweWeWwBOcAvPBhO63Y07TRRe+SSk1NYm7QHGW9RsHhz89OSbaIXqn+r/o+6DZcw5XaO73DtZ62Kx48NErej9kVqcIJ6HnyvCJ/fJoT7h1ixSRI/WmS30l2S/q33Q2G4C/IZ4ZZRD/1thSltAxeA6OAUnr8ITZyW47CqOmyL1IUptrdAb9OLEedYV/QrOhcg2RJLXyP66xnItOwMp014bEp4=
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/CHANGELOG.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# 0.1.2
-
-* windows support (with automated testing)
-* automated code coverage
-
-# 0.1.1
-
-* bumped libc dep from `0.1` to `0.2`
-
-# 0.1.0
-
-* initial release
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/Cargo.toml
+++ /dev/null
@@ -1,15 +0,0 @@
-[package]
-name = "atty"
-version = "0.1.2"
-authors = ["softprops <d.tangren@gmail.com>"]
-description = "A simple interface for querying atty"
-documentation = "http://softprops.github.io/atty"
-homepage = "https://github.com/softprops/atty"
-repository = "https://github.com/softprops/atty"
-keywords = ["terminal", "tty"]
-license = "MIT"
-
-[dependencies]
-libc = "0.2"
-winapi = "0.2"
-kernel32-sys = "0.2"
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2015 Doug Tangren
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/README.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# atty
-
-[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master)
-
-> are you or are you not a tty?
-
-## docs
-
-Find them [here](http://softprops.github.io/atty)
-
-## install
-
-Add the following to your `Cargo.toml`
-
-```toml
-[dependencies]
-atty = "0.1"
-```
-
-Doug Tangren (softprops) 2015
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/appveyor.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-environment:
-  matrix:
-  - TARGET: nightly-x86_64-pc-windows-msvc
-    VCVARS: "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\amd64\\vcvars64.bat"
-  - TARGET: nightly-i686-pc-windows-msvc
-    VCVARS: "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\vcvars32.bat"
-  - TARGET: nightly-x86_64-pc-windows-gnu
-  - TARGET: nightly-i686-pc-windows-gnu
-  - TARGET: 1.2.0-x86_64-pc-windows-gnu
-install:
-  - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:TARGET}.exe" -FileName "rust-install.exe"
-  - ps: .\rust-install.exe /VERYSILENT /NORESTART /DIR="C:\rust" | Out-Null
-  - ps: $env:PATH="$env:PATH;C:\rust\bin"
-  - call "%VCVARS%" || ver>nul
-  - rustc -vV
-  - cargo -vV
-build: false
-test_script:
-  - cargo test --verbose
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/atty-0.1.2/src/lib.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-//! atty is a simple utility that answers one question
-//! > is this a tty?
-//!
-//! usage is just as simple
-//!
-//! ```
-//! if atty::is() {
-//!   println!("i'm a tty")
-//! }
-//! ```
-//!
-//! ```
-//! if atty::isnt() {
-//!   println!("i'm not a tty")
-//! }
-//! ```
-
-extern crate libc;
-
-/// returns true if this is a tty
-#[cfg(unix)]
-pub fn is() -> bool {
-    let r = unsafe { libc::isatty(libc::STDOUT_FILENO) };
-    r != 0
-}
-
-/// returns true if this is a tty
-#[cfg(windows)]
-pub fn is() -> bool {
-    extern crate kernel32;
-    extern crate winapi;
-    use std::ptr;
-    let handle: winapi::HANDLE = unsafe {
-        kernel32::CreateFileA(b"CONOUT$\0".as_ptr() as *const i8,
-                              winapi::GENERIC_READ | winapi::GENERIC_WRITE,
-                              winapi::FILE_SHARE_WRITE,
-                              ptr::null_mut(),
-                              winapi::OPEN_EXISTING,
-                              0,
-                              ptr::null_mut())
-    };
-    if handle == winapi::INVALID_HANDLE_VALUE {
-        return false;
-    }
-    let mut out = 0;
-    unsafe { kernel32::GetConsoleMode(handle, &mut out) != 0 }
-}
-
-/// returns true if this is _not_ a tty
-pub fn isnt() -> bool {
-    !is()
-}
-
-#[cfg(test)]
-mod tests {
-    use super::is;
-
-    #[test]
-    fn is_test() {
-        assert!(is())
-    }
-}
--- a/third_party/rust/binjs_meta/.cargo-checksum.json
+++ b/third_party/rust/binjs_meta/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".cargo_vcs_info.json":"5a666f68ab005317d058d78c58936ebf66086a242f6a4b8415230649bbce768d","Cargo.toml":"04c87832069d5462b4b87c935fa448213e00a804fcf827334a02beda1fd7f971","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"a831abf8d7a1ab73c5d70a9e8517b8af1df492589a2f180698145ac5d46d7102","src/export.rs":"e889c2f45f00c1787e2270a50fc6d9628446d620c3c0d2ac6ba3f031c561197d","src/import.rs":"7a8525aa55ff0c6c266edfb69a351345ab0c36176deeb0fb91901d4a4e6bd9d6","src/lib.rs":"d4ea18ec850054a817c6b91ed52412a2f2f39639628e5918dee688d829d3ed4b","src/spec.rs":"8f442a5d218360681ad3a5b4c4740b7ae227e087eb745df38cca07a88d8484c4","src/util.rs":"1d934eec75d9dee44289f9a9a9e67c96dd6205367430b9bcf9fc66e730bf6eb0"},"package":"cc0956bac41c458cf38340699dbb54c2220c91cdbfa33be19670fe69e0a6ac9b"}
\ No newline at end of file
+{"files":{".cargo_vcs_info.json":"13d7d8f1c677eb54c2b0005b8e048b79461b91176796088fc70f5d40ffbefd0a","Cargo.toml":"eee9b8c9f05e442ed41ee986b07b443cb89465346dce4aae4f73f13fa7243492","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"a831abf8d7a1ab73c5d70a9e8517b8af1df492589a2f180698145ac5d46d7102","src/export.rs":"56910e257a000cac963b9ac377558767d05076b677e83a7e75e570ecbd4b35f6","src/import.rs":"366bada1b19c608ffe7dc4761f1db1a1dae616f4ed99685e4260a00d5c0125d0","src/lib.rs":"d4ea18ec850054a817c6b91ed52412a2f2f39639628e5918dee688d829d3ed4b","src/spec.rs":"7cfb4705d9cfa72ba0a34c5d5beab7e23ac54d8e9fa125317364535d5aa7496a","src/util.rs":"1d934eec75d9dee44289f9a9a9e67c96dd6205367430b9bcf9fc66e730bf6eb0"},"package":"430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"}
\ No newline at end of file
--- a/third_party/rust/binjs_meta/.cargo_vcs_info.json
+++ b/third_party/rust/binjs_meta/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
 {
   "git": {
-    "sha1": "4c24254cdcfba7a929573f34e5ac12686a86bb60"
+    "sha1": "da502c023e7c92bff0003109935a8767d9176637"
   }
 }
--- a/third_party/rust/binjs_meta/Cargo.toml
+++ b/third_party/rust/binjs_meta/Cargo.toml
@@ -7,17 +7,17 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "binjs_meta"
-version = "0.3.10"
+version = "0.4.3"
 authors = ["David Teller <D.O.Teller@gmail.com>"]
 description = "Part of binjs-ref. Tools for manipulating grammars. You probably do not want to use this crate directly unless you're writing an encoder, decoder or parser generator for binjs."
 homepage = "https://binast.github.io/ecmascript-binary-ast/"
 readme = "README.md"
 keywords = ["javascript", "js", "binjs", "ast"]
 categories = ["compression", "parsing", "web-programming"]
 license = "MIT"
 repository = "https://github.com/binast/binjs-ref"
@@ -26,17 +26,17 @@ version = "^0.11"
 
 [dependencies.itertools]
 version = "^0.7"
 
 [dependencies.log]
 version = "^0.4"
 
 [dependencies.webidl]
-version = "^0.6"
+version = "^0.8"
 [dev-dependencies.clap]
 version = "^2"
 
 [dev-dependencies.env_logger]
 version = "^0.5"
 
 [dev-dependencies.yaml-rust]
 version = "^0.4"
--- a/third_party/rust/binjs_meta/src/export.rs
+++ b/third_party/rust/binjs_meta/src/export.rs
@@ -110,21 +110,20 @@ impl TypeDeanonymizer {
             // Copy the declaration.
             let mut declaration = result.builder.add_interface(name)
                 .unwrap();
             for field in fields.drain(..) {
                 // Create *_skip field just before the lazy field.
                 // See also tagged_tuple in write.rs.
                 if field.is_lazy() {
                     declaration.with_field(skip_name_map.get(field.name()).unwrap(),
-                                           Type::offset().required(),
-                                           Laziness::Eager);
+                                           Type::offset().required());
                 }
-                declaration.with_field(field.name(), field.type_().clone(),
-                                       field.laziness());
+                declaration.with_field_laziness(field.name(), field.type_().clone(),
+                                                field.laziness());
             }
         }
         // Copy and deanonymize typedefs
         for (name, definition) in spec.typedefs_by_name() {
             result.builder.import_node_name(name);
             if result.builder.get_typedef(name).is_some() {
                 // Already imported by following links.
                 continue
@@ -186,28 +185,37 @@ impl TypeDeanonymizer {
         }
     }
     fn import_typespec(&mut self, spec: &Spec, type_spec: &TypeSpec, public_name: Option<NodeName>) -> (Option<HashSet<NodeName>>, NodeName) {
         debug!(target: "export_utils", "import_typespec {:?} => {:?}", public_name, type_spec);
         match *type_spec {
             TypeSpec::Boolean |
             TypeSpec::Number |
             TypeSpec::UnsignedLong |
+            TypeSpec::PropertyKey |
+            TypeSpec::IdentifierName |
             TypeSpec::String |
             TypeSpec::Offset |
             TypeSpec::Void    => {
                 if let Some(ref my_name) = public_name {
                     if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
                         debug!(target: "export_utils", "import_typespec: Defining {name} (primitive)", name = my_name.to_str());
                         typedef.with_type(type_spec.clone().required());
                     } else {
                         debug!(target: "export_utils", "import_typespec: Attempting to redefine typedef {name}", name = my_name.to_str());
                     }
                 }
-                (None, self.builder.node_name("@@"))
+                // This is a workaround for typedefs in the webidl that are not truly typedefs.
+                // See https://github.com/Yoric/ecmascript-binary-ast/pull/1
+                let name = match *type_spec {
+                    TypeSpec::PropertyKey => self.builder.node_name("PropertyKey"),
+                    TypeSpec::IdentifierName => self.builder.node_name("IdentifierName"),
+                    _ => self.builder.node_name(&format!("@@{:?}", type_spec)),
+                };
+                (None, name)
             }
             TypeSpec::NamedType(ref link) => {
                 let resolved = spec.get_type_by_name(link)
                     .unwrap_or_else(|| panic!("While deanonymizing, could not find the definition of {} in the original spec.", link.to_str()));
                 let (sum, rewrite, primitive) = match resolved {
                     NamedType::StringEnum(_) => {
                         // - Can't use in a sum
                         // - No rewriting happened.
@@ -233,21 +241,23 @@ impl TypeDeanonymizer {
                 if let Some(ref my_name) = public_name {
                     // If we have a public name, alias it to `content`
                     if let Some(content) = rewrite {
                         let deanonymized = match primitive {
                             None |
                             Some(IsNullable { is_nullable: true, .. }) |
                             Some(IsNullable { content: Primitive::Interface(_), .. }) => Type::named(&content).required(),
                             Some(IsNullable { content: Primitive::String, .. }) => Type::string().required(),
+                            Some(IsNullable { content: Primitive::IdentifierName, .. }) => Type::identifier_name().required(),
+                            Some(IsNullable { content: Primitive::PropertyKey, .. }) => Type::property_key().required(),
                             Some(IsNullable { content: Primitive::Number, .. }) => Type::number().required(),
                             Some(IsNullable { content: Primitive::UnsignedLong, .. }) => Type::unsigned_long().required(),
                             Some(IsNullable { content: Primitive::Boolean, .. }) => Type::bool().required(),
                             Some(IsNullable { content: Primitive::Offset, .. }) => Type::offset().required(),
-                            Some(IsNullable { content: Primitive::Void, .. }) => Type::void().required()
+                            Some(IsNullable { content: Primitive::Void, .. }) => Type::void().required(),
                         };
                         debug!(target: "export_utils", "import_typespec aliasing {:?} => {:?}",
                             my_name, deanonymized);
                         if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
                             debug!(target: "export_utils", "import_typespec: Defining {name} (name to content)", name = my_name.to_str());
                             typedef.with_type(deanonymized.clone());
                         } else {
                             debug!(target: "export_utils", "import_typespec: Attempting to redefine typedef {name}", name = my_name.to_str());
@@ -370,16 +380,20 @@ impl TypeName {
             TypeSpec::Number =>
                 "_Number".to_string(),
             TypeSpec::UnsignedLong =>
                 "_UnsignedLong".to_string(),
             TypeSpec::String =>
                 "_String".to_string(),
             TypeSpec::Void =>
                 "_Void".to_string(),
+            TypeSpec::IdentifierName =>
+                "IdentifierName".to_string(),
+            TypeSpec::PropertyKey =>
+                "PropertyKey".to_string(),
             TypeSpec::TypeSum(ref sum) => {
                 format!("{}", sum.types()
                     .iter()
                     .map(Self::type_spec)
                     .format("Or"))
             }
         }
     }
@@ -403,16 +417,20 @@ impl ToWebidl {
                         description,
                         emptiness = if *supports_empty { "" } else {"[NonEmpty] "} ),
                 }
             }
             TypeSpec::Boolean =>
                 "bool".to_string(),
             TypeSpec::String =>
                 "string".to_string(),
+            TypeSpec::PropertyKey =>
+                "[PropertyKey] string".to_string(),
+            TypeSpec::IdentifierName =>
+                "[IdentifierName] string".to_string(),
             TypeSpec::Number =>
                 "number".to_string(),
             TypeSpec::UnsignedLong =>
                 "unsigned long".to_string(),
             TypeSpec::NamedType(ref name) =>
                 name.to_str().to_string(),
             TypeSpec::TypeSum(ref sum) => {
                 format!("({})", sum.types()
--- a/third_party/rust/binjs_meta/src/import.rs
+++ b/third_party/rust/binjs_meta/src/import.rs
@@ -1,14 +1,16 @@
-use spec::{ self, SpecBuilder, TypeSum, Laziness };
+use spec::{ self, Laziness, SpecBuilder, TypeSum };
 
 use webidl::ast::*;
 
 pub struct Importer {
     builder: SpecBuilder,
+    /// The interfaces we have traversed so far.
+    path: Vec<String>,
 }
 impl Importer {
     /// Import an AST into a SpecBuilder.
     ///
     /// ```
     /// extern crate binjs_meta;
     /// extern crate webidl;
     /// use webidl;
@@ -57,16 +59,17 @@ impl Importer {
     ///     let contents_field =
     ///         interface_lazy.get_field_by_name(&name_contents)
     ///         .expect("Missing field contents");
     ///     assert_eq!(contents_field.is_lazy(), true);
     /// }
     /// ```
     pub fn import(ast: &AST) -> SpecBuilder {
         let mut importer = Importer {
+            path: Vec::with_capacity(256),
             builder: SpecBuilder::new()
         };
         importer.import_ast(ast);
         importer.builder
     }
     fn import_ast(&mut self, ast: &AST) {
         for definition in ast {
             self.import_definition(&definition)
@@ -85,71 +88,125 @@ impl Importer {
         let mut node = self.builder.add_string_enum(&name)
             .expect("Name already present");
         for variant in &enum_.variants {
             node.with_string(variant);
         }
     }
     fn import_typedef(&mut self, typedef: &Typedef) {
         let name = self.builder.node_name(&typedef.name);
-        let type_ = self.convert_type(&*typedef.type_);
+        // The following are, unfortunately, not true typedefs.
+        // Ignore their definition.
+        let type_ = match typedef.name.as_ref() {
+            "Identifier"  => spec::TypeSpec::IdentifierName
+                .required(),
+            "IdentifierName" => spec::TypeSpec::IdentifierName
+                .required(),
+            "PropertyKey" => spec::TypeSpec::PropertyKey
+                .required(),
+            _ => self.convert_type(&*typedef.type_)
+        };
+        debug!(target: "meta::import", "Importing typedef {type_:?} {name:?}",
+            type_ = type_,
+            name = name);
         let mut node = self.builder.add_typedef(&name)
-            .expect("Name already present");
+            .unwrap_or_else(|| panic!("Error: Name {} is defined more than once in the spec.", name));
         assert!(!type_.is_optional());
         node.with_spec(type_.spec);
     }
     fn import_interface(&mut self, interface: &Interface) {
         let interface = if let &Interface::NonPartial(ref interface) = interface {
             interface
         } else {
             panic!("Expected a non-partial interface, got {:?}", interface);
         };
-        if interface.name == "Node" {
-            // We're not interested in the root interface.
-            return;
+
+        // Handle special, hardcoded, interfaces.
+        match interface.name.as_ref() {
+            "Node" => {
+                // We're not interested in the root interface.
+                return;
+            }
+            "IdentifierName" => {
+                unimplemented!()
+            }
+            _ => {
+
+            }
         }
         if let Some(ref parent) = interface.inherits {
             assert_eq!(parent, "Node");
         }
+
+        self.path.push(interface.name.clone());
+
+        // Now handle regular stuff.
         let mut fields = Vec::new();
         for member in &interface.members {
             if let InterfaceMember::Attribute(Attribute::Regular(ref attribute)) = *member {
+                use webidl::ast::ExtendedAttribute::NoArguments;
+                use webidl::ast::Other::Identifier;
+
                 let name = self.builder.field_name(&attribute.name);
                 let type_ = self.convert_type(&*attribute.type_);
-                let mut laziness = Laziness::Eager;
 
-                for extended_attribute in &attribute.extended_attributes {
-                    use webidl::ast::ExtendedAttribute::NoArguments;
-                    use webidl::ast::Other::Identifier;
-                    if let &NoArguments(Identifier(ref id)) = extended_attribute.as_ref() {
-                        if &*id == "Lazy" {
-                            laziness = Laziness::Lazy;
+                let is_lazy = attribute.extended_attributes.iter()
+                    .find(|attribute| {
+                        if let &NoArguments(Identifier(ref id)) = attribute.as_ref() {
+                            if &*id == "Lazy" {
+                                return true;
+                            }
                         }
-                    }
-                }
-
-                fields.push((name, type_, laziness));
+                        false
+                    })
+                    .is_some();
+                fields.push((name, type_, if is_lazy { Laziness::Lazy } else { Laziness:: Eager }));
             } else {
                 panic!("Expected an attribute, got {:?}", member);
             }
         }
         let name = self.builder.node_name(&interface.name);
         let mut node = self.builder.add_interface(&name)
             .expect("Name already present");
-        for (field_name, field_type, field_laziness) in fields.drain(..) {
-            node.with_field(&field_name, field_type, field_laziness);
+        for (field_name, field_type, laziness) in fields.drain(..) {
+            node.with_field_laziness(&field_name, field_type, laziness);
         }
+
+        for extended_attribute in &interface.extended_attributes {
+            use webidl::ast::ExtendedAttribute::NoArguments;
+            use webidl::ast::Other::Identifier;
+            if let &NoArguments(Identifier(ref id)) = extended_attribute.as_ref() {
+                if &*id == "Skippable" {
+                    panic!("Encountered deprecated attribute [Skippable]");
+                }
+                if &*id == "Scope" {
+                    node.with_scope(true);
+                }
+            }
+        }
+        self.path.pop();
     }
     fn convert_type(&mut self, t: &Type) -> spec::Type {
         let spec = match t.kind {
             TypeKind::Boolean => spec::TypeSpec::Boolean,
             TypeKind::Identifier(ref id) => {
                 let name = self.builder.node_name(id);
-                spec::TypeSpec::NamedType(name.clone())
+                // Sadly, some identifiers are not truly `typedef`s.
+                match name.to_str() {
+                    "IdentifierName" if self.is_at_interface("StaticMemberAssignmentTarget") => spec::TypeSpec::PropertyKey,
+                    "IdentifierName" if self.is_at_interface("StaticMemberExpression") => spec::TypeSpec::PropertyKey,
+                    "IdentifierName" if self.is_at_interface("ImportSpecifier") => spec::TypeSpec::PropertyKey,
+                    "IdentifierName" if self.is_at_interface("ExportSpecifier") => spec::TypeSpec::PropertyKey,
+                    "IdentifierName" if self.is_at_interface("ExportLocalSpecifier") => spec::TypeSpec::PropertyKey,
+                    "IdentifierName" => spec::TypeSpec::IdentifierName,
+                    "Identifier" => spec::TypeSpec::IdentifierName,
+                    _ => spec::TypeSpec::NamedType(name.clone())
+                }
             }
+            TypeKind::DOMString if self.is_at_interface("LiteralPropertyName") => spec::TypeSpec::PropertyKey,
             TypeKind::DOMString => spec::TypeSpec::String,
             TypeKind::Union(ref types) => {
                 let mut dest = Vec::with_capacity(types.len());
                 for typ in types {
                     dest.push(self.convert_type(&*typ).spec)
                 }
                 spec::TypeSpec::TypeSum(TypeSum::new(dest))
             }
@@ -169,9 +226,16 @@ impl Importer {
         };
         if t.nullable {
             spec.optional()
                 .unwrap_or_else(|| panic!("This type could not be made optional {:?}", t))
         } else {
             spec.required()
         }
     }
+
+    fn is_at_interface(&self, name: &str) -> bool {
+        if self.path.len() == 0 {
+            return false;
+        }
+        self.path[0].as_str() == name
+    }
 }
--- a/third_party/rust/binjs_meta/src/spec.rs
+++ b/third_party/rust/binjs_meta/src/spec.rs
@@ -1,27 +1,43 @@
 //! Definition of a spec for a version (or subset) of JavaScript.
 
 pub use util::ToStr;
 
+use itertools::Itertools;
+
 use std;
 use std::cell::*;
 use std::collections::{ HashMap, HashSet };
 use std::fmt::{ Debug, Display };
 use std::hash::*;
 use std::rc::*;
 
+/// Whether an attribute is eager or lazy.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Laziness {
+    /// An eager attribute is designed to be parsed immediately.
+    Eager,
+    /// A lazy attribute is designed for deferred parsing.
+    Lazy
+}
 
 /// The name of an interface or enum.
 #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
 pub struct NodeName(Rc<String>);
 impl NodeName {
     pub fn to_string(&self) -> &String {
         self.0.as_ref()
     }
+    pub fn to_str(&self) -> &str {
+        self.0.as_ref()
+    }
+    pub fn to_rc_string(&self) -> &Rc<String> {
+        &self.0
+    }
 }
 impl Debug for NodeName {
     fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
         Debug::fmt(self.to_str(), formatter)
     }
 }
 impl Display for NodeName {
     fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
@@ -37,16 +53,19 @@ impl ToStr for NodeName {
 
 /// The name of a field in an interface.
 #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
 pub struct FieldName(Rc<String>);
 impl FieldName {
     pub fn to_string(&self) -> &String {
         self.0.as_ref()
     }
+    pub fn to_rc_string(&self) -> &Rc<String> {
+        &self.0
+    }
 }
 impl Debug for FieldName {
     fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
         Debug::fmt(self.to_str(), formatter)
     }
 }
 impl ToStr for FieldName {
     fn to_str(&self) -> &str {
@@ -95,30 +114,29 @@ impl TypeSum {
             if result.is_some() {
                 return result
             }
         }
         None
     }
 }
 
-/// Lazy for a field with [lazy] attribute. Eager for others.
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum Laziness {
-    Eager,
-    Lazy,
-}
-
 /// Representation of a field in an interface.
 #[derive(Clone, PartialEq, Eq, Debug)]
 pub struct Field {
+    /// The name of the field.
     name: FieldName,
+
+    /// The type of the field.
     type_: Type,
+
+    /// Documentation for the field. Ignored for the time being.
     documentation: Option<String>,
-    laziness: Laziness,
+
+    laziness: Laziness
 }
 impl Hash for Field {
     fn hash<H>(&self, state: &mut H) where H: Hasher {
         self.name.hash(state)
     }
 }
 impl Field {
     pub fn new(name: FieldName, type_: Type) -> Self {
@@ -136,22 +154,30 @@ impl Field {
         &self.type_
     }
     pub fn is_lazy(&self) -> bool {
         self.laziness == Laziness::Lazy
     }
     pub fn laziness(&self) -> Laziness {
         self.laziness.clone()
     }
+    pub fn with_laziness(mut self, laziness: Laziness) -> Self {
+        self.laziness = laziness;
+        self
+    }
     pub fn doc(&self) -> Option<&str> {
         match self.documentation {
             None => None,
             Some(ref s) => Some(&*s)
         }
     }
+    pub fn with_doc(mut self, doc: Option<String>) -> Self {
+        self.documentation = doc;
+        self
+    }
 }
 
 /// The contents of a type, typically that of a field.
 ///
 /// Note that we generally use `Type`, to represent
 /// the fact that some fields accept `null` while
 /// others do not.
 #[derive(Clone, Debug, PartialEq, Eq)]
@@ -186,16 +212,26 @@ pub enum TypeSpec {
     /// field injected by deanonymization, to represent
     /// lazy fields.
     Offset,
 
     /// Nothing.
     ///
     /// For the moment, this spec is used only internally.
     Void,
+
+    /// A string used to represent something bound in a scope (i.e. a variable, but not a property).
+    /// At this level, we make no distinction between `Identifier` and `IdentifierName`.
+    ///
+    /// Actually maps to a subset of `IdentifierName` in webidl.
+    IdentifierName,
+
+    /// A key for a property. For the time being, we make no distinction between variants such
+    /// as `LiteralPropertyName` and `IdentifierName`-as-property-keys.
+    PropertyKey,
 }
 
 #[derive(Clone, Debug)]
 pub enum NamedType {
     Interface(Rc<Interface>),
     Typedef(Rc<Type>), // FIXME: Check that there are no cycles.
     StringEnum(Rc<StringEnum>),
 }
@@ -290,16 +326,18 @@ impl TypeSpec {
     pub fn get_primitive(&self, spec: &Spec) -> Option<IsNullable<Primitive>> {
         match *self {
             TypeSpec::Boolean => Some(IsNullable::non_nullable(Primitive::Boolean)),
             TypeSpec::Void => Some(IsNullable::non_nullable(Primitive::Void)),
             TypeSpec::Number => Some(IsNullable::non_nullable(Primitive::Number)),
             TypeSpec::UnsignedLong => Some(IsNullable::non_nullable(Primitive::UnsignedLong)),
             TypeSpec::String => Some(IsNullable::non_nullable(Primitive::String)),
             TypeSpec::Offset => Some(IsNullable::non_nullable(Primitive::Offset)),
+            TypeSpec::IdentifierName => Some(IsNullable::non_nullable(Primitive::IdentifierName)),
+            TypeSpec::PropertyKey => Some(IsNullable::non_nullable(Primitive::PropertyKey)),
             TypeSpec::NamedType(ref name) => {
                 match spec.get_type_by_name(name).unwrap() {
                     NamedType::Interface(ref interface) =>
                         Some(IsNullable::non_nullable(Primitive::Interface(interface.clone()))),
                     NamedType::Typedef(ref type_) =>
                         type_.get_primitive(spec),
                     NamedType::StringEnum(_) => None
                 }
@@ -327,16 +365,18 @@ impl<T> IsNullable<T> {
 pub enum Primitive {
     String,
     Boolean,
     Void,
     Number,
     UnsignedLong,
     Offset,
     Interface(Rc<Interface>),
+    IdentifierName,
+    PropertyKey,
 }
 
 #[derive(Clone, Debug, PartialEq)]
 pub struct Type {
     pub spec: TypeSpec,
     or_null: bool,
 }
 impl Eq for Type {}
@@ -388,16 +428,22 @@ impl Type {
         TypeSpec::UnsignedLong
     }
     pub fn bool() -> TypeSpec {
         TypeSpec::Boolean
     }
     pub fn void() -> TypeSpec {
         TypeSpec::Void
     }
+    pub fn identifier_name() -> TypeSpec {
+        TypeSpec::IdentifierName
+    }
+    pub fn property_key() -> TypeSpec {
+        TypeSpec::PropertyKey
+    }
 
     /// An `offset` type, holding a number of bytes in the binary file.
     pub fn offset() -> TypeSpec {
         TypeSpec::Offset
     }
 
     pub fn array(self) -> TypeSpec {
         TypeSpec::Array {
@@ -469,35 +515,36 @@ impl Obj {
 
     fn with_field_aux(self, name: &FieldName, type_: Type, laziness: Laziness,
                       doc: Option<&str>) -> Self {
         if self.field(name).is_some() {
             warn!("Field: attempting to overwrite {:?}", name);
             return self
         }
         let mut fields = self.fields;
-        fields.push(Field {
-            name: name.clone(),
-            type_,
-            documentation: doc.map(str::to_string),
-            laziness,
-        });
+        fields.push(Field::new(name.clone(), type_)
+            .with_doc(doc.map(str::to_string))
+            .with_laziness(laziness));
         Obj {
             fields
         }
 
     }
 
     /// Extend a structure with a field.
-    pub fn with_field(self, name: &FieldName, type_: Type, laziness: Laziness) -> Self {
-        self.with_field_aux(name, type_, laziness, None)
+    pub fn with_field(self, name: &FieldName, type_: Type) -> Self {
+        self.with_field_aux(name, type_, Laziness::Eager, None)
     }
 
-    pub fn with_field_doc(self, name: &FieldName, type_: Type, laziness: Laziness, doc: &str) -> Self {
-        self.with_field_aux(name, type_, laziness, Some(doc))
+    pub fn with_field_doc(self, name: &FieldName, type_: Type, doc: &str) -> Self {
+        self.with_field_aux(name, type_, Laziness::Eager, Some(doc))
+    }
+
+    pub fn with_field_lazy(self, name: &FieldName, type_: Type) -> Self {
+        self.with_field_aux(name, type_, Laziness::Lazy, None)
     }
 }
 
 impl StringEnum {
     pub fn name(&self) -> &NodeName {
         &self.name
     }
 
@@ -524,35 +571,47 @@ impl StringEnum {
 
 #[derive(Clone, Debug)]
 pub struct InterfaceDeclaration {
     /// The name of the interface, e.g. `Node`.
     name: NodeName,
 
     /// The contents of this interface, excluding the contents of parent interfaces.
     contents: Obj,
+
+    is_scope: bool,
 }
 
 impl InterfaceDeclaration {
     pub fn with_full_field(&mut self, contents: Field) -> &mut Self {
         let _ = self.contents.with_full_field(contents);
         self
     }
-    pub fn with_field(&mut self, name: &FieldName, type_: Type, laziness: Laziness) -> &mut Self {
-        self.with_field_aux(name, type_, laziness, None)
+    pub fn with_field(&mut self, name: &FieldName, type_: Type) -> &mut Self {
+        self.with_field_aux(name, type_, None, Laziness::Eager)
+    }
+    pub fn with_field_lazy(&mut self, name: &FieldName, type_: Type) -> &mut Self {
+        self.with_field_aux(name, type_, None, Laziness::Eager)
     }
-    pub fn with_field_doc(&mut self, name: &FieldName, type_: Type, laziness: Laziness, doc: &str) -> &mut Self {
-        self.with_field_aux(name, type_, laziness, Some(doc))
+    pub fn with_field_laziness(&mut self, name: &FieldName, type_: Type, laziness: Laziness) -> &mut Self {
+        self.with_field_aux(name, type_, None, laziness)
     }
-    fn with_field_aux(&mut self, name: &FieldName, type_: Type, laziness: Laziness, doc: Option<&str>) -> &mut Self {
+    pub fn with_field_doc(&mut self, name: &FieldName, type_: Type, doc: &str) -> &mut Self {
+        self.with_field_aux(name, type_, Some(doc), Laziness::Eager)
+    }
+    fn with_field_aux(&mut self, name: &FieldName, type_: Type, doc: Option<&str>, laziness: Laziness) -> &mut Self {
         let mut contents = Obj::new();
         std::mem::swap(&mut self.contents, &mut contents);
         self.contents = contents.with_field_aux(name, type_, laziness, doc);
         self
     }
+    pub fn with_scope(&mut self, value: bool) -> &mut Self {
+        self.is_scope = value;
+        self
+    }
 }
 
 /// A data structure used to progressively construct the `Spec`.
 pub struct SpecBuilder {
     /// All the interfaces entered so far.
     interfaces_by_name: HashMap<NodeName, RefCell<InterfaceDeclaration>>,
 
     /// All the enums entered so far.
@@ -612,16 +671,17 @@ impl SpecBuilder {
 
     pub fn add_interface(&mut self, name: &NodeName) -> Option<RefMut<InterfaceDeclaration>> {
         if self.interfaces_by_name.get(name).is_some() {
             return None;
         }
         let result = RefCell::new(InterfaceDeclaration {
             name: name.clone(),
             contents: Obj::new(),
+            is_scope: false,
         });
         self.interfaces_by_name.insert(name.clone(), result);
         self.interfaces_by_name.get(name)
             .map(RefCell::borrow_mut)
     }
     pub fn get_interface(&mut self, name: &NodeName) -> Option<RefMut<InterfaceDeclaration>> {
         self.interfaces_by_name.get(name)
             .map(RefCell::borrow_mut)
@@ -667,20 +727,30 @@ impl SpecBuilder {
         let string_enums_by_name : HashMap<_, _> = string_enums_by_name.drain()
             .map(|(k, v)| (k, Rc::new(RefCell::into_inner(v))))
             .collect();
         let mut typedefs_by_name = self.typedefs_by_name;
         let typedefs_by_name : HashMap<_, _> = typedefs_by_name.drain()
             .map(|(k, v)| (k, Rc::new(RefCell::into_inner(v))))
             .collect();
 
-        let mut node_names = HashMap::new();
-        for name in interfaces_by_name.keys().chain(string_enums_by_name.keys()).chain(typedefs_by_name.keys()) {
-            node_names.insert(name.to_string().clone(), name.clone());
-        }
+        let node_names: HashMap<_, _> = interfaces_by_name
+            .keys()
+            .chain(string_enums_by_name
+                .keys())
+            .chain(typedefs_by_name
+                .keys())
+            .map(|name| {
+                (name.to_string().clone(), name.clone())
+            })
+            .collect();
+        debug!(target: "spec", "Established list of node names: {:?} ({})",
+            node_names.keys()
+                .sorted(),
+            node_names.len());
 
         // 2. Collect all field names.
         let mut fields = HashMap::new();
         for interface in interfaces_by_name.values() {
             for field in &interface.declaration.contents.fields {
                 fields.insert(field.name.to_string().clone(), field.name.clone());
             }
         }
@@ -713,16 +783,20 @@ impl SpecBuilder {
             for interface in interfaces_by_name.values() {
                 for field in interface.declaration.contents.fields() {
                     for name in field.type_().spec().typenames() {
                         used_typenames.insert(name);
                     }
                 }
             }
             for name in &used_typenames {
+                // Built-in types
+                if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
+                    continue;
+                }
                 if typedefs_by_name.contains_key(name) {
                     continue;
                 }
                 if interfaces_by_name.contains_key(name) {
                     continue;
                 }
                 if string_enums_by_name.contains_key(name) {
                     continue;
@@ -758,42 +832,52 @@ impl SpecBuilder {
                 match *type_ {
                     TypeSpec::Array { ref contents, .. } => {
                         // Check that the contents are correct.
                         let _ = classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, contents.spec(), name);
                         // Regardless, the result is bad for a sum of interfaces.
                         debug!(target: "spec", "classify_type => don't put me in an interface");
                         TypeClassification::Array
                     },
-                    TypeSpec::Boolean | TypeSpec::Number | TypeSpec::UnsignedLong | TypeSpec::String | TypeSpec::Void | TypeSpec::Offset => {
+                    TypeSpec::Boolean
+                        | TypeSpec::Number
+                        | TypeSpec::String
+                        | TypeSpec::Void
+                        | TypeSpec::Offset
+                        | TypeSpec::UnsignedLong
+                        | TypeSpec::IdentifierName
+                        | TypeSpec::PropertyKey => {
                         debug!(target: "spec", "classify_type => don't put me in an interface");
                         TypeClassification::Primitive
                     }
                     TypeSpec::NamedType(ref name) => {
                         if let Some(fetch) = cache.get(name) {
                             if let Some(ref result) = *fetch {
                                 debug!(target: "spec", "classify_type {:?} => (cached) {:?}", name, result);
                                 return result.clone();
                             } else {
                                 panic!("Cycle detected while examining {}", name.to_str());
                             }
                         }
                         // Start lookup for this name.
                         cache.insert(name.clone(), None);
-                        let result = if interfaces_by_name.contains_key(name) {
-                            let mut names = HashSet::new();
-                            names.insert(name.clone());
-                            TypeClassification::SumOfInterfaces(names)
-                        } else if string_enums_by_name.contains_key(name) {
-                            TypeClassification::StringEnum
-                        } else {
-                            let type_ = typedefs_by_name.get(name)
-                                .unwrap(); // Completeness checked abover in this method.
-                            classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_.spec(), name)
-                        };
+                        let result =
+                            if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
+                                TypeClassification::Primitive
+                            } else if interfaces_by_name.contains_key(name) {
+                                let mut names = HashSet::new();
+                                names.insert(name.clone());
+                                TypeClassification::SumOfInterfaces(names)
+                            } else if string_enums_by_name.contains_key(name) {
+                                TypeClassification::StringEnum
+                            } else {
+                                let type_ = typedefs_by_name.get(name)
+                                    .unwrap_or_else(|| panic!("Type {} not found", name)); // Completeness checked abover in this method.
+                                classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_.spec(), name)
+                            };
                         debug!(target: "spec", "classify_type {:?} => (inserting in cache) {:?}", name, result);
                         cache.insert(name.clone(), Some(result.clone()));
                         result
                     }
                     TypeSpec::TypeSum(ref sum) => {
                         let mut names = HashSet::new();
                         for type_ in sum.types() {
                             match classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_, name) {
@@ -897,16 +981,20 @@ impl Interface {
     pub fn get_field_by_name(&self, name: &FieldName) -> Option<&Field> {
         for field in self.contents().fields() {
             if name == field.name() {
                 return Some(field)
             }
         }
         None
     }
+
+   pub fn is_scope(&self) -> bool {
+        self.declaration.is_scope
+    }
 }
 
 /// Immutable representation of the spec.
 pub struct Spec {
     interfaces_by_name: HashMap<NodeName, Rc<Interface>>,
     string_enums_by_name: HashMap<NodeName, Rc<StringEnum>>,
     typedefs_by_name: HashMap<NodeName, Rc<Type>>,
 
--- a/third_party/rust/bit-set/.cargo-checksum.json
+++ b/third_party/rust/bit-set/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"e684c9479b485343f5b932e8f9de7ac046accfb4c1e3c534e6e0fb9e0c8d919b","Cargo.toml":"a30078c3db5bccf6a567ad9ae78a6258d18b990034eda7e4ce8f4b3041ff2aa9","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d3a2993cd15ac201b30c86fe69f2bb692b386875eace571715007637d7ca7abf","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/lib.rs":"7276279f7008dd633d0bb90cc0ff73de170b89d69644fb21c35728c94e913c4d"},"package":"d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"}
\ No newline at end of file
+{"files":{".travis.yml":"e684c9479b485343f5b932e8f9de7ac046accfb4c1e3c534e6e0fb9e0c8d919b","Cargo.toml":"3342b785a96c022128627c03d66f701ff8f5fa3b1088f1a6282bbd7fab94d99d","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"49741b792be0800387a30bf6300d5ad4d306e15b63510301e377670489620f40","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/lib.rs":"51809e3f8799d712a740f5bd37b658fbda44a5c7e62bf33a69c255866afa61b1"},"package":"6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"}
\ No newline at end of file
--- a/third_party/rust/bit-set/Cargo.toml
+++ b/third_party/rust/bit-set/Cargo.toml
@@ -1,20 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "bit-set"
-version = "0.4.0"
+version = "0.5.0"
 authors = ["Alexis Beingessner <a.beingessner@gmail.com>"]
-license = "MIT/Apache-2.0"
 description = "A set of bits"
-repository = "https://github.com/contain-rs/bit-set"
 homepage = "https://github.com/contain-rs/bit-set"
 documentation = "https://contain-rs.github.io/bit-set/bit_set"
+readme = "README.md"
 keywords = ["data-structures", "bitset"]
-readme = "README.md"
-
-[dev-dependencies]
-rand = "0.3"
-
-[dependencies]
-bit-vec = "0.4"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/contain-rs/bit-set"
+[dependencies.bit-vec]
+version = "0.5.0"
+default-features = false
+[dev-dependencies.rand]
+version = "0.3"
 
 [features]
-nightly = []
+default = ["std"]
+nightly = ["bit-vec/nightly"]
+std = ["bit-vec/std"]
--- a/third_party/rust/bit-set/README.md
+++ b/third_party/rust/bit-set/README.md
@@ -1,6 +1,17 @@
+**WARNING: THIS PROJECT IS IN MAINTENANCE MODE, DUE TO INSUFFICIENT MAINTAINER RESOURCES**
+
+It works fine, but will generally no longer be improved.
+
+We are currently only accepting changes which:
+
+* keep this compiling with the latest versions of Rust or its dependencies.
+* have minimal review requirements, such as documentation changes (so not totally new APIs).
+
+------
+
 A Set of bits.
 
 Documentation is available at https://contain-rs.github.io/bit-set/bit_set.
 
 [![Build Status](https://travis-ci.org/contain-rs/bit-set.svg?branch=master)](https://travis-ci.org/contain-rs/bit-set)
 [![crates.io](http://meritbadge.herokuapp.com/bit-set)](https://crates.io/crates/bit-set)
--- a/third_party/rust/bit-set/src/lib.rs
+++ b/third_party/rust/bit-set/src/lib.rs
@@ -42,27 +42,33 @@
 //!     println!("{}", x);
 //! }
 //!
 //! // Can convert back to a `BitVec`
 //! let bv = s.into_bit_vec();
 //! assert!(bv[3]);
 //! ```
 
+#![no_std]
+
 #![cfg_attr(all(test, feature = "nightly"), feature(test))]
 #[cfg(all(test, feature = "nightly"))] extern crate test;
 #[cfg(all(test, feature = "nightly"))] extern crate rand;
 extern crate bit_vec;
 
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+
 use bit_vec::{BitVec, Blocks, BitBlock};
-use std::cmp::Ordering;
-use std::cmp;
-use std::fmt;
-use std::hash;
-use std::iter::{self, Chain, Enumerate, FromIterator, Repeat, Skip, Take};
+use core::cmp::Ordering;
+use core::cmp;
+use core::fmt;
+use core::hash;
+use core::iter::{self, Chain, Enumerate, FromIterator, Repeat, Skip, Take};
 
 type MatchWords<'a, B> = Chain<Enumerate<Blocks<'a, B>>, Skip<Take<Enumerate<Repeat<B>>>>>;
 
 /// Computes how many blocks are needed to store that many bits
 fn blocks_for_bits<B: BitBlock>(bits: usize) -> usize {
     // If we want 17 bits, dividing by 32 will produce 0. So we add 1 to make sure we
     // reserve enough. But if we want exactly a multiple of 32, this will actually allocate
     // one too many. So we need to check if that's the case. We can do that by computing if
@@ -936,16 +942,17 @@ impl<'a, B: BitBlock> IntoIterator for &
     }
 }
 
 #[cfg(test)]
 mod tests {
     use std::cmp::Ordering::{Equal, Greater, Less};
     use super::BitSet;
     use bit_vec::BitVec;
+    use std::vec::Vec;
 
     #[test]
     fn test_bit_set_show() {
         let mut s = BitSet::new();
         s.insert(1);
         s.insert(10);
         s.insert(50);
         s.insert(2);
--- a/third_party/rust/bit-vec/.cargo-checksum.json
+++ b/third_party/rust/bit-vec/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"26dbdd3f33aeefa6216804c025626b8e2bef5c05103410faa5e6e93f20331cbe","Cargo.toml":"6376bd862fc4827a77190427180ccf86cda76907bf3bd935601840cd03ab48da","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"2a42423b7acd5af0ee7f47dcc430b267cfe4661ced77131af2d6e97e6a15377a","benches/extern.rs":"30152d15cc55493d06396baf9eebb90c8f32b314f0dc77398ac8a121bd5ff917","crusader.sh":"e656dcb62d5122a64d55f837992e63cfd3beee37cf74c5ab6ff178a3c7ef943e","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/bench.rs":"a24345464fdbc70b5b877d13fa1b9da809ba4917e592d5de69f01b8b1340e8bb","src/lib.rs":"b784632ce3f6a16314d1d759310f297941fb5577192ba48a10ae3c6893dd5e24"},"package":"02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f"}
\ No newline at end of file
+{"files":{".travis.yml":"26dbdd3f33aeefa6216804c025626b8e2bef5c05103410faa5e6e93f20331cbe","Cargo.toml":"0c1d447fdcff050a2c1f9e3267bdf5b2d3373e080603a5f9127167f31a169b7d","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"c9d3313c3cc0d55496d8c17bf950b96accd751fc67342e3b3dd3ce7756605092","benches/extern.rs":"30152d15cc55493d06396baf9eebb90c8f32b314f0dc77398ac8a121bd5ff917","crusader.sh":"e656dcb62d5122a64d55f837992e63cfd3beee37cf74c5ab6ff178a3c7ef943e","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/bench.rs":"a24345464fdbc70b5b877d13fa1b9da809ba4917e592d5de69f01b8b1340e8bb","src/lib.rs":"5162fc2658cce4d388453e73740eb1d74fbb64b0a5d714c8e7bc9a29671bbfa5"},"package":"4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"}
\ No newline at end of file
--- a/third_party/rust/bit-vec/Cargo.toml
+++ b/third_party/rust/bit-vec/Cargo.toml
@@ -1,17 +1,30 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "bit-vec"
-version = "0.4.4"
+version = "0.5.0"
 authors = ["Alexis Beingessner <a.beingessner@gmail.com>"]
-license = "MIT/Apache-2.0"
 description = "A vector of bits"
-repository = "https://github.com/contain-rs/bit-vec"
 homepage = "https://github.com/contain-rs/bit-vec"
 documentation = "https://contain-rs.github.io/bit-vec/bit_vec"
+readme = "README.md"
 keywords = ["data-structures", "bitvec", "bitmask", "bitmap", "bit"]
-readme = "README.md"
-
-[dev-dependencies]
-rand = "0.3.15"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/contain-rs/bit-vec"
+[dev-dependencies.rand]
+version = "0.3.15"
 
 [features]
+default = ["std"]
 nightly = []
+std = []
--- a/third_party/rust/bit-vec/README.md
+++ b/third_party/rust/bit-vec/README.md
@@ -1,6 +1,18 @@
+**WARNING: THIS PROJECT IS IN MAINTENANCE MODE, DUE TO INSUFFICIENT MAINTAINER RESOURCES**
+
+It works fine, but will generally no longer be improved.
+
+We are currently only accepting changes which:
+
+* keep this compiling with the latest versions of Rust or its dependencies.
+* have minimal review requirements, such as documentation changes (so not totally new APIs).
+
+------
+
+
 A Vec of bits.
 
 Documentation is available at https://contain-rs.github.io/bit-vec/bit_vec.
 
 [![Build Status](https://travis-ci.org/contain-rs/bit-vec.svg?branch=master)](https://travis-ci.org/contain-rs/bit-vec)
 [![crates.io](http://meritbadge.herokuapp.com/bit-vec)](https://crates.io/crates/bit-vec)
--- a/third_party/rust/bit-vec/src/lib.rs
+++ b/third_party/rust/bit-vec/src/lib.rs
@@ -78,33 +78,48 @@
 //! }
 //! println!("");
 //!
 //! let num_primes = primes.iter().filter(|x| *x).count();
 //! println!("There are {} primes below {}", num_primes, max_prime);
 //! assert_eq!(num_primes, 1_229);
 //! ```
 
+#![no_std]
+#![cfg_attr(not(feature="std"), feature(alloc))]
+
 #![cfg_attr(all(test, feature = "nightly"), feature(test))]
 #[cfg(all(test, feature = "nightly"))] extern crate test;
 #[cfg(all(test, feature = "nightly"))] extern crate rand;
 
-use std::cmp::Ordering;
-use std::cmp;
-use std::fmt;
-use std::hash;
-use std::iter::{Chain, Enumerate, Repeat, Skip, Take, repeat};
-use std::iter::FromIterator;
-use std::slice;
-use std::{u8, usize};
+#[cfg(any(test, feature = "std"))]
+#[macro_use]
+extern crate std;
+#[cfg(feature="std")]
+use std::vec::Vec;
+
+#[cfg(not(feature="std"))]
+#[macro_use]
+extern crate alloc;
+#[cfg(not(feature="std"))]
+use alloc::Vec;
+
+use core::cmp::Ordering;
+use core::cmp;
+use core::fmt;
+use core::hash;
+use core::iter::{Chain, Enumerate, Repeat, Skip, Take, repeat};
+use core::iter::FromIterator;
+use core::slice;
+use core::{u8, usize};
 
 type MutBlocks<'a, B> = slice::IterMut<'a, B>;
 type MatchWords<'a, B> = Chain<Enumerate<Blocks<'a, B>>, Skip<Take<Enumerate<Repeat<B>>>>>;
 
-use std::ops::*;
+use core::ops::*;
 
 /// Abstracts over a pile of bits (basically unsigned primitives)
 pub trait BitBlock:
 	Copy +
 	Add<Self, Output=Self> +
 	Sub<Self, Output=Self> +
 	Shl<usize, Output=Self> +
 	Shr<usize, Output=Self> +
@@ -149,17 +164,17 @@ macro_rules! bit_block_impl {
     )*)
 }
 
 bit_block_impl!{
     (u8, 8),
     (u16, 16),
     (u32, 32),
     (u64, 64),
-    (usize, std::mem::size_of::<usize>() * 8)
+    (usize, core::mem::size_of::<usize>() * 8)
 }
 
 
 fn reverse_bits(byte: u8) -> u8 {
     let mut result = 0;
     for i in 0..u8::bits() {
         result = result | ((byte >> i) & 1) << (u8::bits() - 1 - i);
     }
@@ -1086,16 +1101,26 @@ impl<B: BitBlock> BitVec<B> {
     #[inline]
     pub fn is_empty(&self) -> bool { self.len() == 0 }
 
     /// Clears all bits in this vector.
     #[inline]
     pub fn clear(&mut self) {
         for w in &mut self.storage { *w = B::zero(); }
     }
+
+    /// Shrinks the capacity of the underlying storage as much as
+    /// possible.
+    ///
+    /// It will drop down as close as possible to the length but the
+    /// allocator may still inform the underlying storage that there
+    /// is space for a few more elements/bits.
+    pub fn shrink_to_fit(&mut self) {
+        self.storage.shrink_to_fit();
+    }
 }
 
 impl<B: BitBlock> Default for BitVec<B> {
     #[inline]
     fn default() -> Self { BitVec { storage: Vec::new(), nbits: 0 } }
 }
 
 impl<B: BitBlock> FromIterator<bool> for BitVec<B> {
@@ -1303,16 +1328,17 @@ impl<'a, B: BitBlock> ExactSizeIterator 
 
 
 
 
 
 #[cfg(test)]
 mod tests {
     use super::{BitVec, Iter};
+    use std::vec::Vec;
 
     // This is stupid, but I want to differentiate from a "random" 32
     const U32_BITS: usize = 32;
 
     #[test]
     fn test_to_str() {
         let zerolen = BitVec::new();
         assert_eq!(format!("{:?}", zerolen), "");
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{"Cargo.toml":"373908618d7bdf561f84ddc5add92f69dab295c97ab0908d3a4ec428fad23bad","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/lib.rs":"bdf23c8a00fb4d51beabeb6600fe45ebf1be618632db885013b6f60a5666c124","src/paddings.rs":"7a18850dab9dca0a3e6cc49d6a94a9566ea2473628f42f726a69f8e07f95872a"},"package":"a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/Cargo.toml
@@ -0,0 +1,27 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "block-buffer"
+version = "0.3.3"
+authors = ["RustCrypto Developers"]
+description = "Fixed size buffer for block processing of data"
+documentation = "https://docs.rs/block-buffer"
+keywords = ["block", "padding", "pkcs7", "ansix923", "iso7816"]
+categories = ["cryptography", "no-std"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/RustCrypto/utils"
+[dependencies.arrayref]
+version = "0.3"
+
+[dependencies.byte-tools]
+version = "0.2"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2017 Artyom Pavlov
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/src/lib.rs
@@ -0,0 +1,144 @@
+#![no_std]
+#[macro_use]
+extern crate arrayref;
+extern crate byte_tools;
+
+use byte_tools::{zero, write_u64_le};
+
+mod paddings;
+
+pub use paddings::*;
+
+macro_rules! impl_buffer {
+    ($name:ident, $len:expr) => {
+
+        pub struct $name {
+            buffer: [u8; $len],
+            pos: usize,
+        }
+
+        impl Copy for $name {}
+
+        impl Clone for $name {
+            fn clone(&self) -> Self {
+                *self
+            }
+        }
+
+        impl Default for $name {
+            fn default() -> Self {
+                $name {buffer: [0; $len], pos: 0}
+            }
+        }
+
+        impl $name {
+            #[inline]
+            pub fn input<F: FnMut(&[u8; $len])>(&mut self, mut input: &[u8], mut func: F) {
+                // If there is already data in the buffer, copy as much as we can
+                // into it and process the data if the buffer becomes full.
+                if self.pos != 0 {
+                    let rem = self.remaining();
+
+                    if input.len() >= rem {
+                        let (l, r) = input.split_at(rem);
+                        input = r;
+                        self.buffer[self.pos..].copy_from_slice(l);
+                        self.pos = 0;
+                        func(&self.buffer);
+                    } else {
+                        let end = self.pos + input.len();
+                        self.buffer[self.pos..end].copy_from_slice(input);
+                        self.pos = end;
+                        return;
+                    }
+                }
+
+                // While we have at least a full buffer size chunks's worth of data,
+                // process that data without copying it into the buffer
+                while input.len() >= self.size() {
+                    let (l, r) = input.split_at(self.size());
+                    input = r;
+                    func(array_ref!(l, 0, $len));
+                }
+
+                // Copy any input data into the buffer. At this point in the method,
+                // the ammount of data left in the input vector will be less than
+                // the buffer size and the buffer will be empty.
+                self.buffer[..input.len()].copy_from_slice(input);
+                self.pos = input.len();
+            }
+
+            #[inline]
+            fn digest_pad<F>(&mut self, up_to: usize, func: &mut F)
+                where F: FnMut(&[u8; $len])
+            {
+                self.buffer[self.pos] = 0x80;
+                self.pos += 1;
+
+                zero(&mut self.buffer[self.pos..]);
+
+                if self.remaining() < up_to {
+                    func(&self.buffer);
+                    zero(&mut self.buffer[..self.pos]);
+                }
+            }
+
+            #[inline]
+            /// Will pad message with message length in big-endian format
+            pub fn len_padding<F>(&mut self, data_len: u64, mut func: F)
+                where F: FnMut(&[u8; $len])
+            {
+                self.digest_pad(8, &mut func);
+                let s = self.size();
+                write_u64_le(&mut self.buffer[s-8..], data_len);
+                func(&self.buffer);
+                self.pos = 0;
+            }
+
+            #[inline]
+            pub fn len_padding_u128<F>(&mut self, hi: u64, lo: u64, mut func: F)
+                where F: FnMut(&[u8; $len])
+            {
+                self.digest_pad(16, &mut func);
+                let s = self.size();
+                write_u64_le(&mut self.buffer[s-16..s-8], hi);
+                write_u64_le(&mut self.buffer[s-8..], lo);
+                func(&self.buffer);
+                self.pos = 0;
+            }
+
+            #[inline]
+            pub fn pad_with<P: Padding>(&mut self) -> &mut [u8; $len] {
+                P::pad(&mut self.buffer[..], self.pos);
+                self.pos = 0;
+                &mut self.buffer
+            }
+
+            #[inline]
+            pub fn size(&self) -> usize {
+                $len
+            }
+
+            #[inline]
+            pub fn position(&self) -> usize {
+                self.pos
+            }
+
+            #[inline]
+            pub fn remaining(&self) -> usize {
+                self.size() - self.pos
+            }
+        }
+    }
+}
+
+impl_buffer!(BlockBuffer128, 16);
+impl_buffer!(BlockBuffer256, 32);
+impl_buffer!(BlockBuffer512, 64);
+impl_buffer!(BlockBuffer1024, 128);
+
+impl_buffer!(BlockBuffer576, 72);
+impl_buffer!(BlockBuffer832, 104);
+impl_buffer!(BlockBuffer1088, 136);
+impl_buffer!(BlockBuffer1152, 144);
+impl_buffer!(BlockBuffer1344, 168);
new file mode 100644
--- /dev/null
+++ b/third_party/rust/block-buffer/src/paddings.rs
@@ -0,0 +1,129 @@
+use byte_tools::{zero, set};
+
+/// Trait for padding messages divided into blocks
+pub trait Padding {
+    /// Pads `block` filled with data up to `pos`
+    fn pad(block: &mut [u8], pos: usize);
+}
+
+#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+/// Error for indicating failed unpadding process
+pub struct UnpadError;
+
+/// Trait for extracting oringinal message from padded medium
+pub trait Unpadding {
+    /// Unpad given `data` by truncating it according to the used padding.
+    /// In case of the malformed padding will return `UnpadError`
+    fn unpad(data: &[u8]) -> Result<&[u8], UnpadError>;
+}
+
+
+#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub enum ZeroPadding{}
+
+impl Padding for ZeroPadding {
+    #[inline]
+    fn pad(block: &mut [u8], pos: usize) {
+        zero(&mut block[pos..])
+    }
+}
+
+impl Unpadding for ZeroPadding {
+    #[inline]
+    fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
+        let mut n = data.len() - 1;
+        while n != 0 {
+            if data[n] != 0 {
+                break;
+            }
+            n -= 1;
+        }
+        Ok(&data[..n+1])
+    }
+}
+
+#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub enum Pkcs7{}
+
+impl Padding for Pkcs7 {
+    #[inline]
+    fn pad(block: &mut [u8], pos: usize) {
+        let n = block.len() - pos;
+        set(&mut block[pos..], n as u8);
+    }
+}
+
+impl Unpadding for Pkcs7 {
+    #[inline]
+    fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
+        if data.is_empty() { return Err(UnpadError); }
+        let l = data.len();
+        let n = data[l-1];
+        if n == 0 {
+            return Err(UnpadError)
+        }
+        for v in &data[l-n as usize..l-1] {
+            if *v != n { return Err(UnpadError); }
+        }
+        Ok(&data[..l-n as usize])
+    }
+}
+
+#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub enum AnsiX923{}
+
+impl Padding for AnsiX923 {
+    #[inline]
+    fn pad(block: &mut [u8], pos: usize) {
+        let n = block.len() - 1;
+        zero(&mut block[pos..n]);
+        block[n] = (n - pos) as u8;
+    }
+}
+
+impl Unpadding for AnsiX923 {
+    #[inline]
+    fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
+        if data.is_empty() { return Err(UnpadError); }
+        let l = data.len();
+        let n = data[l-1] as usize;
+        if n == 0 {
+            return Err(UnpadError)
+        }
+        for v in &data[l-n..l-1] {
+            if *v != 0 { return Err(UnpadError); }
+        }
+        Ok(&data[..l-n])
+    }
+}
+
+
+
+#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub enum Iso7816{}
+
+impl Padding for Iso7816 {
+    #[inline]
+    fn pad(block: &mut [u8], pos: usize) {
+        let n = block.len() - pos;
+        block[pos] = 0x80;
+        for b in block[pos+1..].iter_mut() {
+            *b = n as u8;
+        }
+    }
+}
+
+impl Unpadding for Iso7816 {
+    fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
+        if data.is_empty() { return Err(UnpadError); }
+        let mut n = data.len() - 1;
+        while n != 0 {
+            if data[n] != 0 {
+                break;
+            }
+            n -= 1;
+        }
+        if data[n] != 0x80 { return Err(UnpadError); }
+        Ok(&data[..n])
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{"Cargo.toml":"af6af6ea1dfa296af5dc58986d1afb46952328588069ec0b08723db439e9972d","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"52232c2cee3bb7d8cabe47ef367f1bf8bb607c22bdfca0219d6156cb7f446e9d","src/lib.rs":"9c96cffef7458fc7bd9e4e61270b69d539ff3a9225a0319b7996155c25ff96ab","src/read_single.rs":"3ab78b15754c2a7848a1be871ff6ee2a31a099f8f4f89be44ad210cda0dbcc9a","src/read_slice.rs":"b3790f2fd080db97e239c05c63da123ea375fb9b354dc9cacb859ed9c44f552e","src/write_single.rs":"1cee4f2f5d8690e47840ea7017539ead417a26abc0717137442a6d9d2875afe4","src/write_slice.rs":"de90e6b9cfca67125871bee7cef55c63574b1871a6584e51fc00a97e5877fe69"},"package":"560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/Cargo.toml
@@ -0,0 +1,21 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "byte-tools"
+version = "0.2.0"
+authors = ["The Rust-Crypto Project Developers"]
+description = "Utility functions for working with bytes"
+documentation = "https://docs.rs/byte-tools"
+keywords = ["bytes"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/RustCrypto/utils"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/LICENSE-MIT
@@ -0,0 +1,26 @@
+Copyright (c) 2006-2009 Graydon Hoare
+Copyright (c) 2009-2013 Mozilla Foundation
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/src/lib.rs
@@ -0,0 +1,37 @@
+#![no_std]
+use core::ptr;
+
+mod read_single;
+mod write_single;
+mod read_slice;
+mod write_slice;
+
+pub use read_single::*;
+pub use write_single::*;
+pub use read_slice::*;
+pub use write_slice::*;
+
+/// Copy bytes from src to dest
+#[inline]
+pub fn copy_memory(src: &[u8], dst: &mut [u8]) {
+    assert!(dst.len() >= src.len());
+    unsafe {
+        let srcp = src.as_ptr();
+        let dstp = dst.as_mut_ptr();
+        ptr::copy_nonoverlapping(srcp, dstp, src.len());
+    }
+}
+
+/// Zero all bytes in dst
+#[inline]
+pub fn zero(dst: &mut [u8]) {
+    set(dst, 0);
+}
+
+/// Sets all bytes in `dst` equal to `value`
+#[inline]
+pub fn set(dst: &mut [u8], value: u8) {
+    unsafe {
+        ptr::write_bytes(dst.as_mut_ptr(), value, dst.len());
+    }
+}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/src/read_single.rs
@@ -0,0 +1,38 @@
+use core::{mem, ptr};
+
+macro_rules! read_single {
+    ($src:expr, $size:expr, $ty:ty, $which:ident) => ({
+        assert!($size == mem::size_of::<$ty>());
+        assert!($size == $src.len());
+        unsafe {
+            let mut tmp: $ty = mem::uninitialized();
+            let p = &mut tmp as *mut _ as *mut u8;
+            ptr::copy_nonoverlapping($src.as_ptr(), p, $size);
+            tmp.$which()
+        }
+    });
+}
+
+/// Read the value of a vector of bytes as a u32 value in little-endian format.
+#[inline]
+pub fn read_u32_le(src: &[u8]) -> u32 {
+    read_single!(src, 4, u32, to_le)
+}
+
+/// Read the value of a vector of bytes as a u32 value in big-endian format.
+#[inline]
+pub fn read_u32_be(src: &[u8]) -> u32 {
+    read_single!(src, 4, u32, to_be)
+}
+
+/// Read the value of a vector of bytes as a u64 value in little-endian format.
+#[inline]
+pub fn read_u64_le(src: &[u8]) -> u64 {
+    read_single!(src, 8, u64, to_le)
+}
+
+/// Read the value of a vector of bytes as a u64 value in big-endian format.
+#[inline]
+pub fn read_u64_be(src: &[u8]) -> u64 {
+    read_single!(src, 8, u64, to_be)
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/src/read_slice.rs
@@ -0,0 +1,44 @@
+use core::ptr;
+
+macro_rules! read_slice {
+    ($src:expr, $dst:expr, $size:expr, $which:ident) => ({
+        assert_eq!($size*$dst.len(), $src.len());
+        unsafe {
+            ptr::copy_nonoverlapping(
+                $src.as_ptr(),
+                $dst.as_mut_ptr() as *mut u8,
+                $src.len());
+        }
+        for v in $dst.iter_mut() {
+            *v = v.$which();
+        }
+    });
+}
+
+/// Read a vector of bytes into a vector of u32s. The values are read in
+/// little-endian format.
+#[inline]
+pub fn read_u32v_le(dst: &mut [u32], src: &[u8]) {
+    read_slice!(src, dst, 4, to_le);
+}
+
+/// Read a vector of bytes into a vector of u32s. The values are read in
+/// big-endian format.
+#[inline]
+pub fn read_u32v_be(dst: &mut [u32], src: &[u8]) {
+    read_slice!(src, dst, 4, to_be);
+}
+
+/// Read a vector of bytes into a vector of u64s. The values are read in
+/// little-endian format.
+#[inline]
+pub fn read_u64v_le(dst: &mut [u64], src: &[u8]) {
+    read_slice!(src, dst, 8, to_le);
+}
+
+/// Read a vector of bytes into a vector of u64s. The values are read in
+/// big-endian format.
+#[inline]
+pub fn read_u64v_be(dst: &mut [u64], src: &[u8]) {
+    read_slice!(src, dst, 8, to_be);
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/src/write_single.rs
@@ -0,0 +1,39 @@
+use core::{mem, ptr};
+
+macro_rules! write_single {
+    ($dst:expr, $n:expr, $size:expr, $which:ident) => ({
+        assert!($size == $dst.len());
+        unsafe {
+            let bytes = mem::transmute::<_, [u8; $size]>($n.$which());
+            ptr::copy_nonoverlapping((&bytes).as_ptr(), $dst.as_mut_ptr(), $size);
+        }
+    });
+}
+
+/// Write a u32 into a vector, which must be 4 bytes long. The value is written
+/// in little-endian format.
+#[inline]
+pub fn write_u32_le(dst: &mut [u8], n: u32) {
+    write_single!(dst, n, 4, to_le);
+}
+
+/// Write a u32 into a vector, which must be 4 bytes long. The value is written
+/// in big-endian format.
+#[inline]
+pub fn write_u32_be(dst: &mut [u8], n: u32) {
+    write_single!(dst, n, 4, to_be);
+}
+
+/// Write a u64 into a vector, which must be 8 bytes long. The value is written
+/// in little-endian format.
+#[inline]
+pub fn write_u64_le(dst: &mut [u8], n: u64) {
+    write_single!(dst, n, 8, to_le);
+}
+
+/// Write a u64 into a vector, which must be 8 bytes long. The value is written
+/// in big-endian format.
+#[inline]
+pub fn write_u64_be(dst: &mut [u8], n: u64) {
+    write_single!(dst, n, 8, to_be);
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/byte-tools/src/write_slice.rs
@@ -0,0 +1,46 @@
+use core::{ptr, mem};
+
+macro_rules! write_slice {
+    ($src:expr, $dst:expr, $ty:ty, $size:expr, $which:ident) => ({
+        assert!($size == mem::size_of::<$ty>());
+        assert_eq!($dst.len(), $size*$src.len());
+        unsafe {
+            ptr::copy_nonoverlapping(
+                $src.as_ptr() as *const u8,
+                $dst.as_mut_ptr(),
+                $dst.len());
+            let tmp: &mut [$ty] = mem::transmute($dst);
+            for v in tmp[..$src.len()].iter_mut() {
+                *v = v.$which();
+            }
+        }
+    });
+}
+
+/// Write a vector of u32s into a vector of bytes. The values are written in
+/// little-endian format.
+#[inline]
+pub fn write_u32v_le(dst: &mut [u8], src: &[u32]) {
+    write_slice!(src, dst, u32, 4, to_le);
+}
+
+/// Write a vector of u32s into a vector of bytes. The values are written in
+/// big-endian format.
+#[inline]
+pub fn write_u32v_be(dst: &mut [u8], src: &[u32]) {
+    write_slice!(src, dst, u32, 4, to_be);
+}
+
+/// Write a vector of u64s into a vector of bytes. The values are written in
+/// little-endian format.
+#[inline]
+pub fn write_u64v_le(dst: &mut [u8], src: &[u64]) {
+    write_slice!(src, dst, u64, 8, to_le);
+}
+
+/// Write a vector of u64s into a vector of bytes. The values are written in
+/// little-endian format.
+#[inline]
+pub fn write_u64v_be(dst: &mut [u8], src: &[u64]) {
+    write_slice!(src, dst, u64, 8, to_be);
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{".cargo_vcs_info.json":"5c4d89b9b833bb5681c04817ef4e799012a6252ba90021c6482010c8871b87a6","Cargo.toml":"b3667b1e1a3985dd2c9e7873f6945c2d7163ed7da95569f40c2097285a325ec4","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/dev.rs":"c824f834fa8b8c729024e4ec61138e89c26a56bfb6b50295600dddb5ff8fff62","src/digest.rs":"6710ac33c80e6159a2396839794fc76a61b94ab573516a69486457b3e291c793","src/errors.rs":"cff5bf2350bc109ad4f08caacf6780ff1e7016d9995f0847e84e96a8e31ab9d5","src/lib.rs":"bf4e93ebd066513001f3d6d77024ae8addf4df4fd89f76549fd1b73df386f3e4"},"package":"03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/.cargo_vcs_info.json
@@ -0,0 +1,5 @@
+{
+  "git": {
+    "sha1": "c02ab3d77605b540fd5dc2ea1a45c184f7d9e7d8"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/Cargo.toml
@@ -0,0 +1,32 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "digest"
+version = "0.7.6"
+authors = ["RustCrypto Developers"]
+description = "Traits for cryptographic hash functions"
+documentation = "https://docs.rs/digest"
+keywords = ["digest", "crypto", "hash"]
+categories = ["cryptography", "no-std"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/RustCrypto/traits"
+[package.metadata.docs.rs]
+features = ["std"]
+[dependencies.generic-array]
+version = "0.9"
+
+[features]
+dev = []
+std = []
+[badges.travis-ci]
+repository = "RustCrypto/traits"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2017 Artyom Pavlov
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/src/dev.rs
@@ -0,0 +1,171 @@
+use super::{Digest, Input, VariableOutput, ExtendableOutput, XofReader};
+use core::fmt::Debug;
+
+pub struct Test {
+    pub name: &'static str,
+    pub input: &'static [u8],
+    pub output: &'static [u8],
+}
+
+#[macro_export]
+macro_rules! new_tests {
+    ( $( $name:expr ),*  ) => {
+        [$(
+            Test {
+                name: $name,
+                input: include_bytes!(concat!("data/", $name, ".input.bin")),
+                output: include_bytes!(concat!("data/", $name, ".output.bin")),
+            },
+        )*]
+    };
+    ( $( $name:expr ),+, ) => (new_tests!($($name),+))
+}
+
+pub fn main_test<D: Digest + Debug + Clone>(tests: &[Test]) {
+    // Test that it works when accepting the message all at once
+    for t in tests.iter() {
+        let mut sh = D::default();
+        sh.input(t.input);
+
+        let out = sh.result();
+
+        assert_eq!(out[..], t.output[..]);
+    }
+
+    // Test that it works when accepting the message in pieces
+    for t in tests.iter() {
+        let mut sh = D::default();
+        let len = t.input.len();
+        let mut left = len;
+        while left > 0 {
+            let take = (left + 1) / 2;
+            sh.input(&t.input[len - left..take + len - left]);
+            left = left - take;
+        }
+
+        let out = sh.result();
+
+        assert_eq!(out[..], t.output[..]);
+    }
+}
+
+pub fn variable_test<D>(tests: &[Test])
+    where D: Input + VariableOutput + Clone + Debug
+{
+    let mut buf = [0u8; 1024];
+    // Test that it works when accepting the message all at once
+    for t in tests.iter() {
+        let mut sh = D::new(t.output.len()).unwrap();
+        sh.process(t.input);
+
+        let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
+
+        assert_eq!(out[..], t.output[..]);
+    }
+
+    // Test that it works when accepting the message in pieces
+    for t in tests.iter() {
+        let mut sh = D::new(t.output.len()).unwrap();
+        let len = t.input.len();
+        let mut left = len;
+        while left > 0 {
+            let take = (left + 1) / 2;
+            sh.process(&t.input[len - left..take + len - left]);
+            left = left - take;
+        }
+
+        let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
+
+        assert_eq!(out[..], t.output[..]);
+    }
+}
+
+
+pub fn xof_test<D>(tests: &[Test])
+    where D: Input + ExtendableOutput + Default + Debug + Clone
+{
+    let mut buf = [0u8; 1024];
+    // Test that it works when accepting the message all at once
+    for t in tests.iter() {
+        let mut sh = D::default();
+        sh.process(t.input);
+
+        let out = &mut buf[..t.output.len()];
+        sh.xof_result().read(out);
+
+        assert_eq!(out[..], t.output[..]);
+    }
+
+    // Test that it works when accepting the message in pieces
+    for t in tests.iter() {
+        let mut sh = D::default();
+        let len = t.input.len();
+        let mut left = len;
+        while left > 0 {
+            let take = (left + 1) / 2;
+            sh.process(&t.input[len - left..take + len - left]);
+            left = left - take;
+        }
+
+        let out = &mut buf[..t.output.len()];
+        sh.xof_result().read(out);
+
+        assert_eq!(out[..], t.output[..]);
+    }
+
+    // Test reeading from reader byte by byte
+    for t in tests.iter() {
+        let mut sh = D::default();
+        sh.process(t.input);
+
+        let mut reader = sh.xof_result();
+        let out = &mut buf[..t.output.len()];
+        for chunk in out.chunks_mut(1) {
+            reader.read(chunk);
+        }
+
+        assert_eq!(out[..], t.output[..]);
+    }
+}
+
+pub fn one_million_a<D: Digest + Default + Debug + Clone>(expected: &[u8]) {
+    let mut sh = D::default();
+    for _ in 0..50000 {
+        sh.input(&[b'a'; 10]);
+    }
+    sh.input(&[b'a'; 500000]);
+    let out = sh.result();
+    assert_eq!(out[..], expected[..]);
+}
+
+
+#[macro_export]
+macro_rules! bench_digest {
+    ($name:ident, $engine:path, $bs:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut d = <$engine>::default();
+            let data = [0; $bs];
+
+            b.iter(|| {
+                d.input(&data);
+            });
+
+            b.bytes = $bs;
+        }
+    };
+
+    ($engine:path) => {
+        extern crate test;
+
+        use test::Bencher;
+        use digest::Digest;
+
+        bench_digest!(bench1_16, $engine, 1<<4);
+        bench_digest!(bench2_64, $engine, 1<<6);
+        bench_digest!(bench3_256, $engine, 1<<8);
+        bench_digest!(bench4_1k, $engine, 1<<10);
+        bench_digest!(bench5_4k, $engine, 1<<12);
+        bench_digest!(bench6_16k, $engine, 1<<14);
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/src/digest.rs
@@ -0,0 +1,86 @@
+use super::{Input, BlockInput, FixedOutput};
+use generic_array::GenericArray;
+#[cfg(feature = "std")]
+use std::io;
+
+type Output<N> = GenericArray<u8, N>;
+
+/// The `Digest` trait specifies an interface common for digest functions.
+///
+/// It's a convinience wrapper around `Input`, `FixedOutput`, `BlockInput` and
+/// `Default` traits. It also provides additional convenience methods.
+pub trait Digest: Input + BlockInput + FixedOutput + Default {
+    /// Create new hasher instance
+    fn new() -> Self {
+        Self::default()
+    }
+
+    /// Digest input data. This method can be called repeatedly
+    /// for use with streaming messages.
+    fn input(&mut self, input: &[u8]) {
+        self.process(input);
+    }
+
+    /// Retrieve the digest result. This method consumes digest instance.
+    fn result(self) -> Output<Self::OutputSize> {
+        self.fixed_result()
+    }
+
+    /// Convenience function to compute hash of the `data`. It will handle
+    /// hasher creation, data feeding and finalization.
+    ///
+    /// Example:
+    ///
+    /// ```rust,ignore
+    /// println!("{:x}", sha2::Sha256::digest(b"Hello world"));
+    /// ```
+    #[inline]
+    fn digest(data: &[u8]) -> Output<Self::OutputSize> {
+        let mut hasher = Self::default();
+        hasher.process(data);
+        hasher.fixed_result()
+    }
+
+    /// Convenience function to compute hash of the string. It's equivalent to
+    /// `digest(input_string.as_bytes())`.
+    #[inline]
+    fn digest_str(str: &str) -> Output<Self::OutputSize> {
+        Self::digest(str.as_bytes())
+    }
+
+    /// Convenience function which takes `std::io::Read` as a source and computes
+    /// value of digest function `D`, e.g. SHA-2, SHA-3, BLAKE2, etc. using 1 KB
+    /// blocks.
+    ///
+    /// Usage example:
+    ///
+    /// ```rust,ignore
+    /// use std::fs;
+    /// use sha2::{Sha256, Digest};
+    ///
+    /// let mut file = fs::File::open("Cargo.toml")?;
+    /// let result = Sha256::digest_reader(&mut file)?;
+    /// println!("{:x}", result);
+    /// ```
+    #[cfg(feature = "std")]
+    #[inline]
+    fn digest_reader(source: &mut io::Read)
+        -> io::Result<Output<Self::OutputSize>>
+    {
+        let mut hasher = Self::default();
+
+        let mut buf = [0u8; 8 * 1024];
+
+        loop {
+            let len = match source.read(&mut buf) {
+                Ok(0) => return Ok(hasher.result()),
+                Ok(len) => len,
+                Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue,
+                Err(e) => Err(e)?,
+            };
+            hasher.process(&buf[..len]);
+        }
+    }
+}
+
+impl<D: Input + FixedOutput + BlockInput + Default> Digest for D {}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/src/errors.rs
@@ -0,0 +1,37 @@
+use core::fmt;
+#[cfg(feature = "std")]
+use std::error;
+
+/// The error type for variable hasher initialization
+#[derive(Clone, Copy, Debug, Default)]
+pub struct InvalidOutputSize;
+
+/// The error type for variable hasher result
+#[derive(Clone, Copy, Debug, Default)]
+pub struct InvalidBufferLength;
+
+impl fmt::Display for InvalidOutputSize {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str("invalid output size")
+    }
+}
+
+impl fmt::Display for InvalidBufferLength {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str("invalid buffer length")
+    }
+}
+
+#[cfg(feature = "std")]
+impl error::Error for InvalidOutputSize {
+    fn description(&self) -> &str {
+        "invalid output size"
+    }
+}
+
+#[cfg(feature = "std")]
+impl error::Error for InvalidBufferLength {
+    fn description(&self) -> &str {
+        "invalid buffer size"
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/digest/src/lib.rs
@@ -0,0 +1,98 @@
+//! This crate provides traits for describing funcionality of cryptographic hash
+//! functions.
+//!
+//! By default std functionality in this crate disabled. (e.g. method for
+//! hashing `Read`ers) To enable it turn on `std` feature in your `Cargo.toml`
+//! for this crate.
+#![cfg_attr(not(feature = "std"), no_std)]
+pub extern crate generic_array;
+
+#[cfg(feature = "std")]
+use std as core;
+use generic_array::{GenericArray, ArrayLength};
+
+mod digest;
+mod errors;
+#[cfg(feature = "dev")]
+pub mod dev;
+
+pub use errors::{InvalidOutputSize, InvalidBufferLength};
+pub use digest::Digest;
+
+// `process` is choosen to not overlap with `input` method in the digest trait
+// change it on trait alias stabilization
+
+/// Trait for processing input data
+pub trait Input {
+    /// Digest input data. This method can be called repeatedly
+    /// for use with streaming messages.
+    fn process(&mut self, input: &[u8]);
+}
+
+/// Trait to indicate that digest function processes data in blocks of size
+/// `BlockSize`. Main usage of this trait is for implementing HMAC generically.
+pub trait BlockInput {
+    type BlockSize: ArrayLength<u8>;
+}
+
+/// Trait for returning digest result with the fixed size
+pub trait FixedOutput {
+    type OutputSize: ArrayLength<u8>;
+
+    /// Retrieve the digest result. This method consumes digest instance.
+    fn fixed_result(self) -> GenericArray<u8, Self::OutputSize>;
+}
+
+/// The error type for variable digest output
+#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub struct InvalidLength;
+
+/// Trait for returning digest result with the varaible size
+pub trait VariableOutput: core::marker::Sized {
+    /// Create new hasher instance with given output size. Will return
+    /// `Err(InvalidLength)` in case if hasher can not work with the given
+    /// output size. Will always return an error if output size equals to zero.
+    fn new(output_size: usize) -> Result<Self, InvalidLength>;
+
+    /// Get output size of the hasher instance provided to the `new` method
+    fn output_size(&self) -> usize;
+
+    /// Retrieve the digest result into provided buffer. Length of the buffer
+    /// must be equal to output size provided to the `new` method, otherwise
+    /// `Err(InvalidLength)` will be returned
+    fn variable_result(self, buffer: &mut [u8]) -> Result<&[u8], InvalidLength>;
+}
+
+/// Trait for decribing readers which are used to extract extendable output
+/// from the resulting state of hash function.
+pub trait XofReader {
+    /// Read output into the `buffer`. Can be called unlimited number of times.
+    fn read(&mut self, buffer: &mut [u8]);
+}
+
+/// Trait which describes extendable output (XOF) of hash functions. Using this
+/// trait you first need to get structure which implements `XofReader`, using
+/// which you can read extendable output.
+pub trait ExtendableOutput {
+    type Reader: XofReader;
+
+    /// Finalize hash function and return XOF reader
+    fn xof_result(self) -> Self::Reader;
+}
+
+/// Macro for defining opaque `Debug` implementation. It will use the following
+/// format: "HasherName { ... }". While it's convinient to have it
+/// (e.g. for including in other structs), it could be undesirable to leak
+/// internall state, which can happen for example through uncareful logging.
+#[macro_export]
+macro_rules! impl_opaque_debug {
+    ($state:ty) => {
+        impl ::core::fmt::Debug for $state {
+            fn fmt(&self, f: &mut ::core::fmt::Formatter)
+                -> Result<(), ::core::fmt::Error>
+            {
+                write!(f, concat!(stringify!($state), " {{ ... }}"))
+            }
+        }
+    }
+}
--- a/third_party/rust/ena/.cargo-checksum.json
+++ b/third_party/rust/ena/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"f8e54ea908a294d46381a1bd608da3fcc7fb0a87cb15f546b93b74ee9c97bb2b","Cargo.toml":"63ff1e6e9d93ec6a81fb28f199ccbf9299e177152cd751f568623717e85ed83a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"11d2194be1dc7460ee631a32884516f78d5d95dc6e5efa9115767a8f55f55a06","measurements.txt":"46606bc04662362369479bce5c31b109984c1a3446d7f0566556257af91b86e2","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/cc/mod.rs":"fc486ba406d5761b1bd63621c37981c2b43966d269f8a596595fca36f8b395a4","src/cc/test.rs":"b6805fd4f22b3a3214c9759a674647e8b1dc83118f81c83955949a7414298423","src/constraint/mod.rs":"7df86d708ba692edd5bdb26b1da20720ee5bf51f741985c8193eb54db9365b4b","src/constraint/test.rs":"6666ec1411a61462777c88e7edf73f4bf7c04d4021007cf3340fd7ee22cece95","src/debug.rs":"0c24b9d2302c66e8f3e615c2a6689d88bc1eeac8844ae1f239fd3244c7f2ce6f","src/graph/mod.rs":"3a98ddddb4650744d5462ee442405551272e6c0ff820fd85c26dfec133974671","src/graph/tests.rs":"e2afc7912203e158d37d1f951cb76e6f67eb63890573649b3b2e9ea3afe5ba01","src/lib.rs":"d4584bb7efa3269a328d1ef373fef02e177efb8874f81556a124a58ea18fad87","src/snapshot_vec.rs":"0654cf102f05e98694b74076d5b2fcb7d52cfcbd1771853db22784ea7ad50cb1","src/unify/mod.rs":"0f8a78332c43d6776c2afa93aef174d5d10fb83a5046f0b7081262b754a31da3","src/unify/tests.rs":"9dfc23f77c6fc0565d90b0f74eceeadf666cd9c728aac388b33f138fbc30b50c"},"package":"cabe5a5078ac8c506d3e4430763b1ba9b609b1286913e7d08e581d1c2de9b7e5"}
\ No newline at end of file
+{"files":{".travis.yml":"8effd1577dc503149f0f829c9291d844ec44d155fd253aa6b644c4ccc25e8bc8","Cargo.toml":"13e445b6bc53bf1ea2379fd2ec33205daa9b1b74d5a41e4dd9ea8cb966185c5a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"4b02d7ebfb188b1f2cbef20ade3082197046ccaa89e49d2bcdef6102d48919e3","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"294aabf6fb846dbe35bba837d70ea9115f20cd808995a318c0fccb05f91d096f","src/snapshot_vec.rs":"abc649bb42dc8592741b02d53ba1ed5f6ad64710b971070872b0c42665d73c93","src/unify/backing_vec.rs":"7d57036ce671169893d069f94454f1c4b95104517ffd62859f180d80cbe490e5","src/unify/mod.rs":"9fc90951778be635fbbf4fba8b3a0a4eb21e2c955660f019377465ac773b9563","src/unify/tests.rs":"b18974faeebdf2c03e82035fe7281bf4db3360ab10ce34b1d3441547836b19f2"},"package":"88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"}
\ No newline at end of file
--- a/third_party/rust/ena/.travis.yml
+++ b/third_party/rust/ena/.travis.yml
@@ -1,5 +1,9 @@
 language: rust
 rust:
+  - stable
   - nightly
 script:
-  - cargo test
\ No newline at end of file
+  - cargo test
+  - |
+    [ $TRAVIS_RUST_VERSION != nightly ] ||
+    cargo test --all-features
--- a/third_party/rust/ena/Cargo.toml
+++ b/third_party/rust/ena/Cargo.toml
@@ -1,8 +1,37 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "ena"
+version = "0.9.3"
+authors = ["Niko Matsakis <niko@alum.mit.edu>"]
 description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
-license = "MIT/Apache-2.0"
 homepage = "https://github.com/nikomatsakis/ena"
+readme = "README.md"
+keywords = ["unification", "union-find"]
+license = "MIT/Apache-2.0"
 repository = "https://github.com/nikomatsakis/ena"
-version = "0.5.0"
-authors = ["Niko Matsakis <niko@alum.mit.edu>"]
+[dependencies.dogged]
+version = "0.2.0"
+optional = true
+
+[dependencies.log]
+version = "0.4"
+
+[dependencies.petgraph]
+version = "0.4.5"
+optional = true
+
+[features]
+bench = []
+congruence-closure = ["petgraph"]
+persistent = ["dogged"]
--- a/third_party/rust/ena/README.md
+++ b/third_party/rust/ena/README.md
@@ -1,19 +1,23 @@
-[![Build Status](https://travis-ci.org/nikomatsakis/rayon.svg?branch=master)](https://travis-ci.org/nikomatsakis/ena)
+[![Build Status](https://travis-ci.org/nikomatsakis/ena.svg?branch=master)](https://travis-ci.org/nikomatsakis/ena)
 
-An implementation of union-find / congruence-closure in Rust. Forked
-from rustc for independent experimentation. My intention is to iterate
-and improve this code and gradually bring back changes into rustc
-itself, but also to enable other crates.io packages to use the same
-code.
+An implementation of union-find in Rust; extracted from (and used by)
+rustc.
 
 ### Name
 
 The name "ena" comes from the Greek word for "one".
 
+### Features
+
+By default, you just get the union-find implementation. You can also
+opt-in to the following experimental features:
+
+- `bench`: use to run benchmarks (`cargo bench --features bench`)
+
 ### License
 
 Like rustc itself, this code is dual-licensed under the MIT and Apache
 licenses. Pull requests, comments, and other contributions are assumed
 to imply consent to those terms. Moreover, it is understood that any
 changes here may well be used in rustc itself under the same terms.
 
--- a/third_party/rust/ena/measurements.txt
+++ b/third_party/rust/ena/measurements.txt
@@ -1,21 +1,6 @@
 base
-test unify::test::big_array_bench ... bench:   1,416,793 ns/iter (+/- 216,475)
-
-assert -> debug_assert
-test unify::test::big_array_bench ... bench:   1,420,368 ns/iter (+/- 144,433)
-test unify::test::big_array_bench ... bench:   1,414,448 ns/iter (+/- 219,137)
+test unify::tests::big_array_bench ... bench:     740,192 ns/iter (+/- 35,823)
+test unify::tests::big_array_bench ... bench:     745,031 ns/iter (+/- 240,463)
+test unify::tests::big_array_bench ... bench:     762,031 ns/iter (+/- 240,463)
+test unify::tests::big_array_bench ... bench:     756,234 ns/iter (+/- 264,710)
 
-don't copy for redirects
-test unify::test::big_array_bench ... bench:   1,349,796 ns/iter (+/- 233,931)
-test unify::test::big_array_bench ... bench:   1,367,082 ns/iter (+/- 301,644)
-test unify::test::big_array_bench ... bench:   1,358,154 ns/iter (+/- 348,796)
-
-copy less
-test unify::test::big_array_bench ... bench:     744,775 ns/iter (+/- 51,865)
-test unify::test::big_array_bench ... bench:     750,939 ns/iter (+/- 146,417)
-test unify::test::big_array_bench ... bench:     754,104 ns/iter (+/- 121,968)
-
-s/set-value/update-value/
-test unify::test::big_array_bench ... bench:     731,531 ns/iter (+/- 125,685)
-test unify::test::big_array_bench ... bench:     725,162 ns/iter (+/- 99,013)
-test unify::test::big_array_bench ... bench:     735,473 ns/iter (+/- 121,156)
deleted file mode 100644
--- a/third_party/rust/ena/src/cc/mod.rs
+++ /dev/null
@@ -1,436 +0,0 @@
-//! An implementation of the Congruence Closure algorithm based on the
-//! paper "Fast Decision Procedures Based on Congruence Closure" by Nelson
-//! and Oppen, JACM 1980.
-
-use graph::{self, Graph, NodeIndex};
-use std::collections::HashMap;
-use std::fmt::Debug;
-use std::hash::Hash;
-use std::iter;
-use unify::{UnifyKey, UnifyValue, InfallibleUnifyValue, UnificationTable, UnionedKeys};
-
-#[cfg(test)]
-mod test;
-
-pub struct CongruenceClosure<K: Key> {
-    map: HashMap<K, Token>,
-    table: UnificationTable<Token>,
-    graph: Graph<K, ()>,
-}
-
-pub trait Key: Hash + Eq + Clone + Debug {
-    // If this Key has some efficient way of converting itself into a
-    // congruence closure `Token`, then it shold return `Some(token)`.
-    // Otherwise, return `None`, in which case the CC will internally
-    // map the key to a token. Typically, this is used by layers that
-    // wrap the CC, where inference variables are mapped directly to
-    // particular tokens.
-    fn to_token(&self) -> Option<Token> {
-        None
-    }
-    fn key_kind(&self) -> KeyKind;
-    fn shallow_eq(&self, key: &Self) -> bool;
-    fn successors(&self) -> Vec<Self>;
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum KeyKind {
-    Applicative,
-    Generative,
-}
-use self::KeyKind::*;
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Token {
-    // this is the index both for the graph and the unification table,
-    // since for every node there is also a slot in the unification
-    // table
-    index: u32,
-}
-
-impl Token {
-    fn new(index: u32) -> Token {
-        Token { index: index }
-    }
-
-    fn from_node(node: NodeIndex) -> Token {
-        Token { index: node.0 as u32 }
-    }
-
-    fn node(&self) -> NodeIndex {
-        NodeIndex(self.index as usize)
-    }
-}
-
-impl UnifyKey for Token {
-    type Value = KeyKind;
-    fn index(&self) -> u32 {
-        self.index
-    }
-    fn from_index(i: u32) -> Token {
-        Token::new(i)
-    }
-    fn tag() -> &'static str {
-        "CongruenceClosure"
-    }
-    fn order_roots(a: Self,
-                   &a_value: &KeyKind,
-                   b: Self,
-                   &b_value: &KeyKind)
-                   -> Option<(Self, Self)> {
-        if a_value == b_value {
-            None
-        } else if a_value == Generative {
-            Some((a, b))
-        } else {
-            debug_assert!(b_value == Generative);
-            Some((b, a))
-        }
-    }
-}
-
-impl UnifyValue for KeyKind {
-    fn unify_values(&kind1: &Self, &kind2: &Self) -> Result<Self, (Self, Self)> {
-        match (kind1, kind2) {
-            (Generative, _) => Ok(Generative),
-            (_, Generative) => Ok(Generative),
-            (Applicative, Applicative) => Ok(Applicative),
-        }
-    }
-}
-
-impl InfallibleUnifyValue for KeyKind {}
-
-impl<K: Key> CongruenceClosure<K> {
-    pub fn new() -> CongruenceClosure<K> {
-        CongruenceClosure {
-            map: HashMap::new(),
-            table: UnificationTable::new(),
-            graph: Graph::new(),
-        }
-    }
-
-    /// Manually create a new CC token. You don't normally need to do
-    /// this, as CC tokens are automatically created for each key when
-    /// we first observe it. However, if you wish to have keys that
-    /// make use of the `to_token` method to bypass the `key -> token`
-    /// map, then you can use this function to make a new-token.  The
-    /// callback `key_op` will be invoked to create the key for the
-    /// fresh token (typically, it will wrap the token in some kind of
-    /// enum indicating an inference variable).
-    ///
-    /// **WARNING:** The new key **must** be a leaf (no successor
-    /// keys) or else things will not work right. This invariant is
-    /// not currently checked.
-    pub fn new_token<OP>(&mut self, key_kind: KeyKind, key_op: OP) -> Token
-        where OP: FnOnce(Token) -> K
-    {
-        let token = self.table.new_key(key_kind);
-        let key = key_op(token);
-        let node = self.graph.add_node(key);
-        assert_eq!(token.node(), node);
-        token
-    }
-
-    /// Return the key for a given token
-    pub fn key(&self, token: Token) -> &K {
-        self.graph.node_data(token.node())
-    }
-
-    /// Indicates they `key1` and `key2` are equivalent.
-    pub fn merge(&mut self, key1: K, key2: K) {
-        let token1 = self.add(key1);
-        let token2 = self.add(key2);
-        self.algorithm().merge(token1, token2);
-    }
-
-    /// Indicates whether `key1` and `key2` are equivalent.
-    pub fn merged(&mut self, key1: K, key2: K) -> bool {
-        // Careful: you cannot naively remove the `add` calls
-        // here. The reason is because of patterns like the test
-        // `struct_union_no_add`. If we unify X and Y, and then unify
-        // F(X) and F(Z), we need to be sure to figure out that F(Y)
-        // == F(Z). This requires a non-trivial deduction step, so
-        // just checking if the arguments are congruent will fail,
-        // because `Y == Z` does not hold.
-
-        debug!("merged: called({:?}, {:?})", key1, key2);
-
-        let token1 = self.add(key1);
-        let token2 = self.add(key2);
-        self.algorithm().unioned(token1, token2)
-    }
-
-    /// Returns an iterator over all keys that are known to have been
-    /// merged with `key`. This is a bit dubious, since the set of
-    /// merged keys will be dependent on what has been added, and is
-    /// not the full set of equivalencies that one might imagine. See the
-    /// test `merged_keys` for an example.
-    pub fn merged_keys(&mut self, key: K) -> MergedKeys<K> {
-        let token = self.add(key);
-        MergedKeys {
-            graph: &self.graph,
-            iterator: self.table.unioned_keys(token),
-        }
-    }
-
-    /// Add a key into the CC table, returning the corresponding
-    /// token. This is not part of the public API, though it could be
-    /// if we wanted.
-    fn add(&mut self, key: K) -> Token {
-        debug!("add(): key={:?}", key);
-
-        let (is_new, token) = self.get_or_add(&key);
-        debug!("add: key={:?} is_new={:?} token={:?}", key, is_new, token);
-
-        // if this node is already in the graph, we are done
-        if !is_new {
-            return token;
-        }
-
-        // Otherwise, we want to add the 'successors' also. So, for
-        // example, if we are adding `Box<Foo>`, the successor would
-        // be `Foo`.  So go ahead and recursively add `Foo` if it
-        // doesn't already exist.
-        let successors: Vec<_> = key.successors()
-            .into_iter()
-            .map(|s| self.add(s))
-            .collect();
-
-        debug!("add: key={:?} successors={:?}", key, successors);
-
-        // Now we have to be a bit careful. It might be that we are
-        // adding `Box<Foo>`, but `Foo` was already present, and in
-        // fact equated with `Bar`. That is, maybe we had a graph like:
-        //
-        //      Box<Bar> -> Bar == Foo
-        //
-        // Now we just added `Box<Foo>`, but we need to equate
-        // `Box<Foo>` and `Box<Bar>`.
-        for successor in successors {
-            // get set of predecessors for each successor BEFORE we add the new node;
-            // this would be `Box<Bar>` in the above example.
-            let predecessors: Vec<_> = self.algorithm().all_preds(successor);
-
-            debug!("add: key={:?} successor={:?} predecessors={:?}",
-                   key,
-                   successor,
-                   predecessors);
-
-            // add edge from new node `Box<Foo>` to its successor `Foo`
-            self.graph.add_edge(token.node(), successor.node(), ());
-
-            // Now we have to consider merging the old predecessors,
-            // like `Box<Bar>`, with this new node `Box<Foo>`.
-            //
-            // Note that in other cases it might be that no merge will
-            // occur. For example, if we were adding `(A1, B1)` to a
-            // graph like this:
-            //
-            //     (A, B) -> A == A1
-            //        |
-            //        v
-            //        B
-            //
-            // In this case, the predecessor would be `(A, B)`; but we don't
-            // know that `B == B1`, so we can't merge that with `(A1, B1)`.
-            for predecessor in predecessors {
-                self.algorithm().maybe_merge(token, predecessor);
-            }
-        }
-
-        token
-    }
-
-    /// Gets the token for a key, if any.
-    fn get(&self, key: &K) -> Option<Token> {
-        key.to_token()
-            .or_else(|| self.map.get(key).cloned())
-    }
-
-    /// Gets the token for a key, adding one if none exists. Returns the token
-    /// and a boolean indicating whether it had to be added.
-    fn get_or_add(&mut self, key: &K) -> (bool, Token) {
-        if let Some(token) = self.get(key) {
-            return (false, token);
-        }
-
-        let token = self.new_token(key.key_kind(), |_| key.clone());
-        self.map.insert(key.clone(), token);
-        (true, token)
-    }
-
-    fn algorithm(&mut self) -> Algorithm<K> {
-        Algorithm {
-            graph: &self.graph,
-            table: &mut self.table,
-        }
-    }
-}
-
-// # Walking merged keys
-
-pub struct MergedKeys<'cc, K: Key + 'cc> {
-    graph: &'cc Graph<K, ()>,
-    iterator: UnionedKeys<'cc, Token>,
-}
-
-impl<'cc, K: Key> Iterator for MergedKeys<'cc, K> {
-    type Item = K;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        self.iterator
-            .next()
-            .map(|token| self.graph.node_data(token.node()).clone())
-    }
-}
-
-// # The core algorithm
-
-struct Algorithm<'a, K: Key + 'a> {
-    graph: &'a Graph<K, ()>,
-    table: &'a mut UnificationTable<Token>,
-}
-
-impl<'a, K: Key> Algorithm<'a, K> {
-    fn merge(&mut self, u: Token, v: Token) {
-        debug!("merge(): u={:?} v={:?}", u, v);
-
-        if self.unioned(u, v) {
-            return;
-        }
-
-        let u_preds = self.all_preds(u);
-        let v_preds = self.all_preds(v);
-
-        self.union(u, v);
-
-        for &p_u in &u_preds {
-            for &p_v in &v_preds {
-                self.maybe_merge(p_u, p_v);
-            }
-        }
-    }
-
-    fn all_preds(&mut self, u: Token) -> Vec<Token> {
-        let graph = self.graph;
-        self.table
-            .unioned_keys(u)
-            .flat_map(|k| graph.predecessor_nodes(k.node()))
-            .map(|i| Token::from_node(i))
-            .collect()
-    }
-
-    fn maybe_merge(&mut self, p_u: Token, p_v: Token) {
-        debug!("maybe_merge(): p_u={:?} p_v={:?}",
-               self.key(p_u),
-               self.key(p_v));
-
-        if !self.unioned(p_u, p_v) && self.shallow_eq(p_u, p_v) && self.congruent(p_u, p_v) {
-            self.merge(p_u, p_v);
-        }
-    }
-
-    // Check whether each of the successors are unioned. So if you
-    // have `Box<X1>` and `Box<X2>`, this is true if `X1 == X2`. (The
-    // result of this fn is not really meaningful unless the two nodes
-    // are shallow equal here.)
-    fn congruent(&mut self, p_u: Token, p_v: Token) -> bool {
-        debug_assert!(self.shallow_eq(p_u, p_v));
-        debug!("congruent({:?}, {:?})", self.key(p_u), self.key(p_v));
-        let succs_u = self.successors(p_u);
-        let succs_v = self.successors(p_v);
-        let r = succs_u.zip(succs_v).all(|(s_u, s_v)| {
-            debug!("congruent: s_u={:?} s_v={:?}", s_u, s_v);
-            self.unioned(s_u, s_v)
-        });
-        debug!("congruent({:?}, {:?}) = {:?}",
-               self.key(p_u),
-               self.key(p_v),
-               r);
-        r
-    }
-
-    fn key(&self, u: Token) -> &'a K {
-        self.graph.node_data(u.node())
-    }
-
-    // Compare the local data, not considering successor nodes. So e.g
-    // `Box<X>` and `Box<Y>` are shallow equal for any `X` and `Y`.
-    fn shallow_eq(&self, u: Token, v: Token) -> bool {
-        let r = self.key(u).shallow_eq(self.key(v));
-        debug!("shallow_eq({:?}, {:?}) = {:?}", self.key(u), self.key(v), r);
-        r
-    }
-
-    fn token_kind(&self, u: Token) -> KeyKind {
-        self.graph.node_data(u.node()).key_kind()
-    }
-
-    fn unioned(&mut self, u: Token, v: Token) -> bool {
-        let r = self.table.unioned(u, v);
-        debug!("unioned(u={:?}, v={:?}) = {:?}",
-               self.key(u),
-               self.key(v),
-               r);
-        r
-    }
-
-    fn union(&mut self, u: Token, v: Token) {
-        debug!("union(u={:?}, v={:?})", self.key(u), self.key(v));
-
-        // find the roots of `u` and `v`; if `u` and `v` have been unioned
-        // with anything generative, these will be generative.
-        let u = self.table.find(u);
-        let v = self.table.find(v);
-
-        // u and v are now union'd
-        self.table.union(u, v);
-
-        // if both `u` and `v` were generative, we can now propagate
-        // the constraint that their successors must also be the same
-        if self.token_kind(u) == Generative && self.token_kind(v) == Generative {
-            if self.shallow_eq(u, v) {
-                let mut succs_u = self.successors(u);
-                let mut succs_v = self.successors(v);
-                for (succ_u, succ_v) in succs_u.by_ref().zip(succs_v.by_ref()) {
-                    // assume # of succ is equal because types are WF (asserted below)
-                    self.merge(succ_u, succ_v);
-                }
-                debug_assert!(succs_u.next().is_none());
-                debug_assert!(succs_v.next().is_none());
-            } else {
-                // error: user asked us to union i32/u32 or Vec<T>/Vec<U>;
-                // for now just panic.
-                panic!("inconsistent conclusion: {:?} vs {:?}",
-                       self.key(u),
-                       self.key(v));
-            }
-        }
-    }
-
-    fn successors(&self, token: Token) -> iter::Map<graph::AdjacentTargets<'a, K, ()>,
-                                                    fn(NodeIndex) -> Token> {
-        self.graph
-            .successor_nodes(token.node())
-            .map(Token::from_node)
-    }
-
-    fn predecessors(&self, token: Token) -> iter::Map<graph::AdjacentSources<'a, K, ()>,
-                                                      fn(NodeIndex) -> Token> {
-        self.graph
-            .predecessor_nodes(token.node())
-            .map(Token::from_node)
-    }
-
-    /// If `token` has been unioned with something generative, returns
-    /// `Ok(u)` where `u` is the generative token. Otherwise, returns
-    /// `Err(v)` where `v` is the root of `token`.
-    fn normalize_to_generative(&mut self, token: Token) -> Result<Token, Token> {
-        let token = self.table.find(token);
-        match self.token_kind(token) {
-            Generative => Ok(token),
-            Applicative => Err(token),
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/cc/test.rs
+++ /dev/null
@@ -1,349 +0,0 @@
-// use debug::Logger;
-use cc::{CongruenceClosure, Key, KeyKind, Token};
-use self::TypeStruct::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-enum TypeStruct {
-    // e.g., `<T as Iterator>::Item` would be `Assoc(Iterator::Item, vec![T])`
-    Assoc(&'static str, Vec<Type>),
-
-    // skolemized version of in-scope generic, e.g., the `T` when checking `fn foo<T>`
-    Skolem(u32),
-
-    // inference variable (existentially quantified)
-    Variable(Token),
-
-    // a nominal type applied to arguments, e.g. `i32` or `Vec<T>`
-    Nominal(&'static str, Vec<Type>),
-}
-
-type Type = Box<TypeStruct>;
-
-impl Key for Type {
-    fn to_token(&self) -> Option<Token> {
-        match **self {
-            TypeStruct::Variable(t) => Some(t),
-            _ => None,
-        }
-    }
-
-    fn key_kind(&self) -> KeyKind {
-        match **self {
-            TypeStruct::Assoc(..) |
-            TypeStruct::Variable(_) |
-            TypeStruct::Skolem(_) =>
-                KeyKind::Applicative,
-
-            TypeStruct::Nominal(..) =>
-                KeyKind::Generative,
-        }
-    }
-
-    fn shallow_eq(&self, key: &Type) -> bool {
-        match (&**self, &**key) {
-            (&Assoc(i, _), &Assoc(j, _)) => i == j,
-            (&Skolem(i), &Skolem(j)) => i == j,
-            (&Nominal(i, _), &Nominal(j, _)) => i == j,
-            _ => false,
-        }
-    }
-
-    fn successors(&self) -> Vec<Self> {
-        match **self {
-            Assoc(_, ref s) => s.clone(),
-            Skolem(_) => vec![],
-            Variable(_) => vec![],
-            Nominal(_, ref s) => s.clone(),
-        }
-    }
-}
-
-fn skolem(x: u32) -> Type {
-    Box::new(Skolem(x))
-}
-
-fn iterator_item(t: Type) -> Type {
-    Box::new(Assoc("Iterator::Item", vec![t]))
-}
-
-fn integer() -> Type {
-    Box::new(Nominal("integer", vec![]))
-}
-
-fn character() -> Type {
-    Box::new(Nominal("char", vec![]))
-}
-
-fn vec(t: Type) -> Type {
-    Box::new(Nominal("Vec", vec![t]))
-}
-
-fn inference_var<'tcx>(cc: &mut CongruenceClosure<Type>) -> Type {
-    let token = cc.new_token(KeyKind::Applicative,
-                             move |token| Box::new(TypeStruct::Variable(token)));
-    cc.key(token).clone()
-}
-
-#[test]
-fn simple_as_it_gets() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-    assert!(cc.merged(skolem(0), skolem(0)));
-    assert!(!cc.merged(skolem(0), skolem(1)));
-    assert!(cc.merged(skolem(1), skolem(1)));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(0))));
-    assert!(!cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-    assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(1))));
-}
-
-#[test]
-fn union_vars() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-    cc.merge(skolem(0), skolem(1));
-    assert!(cc.merged(skolem(0), skolem(1)));
-}
-
-#[test]
-fn union_iterator_item_then_test_var() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-    cc.merge(skolem(0), skolem(1));
-    assert!(cc.merged(skolem(0), skolem(1)));
-}
-
-#[test]
-fn union_direct() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.add(iterator_item(skolem(0)));
-    cc.add(iterator_item(skolem(1)));
-    cc.add(skolem(0));
-    cc.add(skolem(1));
-
-    cc.merge(skolem(0), skolem(1));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-macro_rules! indirect_test {
-    ($test_name:ident: $a:expr, $b:expr; $c:expr, $d:expr) => {
-        #[test]
-        fn $test_name() {
-            // Variant 1: call `add` explicitly
-            //
-            // This caused bugs because nodes were pre-existing.
-            {
-                let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-                cc.add(iterator_item(skolem(0)));
-                cc.add(iterator_item(skolem(2)));
-                cc.add(skolem(0));
-                cc.add(skolem(1));
-                cc.add(skolem(2));
-
-                cc.merge($a, $b);
-                cc.merge($c, $d);
-                assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
-            }
-
-            // Variant 2: never call `add` explicitly
-            //
-            // This is more how we expect library to be used in practice.
-            {
-                let mut cc2: CongruenceClosure<Type> = CongruenceClosure::new();
-                cc2.merge($a, $b);
-                cc2.merge($c, $d);
-                assert!(cc2.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
-            }
-        }
-    }
-}
-
-// The indirect tests test for the case where we merge V0 and V1, and
-// we merged V1 and V2, and we want to use this to conclude that
-// Assoc(V0) and Assoc(V2) are merged -- but there is no node created for
-// Assoc(V1).
-indirect_test! { indirect_test_1: skolem(1), skolem(2); skolem(1), skolem(0) }
-indirect_test! { indirect_test_2: skolem(2), skolem(1); skolem(1), skolem(0) }
-indirect_test! { indirect_test_3: skolem(1), skolem(2); skolem(0), skolem(1) }
-indirect_test! { indirect_test_4: skolem(2), skolem(1); skolem(0), skolem(1) }
-
-// Here we determine that `Assoc(V0) == Assoc(V1)` because `V0==V1`,
-// but we never add nodes for `Assoc(_)`.
-#[test]
-fn merged_no_add() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(skolem(0), skolem(1));
-
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-// Here we determine that `Assoc(V0) == Assoc(V2)` because `V0==V1==V2`,
-// but we never add nodes for `Assoc(_)`.
-#[test]
-fn merged_no_add_indirect() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(skolem(0), skolem(1));
-    cc.merge(skolem(1), skolem(2));
-
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
-}
-
-// Here we determine that `Assoc(V0) == Assoc(V2)` because `V0==V1==V2`,
-// but we never add nodes for `Assoc(_)`.
-#[test]
-fn iterator_item_not_merged() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(iterator_item(skolem(0)), iterator_item(skolem(1)));
-
-    assert!(!cc.merged(skolem(0), skolem(1)));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-// Here we show that merging `Assoc(V1) == Assoc(V2)` does NOT imply that
-// `V1 == V2`.
-#[test]
-fn merge_fns_not_inputs() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(iterator_item(skolem(0)), iterator_item(skolem(1)));
-
-    assert!(!cc.merged(skolem(0), skolem(1)));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-#[test]
-fn inf_var_union() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    let v0 = inference_var(&mut cc);
-    let v1 = inference_var(&mut cc);
-    let v2 = inference_var(&mut cc);
-    let iterator_item_v0 = iterator_item(v0.clone());
-    let iterator_item_v1 = iterator_item(v1.clone());
-    let iterator_item_v2 = iterator_item(v2.clone());
-
-    cc.merge(v0.clone(), v1.clone());
-
-    assert!(cc.map.is_empty()); // inf variables don't take up map slots
-
-    assert!(cc.merged(iterator_item_v0.clone(), iterator_item_v1.clone()));
-    assert!(!cc.merged(iterator_item_v0.clone(), iterator_item_v2.clone()));
-
-    cc.merge(iterator_item_v0.clone(), iterator_item_v2.clone());
-    assert!(cc.merged(iterator_item_v0.clone(), iterator_item_v2.clone()));
-    assert!(cc.merged(iterator_item_v1.clone(), iterator_item_v2.clone()));
-
-    assert_eq!(cc.map.len(), 3); // each iterator_item needs an entry
-}
-
-#[test]
-fn skolem_union_no_add() {
-
-    // This particular pattern of unifications exploits a potentially
-    // subtle bug:
-    // - We merge `skolem(0)` and `skolem(1)`
-    //   and then merge `Assoc(skolem(0))` and `Assoc(skolem(2))`.
-    // - From this we should be able to deduce that `Assoc(skolem(1)) == Assoc(skolem(2))`.
-    // - However, if we are not careful with accounting for
-    //   predecessors and so forth, this fails. For example, when
-    //   adding `Assoc(skolem(1))`, we have to consider `Assoc(skolem(0))`
-    //   to be a predecessor of `skolem(1)`.
-
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(skolem(0), skolem(1));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-    assert!(!cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
-
-    cc.merge(iterator_item(skolem(0)), iterator_item(skolem(2)));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
-    assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
-}
-
-#[test]
-fn merged_keys() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(skolem(0), skolem(1));
-    cc.merge(iterator_item(skolem(0)), iterator_item(skolem(2)));
-
-    // Here we don't yet see `iterator_item(skolem(1))` because it has no
-    // corresponding node:
-    let keys: Vec<Type> = cc.merged_keys(iterator_item(skolem(2))).collect();
-    assert_eq!(&keys[..], &[iterator_item(skolem(2)), iterator_item(skolem(0))]);
-
-    // But of course `merged` returns true (and adds a node):
-    assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
-
-    // So now we see it:
-    let keys: Vec<Type> = cc.merged_keys(iterator_item(skolem(2))).collect();
-    assert_eq!(&keys[..], &[iterator_item(skolem(2)),
-                            iterator_item(skolem(1)),
-                            iterator_item(skolem(0))]);
-}
-
-// Here we show that merging `Vec<V1> == Vec<V2>` DOES imply that
-// `V1 == V2`.
-#[test]
-fn merge_vecs() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(vec(skolem(0)), vec(skolem(1)));
-
-    assert!(cc.merged(skolem(0), skolem(1)));
-    assert!(cc.merged(vec(skolem(0)), vec(skolem(1))));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-// Here we show that merging `Vec<V1::Item> == Vec<V2::Item>` does NOT imply that
-// `V1 == V2`.
-#[test]
-fn merge_vecs_of_items() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(vec(iterator_item(skolem(0))),
-             vec(iterator_item(skolem(1))));
-
-    assert!(!cc.merged(skolem(0), skolem(1)));
-    assert!(!cc.merged(vec(skolem(0)), vec(skolem(1))));
-    assert!(cc.merged(vec(iterator_item(skolem(0))),
-                      vec(iterator_item(skolem(1)))));
-    assert!(cc.merged(iterator_item(vec(iterator_item(skolem(0)))),
-                      iterator_item(vec(iterator_item(skolem(1))))));
-    assert!(cc.merged(iterator_item(iterator_item(vec(iterator_item(skolem(0))))),
-                      iterator_item(iterator_item(vec(iterator_item(skolem(1)))))));
-    assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
-}
-
-// Here we merge `Vec<Int>::Item` with `Int` and then merge that later
-// with an inference variable, and show that we concluded that the
-// variable is (indeed) `Int`.
-#[test]
-fn merge_iterator_item_generative() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-    cc.merge(iterator_item(vec(integer())), integer());
-    let v0 = inference_var(&mut cc);
-    cc.merge(iterator_item(vec(integer())), v0.clone());
-    assert!(cc.merged(v0.clone(), integer()));
-    assert!(cc.merged(vec(iterator_item(vec(integer()))), vec(integer())));
-}
-
-#[test]
-fn merge_ripple() {
-    let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
-
-    cc.merge(iterator_item(skolem(1)), vec(skolem(0)));
-    cc.merge(iterator_item(skolem(2)), vec(integer()));
-
-    assert!(!cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
-
-    println!("------------------------------");
-    cc.merge(skolem(0), integer());
-
-    println!("------------------------------");
-    assert!(cc.merged(iterator_item(skolem(1)),
-                      iterator_item(skolem(2))));
-    assert!(cc.merged(iterator_item(iterator_item(skolem(1))),
-                      iterator_item(iterator_item(skolem(2)))));
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/constraint/mod.rs
+++ /dev/null
@@ -1,160 +0,0 @@
-//! Constraint graph.
-
-#![allow(dead_code)]
-
-use graph::{Graph, NodeIndex};
-use std::collections::VecDeque;
-use std::u32;
-
-#[cfg(test)]
-mod test;
-
-pub trait Lattice {
-    type Element: Clone + Eq;
-
-    fn lub(&self, elem1: &Self::Element, elem2: &Self::Element) -> Option<Self::Element>;
-}
-
-pub struct ConstraintGraph<L: Lattice> {
-    graph: Graph<(), ()>,
-    values: Vec<L::Element>,
-    lattice: L,
-}
-
-#[derive(Copy, Clone)]
-pub struct Var {
-    index: u32,
-}
-
-impl Var {
-    pub fn index(&self) -> usize {
-        self.index as usize
-    }
-
-    fn to_node_index(self) -> NodeIndex {
-        NodeIndex(self.index as usize)
-    }
-
-    fn from_node_index(ni: NodeIndex) -> Var {
-        assert!(ni.0 < (u32::MAX as usize));
-        Var { index: ni.0 as u32 }
-    }
-}
-
-impl<L> ConstraintGraph<L>
-    where L: Lattice
-{
-    fn new(lattice: L) -> ConstraintGraph<L> {
-        ConstraintGraph {
-            graph: Graph::new(),
-            values: Vec::new(),
-            lattice: lattice,
-        }
-    }
-
-    fn new_var(&mut self, initial_value: L::Element) -> Var {
-        assert_eq!(self.graph.all_nodes().len(), self.values.len());
-        let node_index = self.graph.add_node(());
-        self.values.push(initial_value);
-        Var::from_node_index(node_index)
-    }
-
-    pub fn constrain_var(&mut self, var: Var, value: L::Element) -> Vec<PropagationError<L>> {
-        let propagation = Propagation::new(&self.lattice, &self.graph, &mut self.values);
-        propagation.propagate(value, var)
-    }
-
-    pub fn add_edge(&mut self, source: Var, target: Var) -> Vec<PropagationError<L>> {
-        let source_node = source.to_node_index();
-        let target_node = target.to_node_index();
-
-        if self.graph
-               .successor_nodes(source_node)
-               .any(|n| n == target_node) {
-            return vec![];
-        }
-
-        self.graph.add_edge(source_node, target_node, ());
-        let value = self.current_value(source);
-        self.constrain_var(target, value)
-    }
-
-    pub fn current_value(&self, node: Var) -> L::Element {
-        self.values[node.index()].clone()
-    }
-}
-
-/// ////////////////////////////////////////////////////////////////////////
-
-struct Propagation<'p, L>
-    where L: Lattice + 'p,
-          L::Element: 'p
-{
-    lattice: &'p L,
-    graph: &'p Graph<(), ()>,
-    values: &'p mut Vec<L::Element>,
-    queue: VecDeque<Var>,
-    errors: Vec<PropagationError<L>>,
-}
-
-pub struct PropagationError<L>
-    where L: Lattice
-{
-    var: Var,
-    old_value: L::Element,
-    new_value: L::Element,
-}
-
-impl<'p, L> Propagation<'p, L>
-    where L: Lattice,
-          L::Element: 'p
-{
-    fn new(lattice: &'p L,
-           graph: &'p Graph<(), ()>,
-           values: &'p mut Vec<L::Element>)
-           -> Propagation<'p, L> {
-        Propagation {
-            lattice: lattice,
-            graph: graph,
-            values: values,
-            queue: VecDeque::new(),
-            errors: Vec::new(),
-        }
-    }
-
-    fn propagate(mut self, value: L::Element, var: Var) -> Vec<PropagationError<L>> {
-        self.update_node(value, var);
-
-        while let Some(dirty) = self.queue.pop_front() {
-            let value = self.values[dirty.index()].clone();
-
-            for succ_node_index in self.graph.successor_nodes(dirty.to_node_index()) {
-                let succ_var = Var::from_node_index(succ_node_index);
-                self.update_node(value.clone(), succ_var);
-            }
-        }
-
-        self.errors
-    }
-
-    fn update_node(&mut self, value: L::Element, var: Var) {
-        let cur_value = self.values[var.index()].clone();
-        match self.lattice.lub(&cur_value, &value) {
-            Some(new_value) => {
-                if cur_value != new_value {
-                    self.values[var.index()] = value;
-                    self.queue.push_back(var);
-                }
-            }
-
-            None => {
-                // Error. Record for later.
-                self.errors.push(PropagationError::<L> {
-                    var: var,
-                    old_value: cur_value,
-                    new_value: value,
-                });
-            }
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/constraint/test.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-use super::*;
-
-use std::cmp;
-
-struct MaxLattice;
-
-impl Lattice for MaxLattice {
-    type Element = u32;
-
-    fn lub(&self, elem1: &u32, elem2: &u32) -> Option<u32> {
-        Some(cmp::max(*elem1, *elem2))
-    }
-}
-
-#[test]
-fn basic() {
-    // v1 --+--> v2
-    //      |
-    // v3 --+
-
-    let mut graph = ConstraintGraph::new(MaxLattice);
-    let v1 = graph.new_var(3);
-    let v2 = graph.new_var(0);
-    graph.add_edge(v1, v2);
-    assert_eq!(graph.current_value(v1), 3);
-    assert_eq!(graph.current_value(v2), 3);
-
-    let v3 = graph.new_var(5);
-    graph.add_edge(v3, v2);
-    assert_eq!(graph.current_value(v1), 3);
-    assert_eq!(graph.current_value(v2), 5);
-    assert_eq!(graph.current_value(v3), 5);
-
-    graph.constrain_var(v1, 10);
-    assert_eq!(graph.current_value(v1), 10);
-    assert_eq!(graph.current_value(v2), 10);
-    assert_eq!(graph.current_value(v3), 5);
-}
-
-
-#[test]
-fn cycle() {
-    // v1 ----> v2
-    // ^        |
-    // |        v
-    // v3 <---- v3
-
-    let mut graph = ConstraintGraph::new(MaxLattice);
-    let vars = [graph.new_var(0), graph.new_var(0), graph.new_var(0), graph.new_var(0)];
-
-    for i in 0..4 {
-        graph.add_edge(vars[i], vars[(i + 1) % vars.len()]);
-    }
-
-    graph.constrain_var(vars[1], 3);
-    assert!(vars.iter().all(|&var| graph.current_value(var) == 3));
-
-    graph.constrain_var(vars[2], 5);
-    assert!(vars.iter().all(|&var| graph.current_value(var) == 5));
-
-    graph.constrain_var(vars[3], 2);
-    assert!(vars.iter().all(|&var| graph.current_value(var) == 5));
-
-    graph.constrain_var(vars[3], 6);
-    assert!(vars.iter().all(|&var| graph.current_value(var) == 6));
-
-    graph.constrain_var(vars[0], 10);
-    assert!(vars.iter().all(|&var| graph.current_value(var) == 10));
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/debug.rs
+++ /dev/null
@@ -1,43 +0,0 @@
-#[cfg(test)]
-use std::cell::Cell;
-
-#[cfg(test)]
-thread_local!(pub static ENABLED: Cell<u32> = Cell::new(0));
-
-#[cfg(test)]
-#[macro_export]
-macro_rules! debug {
-    ($($arg:tt)*) => (
-        ::debug::ENABLED.with(|slot| {
-            if slot.get() != 0 {
-                println!("{}", format_args!($($arg)+));
-            }
-        })
-    )
-}
-
-#[cfg(not(test))]
-#[macro_export]
-macro_rules! debug {
-    ($($arg:tt)*) => ( () )
-}
-
-#[cfg(test)]
-pub struct Logger {
-    _x: (),
-}
-
-#[cfg(test)]
-impl Logger {
-    pub fn new() -> Logger {
-        ENABLED.with(|slot| slot.set(slot.get() + 1));
-        Logger { _x: () }
-    }
-}
-
-#[cfg(test)]
-impl Drop for Logger {
-    fn drop(&mut self) {
-        ENABLED.with(|slot| slot.set(slot.get() - 1));
-    }
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/graph/mod.rs
+++ /dev/null
@@ -1,427 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A graph module for use in dataflow, region resolution, and elsewhere.
-//!
-//! # Interface details
-//!
-//! You customize the graph by specifying a "node data" type `N` and an
-//! "edge data" type `E`. You can then later gain access (mutable or
-//! immutable) to these "user-data" bits. Currently, you can only add
-//! nodes or edges to the graph. You cannot remove or modify them once
-//! added. This could be changed if we have a need.
-//!
-//! # Implementation details
-//!
-//! The main tricky thing about this code is the way that edges are
-//! stored. The edges are stored in a central array, but they are also
-//! threaded onto two linked lists for each node, one for incoming edges
-//! and one for outgoing edges. Note that every edge is a member of some
-//! incoming list and some outgoing list.  Basically you can load the
-//! first index of the linked list from the node data structures (the
-//! field `first_edge`) and then, for each edge, load the next index from
-//! the field `next_edge`). Each of those fields is an array that should
-//! be indexed by the direction (see the type `Direction`).
-
-use bitvec::BitVector;
-use std::fmt::{Formatter, Error, Debug};
-use std::usize;
-use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
-
-#[cfg(test)]
-mod tests;
-
-pub struct Graph<N, E> {
-    nodes: SnapshotVec<Node<N>>,
-    edges: SnapshotVec<Edge<E>>,
-}
-
-pub struct Node<N> {
-    first_edge: [EdgeIndex; 2], // see module comment
-    pub data: N,
-}
-
-pub struct Edge<E> {
-    next_edge: [EdgeIndex; 2], // see module comment
-    source: NodeIndex,
-    target: NodeIndex,
-    pub data: E,
-}
-
-impl<N> SnapshotVecDelegate for Node<N> {
-    type Value = Node<N>;
-    type Undo = ();
-
-    fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
-}
-
-impl<N> SnapshotVecDelegate for Edge<N> {
-    type Value = Edge<N>;
-    type Undo = ();
-
-    fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
-}
-
-impl<E: Debug> Debug for Edge<E> {
-    fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
-        write!(f,
-               "Edge {{ next_edge: [{:?}, {:?}], source: {:?}, target: {:?}, data: {:?} }}",
-               self.next_edge[0],
-               self.next_edge[1],
-               self.source,
-               self.target,
-               self.data)
-    }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct NodeIndex(pub usize);
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct EdgeIndex(pub usize);
-
-pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
-
-// Use a private field here to guarantee no more instances are created:
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub struct Direction {
-    repr: usize,
-}
-
-pub const OUTGOING: Direction = Direction { repr: 0 };
-
-pub const INCOMING: Direction = Direction { repr: 1 };
-
-impl NodeIndex {
-    /// Returns unique id (unique with respect to the graph holding associated node).
-    pub fn node_id(&self) -> usize {
-        self.0
-    }
-}
-
-impl EdgeIndex {
-    /// Returns unique id (unique with respect to the graph holding associated edge).
-    pub fn edge_id(&self) -> usize {
-        self.0
-    }
-}
-
-impl<N: Debug, E: Debug> Graph<N, E> {
-    pub fn new() -> Graph<N, E> {
-        Graph {
-            nodes: SnapshotVec::new(),
-            edges: SnapshotVec::new(),
-        }
-    }
-
-    // # Simple accessors
-
-    #[inline]
-    pub fn all_nodes(&self) -> &[Node<N>] {
-        &self.nodes
-    }
-
-    #[inline]
-    pub fn len_nodes(&self) -> usize {
-        self.nodes.len()
-    }
-
-    #[inline]
-    pub fn all_edges(&self) -> &[Edge<E>] {
-        &self.edges
-    }
-
-    #[inline]
-    pub fn len_edges(&self) -> usize {
-        self.edges.len()
-    }
-
-    // # Node construction
-
-    pub fn next_node_index(&self) -> NodeIndex {
-        NodeIndex(self.nodes.len())
-    }
-
-    pub fn add_node(&mut self, data: N) -> NodeIndex {
-        let idx = self.next_node_index();
-        self.nodes.push(Node {
-            first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
-            data: data,
-        });
-        idx
-    }
-
-    pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
-        &mut self.nodes[idx.0].data
-    }
-
-    pub fn node_data(&self, idx: NodeIndex) -> &N {
-        &self.nodes[idx.0].data
-    }
-
-    pub fn node(&self, idx: NodeIndex) -> &Node<N> {
-        &self.nodes[idx.0]
-    }
-
-    // # Edge construction and queries
-
-    pub fn next_edge_index(&self) -> EdgeIndex {
-        EdgeIndex(self.edges.len())
-    }
-
-    pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
-        debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
-
-        let idx = self.next_edge_index();
-
-        // read current first of the list of edges from each node
-        let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
-        let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
-
-        // create the new edge, with the previous firsts from each node
-        // as the next pointers
-        self.edges.push(Edge {
-            next_edge: [source_first, target_first],
-            source: source,
-            target: target,
-            data: data,
-        });
-
-        // adjust the firsts for each node target be the next object.
-        self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
-        self.nodes[target.0].first_edge[INCOMING.repr] = idx;
-
-        return idx;
-    }
-
-    pub fn mut_edge_data(&mut self, idx: EdgeIndex) -> &mut E {
-        &mut self.edges[idx.0].data
-    }
-
-    pub fn edge_data(&self, idx: EdgeIndex) -> &E {
-        &self.edges[idx.0].data
-    }
-
-    pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
-        &self.edges[idx.0]
-    }
-
-    pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
-        //! Accesses the index of the first edge adjacent to `node`.
-        //! This is useful if you wish to modify the graph while walking
-        //! the linked list of edges.
-
-        self.nodes[node.0].first_edge[dir.repr]
-    }
-
-    pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
-        //! Accesses the next edge in a given direction.
-        //! This is useful if you wish to modify the graph while walking
-        //! the linked list of edges.
-
-        self.edges[edge.0].next_edge[dir.repr]
-    }
-
-    // # Iterating over nodes, edges
-
-    pub fn each_node<'a, F>(&'a self, mut f: F) -> bool
-        where F: FnMut(NodeIndex, &'a Node<N>) -> bool
-    {
-        //! Iterates over all edges defined in the graph.
-        self.nodes.iter().enumerate().all(|(i, node)| f(NodeIndex(i), node))
-    }
-
-    pub fn each_edge<'a, F>(&'a self, mut f: F) -> bool
-        where F: FnMut(EdgeIndex, &'a Edge<E>) -> bool
-    {
-        //! Iterates over all edges defined in the graph
-        self.edges.iter().enumerate().all(|(i, edge)| f(EdgeIndex(i), edge))
-    }
-
-    pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
-        self.adjacent_edges(source, OUTGOING)
-    }
-
-    pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
-        self.adjacent_edges(source, INCOMING)
-    }
-
-    pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
-        let first_edge = self.node(source).first_edge[direction.repr];
-        AdjacentEdges {
-            graph: self,
-            direction: direction,
-            next: first_edge,
-        }
-    }
-
-    pub fn successor_nodes(&self, source: NodeIndex) -> AdjacentTargets<N, E> {
-        self.outgoing_edges(source).targets()
-    }
-
-    pub fn predecessor_nodes(&self, target: NodeIndex) -> AdjacentSources<N, E> {
-        self.incoming_edges(target).sources()
-    }
-
-    // # Fixed-point iteration
-    //
-    // A common use for graphs in our compiler is to perform
-    // fixed-point iteration. In this case, each edge represents a
-    // constraint, and the nodes themselves are associated with
-    // variables or other bitsets. This method facilitates such a
-    // computation.
-
-    pub fn iterate_until_fixed_point<'a, F>(&'a self, mut op: F)
-        where F: FnMut(usize, EdgeIndex, &'a Edge<E>) -> bool
-    {
-        let mut iteration = 0;
-        let mut changed = true;
-        while changed {
-            changed = false;
-            iteration += 1;
-            for (i, edge) in self.edges.iter().enumerate() {
-                changed |= op(iteration, EdgeIndex(i), edge);
-            }
-        }
-    }
-
-    pub fn depth_traverse<'a>(&'a self, start: NodeIndex) -> DepthFirstTraversal<'a, N, E> {
-        DepthFirstTraversal {
-            graph: self,
-            stack: vec![start],
-            visited: BitVector::new(self.nodes.len()),
-        }
-    }
-}
-
-// # Iterators
-
-pub struct AdjacentEdges<'g, N, E>
-    where N: 'g,
-          E: 'g
-{
-    graph: &'g Graph<N, E>,
-    direction: Direction,
-    next: EdgeIndex,
-}
-
-impl<'g, N, E> AdjacentEdges<'g, N, E> {
-    fn targets(self) -> AdjacentTargets<'g, N, E> {
-        AdjacentTargets { edges: self }
-    }
-
-    fn sources(self) -> AdjacentSources<'g, N, E> {
-        AdjacentSources { edges: self }
-    }
-}
-
-impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
-    type Item = (EdgeIndex, &'g Edge<E>);
-
-    fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
-        let edge_index = self.next;
-        if edge_index == INVALID_EDGE_INDEX {
-            return None;
-        }
-
-        let edge = self.graph.edge(edge_index);
-        self.next = edge.next_edge[self.direction.repr];
-        Some((edge_index, edge))
-    }
-}
-
-pub struct AdjacentTargets<'g, N: 'g, E: 'g>
-    where N: 'g,
-          E: 'g
-{
-    edges: AdjacentEdges<'g, N, E>,
-}
-
-impl<'g, N: Debug, E: Debug> Iterator for AdjacentTargets<'g, N, E> {
-    type Item = NodeIndex;
-
-    fn next(&mut self) -> Option<NodeIndex> {
-        self.edges.next().map(|(_, edge)| edge.target)
-    }
-}
-
-pub struct AdjacentSources<'g, N: 'g, E: 'g>
-    where N: 'g,
-          E: 'g
-{
-    edges: AdjacentEdges<'g, N, E>,
-}
-
-impl<'g, N: Debug, E: Debug> Iterator for AdjacentSources<'g, N, E> {
-    type Item = NodeIndex;
-
-    fn next(&mut self) -> Option<NodeIndex> {
-        self.edges.next().map(|(_, edge)| edge.source)
-    }
-}
-
-pub struct DepthFirstTraversal<'g, N: 'g, E: 'g> {
-    graph: &'g Graph<N, E>,
-    stack: Vec<NodeIndex>,
-    visited: BitVector,
-}
-
-impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
-    type Item = NodeIndex;
-
-    fn next(&mut self) -> Option<NodeIndex> {
-        while let Some(idx) = self.stack.pop() {
-            if !self.visited.insert(idx.node_id()) {
-                continue;
-            }
-
-            for (_, edge) in self.graph.outgoing_edges(idx) {
-                if !self.visited.contains(edge.target().node_id()) {
-                    self.stack.push(edge.target());
-                }
-            }
-
-            return Some(idx);
-        }
-
-        return None;
-    }
-}
-
-pub fn each_edge_index<F>(max_edge_index: EdgeIndex, mut f: F)
-    where F: FnMut(EdgeIndex) -> bool
-{
-    let mut i = 0;
-    let n = max_edge_index.0;
-    while i < n {
-        if !f(EdgeIndex(i)) {
-            return;
-        }
-        i += 1;
-    }
-}
-
-impl<E> Edge<E> {
-    pub fn source(&self) -> NodeIndex {
-        self.source
-    }
-
-    pub fn target(&self) -> NodeIndex {
-        self.target
-    }
-
-    pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
-        if direction == OUTGOING {
-            self.target
-        } else {
-            self.source
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/ena/src/graph/tests.rs
+++ /dev/null
@@ -1,141 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use graph::*;
-use std::fmt::Debug;
-
-type TestNode = Node<&'static str>;
-type TestEdge = Edge<&'static str>;
-type TestGraph = Graph<&'static str, &'static str>;
-
-fn create_graph() -> TestGraph {
-    let mut graph = Graph::new();
-
-    // Create a simple graph
-    //
-    //    A -+> B --> C
-    //       |  |     ^
-    //       |  v     |
-    //       F  D --> E
-
-    let a = graph.add_node("A");
-    let b = graph.add_node("B");
-    let c = graph.add_node("C");
-    let d = graph.add_node("D");
-    let e = graph.add_node("E");
-    let f = graph.add_node("F");
-
-    graph.add_edge(a, b, "AB");
-    graph.add_edge(b, c, "BC");
-    graph.add_edge(b, d, "BD");
-    graph.add_edge(d, e, "DE");
-    graph.add_edge(e, c, "EC");
-    graph.add_edge(f, b, "FB");
-
-    return graph;
-}
-
-#[test]
-fn each_node() {
-    let graph = create_graph();
-    let expected = ["A", "B", "C", "D", "E", "F"];
-    graph.each_node(|idx, node| {
-        assert_eq!(&expected[idx.0], graph.node_data(idx));
-        assert_eq!(expected[idx.0], node.data);
-        true
-    });
-}
-
-#[test]
-fn each_edge() {
-    let graph = create_graph();
-    let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
-    graph.each_edge(|idx, edge| {
-        assert_eq!(&expected[idx.0], graph.edge_data(idx));
-        assert_eq!(expected[idx.0], edge.data);
-        true
-    });
-}
-
-fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph<N, E>,
-                                                                   start_index: NodeIndex,
-                                                                   start_data: N,
-                                                                   expected_incoming: &[(E, N)],
-                                                                   expected_outgoing: &[(E, N)]) {
-    assert!(graph.node_data(start_index) == &start_data);
-
-    let mut counter = 0;
-    for (edge_index, edge) in graph.incoming_edges(start_index) {
-        assert!(graph.edge_data(edge_index) == &edge.data);
-        assert!(counter < expected_incoming.len());
-        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
-               counter,
-               expected_incoming[counter],
-               edge_index,
-               edge);
-        match expected_incoming[counter] {
-            (ref e, ref n) => {
-                assert!(e == &edge.data);
-                assert!(n == graph.node_data(edge.source()));
-                assert!(start_index == edge.target);
-            }
-        }
-        counter += 1;
-    }
-    assert_eq!(counter, expected_incoming.len());
-
-    let mut counter = 0;
-    for (edge_index, edge) in graph.outgoing_edges(start_index) {
-        assert!(graph.edge_data(edge_index) == &edge.data);
-        assert!(counter < expected_outgoing.len());
-        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
-               counter,
-               expected_outgoing[counter],
-               edge_index,
-               edge);
-        match expected_outgoing[counter] {
-            (ref e, ref n) => {
-                assert!(e == &edge.data);
-                assert!(start_index == edge.source);
-                assert!(n == graph.node_data(edge.target));
-            }
-        }
-        counter += 1;
-    }
-    assert_eq!(counter, expected_outgoing.len());
-}
-
-#[test]
-fn each_adjacent_from_a() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]);
-}
-
-#[test]
-fn each_adjacent_from_b() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph,
-                        NodeIndex(1),
-                        "B",
-                        &[("FB", "F"), ("AB", "A")],
-                        &[("BD", "D"), ("BC", "C")]);
-}
-
-#[test]
-fn each_adjacent_from_c() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]);
-}
-
-#[test]
-fn each_adjacent_from_d() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
-}
--- a/third_party/rust/ena/src/lib.rs
+++ b/third_party/rust/ena/src/lib.rs
@@ -3,21 +3,21 @@
 // http://rust-lang.org/COPYRIGHT.
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-#![cfg_attr(all(feature = "unstable", test), feature(test))]
-#![allow(dead_code)]
+//! An implementation of union-find. See the `unify` module for more
+//! details.
+
+#![cfg_attr(feature = "bench", feature(test))]
 
 #[macro_use]
-mod debug;
+extern crate log;
 
-pub mod constraint;
-pub mod graph;
+#[cfg(feature = "persistent")]
+extern crate dogged;
+
 pub mod snapshot_vec;
-#[cfg(feature = "unstable")]
-pub mod cc;
 pub mod unify;
-pub mod bitvec;
--- a/third_party/rust/ena/src/snapshot_vec.rs
+++ b/third_party/rust/ena/src/snapshot_vec.rs
@@ -13,21 +13,24 @@
 //! to rollback to the start of the snapshot or commit those changes.
 //!
 //! This vector is intended to be used as part of an abstraction, not serve as a complete
 //! abstraction on its own. As such, while it will roll back most changes on its own, it also
 //! supports a `get_mut` operation that gives you an arbitrary mutable pointer into the vector. To
 //! ensure that any changes you make this with this pointer are rolled back, you must invoke
 //! `record` to record any changes you make and also supplying a delegate capable of reversing
 //! those changes.
+
 use self::UndoLog::*;
 
+use std::fmt;
 use std::mem;
 use std::ops;
 
+#[derive(Debug)]
 pub enum UndoLog<D: SnapshotVecDelegate> {
     /// Indicates where a snapshot started.
     OpenSnapshot,
 
     /// Indicates a snapshot that has been committed.
     CommittedSnapshot,
 
     /// New variable with given index was created.
@@ -40,16 +43,30 @@ pub enum UndoLog<D: SnapshotVecDelegate>
     Other(D::Undo),
 }
 
 pub struct SnapshotVec<D: SnapshotVecDelegate> {
     values: Vec<D::Value>,
     undo_log: Vec<UndoLog<D>>,
 }
 
+impl<D> fmt::Debug for SnapshotVec<D>
+    where D: SnapshotVecDelegate,
+          D: fmt::Debug,
+          D::Undo: fmt::Debug,
+          D::Value: fmt::Debug
+{
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt.debug_struct("SnapshotVec")
+            .field("values", &self.values)
+            .field("undo_log", &self.undo_log)
+            .finish()
+    }
+}
+
 // Snapshots are tokens that should be created/consumed linearly.
 pub struct Snapshot {
     // Length of the undo log at the time the snapshot was taken.
     length: usize,
 }
 
 pub trait SnapshotVecDelegate {
     type Value;
@@ -61,16 +78,23 @@ pub trait SnapshotVecDelegate {
 impl<D: SnapshotVecDelegate> SnapshotVec<D> {
     pub fn new() -> SnapshotVec<D> {
         SnapshotVec {
             values: Vec::new(),
             undo_log: Vec::new(),
         }
     }
 
+    pub fn with_capacity(c: usize) -> SnapshotVec<D> {
+        SnapshotVec {
+            values: Vec::with_capacity(c),
+            undo_log: Vec::new(),
+        }
+    }
+
     fn in_snapshot(&self) -> bool {
         !self.undo_log.is_empty()
     }
 
     pub fn record(&mut self, action: D::Undo) {
         if self.in_snapshot() {
             self.undo_log.push(Other(action));
         }
@@ -90,34 +114,56 @@ impl<D: SnapshotVecDelegate> SnapshotVec
 
         len
     }
 
     pub fn get(&self, index: usize) -> &D::Value {
         &self.values[index]
     }
 
+    /// Reserve space for new values, just like an ordinary vec.
+    pub fn reserve(&mut self, additional: usize) {
+        // This is not affected by snapshots or anything.
+        self.values.reserve(additional);
+    }
+
     /// Returns a mutable pointer into the vec; whatever changes you make here cannot be undone
     /// automatically, so you should be sure call `record()` with some sort of suitable undo
     /// action.
     pub fn get_mut(&mut self, index: usize) -> &mut D::Value {
         &mut self.values[index]
     }
 
     /// Updates the element at the given index. The old value will saved (and perhaps restored) if
     /// a snapshot is active.
     pub fn set(&mut self, index: usize, new_elem: D::Value) {
         let old_elem = mem::replace(&mut self.values[index], new_elem);
         if self.in_snapshot() {
             self.undo_log.push(SetElem(index, old_elem));
         }
     }
 
+    /// Updates all elements. Potentially more efficient -- but
+    /// otherwise equivalent to -- invoking `set` for each element.
+    pub fn set_all(&mut self, mut new_elems: impl FnMut(usize) -> D::Value) {
+        if !self.in_snapshot() {
+            for (slot, index) in self.values.iter_mut().zip(0..) {
+                *slot = new_elems(index);
+            }
+        } else {
+            for i in 0..self.values.len() {
+                self.set(i, new_elems(i));
+            }
+        }
+    }
+
     pub fn update<OP>(&mut self, index: usize, op: OP)
-        where OP: FnOnce(&mut D::Value), D::Value: Clone
+    where
+        OP: FnOnce(&mut D::Value),
+        D::Value: Clone,
     {
         if self.in_snapshot() {
             let old_elem = self.values[index].clone();
             self.undo_log.push(SetElem(index, old_elem));
         }
         op(&mut self.values[index]);
     }
 
@@ -219,33 +265,47 @@ impl<D: SnapshotVecDelegate> ops::Index<
 }
 
 impl<D: SnapshotVecDelegate> ops::IndexMut<usize> for SnapshotVec<D> {
     fn index_mut(&mut self, index: usize) -> &mut D::Value {
         self.get_mut(index)
     }
 }
 
+impl<D: SnapshotVecDelegate> Extend<D::Value> for SnapshotVec<D> {
+    fn extend<T>(&mut self, iterable: T)
+    where
+        T: IntoIterator<Item = D::Value>,
+    {
+        for item in iterable {
+            self.push(item);
+        }
+    }
+}
+
 impl<D: SnapshotVecDelegate> Clone for SnapshotVec<D>
-    where D::Value: Clone, D::Undo: Clone,
+where
+    D::Value: Clone,
+    D::Undo: Clone,
 {
     fn clone(&self) -> Self {
         SnapshotVec {
             values: self.values.clone(),
             undo_log: self.undo_log.clone(),
         }
     }
 }
 
 impl<D: SnapshotVecDelegate> Clone for UndoLog<D>
-    where D::Value: Clone, D::Undo: Clone,
+where
+    D::Value: Clone,
+    D::Undo: Clone,
 {
     fn clone(&self) -> Self {
         match *self {
             OpenSnapshot => OpenSnapshot,
             CommittedSnapshot => CommittedSnapshot,
             NewElem(i) => NewElem(i),
             SetElem(i, ref v) => SetElem(i, v.clone()),
             Other(ref u) => Other(u.clone()),
         }
     }
 }
-
new file mode 100644
--- /dev/null
+++ b/third_party/rust/ena/src/unify/backing_vec.rs
@@ -0,0 +1,205 @@
+#[cfg(feature = "persistent")]
+use dogged::DVec;
+use snapshot_vec as sv;
+use std::ops;
+use std::marker::PhantomData;
+
+use super::{VarValue, UnifyKey, UnifyValue};
+
+#[allow(dead_code)] // rustc BUG
+type Key<S> = <S as UnificationStore>::Key;
+
+/// Largely internal trait implemented by the unification table
+/// backing store types. The most common such type is `InPlace`,
+/// which indicates a standard, mutable unification table.
+pub trait UnificationStore: ops::Index<usize, Output = VarValue<Key<Self>>> + Clone {
+    type Key: UnifyKey<Value = Self::Value>;
+    type Value: UnifyValue;
+    type Snapshot;
+
+    fn new() -> Self;
+
+    fn start_snapshot(&mut self) -> Self::Snapshot;
+
+    fn rollback_to(&mut self, snapshot: Self::Snapshot);
+
+    fn commit(&mut self, snapshot: Self::Snapshot);
+
+    fn reset_unifications(
+        &mut self,
+        value: impl FnMut(u32) -> VarValue<Self::Key>,
+    );
+
+    fn len(&self) -> usize;
+
+    fn push(&mut self, value: VarValue<Self::Key>);
+
+    fn reserve(&mut self, num_new_values: usize);
+
+    fn update<F>(&mut self, index: usize, op: F)
+        where F: FnOnce(&mut VarValue<Self::Key>);
+
+    fn tag() -> &'static str {
+        Self::Key::tag()
+    }
+}
+
+/// Backing store for an in-place unification table.
+/// Not typically used directly.
+#[derive(Clone, Debug)]
+pub struct InPlace<K: UnifyKey> {
+    values: sv::SnapshotVec<Delegate<K>>
+}
+
+impl<K: UnifyKey> UnificationStore for InPlace<K> {
+    type Key = K;
+    type Value = K::Value;
+    type Snapshot = sv::Snapshot;
+
+    #[inline]
+    fn new() -> Self {
+        InPlace { values: sv::SnapshotVec::new() }
+    }
+
+    #[inline]
+    fn start_snapshot(&mut self) -> Self::Snapshot {
+        self.values.start_snapshot()
+    }
+
+    #[inline]
+    fn rollback_to(&mut self, snapshot: Self::Snapshot) {
+        self.values.rollback_to(snapshot);
+    }
+
+    #[inline]
+    fn commit(&mut self, snapshot: Self::Snapshot) {
+        self.values.commit(snapshot);
+    }
+
+    #[inline]
+    fn reset_unifications(
+        &mut self,
+        mut value: impl FnMut(u32) -> VarValue<Self::Key>,
+    ) {
+        self.values.set_all(|i| value(i as u32));
+    }
+
+    #[inline]
+    fn len(&self) -> usize {
+        self.values.len()
+    }
+
+    #[inline]
+    fn push(&mut self, value: VarValue<Self::Key>) {
+        self.values.push(value);
+    }
+
+    #[inline]
+    fn reserve(&mut self, num_new_values: usize) {
+        self.values.reserve(num_new_values);
+    }
+
+    #[inline]
+    fn update<F>(&mut self, index: usize, op: F)
+        where F: FnOnce(&mut VarValue<Self::Key>)
+    {
+        self.values.update(index, op)
+    }
+}
+
+impl<K> ops::Index<usize> for InPlace<K>
+    where K: UnifyKey
+{
+    type Output = VarValue<K>;
+    fn index(&self, index: usize) -> &VarValue<K> {
+        &self.values[index]
+    }
+}
+
+#[derive(Copy, Clone, Debug)]
+struct Delegate<K>(PhantomData<K>);
+
+impl<K: UnifyKey> sv::SnapshotVecDelegate for Delegate<K> {
+    type Value = VarValue<K>;
+    type Undo = ();
+
+    fn reverse(_: &mut Vec<VarValue<K>>, _: ()) {}
+}
+
+#[cfg(feature = "persistent")]
+#[derive(Clone, Debug)]
+pub struct Persistent<K: UnifyKey> {
+    values: DVec<VarValue<K>>
+}
+
+#[cfg(feature = "persistent")]
+impl<K: UnifyKey> UnificationStore for Persistent<K> {
+    type Key = K;
+    type Value = K::Value;
+    type Snapshot = Self;
+
+    #[inline]
+    fn new() -> Self {
+        Persistent { values: DVec::new() }
+    }
+
+    #[inline]
+    fn start_snapshot(&mut self) -> Self::Snapshot {
+        self.clone()
+    }
+
+    #[inline]
+    fn rollback_to(&mut self, snapshot: Self::Snapshot) {
+        *self = snapshot;
+    }
+
+    #[inline]
+    fn commit(&mut self, _snapshot: Self::Snapshot) {
+    }
+
+    #[inline]
+    fn reset_unifications(
+        &mut self,
+        mut value: impl FnMut(u32) -> VarValue<Self::Key>,
+    ) {
+        // Without extending dogged, there isn't obviously a more
+        // efficient way to do this. But it's pretty dumb. Maybe
+        // dogged needs a `map`.
+        for i in 0 .. self.values.len() {
+            self.values[i] = value(i as u32);
+        }
+    }
+
+    #[inline]
+    fn len(&self) -> usize {
+        self.values.len()
+    }
+
+    #[inline]
+    fn push(&mut self, value: VarValue<Self::Key>) {
+        self.values.push(value);
+    }
+
+    #[inline]
+    fn reserve(&mut self, _num_new_values: usize) {
+        // not obviously relevant to DVec.
+    }
+
+    #[inline]
+    fn update<F>(&mut self, index: usize, op: F)
+        where F: FnOnce(&mut VarValue<Self::Key>)
+    {
+        let p = &mut self.values[index];
+        op(p);
+    }
+}
+
+#[cfg(feature = "persistent")]
+impl<K> ops::Index<usize> for Persistent<K>
+    where K: UnifyKey
+{
+    type Output = VarValue<K>;
+    fn index(&self, index: usize) -> &VarValue<K> {
+        &self.values[index]
+    }
+}
--- a/third_party/rust/ena/src/unify/mod.rs
+++ b/third_party/rust/ena/src/unify/mod.rs
@@ -3,36 +3,64 @@
 // http://rust-lang.org/COPYRIGHT.
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+//! Union-find implementation. The main type is `UnificationTable`.
+//!
+//! You can define your own type for the *keys* in the table, but you
+//! must implement `UnifyKey` for that type. The assumption is that
+//! keys will be newtyped integers, hence we require that they
+//! implement `Copy`.
+//!
+//! Keys can have values associated with them. The assumption is that
+//! these values are cheaply cloneable (ideally, `Copy`), and some of
+//! the interfaces are oriented around that assumption. If you just
+//! want the classical "union-find" algorithm where you group things
+//! into sets, use the `Value` type of `()`.
+//!
+//! When you have keys with non-trivial values, you must also define
+//! how those values can be merged. As part of doing this, you can
+//! define the "error" type to return on error; if errors are not
+//! possible, use `NoError` (an uninstantiable struct). Using this
+//! type also unlocks various more ergonomic methods (e.g., `union()`
+//! in place of `unify_var_var()`).
+//!
+//! The best way to see how it is used is to read the `tests.rs` file;
+//! search for e.g. `UnitKey`.
+
 use std::marker;
 use std::fmt::Debug;
-use std::marker::PhantomData;
-use snapshot_vec as sv;
+
+mod backing_vec;
+pub use self::backing_vec::{InPlace, UnificationStore};
+
+#[cfg(feature = "persistent")]
+pub use self::backing_vec::Persistent;
+
 
 #[cfg(test)]
 mod tests;
 
 /// This trait is implemented by any type that can serve as a type
 /// variable. We call such variables *unification keys*. For example,
 /// this trait is implemented by `IntVid`, which represents integral
 /// variables.
 ///
 /// Each key type has an associated value type `V`. For example, for
 /// `IntVid`, this is `Option<IntVarValue>`, representing some
 /// (possibly not yet known) sort of integer.
 ///
 /// Clients are expected to provide implementations of this trait; you
 /// can see some examples in the `test` module.
-pub trait UnifyKey : Copy + Clone + Debug + PartialEq {
+pub trait UnifyKey: Copy + Clone + Debug + PartialEq {
     type Value: UnifyValue;
 
     fn index(&self) -> u32;
 
     fn from_index(u: u32) -> Self;
 
     fn tag() -> &'static str;
 
@@ -45,415 +73,465 @@ pub trait UnifyKey : Copy + Clone + Debu
     /// root in an optimal way.
     ///
     /// NB. The only reason to implement this method is if you want to
     /// control what value is returned from `find()`. In general, it
     /// is better to let the unification table determine the root,
     /// since overriding the rank can cause execution time to increase
     /// dramatically.
     #[allow(unused_variables)]
-    fn order_roots(a: Self, a_value: &Self::Value,
-                   b: Self, b_value: &Self::Value)
-                   -> Option<(Self, Self)> {
+    fn order_roots(
+        a: Self,
+        a_value: &Self::Value,
+        b: Self,
+        b_value: &Self::Value,
+    ) -> Option<(Self, Self)> {
         None
     }
 }
 
+/// Trait implemented for **values** associated with a unification
+/// key. This trait defines how to merge the values from two keys that
+/// are unioned together. This merging can be fallible. If you attempt
+/// to union two keys whose values cannot be merged, then the error is
+/// propagated up and the two keys are not unioned.
+///
+/// This crate provides implementations of `UnifyValue` for `()`
+/// (which is infallible) and `Option<T>` (where `T: UnifyValue`). The
+/// option implementation merges two sum-values using the `UnifyValue`
+/// implementation of `T`.
+///
+/// See also `EqUnifyValue`, which is a convenience trait for cases
+/// where the "merge" operation succeeds only if the two values are
+/// equal.
 pub trait UnifyValue: Clone + Debug {
+    /// Defines the type to return when merging of two values fails.
+    /// If merging is infallible, use the special struct `NoError`
+    /// found in this crate, which unlocks various more convenient
+    /// methods on the unification table.
+    type Error;
+
     /// Given two values, produce a new value that combines them.
     /// If that is not possible, produce an error.
-    fn unify_values(value1: &Self, value2: &Self) -> Result<Self, (Self, Self)>;
+    fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error>;
 }
 
-/// Marker trait which indicates that `UnifyValues::unify_values` will never return `Err`.
-pub trait InfallibleUnifyValue: UnifyValue {
+/// A convenient helper for unification values which must be equal or
+/// else an error occurs. For example, if you are unifying types in a
+/// simple functional language, this may be appropriate, since (e.g.)
+/// you can't unify a type variable bound to `int` with one bound to
+/// `float` (but you can unify two type variables both bound to
+/// `int`).
+///
+/// Any type which implements `EqUnifyValue` automatially implements
+/// `UnifyValue`; if the two values are equal, merging is permitted.
+/// Otherwise, the error `(v1, v2)` is returned, where `v1` and `v2`
+/// are the two unequal values.
+pub trait EqUnifyValue: Eq + Clone + Debug {}
+
+impl<T: EqUnifyValue> UnifyValue for T {
+    type Error = (T, T);
+
+    fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error> {
+        if value1 == value2 {
+            Ok(value1.clone())
+        } else {
+            Err((value1.clone(), value2.clone()))
+        }
+    }
+}
+
+/// A struct which can never be instantiated. Used
+/// for the error type for infallible cases.
+#[derive(Debug)]
+pub struct NoError {
+    _dummy: (),
 }
 
 /// Value of a unification key. We implement Tarjan's union-find
 /// algorithm: when two keys are unified, one of them is converted
 /// into a "redirect" pointing at the other. These redirects form a
 /// DAG: the roots of the DAG (nodes that are not redirected) are each
 /// associate