Backed out changeset 4d220064bcf8 (bug 1529681) for causing Windows 2012 AArch64 build bustages. CLOSED TREE
authorCosmin Sabou <csabou@mozilla.com>
Mon, 25 Feb 2019 20:31:13 +0200
changeset 518828 d59858e71dee882e1975b947037a77aca05d239b
parent 518827 8e4102596afd16d32bfef539acb909443d29d689
child 518829 73f63e74eaa1b62e0d702c21c1284757e51d6c08
push id10862
push userffxbld-merge
push dateMon, 11 Mar 2019 13:01:11 +0000
treeherdermozilla-beta@a2e7f5c935da [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1529681
milestone67.0a1
backs out4d220064bcf8acbb47a26c9d622c45d3df67231d
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset 4d220064bcf8 (bug 1529681) for causing Windows 2012 AArch64 build bustages. CLOSED TREE
Cargo.lock
js/rust/Cargo.toml
js/src/wasm/cranelift/Cargo.toml
servo/components/style/Cargo.toml
third_party/rust/bindgen/.cargo-checksum.json
third_party/rust/bindgen/Cargo.toml
third_party/rust/bindgen/README.md
third_party/rust/bindgen/build.rs
third_party/rust/bindgen/src/callbacks.rs
third_party/rust/bindgen/src/clang.rs
third_party/rust/bindgen/src/codegen/helpers.rs
third_party/rust/bindgen/src/codegen/impl_debug.rs
third_party/rust/bindgen/src/codegen/impl_partialeq.rs
third_party/rust/bindgen/src/codegen/mod.rs
third_party/rust/bindgen/src/codegen/struct_layout.rs
third_party/rust/bindgen/src/extra_assertions.rs
third_party/rust/bindgen/src/features.rs
third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
third_party/rust/bindgen/src/ir/analysis/derive_debug.rs
third_party/rust/bindgen/src/ir/analysis/derive_default.rs
third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
third_party/rust/bindgen/src/ir/analysis/has_destructor.rs
third_party/rust/bindgen/src/ir/analysis/has_float.rs
third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs
third_party/rust/bindgen/src/ir/analysis/has_vtable.rs
third_party/rust/bindgen/src/ir/analysis/mod.rs
third_party/rust/bindgen/src/ir/analysis/sizedness.rs
third_party/rust/bindgen/src/ir/analysis/template_params.rs
third_party/rust/bindgen/src/ir/annotations.rs
third_party/rust/bindgen/src/ir/comment.rs
third_party/rust/bindgen/src/ir/comp.rs
third_party/rust/bindgen/src/ir/context.rs
third_party/rust/bindgen/src/ir/dot.rs
third_party/rust/bindgen/src/ir/function.rs
third_party/rust/bindgen/src/ir/item.rs
third_party/rust/bindgen/src/ir/objc.rs
third_party/rust/bindgen/src/ir/var.rs
third_party/rust/bindgen/src/lib.rs
third_party/rust/bindgen/src/options.rs
third_party/rust/bindgen/src/regex_set.rs
third_party/rust/clang-sys/.cargo-checksum.json
third_party/rust/clang-sys/CHANGELOG.md
third_party/rust/clang-sys/Cargo.toml
third_party/rust/clang-sys/build/dynamic.rs
third_party/rust/clang-sys/ci/before_install.sh
third_party/rust/clang-sys/ci/install.bat
third_party/rust/clang-sys/src/link.rs
third_party/rust/hashbrown/.cargo-checksum.json
third_party/rust/hashbrown/CHANGELOG.md
third_party/rust/hashbrown/Cargo.toml
third_party/rust/hashbrown/LICENSE-APACHE
third_party/rust/hashbrown/LICENSE-MIT
third_party/rust/hashbrown/README.md
third_party/rust/hashbrown/benches/bench.rs
third_party/rust/hashbrown/bors.toml
third_party/rust/hashbrown/src/external_trait_impls/mod.rs
third_party/rust/hashbrown/src/external_trait_impls/rayon/helpers.rs
third_party/rust/hashbrown/src/external_trait_impls/rayon/map.rs
third_party/rust/hashbrown/src/external_trait_impls/rayon/mod.rs
third_party/rust/hashbrown/src/external_trait_impls/rayon/raw.rs
third_party/rust/hashbrown/src/external_trait_impls/rayon/set.rs
third_party/rust/hashbrown/src/external_trait_impls/serde.rs
third_party/rust/hashbrown/src/fx.rs
third_party/rust/hashbrown/src/lib.rs
third_party/rust/hashbrown/src/map.rs
third_party/rust/hashbrown/src/raw/bitmask.rs
third_party/rust/hashbrown/src/raw/generic.rs
third_party/rust/hashbrown/src/raw/mod.rs
third_party/rust/hashbrown/src/raw/sse2.rs
third_party/rust/hashbrown/src/set.rs
third_party/rust/hashbrown/tests/rayon.rs
third_party/rust/hashbrown/tests/serde.rs
third_party/rust/hashbrown/tests/set.rs
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,8 +1,10 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
 # This file is automatically @generated by Cargo.
 # It is not intended for manual editing.
 [[package]]
 name = "Inflector"
 version = "0.11.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -157,17 +159,17 @@ dependencies = [
  "cc 1.0.23 (git+https://github.com/glandium/cc-rs?branch=1.0.23-clang-cl-aarch64)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "baldrdash"
 version = "0.1.0"
 dependencies = [
- "bindgen 0.47.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "cranelift-codegen 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "cranelift-wasm 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "target-lexicon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -210,29 +212,28 @@ version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bindgen"
-version = "0.47.3"
+version = "0.43.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "clang-sys 0.26.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clang-sys 0.26.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "hashbrown 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "binjs_meta"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -385,17 +386,17 @@ source = "registry+https://github.com/ru
 dependencies = [
  "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
  "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "clang-sys"
-version = "0.26.4"
+version = "0.26.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -1196,25 +1197,16 @@ dependencies = [
  "indexmap 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "tokio-io 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
-name = "hashbrown"
-version = "0.1.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
 name = "hashglobe"
 version = "0.1.0"
 dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "http"
@@ -1305,17 +1297,17 @@ dependencies = [
 name = "itoa"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "js"
 version = "0.1.4"
 dependencies = [
- "bindgen 0.47.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "mozjs_sys 0.0.0",
  "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2511,17 +2503,17 @@ source = "registry+https://github.com/ru
 
 [[package]]
 name = "style"
 version = "0.0.1"
 dependencies = [
  "app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "bindgen 0.47.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "cssparser 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "derive_more 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "euclid 0.19.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "fallible 0.0.1",
  "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "hashglobe 0.1.0",
@@ -3306,17 +3298,17 @@ dependencies = [
 "checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
 "checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
 "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
 "checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"
 "checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"
 "checksum base64 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "621fc7ecb8008f86d7fb9b95356cd692ce9514b80a86d85b397f32a22da7b9e2"
 "checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
 "checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
-"checksum bindgen 0.47.3 (registry+https://github.com/rust-lang/crates.io-index)" = "df683a55b54b41d5ea8ebfaebb5aa7e6b84e3f3006a78f010dadc9ca88469260"
+"checksum bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d52d263eacd15d26cbcf215d254b410bd58212aaa2d3c453a04b2d3b3adcf41"
 "checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"
 "checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
 "checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
 "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
 "checksum bitreader 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "80b13e2ab064ff3aa0bdbf1eff533f9822dc37899821f5f98c67f263eab51707"
 "checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400"
 "checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
 "checksum block-buffer 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49665c62e0e700857531fa5d3763e91b539ff1abeebd56808d378b495870d60d"
@@ -3329,17 +3321,17 @@ dependencies = [
 "checksum bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e178b8e0e239e844b083d5a0d4a156b2654e67f9f80144d48398fcd736a24fb8"
 "checksum bzip2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3eafc42c44e0d827de6b1c131175098fe7fb53b8ce8a47e65cb3ea94688be24"
 "checksum bzip2-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2c5162604199bbb17690ede847eaa6120a3f33d5ab4dcc8e7c25b16d849ae79b"
 "checksum cast 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "926013f2860c46252efceabb19f4a6b308197505082c609025aa6706c011d427"
 "checksum cc 1.0.23 (git+https://github.com/glandium/cc-rs?branch=1.0.23-clang-cl-aarch64)" = "<none>"
 "checksum cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b"
 "checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
 "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"
-"checksum clang-sys 0.26.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6ef0c1bcf2e99c649104bd7a7012d8f8802684400e03db0ec0af48583c6fa0e4"
+"checksum clang-sys 0.26.1 (registry+https://github.com/rust-lang/crates.io-index)" = "481e42017c1416b1c0856ece45658ecbb7c93d8a93455f7e5fa77f3b35455557"
 "checksum clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f0f16b89cbb9ee36d87483dc939fe9f1e13c05898d56d7b230a0d4dff033a536"
 "checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb"
 "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e"
 "checksum cookie 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1465f8134efa296b4c19db34d909637cb2bf0f7aaf21299e23e18fa29ac557cf"
 "checksum core-foundation 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4e2640d6d0bf22e82bed1b73c6aef8d5dd31e5abe6666c57e6d45e2649f4f887"
 "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b"
 "checksum core-graphics 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)" = "62ceafe1622ffc9a332199096841d0ff9912ec8cf8f9cde01e254a7d5217cd10"
 "checksum core-text 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f3f46450d6f2397261af420b4ccce23807add2e45fa206410a03d66fb7f050ae"
@@ -3405,17 +3397,16 @@ dependencies = [
 "checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
 "checksum generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c0f28c2f5bfb5960175af447a2da7c18900693738343dc896ffbcabd9839592"
 "checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
 "checksum gl_generator 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a0ffaf173cf76c73a73e080366bf556b4776ece104b06961766ff11449f38604"
 "checksum gleam 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "369e326d40628f4013f5754fbcf4b01eb999b9d0f13795a1b9d20f3288ab799f"
 "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
 "checksum goblin 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5911d7df7b8f65ab676c5327b50acea29d3c6a1a4ad05e444cf5dce321b26db2"
 "checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c"
-"checksum hashbrown 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3bae29b6653b3412c2e71e9d486db9f9df5d701941d86683005efb9f2d28e3da"
 "checksum http 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dca621d0fa606a5ff2850b6e337b57ad6137ee4d67e940449643ff45af6874c6"
 "checksum httparse 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "af2f2dd97457e8fb1ae7c5a420db346af389926e36f43768b96f101546b04a07"
 "checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
 "checksum hyper 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c087746de95e20e4dabe86606c3a019964a8fde2d5f386152939063c116c5971"
 "checksum ident_case 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c9826188e666f2ed92071d2dadef6edc430b11b158b5b2b3f4babbcc891eaaa"
 "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
 "checksum indexmap 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08173ba1e906efb6538785a8844dd496f5d34f0a2d88038e95195172fc667220"
 "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
--- a/js/rust/Cargo.toml
+++ b/js/rust/Cargo.toml
@@ -2,17 +2,17 @@
 name = "js"
 version = "0.1.4"
 authors = ["The Servo Project Developers"]
 build = "build.rs"
 license = "MPL-2.0"
 
 [build-dependencies]
 env_logger = {version = "0.5", default-features = false} # disable `regex` to reduce code size
-bindgen = {version = "0.47", default-features = false} # disable `logging` to reduce code size
+bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size
 cmake = "0.1"
 glob = "0.2.11"
 
 [[test]]
 name = "bigint"
 required-features = ["bigint"]
 [[test]]
 name = "callback"
--- a/js/src/wasm/cranelift/Cargo.toml
+++ b/js/src/wasm/cranelift/Cargo.toml
@@ -10,13 +10,13 @@ name = "baldrdash"
 [dependencies]
 cranelift-codegen = "0.28.0"
 cranelift-wasm = "0.28.0"
 target-lexicon = "0.2.0"
 log = { version = "0.4.6", default-features = false, features = ["release_max_level_info"] }
 env_logger = "0.5.6"
 
 [build-dependencies]
-bindgen = {version = "0.47", default-features = false} # disable `logging` to reduce code size
+bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size
 
 # Uncomment this to enable perf support in release mode.
 #[profile.release]
 #debug = true
--- a/servo/components/style/Cargo.toml
+++ b/servo/components/style/Cargo.toml
@@ -73,12 +73,12 @@ time = "0.1"
 uluru = "0.3"
 unicode-bidi = "0.3"
 unicode-segmentation = "1.0"
 void = "1.0.2"
 
 [build-dependencies]
 lazy_static = "1"
 log = "0.4"
-bindgen = {version = "0.47", optional = true, default-features = false}
+bindgen = { version = "0.43", optional = true, default-features = false }
 regex = {version = "1.0", optional = true}
 walkdir = "2.1.4"
 toml = {version = "0.4.5", optional = true, default-features = false}
--- a/third_party/rust/bindgen/.cargo-checksum.json
+++ b/third_party/rust/bindgen/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"8e23b856806a715d854751af498327a3e174420845852e634bf1fa4f81f9a299","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"0b50adc1da2d15211d61cab2ff8b9f1e8eccc37ae25695ba7a0c21b77389aa4c","build.rs":"a9f6915c54d75f357ce32f96327bf4df53dc81a505b70831978f9dac6f43841d","src/callbacks.rs":"b24d7982332c6a35928f134184ddf4072fe4545a45546b97b9b0e0c1fbb77c08","src/clang.rs":"4afb2865ac815c72b613a5ca4cb1289218d4e08abc6345b3e250023d7d23c0fb","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"58cd5ad72425f766eb0560528a3953b05c58bc35b6637a331cd7403fdc0b3116","src/codegen/impl_debug.rs":"19a8f75a1513efb523f33fd02246976e81842d38f249261b0d1a671365f78caf","src/codegen/impl_partialeq.rs":"d40d9ee2849c4d3d557b033c4d3af5e6de4a44347f67c0f016198086338811af","src/codegen/mod.rs":"1a3f1fd56572e1f7257995c0f3fc92c6b0565a7feb35df509f0f9c92349cf64c","src/codegen/struct_layout.rs":"43132726f981b2d90f957fa6a0909fe48c07ca3e19b83886f24c876a33c61848","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"288202e0d8a330a0fc7be624a0bce586d00f00733ccff608e61a71a08f37a2ae","src/ir/analysis/derive_copy.rs":"a4c0db650162ac9353051dd6718edab9ed8f67348c8c67e3202cc3cf05b84cac","src/ir/analysis/derive_debug.rs":"395912b60c701e5214425ee9c371da1af51c4c7934cb2d795a262752553b5caa","src/ir/analysis/derive_default.rs":"c2e44d72b53eb61376ba281696f2d12e9dea30dee4c5655c538b20f3927d1ee8","src/ir/analysis/derive_hash.rs":"08972280c609a5018b2fc318f71b0542b2e1a0e05d4978add3edb25852744705","src/ir/analysis/derive_partialeq_or_partialord.rs":"cace3eb52fabb2c6441e2b91697caa785b40360e0719417138b46f5005900941","src/ir/analysis/has_destructor.rs":"63644f479738df35e531d3324ff892614083c3656e0747aa34d9f20dada878ec","src/ir/analysis/has_float.rs":"76162a309e4285a806755a08c687a3e7bc894a100a63da4e88584035e215b11d","src/ir/analysis/has_type_param_in_array.rs":"fdbc0af28a144c88ea2de83e6e6da5e1ffb40e3dd63fd7a708095d085bb06f94","src/ir/analysis/has_vtable.rs":"5788372d27bdbaaf0454bc17be31a5480918bc41a8a1c4832e8c61185c07f9cd","src/ir/analysis/mod.rs":"532213b1ac2f995b41e44e9c2dbd607eeacebfdf2543d45251df3b4b7c32477f","src/ir/analysis/sizedness.rs":"8dc10043d872e68e660ef96edca4d9733f95be45cdad4893462fa929b335014f","src/ir/analysis/template_params.rs":"6312c008bbc80f50e72a766756c8daddea0b6eeb31ec924b83a231df931e170e","src/ir/annotations.rs":"39a5ab19f4d5dfa617577e4a0d0d2b67b5369d480c7cca4b14d172458c9843f0","src/ir/comment.rs":"c48abe01c5af0f09f583a89f1394bc6c161b40f6c8f0f600bbfe3c907b47969b","src/ir/comp.rs":"67bb94ab81c731739295a7ae91f29326c909c4635abc41c09c714ebfca887494","src/ir/context.rs":"6c382f28d4d5e5019fc328c289a66d36c739833419e6a6a7112c35b293bb6ee7","src/ir/derive.rs":"19601e76528d6cce8e04a66572e75da4e9efdecc4d60a983fc68c11958e9f3ec","src/ir/dot.rs":"95ed2968fc3239d87892e9f1edf1ed6dd18630d949564961765967ea1d16960c","src/ir/enum_ty.rs":"9cc242d6b3c1866665594e8b306860ee39c0ea42d22198d46b7fded473fe3e84","src/ir/function.rs":"b0b7355b5ad5fb6e5bf783ae0984ddbcf6d9c16bbbb63334f21e3649957d60b9","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"4dd447d43ff09bd424e518395340ebc2cb3b1819cef318124a359ca2ce59481a","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e722edffcd34914b534813da5af6fe8ba69927a54e0ec88ae1733f5ddf0e50b1","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"758aa955a0c5d6ad82606c88a1f4cd1d93e666b71e82d43b18b1aaae96cf888a","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"1068a7e4916d69b5034a76c47b67e6257db906cc16dad6d8af4bdb39ad52cd84","src/ir/var.rs":"8bdafb6d02f2c55ae11c28d88b19fb7a65ba8466da12ff039ae4c16c790b291e","src/lib.rs":"11afe51d093bce1cedfcdfc45d150816f4d6379f30dafe190104b9a2f106f9cf","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"4aeef74c25fbb1ed0f12b81f3e340078359168d396755458fd4a4bdb0a2b22c0","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"5cb72fc3714c0d79e9e942d003349c0775fafd7cd0c9603c65f5261883bbf9cf","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"df683a55b54b41d5ea8ebfaebb5aa7e6b84e3f3006a78f010dadc9ca88469260"}
\ No newline at end of file
+{"files":{"Cargo.toml":"e0559de35f6564bbfc4779f43d9104d604befb7ff7de5baf591379c285544d3c","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"630d1a1d123c131bad0fec23173e263ba8ecc064b5cd8446d4cab7ffd197db45","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"936198c967ca4205ab043ce2264d8188d0716ad7c294cebdaacde2b486224450","src/clang.rs":"b25f8d455e3cd89d416a4c5e55d828db9691f4def82109c1dd12457e5ca2c13c","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"2c890c96a1a6b72ada63593cb544f005476fb176d7181553713e83710dc8eefd","src/codegen/impl_debug.rs":"43b977b8d16073d021977ce57f3c22eb5b1083493905ae19a171e2271939f574","src/codegen/impl_partialeq.rs":"671dd0eac712bf8281e11a7b3e545a443c6e9e2c8ee7fbebeb03c76667ca206b","src/codegen/mod.rs":"57a6c0dc52af70b08f54e744b629df67c5528a8d63ccb9485cc1af91d02dadc0","src/codegen/struct_layout.rs":"b77f03dfbbed408a5fa6e693560aea8dc902fe7d10d847ce39122e6961078515","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"be74e03d4f00582fa8970439da52057b04204b450193833953ed84772933bd46","src/ir/analysis/derive_copy.rs":"b7e12cdc74937909529e4cefe9f43b3ee0a5590f07392b73481811ac9fddedd2","src/ir/analysis/derive_debug.rs":"cf9346ecb3afd4e94094a2723e4d76c76c55f42a13dc1d5ec6564d25d3a46cf4","src/ir/analysis/derive_default.rs":"87332eccd5accbfbf7fad2e1511be4f8945b0538ae3e0628c8af17d16068691f","src/ir/analysis/derive_hash.rs":"521ea1dbe221755042a95e8e8dcb594e427e54be2eb869c61ebbdb27fec5aa77","src/ir/analysis/derive_partialeq_or_partialord.rs":"3c5d051f69401fe50b56143143eca3e71674d6a87d0013c31745b75d0f3d584f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"5f7ee1b834978817041d884fee4648b31ecb66c62aafb8e7a9a17e5ac434bfe5","src/ir/analysis/has_type_param_in_array.rs":"abf74468b923c015aaf67599e50857267516010472819a79ca494fe02dd6ac93","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"4f788bff0ceb0e008d70145510340ab636e5203787316f0be41f789ce9b2f73d","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"1b068d5834da7360aec4cb80d9c55219cedbb2ae8b9727a39ec7d156c88fe0b5","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"925ba08ad614c40b0578be524206a662aec53f959d47d3b2cc84389718fc485b","src/ir/derive.rs":"19601e76528d6cce8e04a66572e75da4e9efdecc4d60a983fc68c11958e9f3ec","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"9cc242d6b3c1866665594e8b306860ee39c0ea42d22198d46b7fded473fe3e84","src/ir/function.rs":"c497a6e07e95dc65be73f12396e344929973243d5cf7808a97c5309b0b090ef8","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"d626a0054df8254a504b44019dc531a933ec1bd3961b1465a602f0d767e0ad4e","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e722edffcd34914b534813da5af6fe8ba69927a54e0ec88ae1733f5ddf0e50b1","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"828a890acdc8b10c44e69e2ed4a4f5d8c0e734606d3a8cc71658dcf43a49acf4","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"1068a7e4916d69b5034a76c47b67e6257db906cc16dad6d8af4bdb39ad52cd84","src/ir/var.rs":"5c0caaa505faef18e334c6198b3634b6f390d14cf9da629226cd78617fd3594b","src/lib.rs":"994d8495557cadc8c4a748e2643b35c6850f2c7130e35c8abf4ae02b83cfeff7","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"a4b4028542d6292363fc97621c704bf1b4e7eb149e9cb86b52e30aad0be13b99","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"c417889726b5e3325f9375551bf23fd54c9b40020151c364741ea6126ede386b","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"6d52d263eacd15d26cbcf215d254b410bd58212aaa2d3c453a04b2d3b3adcf41"}
\ No newline at end of file
--- a/third_party/rust/bindgen/Cargo.toml
+++ b/third_party/rust/bindgen/Cargo.toml
@@ -7,28 +7,28 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "bindgen"
-version = "0.47.3"
+version = "0.43.2"
 authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
 build = "build.rs"
 include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
 description = "Automatically generates Rust FFI bindings to C and C++ libraries."
-homepage = "https://rust-lang.github.io/rust-bindgen/"
+homepage = "https://rust-lang-nursery.github.io/rust-bindgen/"
 documentation = "https://docs.rs/bindgen"
 readme = "README.md"
 keywords = ["bindings", "ffi", "code-generation"]
 categories = ["external-ffi-bindings", "development-tools::ffi"]
 license = "BSD-3-Clause"
-repository = "https://github.com/rust-lang/rust-bindgen"
+repository = "https://github.com/rust-lang-nursery/rust-bindgen"
 
 [lib]
 path = "src/lib.rs"
 
 [[bin]]
 name = "bindgen"
 path = "src/main.rs"
 doc = false
@@ -37,52 +37,49 @@ version = "1.0.3"
 
 [dependencies.cexpr]
 version = "0.3.3"
 
 [dependencies.cfg-if]
 version = "0.1.0"
 
 [dependencies.clang-sys]
-version = "0.26.4"
+version = "0.26"
 features = ["runtime", "clang_6_0"]
 
 [dependencies.clap]
 version = "2"
 
 [dependencies.env_logger]
 version = "0.6"
 optional = true
 
-[dependencies.hashbrown]
-version = "0.1"
-
 [dependencies.lazy_static]
 version = "1"
 
 [dependencies.log]
 version = "0.4"
 optional = true
 
 [dependencies.peeking_take_while]
 version = "0.1.2"
 
 [dependencies.proc-macro2]
-version = "0.4"
+version = "0.3.2, < 0.3.6"
 default-features = false
 
 [dependencies.quote]
-version = "0.6"
+version = "0.5"
 default-features = false
 
 [dependencies.regex]
 version = "1.0"
 
 [dependencies.which]
-version = ">=1.0, <3.0"
+version = "1.0.2"
 [dev-dependencies.clap]
 version = "2"
 
 [dev-dependencies.diff]
 version = "0.1"
 
 [dev-dependencies.shlex]
 version = "0.1"
@@ -93,9 +90,9 @@ logging = ["env_logger", "log"]
 static = []
 testing_only_docs = []
 testing_only_extra_assertions = []
 testing_only_libclang_3_8 = []
 testing_only_libclang_3_9 = []
 testing_only_libclang_4 = []
 testing_only_libclang_5 = []
 [badges.travis-ci]
-repository = "rust-lang/rust-bindgen"
+repository = "rust-lang-nursery/rust-bindgen"
--- a/third_party/rust/bindgen/README.md
+++ b/third_party/rust/bindgen/README.md
@@ -1,10 +1,12 @@
 # `bindgen`
 
+[`impl period`](https://blog.rust-lang.org/2017/09/18/impl-future-for-rust.html) has been started! Join us at [Gitter.im](https://gitter.im/rust-impl-period/WG-dev-tools-bindgen).
+
 **`bindgen` automatically generates Rust FFI bindings to C (and some C++) libraries.**
 
 For example, given the C header `doggo.h`:
 
 ```c
 typedef struct Doggo {
     int many;
     char wow;
@@ -27,17 +29,17 @@ pub struct Doggo {
 
 extern "C" {
     pub fn eleven_out_of_ten_majestic_af(pupper: *mut Doggo);
 }
 ```
 
 ## Users Guide
 
-[📚 Read the `bindgen` users guide here! 📚](https://rust-lang.github.io/rust-bindgen)
+[📚 Read the `bindgen` users guide here! 📚](https://rust-lang-nursery.github.io/rust-bindgen)
 
 ## API Reference
 
 [API reference documentation is on docs.rs](https://docs.rs/bindgen)
 
 ## Contributing
 
 [See `CONTRIBUTING.md` for hacking on `bindgen`!](./CONTRIBUTING.md)
--- a/third_party/rust/bindgen/build.rs
+++ b/third_party/rust/bindgen/build.rs
@@ -51,17 +51,17 @@ mod testgen {
                         .unwrap()
                         .replace(|c| !char::is_alphanumeric(c), "_")
                         .replace("__", "_")
                         .to_lowercase();
                     writeln!(
                         dst,
                         "test_header!(header_{}, {:?});",
                         func,
-                        entry.path(),
+                        entry.path()
                     ).unwrap();
                 }
                 _ => {}
             }
         }
 
         dst.flush().unwrap();
     }
--- a/third_party/rust/bindgen/src/callbacks.rs
+++ b/third_party/rust/bindgen/src/callbacks.rs
@@ -30,21 +30,16 @@ pub trait ParseCallbacks: fmt::Debug + U
     }
 
     /// The integer kind an integer macro should have, given a name and the
     /// value of that macro, or `None` if you want the default to be chosen.
     fn int_macro(&self, _name: &str, _value: i64) -> Option<IntKind> {
         None
     }
 
-    /// This will be run on every string macro. The callback can not influence the further
-    /// treatment of the macro, but may use the value to generate additional code or configuration.
-    fn str_macro(&self, _name: &str, _value: &[u8]) {
-    }
-
     /// This function should return whether, given an enum variant
     /// name, and value, this enum variant will forcibly be a constant.
     fn enum_variant_behavior(
         &self,
         _enum_name: Option<&str>,
         _original_variant_name: &str,
         _variant_value: EnumVariantValue,
     ) -> Option<EnumVariantCustomBehavior> {
--- a/third_party/rust/bindgen/src/clang.rs
+++ b/third_party/rust/bindgen/src/clang.rs
@@ -494,39 +494,16 @@ impl Cursor {
             if self.kind() == CXCursor_EnumConstantDecl {
                 Some(clang_getEnumConstantDeclUnsignedValue(self.x) as u64)
             } else {
                 None
             }
         }
     }
 
-    /// Does this cursor have the given simple attribute?
-    ///
-    /// Note that this will only work for attributes that don't have an existing libclang
-    /// CursorKind, e.g. pure, const, etc.
-    pub fn has_simple_attr(&self, attr: &str) -> bool {
-        let mut found_attr = false;
-        self.visit(|cur| {
-            if cur.kind() == CXCursor_UnexposedAttr {
-                found_attr = cur.tokens().iter().any(|t| {
-                    t.kind == CXToken_Identifier && t.spelling() == attr.as_bytes()
-                });
-
-                if found_attr {
-                    return CXChildVisit_Break;
-                }
-            }
-
-            CXChildVisit_Continue
-        });
-
-        found_attr
-    }
-
     /// Given that this cursor's referent is a `typedef`, get the `Type` that is
     /// being aliased.
     pub fn typedef_type(&self) -> Option<Type> {
         let inner = Type {
             x: unsafe { clang_getTypedefDeclUnderlyingType(self.x) },
         };
 
         if inner.is_valid() { Some(inner) } else { None }
@@ -545,37 +522,43 @@ impl Cursor {
             unsafe { clang_getCursorVisibility(self.x) }
         } else {
             CXVisibility_Default
         }
     }
 
     /// Given that this cursor's referent is a function, return cursors to its
     /// parameters.
-    ///
-    /// Returns None if the cursor's referent is not a function/method call or
-    /// declaration.
     pub fn args(&self) -> Option<Vec<Cursor>> {
+        // XXX: We might want to use and keep num_args
         // match self.kind() {
         // CXCursor_FunctionDecl |
         // CXCursor_CXXMethod => {
-        self.num_args().ok().map(|num| {
-            (0..num).map(|i| {
-                Cursor {
-                    x: unsafe { clang_Cursor_getArgument(self.x, i as c_uint) },
+        unsafe {
+            let w = clang_Cursor_getNumArguments(self.x);
+            if w == -1 {
+                None
+            } else {
+                let num = w as u32;
+
+                let mut args = vec![];
+                for i in 0..num {
+                    args.push(Cursor {
+                        x: clang_Cursor_getArgument(self.x, i as c_uint),
+                    });
                 }
-            })
-            .collect()
-        })
+                Some(args)
+            }
+        }
     }
 
     /// Given that this cursor's referent is a function/method call or
     /// declaration, return the number of arguments it takes.
     ///
-    /// Returns Err if the cursor's referent is not a function/method call or
+    /// Returns -1 if the cursor's referent is not a function/method call or
     /// declaration.
     pub fn num_args(&self) -> Result<u32, ()> {
         unsafe {
             let w = clang_Cursor_getNumArguments(self.x);
             if w == -1 { Err(()) } else { Ok(w as u32) }
         }
     }
 
@@ -640,136 +623,74 @@ impl Cursor {
     pub fn ret_type(&self) -> Option<Type> {
         let rt = Type {
             x: unsafe { clang_getCursorResultType(self.x) },
         };
         if rt.is_valid() { Some(rt) } else { None }
     }
 
     /// Gets the tokens that correspond to that cursor.
-    pub fn tokens(&self) -> RawTokens {
-        RawTokens::new(self)
+    pub fn tokens(&self) -> Option<Vec<Token>> {
+        let range = self.extent();
+        let mut tokens = vec![];
+        unsafe {
+            let tu = clang_Cursor_getTranslationUnit(self.x);
+            let mut token_ptr = ptr::null_mut();
+            let mut num_tokens: c_uint = 0;
+            clang_tokenize(tu, range, &mut token_ptr, &mut num_tokens);
+            if token_ptr.is_null() {
+                return None;
+            }
+
+            let token_array =
+                slice::from_raw_parts(token_ptr, num_tokens as usize);
+            for &token in token_array.iter() {
+                let kind = clang_getTokenKind(token);
+                let spelling =
+                    cxstring_into_string(clang_getTokenSpelling(tu, token));
+
+                tokens.push(Token {
+                    kind: kind,
+                    spelling: spelling,
+                });
+            }
+            clang_disposeTokens(tu, token_ptr, num_tokens);
+        }
+        Some(tokens)
     }
 
     /// Gets the tokens that correspond to that cursor as  `cexpr` tokens.
-    pub fn cexpr_tokens(self) -> Vec<cexpr::token::Token> {
+    pub fn cexpr_tokens(self) -> Option<Vec<cexpr::token::Token>> {
         use cexpr::token;
 
-        self.tokens().iter().filter_map(|token| {
-            let kind = match token.kind {
-                CXToken_Punctuation => token::Kind::Punctuation,
-                CXToken_Literal => token::Kind::Literal,
-                CXToken_Identifier => token::Kind::Identifier,
-                CXToken_Keyword => token::Kind::Keyword,
-                // NB: cexpr is not too happy about comments inside
-                // expressions, so we strip them down here.
-                CXToken_Comment => return None,
-                _ => {
-                    error!("Found unexpected token kind: {:?}", token);
-                    return None;
-                }
-            };
-
-            Some(token::Token {
-                kind,
-                raw: token.spelling().to_vec().into_boxed_slice(),
-            })
-        }).collect()
-    }
-}
-
-/// A struct that owns the tokenizer result from a given cursor.
-pub struct RawTokens<'a> {
-    cursor: &'a Cursor,
-    tu: CXTranslationUnit,
-    tokens: *mut CXToken,
-    token_count: c_uint,
-}
-
-impl<'a> RawTokens<'a> {
-    fn new(cursor: &'a Cursor) -> Self {
-        let mut tokens = ptr::null_mut();
-        let mut token_count = 0;
-        let range = cursor.extent();
-        let tu = unsafe {
-            clang_Cursor_getTranslationUnit(cursor.x)
-        };
-        unsafe { clang_tokenize(tu, range, &mut tokens, &mut token_count) };
-        Self { cursor, tu, tokens, token_count }
-    }
-
-    fn as_slice(&self) -> &[CXToken] {
-        if self.tokens.is_null() {
-            return &[];
-        }
-        unsafe { slice::from_raw_parts(self.tokens, self.token_count as usize) }
-    }
+        self.tokens().map(|tokens| {
+            tokens
+                .into_iter()
+                .filter_map(|token| {
+                    let kind = match token.kind {
+                        CXToken_Punctuation => token::Kind::Punctuation,
+                        CXToken_Literal => token::Kind::Literal,
+                        CXToken_Identifier => token::Kind::Identifier,
+                        CXToken_Keyword => token::Kind::Keyword,
+                        // NB: cexpr is not too happy about comments inside
+                        // expressions, so we strip them down here.
+                        CXToken_Comment => return None,
+                        _ => {
+                            error!("Found unexpected token kind: {:?}", token);
+                            return None;
+                        }
+                    };
 
-    /// Get an iterator over these tokens.
-    pub fn iter(&self) -> ClangTokenIterator {
-        ClangTokenIterator {
-            tu: self.tu,
-            raw: self.as_slice().iter(),
-        }
-    }
-}
-
-impl<'a> Drop for RawTokens<'a> {
-    fn drop(&mut self) {
-        if !self.tokens.is_null() {
-            unsafe {
-                clang_disposeTokens(self.tu, self.tokens, self.token_count as c_uint);
-            }
-        }
-    }
-}
-
-/// A raw clang token, that exposes only the kind and spelling. This is a
-/// slightly more convenient version of `CXToken` which owns the spelling
-/// string.
-#[derive(Debug)]
-pub struct ClangToken {
-    spelling: CXString,
-    /// The kind of token, this is the same as the relevant member from
-    /// `CXToken`.
-    pub kind: CXTokenKind,
-}
-
-impl ClangToken {
-    /// Get the token spelling, without being converted to utf-8.
-    pub fn spelling(&self) -> &[u8] {
-        let c_str = unsafe {
-            CStr::from_ptr(clang_getCString(self.spelling) as *const _)
-        };
-        c_str.to_bytes()
-    }
-}
-
-impl Drop for ClangToken {
-    fn drop(&mut self) {
-        unsafe { clang_disposeString(self.spelling) }
-    }
-}
-
-/// An iterator over a set of Tokens.
-pub struct ClangTokenIterator<'a> {
-    tu: CXTranslationUnit,
-    raw: slice::Iter<'a, CXToken>,
-}
-
-impl<'a> Iterator for ClangTokenIterator<'a> {
-    type Item = ClangToken;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        let raw = self.raw.next()?;
-        unsafe {
-            let kind = clang_getTokenKind(*raw);
-            let spelling = clang_getTokenSpelling(self.tu, *raw);
-            Some(ClangToken { kind, spelling })
-        }
+                    Some(token::Token {
+                        kind: kind,
+                        raw: token.spelling.into_bytes().into_boxed_slice(),
+                    })
+                })
+                .collect::<Vec<_>>()
+        })
     }
 }
 
 /// Checks whether the name looks like an identifier, i.e. is alphanumeric
 /// (including '_') and does not start with a digit.
 pub fn is_valid_identifier(name: &str) -> bool {
     let mut chars = name.chars();
     let first_valid = chars
@@ -926,68 +847,53 @@ impl Type {
         s
     }
 
     /// Is this type const qualified?
     pub fn is_const(&self) -> bool {
         unsafe { clang_isConstQualifiedType(self.x) != 0 }
     }
 
-    #[inline]
-    fn is_non_deductible_auto_type(&self) -> bool {
-        self.kind() == CXType_Auto && self.canonical_type() == *self
-    }
-
-    #[inline]
-    fn clang_size_of(&self) -> c_longlong {
-        if self.is_non_deductible_auto_type() {
-            return -6; // Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
-        }
-        unsafe { clang_Type_getSizeOf(self.x) }
-    }
-
-    #[inline]
-    fn clang_align_of(&self) -> c_longlong {
-        if self.is_non_deductible_auto_type() {
-            return -6; // Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
-        }
-        unsafe { clang_Type_getAlignOf(self.x) }
-    }
-
     /// What is the size of this type? Paper over invalid types by returning `0`
     /// for them.
     pub fn size(&self) -> usize {
-        let val = self.clang_size_of();
-        if val < 0 { 0 } else { val as usize }
+        unsafe {
+            let val = clang_Type_getSizeOf(self.x);
+            if val < 0 { 0 } else { val as usize }
+        }
     }
 
     /// What is the size of this type?
     pub fn fallible_size(&self) -> Result<usize, LayoutError> {
-        let val = self.clang_size_of();
+        let val = unsafe { clang_Type_getSizeOf(self.x) };
         if val < 0 {
             Err(LayoutError::from(val as i32))
         } else {
             Ok(val as usize)
         }
     }
 
     /// What is the alignment of this type? Paper over invalid types by
     /// returning `0`.
     pub fn align(&self) -> usize {
-        let val = self.clang_align_of();
-        if val < 0 { 0 } else { val as usize }
+        unsafe {
+            let val = clang_Type_getAlignOf(self.x);
+            if val < 0 { 0 } else { val as usize }
+        }
     }
 
     /// What is the alignment of this type?
     pub fn fallible_align(&self) -> Result<usize, LayoutError> {
-        let val = self.clang_align_of();
-        if val < 0 {
-            Err(LayoutError::from(val as i32))
-        } else {
-            Ok(val as usize)
+        unsafe {
+            let val = clang_Type_getAlignOf(self.x);
+            if val < 0 {
+                Err(LayoutError::from(val as i32))
+            } else {
+                Ok(val as usize)
+            }
         }
     }
 
     /// Get the layout for this type, or an error describing why it does not
     /// have a valid layout.
     pub fn fallible_layout(&self) -> Result<::ir::layout::Layout, LayoutError> {
         use ir::layout::Layout;
         let size = self.fallible_size()?;
@@ -1021,41 +927,16 @@ impl Type {
             TypeTemplateArgIterator {
                 x: self.x,
                 length: n,
                 index: 0,
             }
         })
     }
 
-    /// Given that this type is a function prototype, return the types of its parameters.
-    ///
-    /// Returns None if the type is not a function prototype.
-    pub fn args(&self) -> Option<Vec<Type>> {
-        self.num_args().ok().map(|num| {
-            (0..num).map(|i| {
-                Type {
-                    x: unsafe { clang_getArgType(self.x, i as c_uint) },
-                }
-            })
-            .collect()
-        })
-    }
-
-    /// Given that this type is a function prototype, return the number of arguments it takes.
-    ///
-    /// Returns Err if the type is not a function prototype.
-    pub fn num_args(&self) -> Result<u32, ()> {
-        unsafe {
-            let w = clang_getNumArgTypes(self.x);
-            if w == -1 { Err(()) } else { Ok(w as u32) }
-        }
-    }
-
-
     /// Given that this type is a pointer type, return the type that it points
     /// to.
     pub fn pointee_type(&self) -> Option<Type> {
         match self.kind() {
             CXType_Pointer |
             CXType_RValueReference |
             CXType_LValueReference |
             CXType_MemberPointer |
--- a/third_party/rust/bindgen/src/codegen/helpers.rs
+++ b/third_party/rust/bindgen/src/codegen/helpers.rs
@@ -1,111 +1,108 @@
 //! Helpers for code generation that don't need macro expansion.
 
 use ir::context::BindgenContext;
 use ir::layout::Layout;
-use proc_macro2::{Ident, Span, TokenStream};
-use quote::TokenStreamExt;
+use quote;
+use proc_macro2::{Term, Span};
 
 pub mod attributes {
-    use proc_macro2::{Ident, Span, TokenStream};
-    use std::str::FromStr;
+    use quote;
+    use proc_macro2::{Term, Span};
 
-    pub fn repr(which: &str) -> TokenStream {
-        let which = Ident::new(which, Span::call_site());
+    pub fn repr(which: &str) -> quote::Tokens {
+        let which = Term::new(which, Span::call_site());
         quote! {
             #[repr( #which )]
         }
     }
 
-    pub fn repr_list(which_ones: &[&str]) -> TokenStream {
-        let which_ones = which_ones.iter().cloned().map(|one| TokenStream::from_str(one).expect("repr to be valid"));
+    pub fn repr_list(which_ones: &[&str]) -> quote::Tokens {
+        let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
         quote! {
             #[repr( #( #which_ones ),* )]
         }
     }
 
-    pub fn derives(which_ones: &[&str]) -> TokenStream {
-        let which_ones = which_ones.iter().cloned().map(|one| Ident::new(one, Span::call_site()));
+    pub fn derives(which_ones: &[&str]) -> quote::Tokens {
+        let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
         quote! {
             #[derive( #( #which_ones ),* )]
         }
     }
 
-    pub fn inline() -> TokenStream {
+    pub fn inline() -> quote::Tokens {
         quote! {
             #[inline]
         }
     }
 
-    pub fn must_use() -> TokenStream {
-        quote! {
-            #[must_use]
-        }
+    pub fn doc(comment: String) -> quote::Tokens {
+        // Doc comments are already preprocessed into nice `///` formats by the
+        // time they get here. Just make sure that we have newlines around it so
+        // that nothing else gets wrapped into the comment.
+        let mut tokens = quote! {};
+        tokens.append(Term::new("\n", Span::call_site()));
+        tokens.append(Term::new(&comment, Span::call_site()));
+        tokens.append(Term::new("\n", Span::call_site()));
+        tokens
     }
 
-    pub fn doc(comment: String) -> TokenStream {
-        use std::str::FromStr;
-
-        // NOTE(emilio): By this point comments are already preprocessed and in
-        // `///` form. Quote turns them into `#[doc]` comments, but oh well.
-        TokenStream::from_str(&comment).unwrap()
-    }
-
-    pub fn link_name(name: &str) -> TokenStream {
+    pub fn link_name(name: &str) -> quote::Tokens {
         // LLVM mangles the name by default but it's already mangled.
         // Prefixing the name with \u{1} should tell LLVM to not mangle it.
         let name = format!("\u{1}{}", name);
         quote! {
             #[link_name = #name]
         }
     }
 }
 
 /// Generates a proper type for a field or type with a given `Layout`, that is,
 /// a type with the correct size and alignment restrictions.
-pub fn blob(ctx: &BindgenContext, layout: Layout) -> TokenStream {
+pub fn blob(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
     let opaque = layout.opaque();
 
     // FIXME(emilio, #412): We fall back to byte alignment, but there are
     // some things that legitimately are more than 8-byte aligned.
     //
     // Eventually we should be able to `unwrap` here, but...
     let ty_name = match opaque.known_rust_type_for_array(ctx) {
         Some(ty) => ty,
         None => {
             warn!("Found unknown alignment on code generation!");
             "u8"
         }
     };
 
-    let ty_name = Ident::new(ty_name, Span::call_site());
+    let ty_name = Term::new(ty_name, Span::call_site());
 
     let data_len = opaque.array_size(ctx).unwrap_or(layout.size);
 
     if data_len == 1 {
         quote! {
             #ty_name
         }
     } else {
         quote! {
             [ #ty_name ; #data_len ]
         }
     }
 }
 
 /// Integer type of the same size as the given `Layout`.
-pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<TokenStream> {
+pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<quote::Tokens> {
     let name = Layout::known_type_for_size(ctx, layout.size)?;
-    let name = Ident::new(name, Span::call_site());
+    let name = Term::new(name, Span::call_site());
     Some(quote! { #name })
 }
 
 /// Generates a bitfield allocation unit type for a type with the given `Layout`.
-pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> TokenStream {
+pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
     let mut tokens = quote! {};
 
     if ctx.options().enable_cxx_namespaces {
         tokens.append_all(quote! { root:: });
     }
 
     let align = match layout.align {
         n if n >= 8 => quote! { u64 },
@@ -122,39 +119,39 @@ pub fn bitfield_unit(ctx: &BindgenContex
     tokens
 }
 
 pub mod ast_ty {
     use ir::context::BindgenContext;
     use ir::function::FunctionSig;
     use ir::layout::Layout;
     use ir::ty::FloatKind;
-    use std::str::FromStr;
-    use proc_macro2::{self, TokenStream};
+    use quote;
+    use proc_macro2;
 
-    pub fn raw_type(ctx: &BindgenContext, name: &str) -> TokenStream {
+    pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens {
         let ident = ctx.rust_ident_raw(name);
         match ctx.options().ctypes_prefix {
             Some(ref prefix) => {
-                let prefix = TokenStream::from_str(prefix.as_str()).unwrap();
+                let prefix = ctx.rust_ident_raw(prefix.as_str());
                 quote! {
                     #prefix::#ident
                 }
             }
             None => quote! {
                 ::std::os::raw::#ident
             },
         }
     }
 
     pub fn float_kind_rust_type(
         ctx: &BindgenContext,
         fk: FloatKind,
         layout: Option<Layout>,
-    ) -> TokenStream {
+    ) -> quote::Tokens {
         // TODO: we probably should take the type layout into account more
         // often?
         //
         // Also, maybe this one shouldn't be the default?
         match (fk, ctx.options().convert_floats) {
             (FloatKind::Float, true) => quote! { f32 },
             (FloatKind::Double, true) => quote! { f64 },
             (FloatKind::Float, false) => raw_type(ctx, "c_float"),
@@ -184,46 +181,46 @@ pub mod ast_ty {
                     quote! { u128 }
                 } else {
                     quote! { [u64; 2] }
                 }
             }
         }
     }
 
-    pub fn int_expr(val: i64) -> TokenStream {
+    pub fn int_expr(val: i64) -> quote::Tokens {
         // Don't use quote! { #val } because that adds the type suffix.
         let val = proc_macro2::Literal::i64_unsuffixed(val);
         quote!(#val)
     }
 
-    pub fn uint_expr(val: u64) -> TokenStream {
+    pub fn uint_expr(val: u64) -> quote::Tokens {
         // Don't use quote! { #val } because that adds the type suffix.
         let val = proc_macro2::Literal::u64_unsuffixed(val);
         quote!(#val)
     }
 
-    pub fn byte_array_expr(bytes: &[u8]) -> TokenStream {
+    pub fn byte_array_expr(bytes: &[u8]) -> quote::Tokens {
         let mut bytes: Vec<_> = bytes.iter().cloned().collect();
         bytes.push(0);
         quote! { [ #(#bytes),* ] }
     }
 
-    pub fn cstr_expr(mut string: String) -> TokenStream {
+    pub fn cstr_expr(mut string: String) -> quote::Tokens {
         string.push('\0');
         let b = proc_macro2::Literal::byte_string(&string.as_bytes());
         quote! {
             #b
         }
     }
 
     pub fn float_expr(
         ctx: &BindgenContext,
         f: f64,
-    ) -> Result<TokenStream, ()> {
+    ) -> Result<quote::Tokens, ()> {
         if f.is_finite() {
             let val = proc_macro2::Literal::f64_unsuffixed(f);
 
             return Ok(quote!(#val));
         }
 
         let prefix = ctx.trait_prefix();
 
@@ -247,17 +244,17 @@ pub mod ast_ty {
 
         warn!("Unknown non-finite float number: {:?}", f);
         return Err(());
     }
 
     pub fn arguments_from_signature(
         signature: &FunctionSig,
         ctx: &BindgenContext,
-    ) -> Vec<TokenStream> {
+    ) -> Vec<quote::Tokens> {
         let mut unnamed_arguments = 0;
         signature
             .argument_types()
             .iter()
             .map(|&(ref name, _ty)| {
                 match *name {
                     Some(ref name) => {
                         let name = ctx.rust_ident(name);
--- a/third_party/rust/bindgen/src/codegen/impl_debug.rs
+++ b/third_party/rust/bindgen/src/codegen/impl_debug.rs
@@ -1,21 +1,21 @@
 use ir::comp::{BitfieldUnit, CompKind, Field, FieldData, FieldMethods};
 use ir::context::BindgenContext;
 use ir::derive::CanTriviallyDeriveDebug;
 use ir::item::{HasTypeParamInArray, IsOpaque, Item, ItemCanonicalName};
 use ir::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, TypeKind};
-use proc_macro2;
+use quote;
 
 pub fn gen_debug_impl(
     ctx: &BindgenContext,
     fields: &[Field],
     item: &Item,
     kind: CompKind,
-) -> proc_macro2::TokenStream {
+) -> quote::Tokens {
     let struct_name = item.canonical_name(ctx);
     let mut format_string = format!("{} {{{{ ", struct_name);
     let mut tokens = vec![];
 
     if item.is_opaque(ctx, &()) {
         format_string.push_str("opaque");
     } else {
         match kind {
@@ -58,43 +58,43 @@ pub trait ImplDebug<'a> {
     type Extra;
 
     /// Generate a format string snippet to be included in the larger `impl Debug`
     /// format string, and the code to get the format string's interpolation values.
     fn impl_debug(
         &self,
         ctx: &BindgenContext,
         extra: Self::Extra,
-    ) -> Option<(String, Vec<proc_macro2::TokenStream>)>;
+    ) -> Option<(String, Vec<quote::Tokens>)>;
 }
 
 impl<'a> ImplDebug<'a> for FieldData {
     type Extra = ();
 
     fn impl_debug(
         &self,
         ctx: &BindgenContext,
         _: Self::Extra,
-    ) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
+    ) -> Option<(String, Vec<quote::Tokens>)> {
         if let Some(name) = self.name() {
             ctx.resolve_item(self.ty()).impl_debug(ctx, name)
         } else {
             None
         }
     }
 }
 
 impl<'a> ImplDebug<'a> for BitfieldUnit {
     type Extra = ();
 
     fn impl_debug(
         &self,
         ctx: &BindgenContext,
         _: Self::Extra,
-    ) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
+    ) -> Option<(String, Vec<quote::Tokens>)> {
         let mut format_string = String::new();
         let mut tokens = vec![];
         for (i, bitfield) in self.bitfields().iter().enumerate() {
             if i > 0 {
                 format_string.push_str(", ");
             }
 
             if let Some(bitfield_name) = bitfield.name() {
@@ -113,17 +113,17 @@ impl<'a> ImplDebug<'a> for BitfieldUnit 
 
 impl<'a> ImplDebug<'a> for Item {
     type Extra = &'a str;
 
     fn impl_debug(
         &self,
         ctx: &BindgenContext,
         name: &str,
-    ) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
+    ) -> Option<(String, Vec<quote::Tokens>)> {
         let name_ident = ctx.rust_ident(name);
 
         // We don't know if blacklisted items `impl Debug` or not, so we can't
         // add them to the format string we're building up.
         if !ctx.whitelisted_items().contains(&self.id()) {
             return None;
         }
 
@@ -131,18 +131,18 @@ impl<'a> ImplDebug<'a> for Item {
             Some(ty) => ty,
             None => {
                 return None;
             }
         };
 
         fn debug_print(
             name: &str,
-            name_ident: proc_macro2::TokenStream,
-        ) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
+            name_ident: quote::Tokens,
+        ) -> Option<(String, Vec<quote::Tokens>)> {
             Some((
                 format!("{}: {{:?}}", name),
                 vec![quote! {
                     self.#name_ident
                 }],
             ))
         }
 
--- a/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
+++ b/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
@@ -1,23 +1,24 @@
 
 use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
 use ir::context::BindgenContext;
 use ir::item::{IsOpaque, Item};
 use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
+use quote;
 use proc_macro2;
 
 /// Generate a manual implementation of `PartialEq` trait for the
 /// specified compound type.
 pub fn gen_partialeq_impl(
     ctx: &BindgenContext,
     comp_info: &CompInfo,
     item: &Item,
-    ty_for_impl: &proc_macro2::TokenStream,
-) -> Option<proc_macro2::TokenStream> {
+    ty_for_impl: &quote::Tokens,
+) -> Option<quote::Tokens> {
     let mut tokens = vec![];
 
     if item.is_opaque(ctx, &()) {
         tokens.push(quote! {
             &self._bindgen_opaque_blob[..] == &other._bindgen_opaque_blob[..]
         });
     } else if comp_info.kind() == CompKind::Union {
         assert!(!ctx.options().rust_features().untagged_union);
@@ -65,18 +66,18 @@ pub fn gen_partialeq_impl(
 
     Some(quote! {
         fn eq(&self, other: & #ty_for_impl) -> bool {
             #( #tokens )&&*
         }
     })
 }
 
-fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> proc_macro2::TokenStream {
-    fn quote_equals(name_ident: proc_macro2::Ident) -> proc_macro2::TokenStream {
+fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> quote::Tokens {
+    fn quote_equals(name_ident: proc_macro2::Term) -> quote::Tokens {
         quote! { self.#name_ident == other.#name_ident }
     }
 
     let name_ident = ctx.rust_ident(name);
     let ty = ty_item.expect_type();
 
     match *ty.kind() {
         TypeKind::Void |
--- a/third_party/rust/bindgen/src/codegen/mod.rs
+++ b/third_party/rust/bindgen/src/codegen/mod.rs
@@ -32,74 +32,76 @@ use ir::item::{IsOpaque, Item, ItemCanon
 use ir::item_kind::ItemKind;
 use ir::layout::Layout;
 use ir::module::Module;
 use ir::objc::{ObjCInterface, ObjCMethod};
 use ir::template::{AsTemplateParam, TemplateInstantiation, TemplateParameters};
 use ir::ty::{Type, TypeKind};
 use ir::var::Var;
 
-use quote::TokenStreamExt;
-use proc_macro2::{self, Ident, Span};
+use quote;
+use proc_macro2::{self, Term, Span};
 
 use std;
 use std::borrow::Cow;
 use std::cell::Cell;
-use std::collections::VecDeque;
+use std::collections::{HashSet, VecDeque};
+use std::collections::hash_map::{Entry, HashMap};
 use std::fmt::Write;
 use std::iter;
 use std::ops;
-use std::str::FromStr;
-use {HashMap, HashSet, Entry};
 
 // Name of type defined in constified enum module
 pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type";
 
-fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<proc_macro2::TokenStream> {
+fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<quote::Tokens> {
     let mut path = vec![quote! { self }];
 
     if ctx.options().enable_cxx_namespaces {
         for _ in 0..item.codegen_depth(ctx) {
             path.push(quote! { super });
         }
     }
 
     path
 }
 
-fn root_import(ctx: &BindgenContext, module: &Item) -> proc_macro2::TokenStream {
+fn root_import(ctx: &BindgenContext, module: &Item) -> quote::Tokens {
     assert!(ctx.options().enable_cxx_namespaces, "Somebody messed it up");
     assert!(module.is_module());
 
     let mut path = top_level_path(ctx, module);
 
     let root = ctx.root_module().canonical_name(ctx);
     let root_ident = ctx.rust_ident(&root);
     path.push(quote! { #root_ident });
 
 
     let mut tokens = quote! {};
-    tokens.append_separated(path, quote!(::));
+    tokens.append_separated(path, Term::new("::", Span::call_site()));
 
     quote! {
         #[allow(unused_imports)]
         use #tokens ;
     }
 }
 
 struct CodegenResult<'a> {
-    items: Vec<proc_macro2::TokenStream>,
+    items: Vec<quote::Tokens>,
 
     /// A monotonic counter used to add stable unique id's to stuff that doesn't
     /// need to be referenced by anything.
     codegen_id: &'a Cell<usize>,
 
     /// Whether a bindgen union has been generated at least once.
     saw_bindgen_union: bool,
 
+    /// Whether an union has been generated at least once.
+    saw_union: bool,
+
     /// Whether an incomplete array has been generated at least once.
     saw_incomplete_array: bool,
 
     /// Whether Objective C types have been seen at least once.
     saw_objc: bool,
 
     /// Whether Apple block types have been seen at least once.
     saw_block: bool,
@@ -132,30 +134,36 @@ struct CodegenResult<'a> {
     /// that name. This lets us give each overload a unique suffix.
     overload_counters: HashMap<String, u32>,
 }
 
 impl<'a> CodegenResult<'a> {
     fn new(codegen_id: &'a Cell<usize>) -> Self {
         CodegenResult {
             items: vec![],
+            saw_union: false,
             saw_bindgen_union: false,
             saw_incomplete_array: false,
             saw_objc: false,
             saw_block: false,
             saw_bitfield_unit: false,
             codegen_id: codegen_id,
             items_seen: Default::default(),
             functions_seen: Default::default(),
             vars_seen: Default::default(),
             overload_counters: Default::default(),
         }
     }
 
+    fn saw_union(&mut self) {
+        self.saw_union = true;
+    }
+
     fn saw_bindgen_union(&mut self) {
+        self.saw_union();
         self.saw_bindgen_union = true;
     }
 
     fn saw_incomplete_array(&mut self) {
         self.saw_incomplete_array = true;
     }
 
     fn saw_objc(&mut self) {
@@ -199,75 +207,75 @@ impl<'a> CodegenResult<'a> {
     fn seen_var(&self, name: &str) -> bool {
         self.vars_seen.contains(name)
     }
 
     fn saw_var(&mut self, name: &str) {
         self.vars_seen.insert(name.into());
     }
 
-    fn inner<F>(&mut self, cb: F) -> Vec<proc_macro2::TokenStream>
+    fn inner<F>(&mut self, cb: F) -> Vec<quote::Tokens>
     where
         F: FnOnce(&mut Self),
     {
         let mut new = Self::new(self.codegen_id);
 
         cb(&mut new);
 
+        self.saw_union |= new.saw_union;
         self.saw_incomplete_array |= new.saw_incomplete_array;
         self.saw_objc |= new.saw_objc;
         self.saw_block |= new.saw_block;
         self.saw_bitfield_unit |= new.saw_bitfield_unit;
-        self.saw_bindgen_union |= new.saw_bindgen_union;
 
         new.items
     }
 }
 
 impl<'a> ops::Deref for CodegenResult<'a> {
-    type Target = Vec<proc_macro2::TokenStream>;
+    type Target = Vec<quote::Tokens>;
 
     fn deref(&self) -> &Self::Target {
         &self.items
     }
 }
 
 impl<'a> ops::DerefMut for CodegenResult<'a> {
     fn deref_mut(&mut self) -> &mut Self::Target {
         &mut self.items
     }
 }
 
 /// A trait to convert a rust type into a pointer, optionally const, to the same
 /// type.
 trait ToPtr {
-    fn to_ptr(self, is_const: bool) -> proc_macro2::TokenStream;
+    fn to_ptr(self, is_const: bool) -> quote::Tokens;
 }
 
-impl ToPtr for proc_macro2::TokenStream {
-    fn to_ptr(self, is_const: bool) -> proc_macro2::TokenStream {
+impl ToPtr for quote::Tokens {
+    fn to_ptr(self, is_const: bool) -> quote::Tokens {
         if is_const {
             quote! { *const #self }
         } else {
             quote! { *mut #self }
         }
     }
 }
 
-/// An extension trait for `proc_macro2::TokenStream` that lets us append any implicit
+/// An extension trait for `quote::Tokens` that lets us append any implicit
 /// template parameters that exist for some type, if necessary.
 trait AppendImplicitTemplateParams {
     fn append_implicit_template_params(
         &mut self,
         ctx: &BindgenContext,
         item: &Item,
     );
 }
 
-impl AppendImplicitTemplateParams for proc_macro2::TokenStream {
+impl AppendImplicitTemplateParams for quote::Tokens {
     fn append_implicit_template_params(
         &mut self,
         ctx: &BindgenContext,
         item: &Item,
     ) {
         let item = item.id()
             .into_resolver()
             .through_type_refs()
@@ -426,17 +434,20 @@ impl CodeGenerator for Module {
         let mut found_any = false;
         let inner_items = result.inner(|result| {
             result.push(root_import(ctx, item));
 
             let path = item.namespace_aware_canonical_path(ctx).join("::");
             if let Some(raw_lines) = ctx.options().module_lines.get(&path) {
                 for raw_line in raw_lines {
                     found_any = true;
-                    result.push(proc_macro2::TokenStream::from_str(raw_line).unwrap());
+                    // FIXME(emilio): The use of `Term` is an abuse, but we abuse it
+                    // in a bunch more places.
+                    let line = Term::new(raw_line, Span::call_site());
+                    result.push(quote! { #line });
                 }
             }
 
             codegen_self(result, &mut found_any);
         });
 
         // Don't bother creating an empty module.
         if !found_any {
@@ -740,17 +751,17 @@ impl CodeGenerator for Type {
                     }) &&
                     outer_params.is_empty() &&
                     inner_item.expect_type().canonical_type(ctx).is_enum()
                 {
                     tokens.append_all(quote! {
                         pub use
                     });
                     let path = top_level_path(ctx, item);
-                    tokens.append_separated(path, quote!(::));
+                    tokens.append_separated(path, Term::new("::", Span::call_site()));
                     tokens.append_all(quote! {
                         :: #inner_rust_type  as #rust_name ;
                     });
                     result.push(tokens);
                     return;
                 }
 
                 tokens.append_all(quote! {
@@ -852,17 +863,17 @@ impl<'a> ItemCanonicalName for Vtable<'a
 
 impl<'a> TryToRustTy for Vtable<'a> {
     type Extra = ();
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         _: &(),
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         let name = ctx.rust_ident(self.canonical_name(ctx));
         Ok(quote! {
             #name
         })
     }
 }
 
 impl CodeGenerator for TemplateInstantiation {
@@ -947,18 +958,18 @@ trait FieldCodegen<'a> {
         accessor_kind: FieldAccessorKind,
         parent: &CompInfo,
         result: &mut CodegenResult,
         struct_layout: &mut StructLayoutTracker,
         fields: &mut F,
         methods: &mut M,
         extra: Self::Extra,
     ) where
-        F: Extend<proc_macro2::TokenStream>,
-        M: Extend<proc_macro2::TokenStream>;
+        F: Extend<quote::Tokens>,
+        M: Extend<quote::Tokens>;
 }
 
 impl<'a> FieldCodegen<'a> for Field {
     type Extra = ();
 
     fn codegen<F, M>(
         &self,
         ctx: &BindgenContext,
@@ -967,18 +978,18 @@ impl<'a> FieldCodegen<'a> for Field {
         accessor_kind: FieldAccessorKind,
         parent: &CompInfo,
         result: &mut CodegenResult,
         struct_layout: &mut StructLayoutTracker,
         fields: &mut F,
         methods: &mut M,
         _: (),
     ) where
-        F: Extend<proc_macro2::TokenStream>,
-        M: Extend<proc_macro2::TokenStream>,
+        F: Extend<quote::Tokens>,
+        M: Extend<quote::Tokens>,
     {
         match *self {
             Field::DataMember(ref data) => {
                 data.codegen(
                     ctx,
                     fields_should_be_private,
                     codegen_depth,
                     accessor_kind,
@@ -1019,31 +1030,30 @@ impl<'a> FieldCodegen<'a> for FieldData 
         accessor_kind: FieldAccessorKind,
         parent: &CompInfo,
         result: &mut CodegenResult,
         struct_layout: &mut StructLayoutTracker,
         fields: &mut F,
         methods: &mut M,
         _: (),
     ) where
-        F: Extend<proc_macro2::TokenStream>,
-        M: Extend<proc_macro2::TokenStream>,
+        F: Extend<quote::Tokens>,
+        M: Extend<quote::Tokens>,
     {
         // Bitfields are handled by `FieldCodegen` implementations for
         // `BitfieldUnit` and `Bitfield`.
         assert!(self.bitfield_width().is_none());
 
         let field_item = self.ty().into_resolver().through_type_refs().resolve(ctx);
         let field_ty = field_item.expect_type();
         let mut ty = self.ty().to_rust_ty_or_opaque(ctx, &());
         ty.append_implicit_template_params(ctx, field_item);
 
         // NB: If supported, we use proper `union` types.
         let ty = if parent.is_union() && !parent.can_be_rust_union(ctx) {
-            result.saw_bindgen_union();
             if ctx.options().enable_cxx_namespaces {
                 quote! {
                     root::__BindgenUnionField<#ty>
                 }
             } else {
                 quote! {
                     __BindgenUnionField<#ty>
                 }
@@ -1155,18 +1165,18 @@ impl<'a> FieldCodegen<'a> for FieldData 
                 }
             }
         }));
     }
 }
 
 impl BitfieldUnit {
     /// Get the constructor name for this bitfield unit.
-    fn ctor_name(&self) -> proc_macro2::TokenStream {
-        let ctor_name = Ident::new(&format!("new_bitfield_{}", self.nth()), Span::call_site());
+    fn ctor_name(&self) -> quote::Tokens {
+        let ctor_name = Term::new(&format!("new_bitfield_{}", self.nth()), Span::call_site());
         quote! {
             #ctor_name
         }
     }
 }
 
 impl Bitfield {
     /// Extend an under construction bitfield unit constructor with this
@@ -1174,19 +1184,19 @@ impl Bitfield {
     ///
     /// 1. Adding a parameter with this bitfield's name and its type.
     ///
     /// 2. Setting the relevant bits on the `__bindgen_bitfield_unit` variable
     ///    that's being constructed.
     fn extend_ctor_impl(
         &self,
         ctx: &BindgenContext,
-        param_name: proc_macro2::TokenStream,
-        mut ctor_impl: proc_macro2::TokenStream,
-    ) -> proc_macro2::TokenStream {
+        param_name: quote::Tokens,
+        mut ctor_impl: quote::Tokens,
+    ) -> quote::Tokens {
         let bitfield_ty = ctx.resolve_type(self.ty());
         let bitfield_ty_layout = bitfield_ty.layout(ctx).expect(
             "Bitfield without layout? Gah!",
         );
         let bitfield_int_ty = helpers::blob(ctx, bitfield_ty_layout);
 
         let offset = self.offset_into_unit();
         let width = self.width() as u8;
@@ -1220,25 +1230,24 @@ impl<'a> FieldCodegen<'a> for BitfieldUn
         accessor_kind: FieldAccessorKind,
         parent: &CompInfo,
         result: &mut CodegenResult,
         struct_layout: &mut StructLayoutTracker,
         fields: &mut F,
         methods: &mut M,
         _: (),
     ) where
-        F: Extend<proc_macro2::TokenStream>,
-        M: Extend<proc_macro2::TokenStream>,
+        F: Extend<quote::Tokens>,
+        M: Extend<quote::Tokens>,
     {
         result.saw_bitfield_unit();
 
         let field_ty = {
             let ty = helpers::bitfield_unit(ctx, self.layout());
             if parent.is_union() && !parent.can_be_rust_union(ctx) {
-                result.saw_bindgen_union();
                 if ctx.options().enable_cxx_namespaces {
                     quote! {
                         root::__BindgenUnionField<#ty>
                     }
                 } else {
                     quote! {
                         __BindgenUnionField<#ty>
                     }
@@ -1319,26 +1328,26 @@ impl<'a> FieldCodegen<'a> for BitfieldUn
 
         struct_layout.saw_bitfield_unit(self.layout());
     }
 }
 
 fn bitfield_getter_name(
     ctx: &BindgenContext,
     bitfield: &Bitfield,
-) -> proc_macro2::TokenStream {
+) -> quote::Tokens {
     let name = bitfield.getter_name();
     let name = ctx.rust_ident_raw(name);
     quote! { #name }
 }
 
 fn bitfield_setter_name(
     ctx: &BindgenContext,
     bitfield: &Bitfield,
-) -> proc_macro2::TokenStream {
+) -> quote::Tokens {
     let setter = bitfield.setter_name();
     let setter = ctx.rust_ident_raw(setter);
     quote! { #setter }
 }
 
 impl<'a> FieldCodegen<'a> for Bitfield {
     type Extra = (&'a str, &'a mut bool);
 
@@ -1350,23 +1359,23 @@ impl<'a> FieldCodegen<'a> for Bitfield {
         _accessor_kind: FieldAccessorKind,
         parent: &CompInfo,
         _result: &mut CodegenResult,
         _struct_layout: &mut StructLayoutTracker,
         _fields: &mut F,
         methods: &mut M,
         (unit_field_name, bitfield_representable_as_int): (&'a str, &mut bool),
     ) where
-        F: Extend<proc_macro2::TokenStream>,
-        M: Extend<proc_macro2::TokenStream>,
+        F: Extend<quote::Tokens>,
+        M: Extend<quote::Tokens>,
     {
         let prefix = ctx.trait_prefix();
         let getter_name = bitfield_getter_name(ctx, self);
         let setter_name = bitfield_setter_name(ctx, self);
-        let unit_field_ident = Ident::new(unit_field_name, Span::call_site());
+        let unit_field_ident = Term::new(unit_field_name, Span::call_site());
 
         let bitfield_ty_item = ctx.resolve_item(self.ty());
         let bitfield_ty = bitfield_ty_item.expect_type();
 
         let bitfield_ty_layout = bitfield_ty.layout(ctx).expect(
             "Bitfield without layout? Gah!",
         );
         let bitfield_int_ty = match helpers::integer_type(ctx, bitfield_ty_layout) {
@@ -1499,25 +1508,24 @@ impl CodeGenerator for CompInfo {
                 struct_layout.saw_vtable();
             }
 
             for base in self.base_members() {
                 if !base.requires_storage(ctx) {
                     continue;
                 }
 
-                let inner_item = ctx.resolve_item(base.ty);
-                let mut inner = inner_item.to_rust_ty_or_opaque(ctx, &());
-                inner.append_implicit_template_params(ctx, &inner_item);
+                let inner = base.ty.to_rust_ty_or_opaque(ctx, &());
                 let field_name = ctx.rust_ident(&base.field_name);
 
-                struct_layout.saw_base(inner_item.expect_type());
+                let base_ty = ctx.resolve_type(base.ty);
+                struct_layout.saw_base(base_ty);
 
                 fields.push(quote! {
-                    pub #field_name: #inner,
+                    pub #field_name : #inner ,
                 });
             }
         }
 
         let mut methods = vec![];
         if !is_opaque {
             let codegen_depth = item.codegen_depth(ctx);
             let fields_should_be_private =
@@ -1581,16 +1589,21 @@ impl CodeGenerator for CompInfo {
                             fields.push(quote! {
                                 pub __bindgen_align: #ty ,
                             });
                         }
                     }
                 }
             }
         } else if is_union && !self.is_forward_declaration() {
+            result.saw_union();
+            if !self.can_be_rust_union(ctx) {
+                result.saw_bindgen_union();
+            }
+
             // TODO(emilio): It'd be nice to unify this with the struct path
             // above somehow.
             let layout = layout.expect("Unable to get layout information?");
 
             if struct_layout.requires_explicit_align(layout) {
                 explicit_align = Some(layout.align);
             }
 
@@ -1669,20 +1682,17 @@ impl CodeGenerator for CompInfo {
         let mut needs_clone_impl = false;
         let mut needs_default_impl = false;
         let mut needs_debug_impl = false;
         let mut needs_partialeq_impl = false;
         if let Some(comment) = item.comment(ctx) {
             attributes.push(attributes::doc(comment));
         }
         if packed && !is_opaque {
-            let n = layout.map_or(1, |l| l.align);
-            assert!(ctx.options().rust_features().repr_packed_n || n == 1);
-            let packed_repr = if n == 1 { "packed".to_string() } else { format!("packed({})", n) };
-            attributes.push(attributes::repr_list(&["C", &packed_repr]));
+            attributes.push(attributes::repr_list(&["C", "packed"]));
         } else {
             attributes.push(attributes::repr("C"));
         }
 
         if ctx.options().rust_features().repr_align {
             if let Some(explicit) = explicit_align {
                 // Ensure that the struct has the correct alignment even in
                 // presence of alignas.
@@ -1749,18 +1759,16 @@ impl CodeGenerator for CompInfo {
                 ctx.options().impl_partialeq &&
                 ctx.lookup_can_derive_partialeq_or_partialord(item.id()) == CanDerive::ArrayTooLarge;
         }
 
         if item.can_derive_eq(ctx) {
             derives.push("Eq");
         }
 
-        derives.extend(item.annotations().derives().iter().map(String::as_str));
-
         if !derives.is_empty() {
             attributes.push(attributes::derives(&derives))
         }
 
         let mut tokens = if is_union && self.can_be_rust_union(ctx) {
             quote! {
                 #( #attributes )*
                 pub union #canonical_ident
@@ -1790,31 +1798,31 @@ impl CodeGenerator for CompInfo {
         }
 
         // NOTE: Some unexposed attributes (like alignment attributes) may
         // affect layout, so we're bad and pray to the gods for avoid sending
         // all the tests to shit when parsing things like max_align_t.
         if self.found_unknown_attr() {
             warn!(
                 "Type {} has an unknown attribute that may affect layout",
-                canonical_ident
+                canonical_ident.as_str()
             );
         }
 
         if all_template_params.is_empty() {
             if !is_opaque {
                 for var in self.inner_vars() {
                     ctx.resolve_item(*var).codegen(ctx, result, &());
                 }
             }
 
             if ctx.options().layout_tests && !self.is_forward_declaration() {
                 if let Some(layout) = layout {
                     let fn_name =
-                        format!("bindgen_test_layout_{}", canonical_ident);
+                        format!("bindgen_test_layout_{}", canonical_ident.as_str());
                     let fn_name = ctx.rust_ident_raw(fn_name);
                     let prefix = ctx.trait_prefix();
                     let size_of_expr = quote! {
                         ::#prefix::mem::size_of::<#canonical_ident>()
                     };
                     let align_of_expr = quote! {
                         ::#prefix::mem::align_of::<#canonical_ident>()
                     };
@@ -1866,17 +1874,17 @@ impl CodeGenerator for CompInfo {
                                                     &(*(::#prefix::ptr::null::<#canonical_ident>())).#field_name as *const _ as usize
                                                 },
                                                 #field_offset,
                                                 concat!("Offset of field: ", stringify!(#canonical_ident), "::", stringify!(#field_name))
                                             );
                                         })
                                     })
                                 })
-                                .collect::<Vec<proc_macro2::TokenStream>>();
+                                .collect::<Vec<quote::Tokens>>();
 
                             asserts
                         };
 
                     let item = quote! {
                         #[test]
                         fn #fn_name() {
                             assert_eq!(#size_of_expr,
@@ -2006,28 +2014,28 @@ impl CodeGenerator for CompInfo {
         }
     }
 }
 
 trait MethodCodegen {
     fn codegen_method<'a>(
         &self,
         ctx: &BindgenContext,
-        methods: &mut Vec<proc_macro2::TokenStream>,
+        methods: &mut Vec<quote::Tokens>,
         method_names: &mut HashMap<String, usize>,
         result: &mut CodegenResult<'a>,
         parent: &CompInfo,
     );
 }
 
 impl MethodCodegen for Method {
     fn codegen_method<'a>(
         &self,
         ctx: &BindgenContext,
-        methods: &mut Vec<proc_macro2::TokenStream>,
+        methods: &mut Vec<quote::Tokens>,
         method_names: &mut HashMap<String, usize>,
         result: &mut CodegenResult<'a>,
         _parent: &CompInfo,
     ) {
         assert!({
             let cc = &ctx.options().codegen_config;
             match self.kind() {
                 MethodKind::Constructor => cc.constructors(),
@@ -2141,23 +2149,19 @@ impl MethodCodegen for Method {
 
         let block = quote! {
             #( #stmts );*
         };
 
         let mut attrs = vec![];
         attrs.push(attributes::inline());
 
-        if signature.must_use() && ctx.options().rust_features().must_use_function {
-            attrs.push(attributes::must_use());
-        }
-
         let name = ctx.rust_ident(&name);
         methods.push(quote! {
-            #(#attrs)*
+            #[inline]
             pub unsafe fn #name ( #( #args ),* ) #ret {
                 #block
             }
         });
     }
 }
 
 /// A helper type that represents different enum variations.
@@ -2222,34 +2226,34 @@ impl std::str::FromStr for EnumVariation
     }
 }
 
 
 /// A helper type to construct different enum variations.
 enum EnumBuilder<'a> {
     Rust {
         codegen_depth: usize,
-        attrs: Vec<proc_macro2::TokenStream>,
-        ident: Ident,
-        tokens: proc_macro2::TokenStream,
+        attrs: Vec<quote::Tokens>,
+        ident: Term,
+        tokens: quote::Tokens,
         emitted_any_variants: bool,
     },
     Bitfield {
         codegen_depth: usize,
         canonical_name: &'a str,
-        tokens: proc_macro2::TokenStream,
+        tokens: quote::Tokens,
     },
     Consts {
-        variants: Vec<proc_macro2::TokenStream>,
+        variants: Vec<quote::Tokens>,
         codegen_depth: usize,
     },
     ModuleConsts {
         codegen_depth: usize,
         module_name: &'a str,
-        module_items: Vec<proc_macro2::TokenStream>,
+        module_items: Vec<quote::Tokens>,
     },
 }
 
 impl<'a> EnumBuilder<'a> {
     /// Returns the depth of the code generation for a variant of this enum.
     fn codegen_depth(&self) -> usize {
         match *self {
             EnumBuilder::Rust { codegen_depth, .. } |
@@ -2258,22 +2262,22 @@ impl<'a> EnumBuilder<'a> {
             EnumBuilder::Consts { codegen_depth, .. } => codegen_depth,
         }
     }
 
     /// Create a new enum given an item builder, a canonical name, a name for
     /// the representation, and which variation it should be generated as.
     fn new(
         name: &'a str,
-        attrs: Vec<proc_macro2::TokenStream>,
-        repr: proc_macro2::TokenStream,
+        attrs: Vec<quote::Tokens>,
+        repr: quote::Tokens,
         enum_variation: EnumVariation,
         enum_codegen_depth: usize,
     ) -> Self {
-        let ident = Ident::new(name, Span::call_site());
+        let ident = Term::new(name, Span::call_site());
 
         match enum_variation {
             EnumVariation::Bitfield => {
                 EnumBuilder::Bitfield {
                     codegen_depth: enum_codegen_depth,
                     canonical_name: name,
                     tokens: quote! {
                         #( #attrs )*
@@ -2301,17 +2305,17 @@ impl<'a> EnumBuilder<'a> {
                             pub type #ident = #repr;
                         }
                     ],
                     codegen_depth: enum_codegen_depth,
                 }
             }
 
             EnumVariation::ModuleConsts => {
-                let ident = Ident::new(CONSTIFIED_ENUM_MODULE_REPR_NAME, Span::call_site());
+                let ident = Term::new(CONSTIFIED_ENUM_MODULE_REPR_NAME, Span::call_site());
                 let type_definition = quote! {
                     #( #attrs )*
                     pub type #ident = #repr;
                 };
 
                 EnumBuilder::ModuleConsts {
                     codegen_depth: enum_codegen_depth + 1,
                     module_name: name,
@@ -2322,17 +2326,17 @@ impl<'a> EnumBuilder<'a> {
     }
 
     /// Add a variant to this enum.
     fn with_variant<'b>(
         self,
         ctx: &BindgenContext,
         variant: &EnumVariant,
         mangling_prefix: Option<&str>,
-        rust_ty: proc_macro2::TokenStream,
+        rust_ty: quote::Tokens,
         result: &mut CodegenResult<'b>,
         is_ty_named: bool,
     ) -> Self {
         let variant_name = ctx.rust_mangle(variant.name());
         let expr = match variant.val() {
             EnumVariantValue::Signed(v) => helpers::ast_ty::int_expr(v),
             EnumVariantValue::Unsigned(v) => helpers::ast_ty::uint_expr(v),
         };
@@ -2424,19 +2428,19 @@ impl<'a> EnumBuilder<'a> {
                 }
             }
         }
     }
 
     fn build<'b>(
         self,
         ctx: &BindgenContext,
-        rust_ty: proc_macro2::TokenStream,
+        rust_ty: quote::Tokens,
         result: &mut CodegenResult<'b>,
-    ) -> proc_macro2::TokenStream {
+    ) -> quote::Tokens {
         match self {
             EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants, .. } => {
                 let variants = if !emitted_any_variants {
                     quote!(__bindgen_cannot_repr_c_on_empty_enum = 0)
                 } else {
                     tokens
                 };
 
@@ -2572,21 +2576,17 @@ impl CodeGenerator for Enum {
         let mut attrs = vec![];
 
         let variation = self.computed_enum_variation(ctx, item);
 
         // TODO(emilio): Delegate this to the builders?
         if variation.is_rust() {
             attrs.push(attributes::repr(repr_name));
         } else if variation.is_bitfield() {
-            if ctx.options().rust_features.repr_transparent {
-                attrs.push(attributes::repr("transparent"));
-            } else {
-                attrs.push(attributes::repr("C"));
-            }
+            attrs.push(attributes::repr("C"));
         }
 
         if let Some(comment) = item.comment(ctx) {
             attrs.push(attributes::doc(comment));
         }
 
         if !variation.is_const() {
             let mut derives = vec!["Debug", "Copy", "Clone", "PartialEq", "Eq", "Hash"];
@@ -2601,33 +2601,33 @@ impl CodeGenerator for Enum {
 
             attrs.push(attributes::derives(&derives));
         }
 
         fn add_constant<'a>(
             ctx: &BindgenContext,
             enum_: &Type,
             // Only to avoid recomputing every time.
-            enum_canonical_name: &Ident,
+            enum_canonical_name: &Term,
             // May be the same as "variant" if it's because the
             // enum is unnamed and we still haven't seen the
             // value.
-            variant_name: &Ident,
-            referenced_name: &Ident,
-            enum_rust_ty: proc_macro2::TokenStream,
+            variant_name: &str,
+            referenced_name: &Term,
+            enum_rust_ty: quote::Tokens,
             result: &mut CodegenResult<'a>,
         ) {
             let constant_name = if enum_.name().is_some() {
                 if ctx.options().prepend_enum_name {
-                    format!("{}_{}", enum_canonical_name, variant_name)
+                    format!("{}_{}", enum_canonical_name.as_str(), variant_name)
                 } else {
-                    format!("{}", variant_name)
+                    variant_name.into()
                 }
             } else {
-                format!("{}", variant_name)
+                variant_name.into()
             };
             let constant_name = ctx.rust_ident(constant_name);
 
             result.push(quote! {
                 pub const #constant_name : #enum_rust_ty =
                     #enum_canonical_name :: #referenced_name ;
             });
         }
@@ -2641,17 +2641,17 @@ impl CodeGenerator for Enum {
             &name,
             attrs,
             repr,
             variation,
             item.codegen_depth(ctx),
         );
 
         // A map where we keep a value -> variant relation.
-        let mut seen_values = HashMap::<_, Ident>::default();
+        let mut seen_values = HashMap::<_, Term>::new();
         let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
         let is_toplevel = item.is_toplevel(ctx);
 
         // Used to mangle the constants we generate in the unnamed-enum case.
         let parent_canonical_name = if is_toplevel {
             None
         } else {
             Some(item.parent_id().canonical_name(ctx))
@@ -2713,17 +2713,17 @@ impl CodeGenerator for Enum {
                                         #enum_canonical_name :: #existing_variant_name ;
                                 }
                             });
                         } else {
                             add_constant(
                                 ctx,
                                 enum_ty,
                                 &ident,
-                                &Ident::new(&*mangled_name, Span::call_site()),
+                                &*mangled_name,
                                 existing_variant_name,
                                 enum_rust_ty.clone(),
                                 result,
                             );
                         }
                     } else {
                         builder = builder.with_variant(
                             ctx,
@@ -2754,31 +2754,31 @@ impl CodeGenerator for Enum {
                         variant.force_constification()
                     {
                         let mangled_name = if is_toplevel {
                             variant_name.clone()
                         } else {
                             let parent_name =
                                 parent_canonical_name.as_ref().unwrap();
 
-                            Ident::new(
+                            Term::new(
                                 &format!(
                                     "{}_{}",
                                     parent_name,
-                                    variant_name
+                                    variant_name.as_str()
                                 ),
                                 Span::call_site()
                             )
                         };
 
                         add_constant(
                             ctx,
                             enum_ty,
                             &ident,
-                            &mangled_name,
+                            mangled_name.as_str(),
                             &variant_name,
                             enum_rust_ty.clone(),
                             result,
                         );
                     }
 
                     entry.insert(variant_name);
                 }
@@ -2805,17 +2805,17 @@ trait TryToOpaque {
         extra: &Self::Extra,
     ) -> error::Result<Layout>;
 
     /// Do not override this provided trait method.
     fn try_to_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &Self::Extra,
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         self.try_get_layout(ctx, extra).map(|layout| {
             helpers::blob(ctx, layout)
         })
     }
 }
 
 /// Infallible conversion of an IR thing to an opaque blob.
 ///
@@ -2832,17 +2832,17 @@ trait ToOpaque: TryToOpaque {
         self.try_get_layout(ctx, extra)
             .unwrap_or_else(|_| Layout::for_size(ctx, 1))
     }
 
     fn to_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &Self::Extra,
-    ) -> proc_macro2::TokenStream {
+    ) -> quote::Tokens {
         let layout = self.get_layout(ctx, extra);
         helpers::blob(ctx, layout)
     }
 }
 
 impl<T> ToOpaque for T
 where
     T: TryToOpaque,
@@ -2858,46 +2858,46 @@ where
 /// alignment. That is the responsibility of the `TryToOpaque` trait.
 trait TryToRustTy {
     type Extra;
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         extra: &Self::Extra,
-    ) -> error::Result<proc_macro2::TokenStream>;
+    ) -> error::Result<quote::Tokens>;
 }
 
 /// Fallible conversion to a Rust type or an opaque blob with the correct size
 /// and alignment.
 ///
 /// Don't implement this directly. Instead implement `TryToRustTy` and
 /// `TryToOpaque`, and then leverage the blanket impl for this trait below.
 trait TryToRustTyOrOpaque: TryToRustTy + TryToOpaque {
     type Extra;
 
     fn try_to_rust_ty_or_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &<Self as TryToRustTyOrOpaque>::Extra,
-    ) -> error::Result<proc_macro2::TokenStream>;
+    ) -> error::Result<quote::Tokens>;
 }
 
 impl<E, T> TryToRustTyOrOpaque for T
 where
     T: TryToRustTy<Extra = E>
         + TryToOpaque<Extra = E>,
 {
     type Extra = E;
 
     fn try_to_rust_ty_or_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &E,
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         self.try_to_rust_ty(ctx, extra).or_else(
             |_| if let Ok(layout) =
                 self.try_get_layout(ctx, extra)
             {
                 Ok(helpers::blob(ctx, layout))
             } else {
                 Err(error::Error::NoLayoutForOpaqueBlob)
             },
@@ -2924,30 +2924,30 @@ where
 /// type for a C++ construct.
 trait ToRustTyOrOpaque: TryToRustTy + ToOpaque {
     type Extra;
 
     fn to_rust_ty_or_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &<Self as ToRustTyOrOpaque>::Extra,
-    ) -> proc_macro2::TokenStream;
+    ) -> quote::Tokens;
 }
 
 impl<E, T> ToRustTyOrOpaque for T
 where
     T: TryToRustTy<Extra = E> + ToOpaque<Extra = E>,
 {
     type Extra = E;
 
     fn to_rust_ty_or_opaque(
         &self,
         ctx: &BindgenContext,
         extra: &E,
-    ) -> proc_macro2::TokenStream {
+    ) -> quote::Tokens {
         self.try_to_rust_ty(ctx, extra).unwrap_or_else(|_| {
             self.to_opaque(ctx, extra)
         })
     }
 }
 
 impl<T> TryToOpaque for T
 where
@@ -2969,17 +2969,17 @@ where
     T: Copy + Into<ItemId>
 {
     type Extra = ();
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         _: &(),
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         ctx.resolve_item((*self).into()).try_to_rust_ty(ctx, &())
     }
 }
 
 impl TryToOpaque for Item {
     type Extra = ();
 
     fn try_get_layout(
@@ -2993,17 +2993,17 @@ impl TryToOpaque for Item {
 
 impl TryToRustTy for Item {
     type Extra = ();
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         _: &(),
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         self.kind().expect_type().try_to_rust_ty(ctx, self)
     }
 }
 
 impl TryToOpaque for Type {
     type Extra = Item;
 
     fn try_get_layout(
@@ -3017,17 +3017,17 @@ impl TryToOpaque for Type {
 
 impl TryToRustTy for Type {
     type Extra = Item;
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         item: &Item,
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         use self::helpers::ast_ty::*;
 
         match *self.kind() {
             TypeKind::Void => Ok(raw_type(ctx, "c_void")),
             // TODO: we should do something smart with nullptr, or maybe *const
             // c_void is enough?
             TypeKind::NullPtr => {
                 Ok(raw_type(ctx, "c_void").to_ptr(true))
@@ -3119,17 +3119,17 @@ impl TryToRustTy for Type {
             TypeKind::Array(item, len) | TypeKind::Vector(item, len) => {
                 let ty = item.try_to_rust_ty(ctx, &())?;
                 Ok(quote! {
                     [ #ty ; #len ]
                 })
             }
             TypeKind::Enum(..) => {
                 let path = item.namespace_aware_canonical_path(ctx);
-                let path = proc_macro2::TokenStream::from_str(&path.join("::")).unwrap();
+                let path = Term::new(&path.join("::"), Span::call_site());
                 Ok(quote!(#path))
             }
             TypeKind::TemplateInstantiation(ref inst) => {
                 inst.try_to_rust_ty(ctx, item)
             }
             TypeKind::ResolvedTypeRef(inner) => inner.try_to_rust_ty(ctx, &()),
             TypeKind::TemplateAlias(..) |
             TypeKind::Alias(..) |
@@ -3222,29 +3222,29 @@ impl TryToOpaque for TemplateInstantiati
 
 impl TryToRustTy for TemplateInstantiation {
     type Extra = Item;
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         item: &Item,
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         if self.is_opaque(ctx, item) {
             return Err(error::Error::InstantiationOfOpaqueType);
         }
 
         let def = self.template_definition()
             .into_resolver()
             .through_type_refs()
             .resolve(ctx);
 
         let mut ty = quote! {};
         let def_path = def.namespace_aware_canonical_path(ctx);
-        ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), quote!(::));
+        ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), Term::new("::", Span::call_site()));
 
         let def_params = def.self_template_params(ctx);
         if def_params.is_empty() {
             // This can happen if we generated an opaque type for a partial
             // template specialization, and we've hit an instantiation of
             // that partial specialization.
             extra_assert!(
                 def.is_opaque(ctx, &())
@@ -3286,26 +3286,26 @@ impl TryToRustTy for TemplateInstantiati
 
 impl TryToRustTy for FunctionSig {
     type Extra = ();
 
     fn try_to_rust_ty(
         &self,
         ctx: &BindgenContext,
         _: &(),
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
         // TODO: we might want to consider ignoring the reference return value.
         let ret = utils::fnsig_return_ty(ctx, &self);
         let arguments = utils::fnsig_arguments(ctx, &self);
         let abi = self.abi();
 
         match abi {
             Abi::ThisCall if !ctx.options().rust_features().thiscall_abi => {
                 warn!("Skipping function with thiscall ABI that isn't supported by the configured Rust target");
-                Ok(proc_macro2::TokenStream::new())
+                Ok(quote::Tokens::new())
             }
             _ => {
                 Ok(quote! {
                     unsafe extern #abi fn ( #( #arguments ),* ) #ret
                 })
             }
         }
     }
@@ -3369,20 +3369,16 @@ impl CodeGenerator for Function {
             _ => panic!("Signature kind is not a Function: {:?}", signature),
         };
 
         let args = utils::fnsig_arguments(ctx, signature);
         let ret = utils::fnsig_return_ty(ctx, signature);
 
         let mut attributes = vec![];
 
-        if signature.must_use() && ctx.options().rust_features().must_use_function {
-            attributes.push(attributes::must_use());
-        }
-
         if let Some(comment) = item.comment(ctx) {
             attributes.push(attributes::doc(comment));
         }
 
         // Handle overloaded functions by giving each overload its own unique
         // suffix.
         let times_seen = result.overload_number(&canonical_name);
         if times_seen > 0 {
@@ -3427,17 +3423,17 @@ impl CodeGenerator for Function {
 }
 
 
 fn objc_method_codegen(
     ctx: &BindgenContext,
     method: &ObjCMethod,
     class_name: Option<&str>,
     prefix: &str,
-) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {
+) -> (quote::Tokens, quote::Tokens) {
     let signature = method.signature();
     let fn_args = utils::fnsig_arguments(ctx, signature);
     let fn_ret = utils::fnsig_return_ty(ctx, signature);
 
     let sig = if method.is_class_method() {
         let fn_args = fn_args.clone();
         quote! {
             ( #( #fn_args ),* ) #fn_ret
@@ -3541,30 +3537,29 @@ impl CodeGenerator for ObjCInterface {
         };
 
         result.push(trait_block);
         result.push(impl_block);
         result.saw_objc();
     }
 }
 
-pub(crate) fn codegen(context: BindgenContext) -> (Vec<proc_macro2::TokenStream>, BindgenOptions) {
+pub(crate) fn codegen(context: BindgenContext) -> (Vec<quote::Tokens>, BindgenOptions) {
     context.gen(|context| {
         let _t = context.timer("codegen");
         let counter = Cell::new(0);
         let mut result = CodegenResult::new(&counter);
 
         debug!("codegen: {:?}", context.options());
 
+        let codegen_items = context.codegen_items();
         if context.options().emit_ir {
-            let codegen_items = context.codegen_items();
-            for (id, item) in context.items() {
-                if codegen_items.contains(&id) {
-                    println!("ir: {:?} = {:#?}", id, item);
-                }
+            for &id in codegen_items {
+                let item = context.resolve_item(id);
+                println!("ir: {:?} = {:#?}", id, item);
             }
         }
 
         if let Some(path) = context.options().emit_ir_graphviz.as_ref() {
             match dot::write_dot_file(context, path) {
                 Ok(()) => info!("Your dot file was generated successfully into: {}", path),
                 Err(e) => error!("{}", e),
             }
@@ -3578,32 +3573,32 @@ pub(crate) fn codegen(context: BindgenCo
 }
 
 mod utils {
     use super::{ToRustTyOrOpaque, error};
     use ir::context::BindgenContext;
     use ir::function::FunctionSig;
     use ir::item::{Item, ItemCanonicalPath};
     use ir::ty::TypeKind;
-    use proc_macro2;
+    use quote;
+    use proc_macro2::{Term, Span};
     use std::mem;
-    use std::str::FromStr;
-
-    pub fn prepend_bitfield_unit_type(result: &mut Vec<proc_macro2::TokenStream>) {
-        let bitfield_unit_type = proc_macro2::TokenStream::from_str(include_str!("./bitfield_unit.rs")).unwrap();
+
+    pub fn prepend_bitfield_unit_type(result: &mut Vec<quote::Tokens>) {
+        let bitfield_unit_type = Term::new(include_str!("./bitfield_unit.rs"), Span::call_site());
         let bitfield_unit_type = quote!(#bitfield_unit_type);
 
         let items = vec![bitfield_unit_type];
         let old_items = mem::replace(result, items);
         result.extend(old_items);
     }
 
     pub fn prepend_objc_header(
         ctx: &BindgenContext,
-        result: &mut Vec<proc_macro2::TokenStream>,
+        result: &mut Vec<quote::Tokens>,
     ) {
         let use_objc = if ctx.options().objc_extern_crate {
             quote! {
                 #[macro_use]
                 extern crate objc;
             }
         } else {
             quote! {
@@ -3618,17 +3613,17 @@ mod utils {
 
         let items = vec![use_objc, id_type];
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn prepend_block_header(
         ctx: &BindgenContext,
-        result: &mut Vec<proc_macro2::TokenStream>,
+        result: &mut Vec<quote::Tokens>,
     ) {
         let use_block = if ctx.options().block_extern_crate {
             quote! {
                 extern crate block;
             }
         } else {
             quote! {
                 use block;
@@ -3637,17 +3632,17 @@ mod utils {
 
         let items = vec![use_block];
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn prepend_union_types(
         ctx: &BindgenContext,
-        result: &mut Vec<proc_macro2::TokenStream>,
+        result: &mut Vec<quote::Tokens>,
     ) {
         let prefix = ctx.trait_prefix();
 
         // TODO(emilio): The fmt::Debug impl could be way nicer with
         // std::intrinsics::type_name, but...
         let union_field_decl = quote! {
             #[repr(C)]
             pub struct __BindgenUnionField<T>(::#prefix::marker::PhantomData<T>);
@@ -3736,32 +3731,32 @@ mod utils {
                          union_field_eq_impl];
 
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn prepend_incomplete_array_types(
         ctx: &BindgenContext,
-        result: &mut Vec<proc_macro2::TokenStream>,
+        result: &mut Vec<quote::Tokens>,
     ) {
         let prefix = ctx.trait_prefix();
 
         let incomplete_array_decl = quote! {
             #[repr(C)]
             #[derive(Default)]
             pub struct __IncompleteArrayField<T>(
-                ::#prefix::marker::PhantomData<T>, [T; 0]);
+                ::#prefix::marker::PhantomData<T>);
         };
 
         let incomplete_array_impl = quote! {
             impl<T> __IncompleteArrayField<T> {
                 #[inline]
                 pub fn new() -> Self {
-                    __IncompleteArrayField(::#prefix::marker::PhantomData, [])
+                    __IncompleteArrayField(::#prefix::marker::PhantomData)
                 }
 
                 #[inline]
                 pub unsafe fn as_ptr(&self) -> *const T {
                     ::#prefix::mem::transmute(self)
                 }
 
                 #[inline]
@@ -3794,27 +3789,32 @@ mod utils {
             impl<T> ::#prefix::clone::Clone for __IncompleteArrayField<T> {
                 #[inline]
                 fn clone(&self) -> Self {
                     Self::new()
                 }
             }
         };
 
+        let incomplete_array_copy_impl = quote! {
+            impl<T> ::#prefix::marker::Copy for __IncompleteArrayField<T> {}
+        };
+
         let items = vec![incomplete_array_decl,
                          incomplete_array_impl,
                          incomplete_array_debug_impl,
-                         incomplete_array_clone_impl];
+                         incomplete_array_clone_impl,
+                         incomplete_array_copy_impl];
 
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn prepend_complex_type(
-        result: &mut Vec<proc_macro2::TokenStream>,
+        result: &mut Vec<quote::Tokens>,
     ) {
         let complex_type = quote! {
             #[derive(PartialEq, Copy, Clone, Hash, Debug, Default)]
             #[repr(C)]
             pub struct __BindgenComplex<T> {
                 pub re: T,
                 pub im: T
             }
@@ -3823,34 +3823,38 @@ mod utils {
         let items = vec![complex_type];
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn build_path(
         item: &Item,
         ctx: &BindgenContext,
-    ) -> error::Result<proc_macro2::TokenStream> {
+    ) -> error::Result<quote::Tokens> {
+        use proc_macro2::{Term, Span};
+
         let path = item.namespace_aware_canonical_path(ctx);
-        let tokens = proc_macro2::TokenStream::from_str(&path.join("::")).unwrap();
+        let path = Term::new(&path.join("::"), Span::call_site());
+        let tokens = quote! {#path};
+        //tokens.append_separated(path, "::");
 
         Ok(tokens)
     }
 
-    fn primitive_ty(ctx: &BindgenContext, name: &str) -> proc_macro2::TokenStream {
+    fn primitive_ty(ctx: &BindgenContext, name: &str) -> quote::Tokens {
         let ident = ctx.rust_ident_raw(name);
         quote! {
             #ident
         }
     }
 
     pub fn type_from_named(
         ctx: &BindgenContext,
         name: &str,
-    ) -> Option<proc_macro2::TokenStream> {
+    ) -> Option<quote::Tokens> {
         // FIXME: We could use the inner item to check this is really a
         // primitive type but, who the heck overrides these anyway?
         Some(match name {
             "int8_t" => primitive_ty(ctx, "i8"),
             "uint8_t" => primitive_ty(ctx, "u8"),
             "int16_t" => primitive_ty(ctx, "i16"),
             "uint16_t" => primitive_ty(ctx, "u16"),
             "int32_t" => primitive_ty(ctx, "i32"),
@@ -3863,32 +3867,32 @@ mod utils {
             "intptr_t" | "ptrdiff_t" | "ssize_t" => primitive_ty(ctx, "isize"),
             _ => return None,
         })
     }
 
     pub fn fnsig_return_ty(
         ctx: &BindgenContext,
         sig: &FunctionSig,
-    ) -> proc_macro2::TokenStream {
+    ) -> quote::Tokens {
         let return_item = ctx.resolve_item(sig.return_type());
         if let TypeKind::Void = *return_item.kind().expect_type().kind() {
             quote! { }
         } else {
             let ret_ty = return_item.to_rust_ty_or_opaque(ctx, &());
             quote! {
                 -> #ret_ty
             }
         }
     }
 
     pub fn fnsig_arguments(
         ctx: &BindgenContext,
         sig: &FunctionSig,
-    ) -> Vec<proc_macro2::TokenStream> {
+    ) -> Vec<quote::Tokens> {
         use super::ToPtr;
 
         let mut unnamed_arguments = 0;
         let mut args = sig.argument_types().iter().map(|&(ref name, ty)| {
             let arg_item = ctx.resolve_item(ty);
             let arg_ty = arg_item.kind().expect_type();
 
             // From the C90 standard[1]:
@@ -3941,27 +3945,27 @@ mod utils {
         }
 
         args
     }
 
     pub fn fnsig_block(
         ctx: &BindgenContext,
         sig: &FunctionSig,
-    ) -> proc_macro2::TokenStream {
+    ) -> quote::Tokens {
         let args = sig.argument_types().iter().map(|&(_, ty)| {
             let arg_item = ctx.resolve_item(ty);
 
             arg_item.to_rust_ty_or_opaque(ctx, &())
         });
 
         let return_item = ctx.resolve_item(sig.return_type());
         let ret_ty = if let TypeKind::Void = *return_item.kind().expect_type().kind() {
             quote! { () }
         } else {
             return_item.to_rust_ty_or_opaque(ctx, &())
         };
 
         quote! {
-            *const ::block::Block<(#(#args,)*), #ret_ty>
+            *const ::block::Block<(#(#args),*), #ret_ty>
         }
     }
 }
--- a/third_party/rust/bindgen/src/codegen/struct_layout.rs
+++ b/third_party/rust/bindgen/src/codegen/struct_layout.rs
@@ -1,17 +1,18 @@
 //! Helpers for code generation that need struct layout
 
 use super::helpers;
 
 use ir::comp::CompInfo;
 use ir::context::BindgenContext;
 use ir::layout::Layout;
 use ir::ty::{Type, TypeKind};
-use proc_macro2::{self, Ident, Span};
+use quote;
+use proc_macro2::{Term, Span};
 use std::cmp;
 
 /// Trace the layout of struct.
 #[derive(Debug)]
 pub struct StructLayoutTracker<'a> {
     name: &'a str,
     ctx: &'a BindgenContext,
     comp: &'a CompInfo,
@@ -148,17 +149,17 @@ impl<'a> StructLayoutTracker<'a> {
 
     /// Add a padding field if necessary for a given new field _before_ adding
     /// that field.
     pub fn pad_field(
         &mut self,
         field_name: &str,
         field_ty: &Type,
         field_offset: Option<usize>,
-    ) -> Option<proc_macro2::TokenStream> {
+    ) -> Option<quote::Tokens> {
         let mut field_layout = field_ty.layout(self.ctx)?;
 
         if let TypeKind::Array(inner, len) =
             *field_ty.canonical_type(self.ctx).kind()
         {
             // FIXME(emilio): As an _ultra_ hack, we correct the layout returned
             // by arrays of structs that have a bigger alignment than what we
             // can support.
@@ -230,17 +231,17 @@ impl<'a> StructLayoutTracker<'a> {
             field_name,
             self.latest_offset - field_layout.size,
             self.latest_offset
         );
 
         padding_layout.map(|layout| self.padding_field(layout))
     }
 
-    pub fn pad_struct(&mut self, layout: Layout) -> Option<proc_macro2::TokenStream> {
+    pub fn pad_struct(&mut self, layout: Layout) -> Option<quote::Tokens> {
         debug!(
             "pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}",
             self,
             layout
         );
 
         if layout.size < self.latest_offset {
             error!(
@@ -304,23 +305,23 @@ impl<'a> StructLayoutTracker<'a> {
         // repr(align).
         repr_align || layout.align <= self.ctx.target_pointer_size()
     }
 
     fn padding_bytes(&self, layout: Layout) -> usize {
         align_to(self.latest_offset, layout.align) - self.latest_offset
     }
 
-    fn padding_field(&mut self, layout: Layout) -> proc_macro2::TokenStream {
+    fn padding_field(&mut self, layout: Layout) -> quote::Tokens {
         let ty = helpers::blob(self.ctx, layout);
         let padding_count = self.padding_count;
 
         self.padding_count += 1;
 
-        let padding_field_name = Ident::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
+        let padding_field_name = Term::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
 
         self.max_field_align = cmp::max(self.max_field_align, layout.align);
 
         quote! {
             pub #padding_field_name : #ty ,
         }
     }
 
--- a/third_party/rust/bindgen/src/extra_assertions.rs
+++ b/third_party/rust/bindgen/src/extra_assertions.rs
@@ -1,29 +1,25 @@
 //! Macros for defining extra assertions that should only be checked in testing
 //! and/or CI when the `testing_only_extra_assertions` feature is enabled.
 
-/// Simple macro that forwards to assert! when using
-/// testing_only_extra_assertions.
 #[macro_export]
 macro_rules! extra_assert {
     ( $cond:expr ) => {
         if cfg!(feature = "testing_only_extra_assertions") {
             assert!($cond);
         }
     };
     ( $cond:expr , $( $arg:tt )+ ) => {
         if cfg!(feature = "testing_only_extra_assertions") {
             assert!($cond, $( $arg )* )
         }
     };
 }
 
-/// Simple macro that forwards to assert_eq! when using
-/// testing_only_extra_assertions.
 #[macro_export]
 macro_rules! extra_assert_eq {
     ( $lhs:expr , $rhs:expr ) => {
         if cfg!(feature = "testing_only_extra_assertions") {
             assert_eq!($lhs, $rhs);
         }
     };
     ( $lhs:expr , $rhs:expr , $( $arg:tt )+ ) => {
--- a/third_party/rust/bindgen/src/features.rs
+++ b/third_party/rust/bindgen/src/features.rs
@@ -93,22 +93,16 @@ macro_rules! rust_target_base {
             /// Rust stable 1.20
             => Stable_1_20 => 1.20;
             /// Rust stable 1.21
             => Stable_1_21 => 1.21;
             /// Rust stable 1.25
             => Stable_1_25 => 1.25;
             /// Rust stable 1.26
             => Stable_1_26 => 1.26;
-            /// Rust stable 1.27
-            => Stable_1_27 => 1.27;
-            /// Rust stable 1.28
-            => Stable_1_28 => 1.28;
-            /// Rust stable 1.33
-            => Stable_1_33 => 1.33;
             /// Nightly rust
             => Nightly => nightly;
         );
     }
 }
 
 rust_target_base!(rust_target_def);
 rust_target_base!(rust_target_values_def);
@@ -179,28 +173,16 @@ rust_feature_def!(
     Stable_1_25 {
         /// repr(align) ([PR](https://github.com/rust-lang/rust/pull/47006))
         => repr_align;
     }
     Stable_1_26 {
         /// [i128 / u128 support](https://doc.rust-lang.org/std/primitive.i128.html)
         => i128_and_u128;
     }
-    Stable_1_27 {
-        /// `must_use` attribute on functions ([PR](https://github.com/rust-lang/rust/pull/48925))
-        => must_use_function;
-    }
-    Stable_1_28 {
-        /// repr(transparent) ([PR](https://github.com/rust-lang/rust/pull/51562))
-        => repr_transparent;
-    }
-    Stable_1_33 {
-        /// repr(packed(N)) ([PR](https://github.com/rust-lang/rust/pull/57049))
-        => repr_packed_n;
-    }
     Nightly {
         /// `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202))
         => thiscall_abi;
     }
 );
 
 impl Default for RustFeatures {
     fn default() -> Self {
--- a/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
@@ -6,17 +6,18 @@ use ir::comp::Field;
 use ir::comp::FieldMethods;
 use ir::context::{BindgenContext, ItemId};
 use ir::derive::CanTriviallyDeriveCopy;
 use ir::item::IsOpaque;
 use ir::template::TemplateParameters;
 use ir::traversal::EdgeKind;
 use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether copy cannot be derived.
 ///
 /// We use the monotone constraint function `cannot_derive_copy`, defined as
 /// follows:
 ///
 /// * If T is Opaque and layout of the type is known, get this layout as opaque
 ///   type and check whether it can be derived using trivial checks.
@@ -97,17 +98,17 @@ impl<'ctx> CannotDeriveCopy<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for CannotDeriveCopy<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> CannotDeriveCopy<'ctx> {
-        let cannot_derive_copy = HashSet::default();
+        let cannot_derive_copy = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         CannotDeriveCopy {
             ctx,
             cannot_derive_copy,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs
@@ -5,17 +5,18 @@ use ir::comp::CompKind;
 use ir::comp::Field;
 use ir::comp::FieldMethods;
 use ir::context::{BindgenContext, ItemId};
 use ir::derive::CanTriviallyDeriveDebug;
 use ir::item::IsOpaque;
 use ir::traversal::EdgeKind;
 use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether debug cannot be derived.
 ///
 /// We use the monotone constraint function `cannot_derive_debug`, defined as
 /// follows:
 ///
 /// * If T is Opaque and layout of the type is known, get this layout as opaque
 ///   type and check whether it can be derived using trivial checks.
@@ -98,17 +99,17 @@ impl<'ctx> CannotDeriveDebug<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for CannotDeriveDebug<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDebug<'ctx> {
-        let cannot_derive_debug = HashSet::default();
+        let cannot_derive_debug = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         CannotDeriveDebug {
             ctx,
             cannot_derive_debug,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
@@ -7,17 +7,18 @@ use ir::comp::FieldMethods;
 use ir::context::{BindgenContext, ItemId};
 use ir::derive::CanTriviallyDeriveDefault;
 use ir::item::IsOpaque;
 use ir::item::ItemSet;
 use ir::traversal::EdgeKind;
 use ir::traversal::Trace;
 use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether default cannot be derived.
 ///
 /// We use the monotone constraint function `cannot_derive_default`, defined as
 /// follows:
 ///
 /// * If T is Opaque and layout of the type is known, get this layout as opaque
 ///   type and check whether it can be derived using trivial checks.
@@ -93,18 +94,18 @@ impl<'ctx> CannotDeriveDefault<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for CannotDeriveDefault<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDefault<'ctx> {
-        let mut dependencies = HashMap::default();
-        let cannot_derive_default = HashSet::default();
+        let mut dependencies = HashMap::new();
+        let cannot_derive_default = HashSet::new();
 
         let whitelisted_items: HashSet<_> =
             ctx.whitelisted_items().iter().cloned().collect();
 
         let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items
             .iter()
             .cloned()
             .flat_map(|i| {
--- a/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
@@ -5,17 +5,18 @@ use ir::comp::CompKind;
 use ir::comp::Field;
 use ir::comp::FieldMethods;
 use ir::context::{BindgenContext, ItemId};
 use ir::derive::CanTriviallyDeriveHash;
 use ir::item::IsOpaque;
 use ir::traversal::EdgeKind;
 use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether hash cannot be derived.
 ///
 /// We use the monotone constraint function `cannot_derive_hash`, defined as
 /// follows:
 ///
 /// * If T is Opaque and layout of the type is known, get this layout as opaque
 ///   type and check whether it can be derived using trivial checks.
@@ -90,17 +91,17 @@ impl<'ctx> CannotDeriveHash<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for CannotDeriveHash<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> CannotDeriveHash<'ctx> {
-        let cannot_derive_hash = HashSet::default();
+        let cannot_derive_hash = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         CannotDeriveHash {
             ctx,
             cannot_derive_hash,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
@@ -4,17 +4,18 @@
 use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
 use ir::comp::CompKind;
 use ir::context::{BindgenContext, ItemId};
 use ir::derive::{CanTriviallyDerivePartialEqOrPartialOrd, CanDerive};
 use ir::item::{Item, IsOpaque};
 use ir::traversal::{EdgeKind, Trace};
 use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
 use ir::ty::{TypeKind, Type};
-use {HashMap, Entry};
+use std::collections::HashMap;
+use std::collections::hash_map::Entry;
 
 /// An analysis that finds for each IR item whether `PartialEq`/`PartialOrd`
 /// cannot be derived.
 ///
 /// We use the monotone constraint function
 /// `cannot_derive_partialeq_or_partialord`, defined as follows:
 ///
 /// * If T is Opaque and layout of the type is known, get this layout as opaque
@@ -320,17 +321,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia
 impl<'ctx> MonotoneFramework for CannotDerivePartialEqOrPartialOrd<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashMap<ItemId, CanDerive>;
 
     fn new(
         ctx: &'ctx BindgenContext,
     ) -> CannotDerivePartialEqOrPartialOrd<'ctx> {
-        let can_derive_partialeq_or_partialord = HashMap::default();
+        let can_derive_partialeq_or_partialord = HashMap::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         CannotDerivePartialEqOrPartialOrd {
             ctx,
             can_derive_partialeq_or_partialord,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/has_destructor.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/has_destructor.rs
@@ -1,16 +1,17 @@
 //! Determining which types have destructors
 
 use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
 use ir::context::{BindgenContext, ItemId};
 use ir::traversal::EdgeKind;
 use ir::comp::{CompKind, Field, FieldMethods};
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether it has a destructor or not
 ///
 /// We use the monotone function `has destructor`, defined as follows:
 ///
 /// * If T is a type alias, a templated alias, or an indirection to another type,
 ///   T has a destructor if the type T refers to has a destructor.
 /// * If T is a compound type, T has a destructor if we saw a destructor when parsing it,
@@ -67,17 +68,17 @@ impl<'ctx> HasDestructorAnalysis<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> Self {
-        let have_destructor = HashSet::default();
+        let have_destructor = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         HasDestructorAnalysis {
             ctx,
             have_destructor,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/has_float.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/has_float.rs
@@ -1,12 +1,13 @@
 //! Determining which types has float.
 
 use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
-use {HashSet, HashMap};
+use std::collections::HashSet;
+use std::collections::HashMap;
 use ir::context::{BindgenContext, ItemId};
 use ir::traversal::EdgeKind;
 use ir::ty::TypeKind;
 use ir::comp::Field;
 use ir::comp::FieldMethods;
 
 /// An analysis that finds for each IR item whether it has float or not.
 ///
@@ -78,17 +79,17 @@ impl<'ctx> HasFloat<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(ctx: &'ctx BindgenContext) -> HasFloat<'ctx> {
-        let has_float = HashSet::default();
+        let has_float = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         HasFloat {
             ctx,
             has_float,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs
@@ -1,17 +1,18 @@
 //! Determining which types has typed parameters in array.
 
 use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
 use ir::comp::Field;
 use ir::comp::FieldMethods;
 use ir::context::{BindgenContext, ItemId};
 use ir::traversal::EdgeKind;
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::HashMap;
+use std::collections::HashSet;
 
 /// An analysis that finds for each IR item whether it has array or not.
 ///
 /// We use the monotone constraint function `has_type_parameter_in_array`,
 /// defined as follows:
 ///
 /// * If T is Array type with type parameter, T trivially has.
 /// * If T is a type alias, a templated alias or an indirection to another type,
@@ -86,17 +87,17 @@ impl<'ctx> HasTypeParameterInArray<'ctx>
 impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashSet<ItemId>;
 
     fn new(
         ctx: &'ctx BindgenContext,
     ) -> HasTypeParameterInArray<'ctx> {
-        let has_type_parameter_in_array = HashSet::default();
+        let has_type_parameter_in_array = HashSet::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         HasTypeParameterInArray {
             ctx,
             has_type_parameter_in_array,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/has_vtable.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/has_vtable.rs
@@ -1,17 +1,18 @@
 //! Determining which types has vtable
 
 use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
 use ir::context::{BindgenContext, ItemId};
 use ir::traversal::EdgeKind;
 use ir::ty::TypeKind;
 use std::cmp;
+use std::collections::HashMap;
+use std::collections::hash_map::Entry;
 use std::ops;
-use {HashMap, Entry};
 
 /// The result of the `HasVtableAnalysis` for an individual item.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)]
 pub enum HasVtableResult {
     /// The item has a vtable, but the actual vtable pointer is in a base
     /// member.
     BaseHasVtable,
 
@@ -142,17 +143,17 @@ impl<'ctx> HasVtableAnalysis<'ctx> {
 }
 
 impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashMap<ItemId, HasVtableResult>;
 
     fn new(ctx: &'ctx BindgenContext) -> HasVtableAnalysis<'ctx> {
-        let have_vtable = HashMap::default();
+        let have_vtable = HashMap::new();
         let dependencies = generate_dependencies(ctx, Self::consider_edge);
 
         HasVtableAnalysis {
             ctx,
             have_vtable,
             dependencies,
         }
     }
--- a/third_party/rust/bindgen/src/ir/analysis/mod.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/mod.rs
@@ -59,17 +59,17 @@ pub use self::derive_partialeq_or_partia
 mod has_float;
 pub use self::has_float::HasFloat;
 mod sizedness;
 pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult};
 
 use ir::context::{BindgenContext, ItemId};
 
 use ir::traversal::{EdgeKind, Trace};
-use HashMap;
+use std::collections::HashMap;
 use std::fmt;
 use std::ops;
 
 /// An analysis in the monotone framework.
 ///
 /// Implementors of this trait must maintain the following two invariants:
 ///
 /// 1. The concrete data must be a member of a finite-height lattice.
@@ -185,17 +185,17 @@ where
 /// Generate the dependency map for analysis
 pub fn generate_dependencies<F>(
     ctx: &BindgenContext,
     consider_edge: F,
 ) -> HashMap<ItemId, Vec<ItemId>>
 where
     F: Fn(EdgeKind) -> bool,
 {
-    let mut dependencies = HashMap::default();
+    let mut dependencies = HashMap::new();
 
     for &item in ctx.whitelisted_items() {
         dependencies.entry(item).or_insert(vec![]);
 
         {
             // We reverse our natural IR graph edges to find dependencies
             // between nodes.
             item.trace(
@@ -214,17 +214,17 @@ where
         }
     }
     dependencies
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use {HashMap, HashSet};
+    use std::collections::{HashMap, HashSet};
 
     // Here we find the set of nodes that are reachable from any given
     // node. This is a lattice mapping nodes to subsets of all nodes. Our join
     // function is set union.
     //
     // This is our test graph:
     //
     //     +---+                    +---+
@@ -329,24 +329,24 @@ mod tests {
             //     reachable(x) = s_0 U s_1 U ... U reachable(s_0) U reachable(s_1) U ...
             //
             // where there exist edges from `x` to each of `s_0, s_1, ...`.
             //
             // Yes, what follows is a **terribly** inefficient set union
             // implementation. Don't copy this code outside of this test!
 
             let original_size =
-                self.reachable.entry(node).or_insert(HashSet::default()).len();
+                self.reachable.entry(node).or_insert(HashSet::new()).len();
 
             for sub_node in self.graph.0[&node].iter() {
                 self.reachable.get_mut(&node).unwrap().insert(*sub_node);
 
                 let sub_reachable = self.reachable
                     .entry(*sub_node)
-                    .or_insert(HashSet::default())
+                    .or_insert(HashSet::new())
                     .clone();
 
                 for transitive in sub_reachable {
                     self.reachable.get_mut(&node).unwrap().insert(transitive);
                 }
             }
 
             let new_size = self.reachable[&node].len();
@@ -381,17 +381,17 @@ mod tests {
 
         fn nodes<A>(nodes: A) -> HashSet<Node>
         where
             A: AsRef<[usize]>,
         {
             nodes.as_ref().iter().cloned().map(Node).collect()
         }
 
-        let mut expected = HashMap::default();
+        let mut expected = HashMap::new();
         expected.insert(Node(1), nodes([3, 4, 5, 6, 7, 8]));
         expected.insert(Node(2), nodes([2]));
         expected.insert(Node(3), nodes([3, 4, 5, 6, 7, 8]));
         expected.insert(Node(4), nodes([3, 4, 5, 6, 7, 8]));
         expected.insert(Node(5), nodes([3, 4, 5, 6, 7, 8]));
         expected.insert(Node(6), nodes([8]));
         expected.insert(Node(7), nodes([3, 4, 5, 6, 7, 8]));
         expected.insert(Node(8), nodes([]));
--- a/third_party/rust/bindgen/src/ir/analysis/sizedness.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/sizedness.rs
@@ -1,17 +1,19 @@
 //! Determining the sizedness of types (as base classes and otherwise).
 
 use super::{ConstrainResult, MonotoneFramework, HasVtable, generate_dependencies};
 use ir::context::{BindgenContext, TypeId};
 use ir::item::IsOpaque;
 use ir::traversal::EdgeKind;
 use ir::ty::TypeKind;
-use std::{cmp, ops};
-use {HashMap, Entry};
+use std::cmp;
+use std::collections::HashMap;
+use std::collections::hash_map::Entry;
+use std::ops;
 
 /// The result of the `Sizedness` analysis for an individual item.
 ///
 /// This is a chain lattice of the form:
 ///
 /// ```ignore
 ///                   NonZeroSized
 ///                        |
@@ -187,17 +189,17 @@ impl<'ctx> MonotoneFramework for Sizedne
                             sub_ids.into_iter()
                                 .filter_map(|s| s.as_type_id(ctx))
                                 .collect::<Vec<_>>()
                         )
                     })
             })
             .collect();
 
-        let sized = HashMap::default();
+        let sized = HashMap::new();
 
         SizednessAnalysis {
             ctx,
             dependencies,
             sized,
         }
     }
 
--- a/third_party/rust/bindgen/src/ir/analysis/template_params.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/template_params.rs
@@ -89,17 +89,17 @@
 //! See `src/ir/analysis.rs` for more.
 
 use super::{ConstrainResult, MonotoneFramework};
 use ir::context::{BindgenContext, ItemId};
 use ir::item::{Item, ItemSet};
 use ir::template::{TemplateInstantiation, TemplateParameters};
 use ir::traversal::{EdgeKind, Trace};
 use ir::ty::TypeKind;
-use {HashMap, HashSet};
+use std::collections::{HashMap, HashSet};
 
 /// An analysis that finds for each IR item its set of template parameters that
 /// it uses.
 ///
 /// We use the monotone constraint function `template_param_usage`, defined as
 /// follows:
 ///
 /// * If `T` is a named template type parameter, it trivially uses itself:
@@ -368,18 +368,18 @@ impl<'ctx> UsedTemplateParameters<'ctx> 
 impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
     type Node = ItemId;
     type Extra = &'ctx BindgenContext;
     type Output = HashMap<ItemId, ItemSet>;
 
     fn new(
         ctx: &'ctx BindgenContext,
     ) -> UsedTemplateParameters<'ctx> {
-        let mut used = HashMap::default();
-        let mut dependencies = HashMap::default();
+        let mut used = HashMap::new();
+        let mut dependencies = HashMap::new();
         let whitelisted_items: HashSet<_> =
             ctx.whitelisted_items().iter().cloned().collect();
 
         let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items
             .iter()
             .cloned()
             .flat_map(|i| {
                 let mut reachable = vec![i];
--- a/third_party/rust/bindgen/src/ir/annotations.rs
+++ b/third_party/rust/bindgen/src/ir/annotations.rs
@@ -53,18 +53,16 @@ pub struct Annotations {
     ///     Bar = 0, /**< <div rustbindgen constant></div> */
     ///     Baz = 0,
     /// };
     /// ```
     ///
     /// In that case, bindgen will generate a constant for `Bar` instead of
     /// `Baz`.
     constify_enum_variant: bool,
-    /// List of explicit derives for this type.
-    derives: Vec<String>,
 }
 
 fn parse_accessor(s: &str) -> FieldAccessorKind {
     match s {
         "false" => FieldAccessorKind::None,
         "unsafe" => FieldAccessorKind::Unsafe,
         "immutable" => FieldAccessorKind::Immutable,
         _ => FieldAccessorKind::Regular,
@@ -76,17 +74,16 @@ impl Default for Annotations {
         Annotations {
             opaque: false,
             hide: false,
             use_instead_of: None,
             disallow_copy: false,
             private_fields: None,
             accessor_kind: None,
             constify_enum_variant: false,
-            derives: vec![],
         }
     }
 }
 
 impl Annotations {
     /// Construct new annotations for the given cursor and its bindgen comments
     /// (if any).
     pub fn new(cursor: &clang::Cursor) -> Option<Annotations> {
@@ -128,21 +125,16 @@ impl Annotations {
     /// };
     /// ```
     ///
     /// That is, code for `Foo` is used to generate `Bar`.
     pub fn use_instead_of(&self) -> Option<&[String]> {
         self.use_instead_of.as_ref().map(|s| &**s)
     }
 
-    /// The list of derives that have been specified in this annotation.
-    pub fn derives(&self) -> &[String] {
-        &self.derives
-    }
-
     /// Should we avoid implementing the `Copy` trait?
     pub fn disallow_copy(&self) -> bool {
         self.disallow_copy
     }
 
     /// Should the fields be private?
     pub fn private_fields(&self) -> Option<bool> {
         self.private_fields
@@ -168,19 +160,16 @@ impl Annotations {
                     "hide" => self.hide = true,
                     "nocopy" => self.disallow_copy = true,
                     "replaces" => {
                         self.use_instead_of =
                             Some(
                                 attr.value.split("::").map(Into::into).collect(),
                             )
                     }
-                    "derive" => {
-                        self.derives.push(attr.value)
-                    }
                     "private" => {
                         self.private_fields = Some(attr.value != "false")
                     }
                     "accessor" => {
                         self.accessor_kind = Some(parse_accessor(&attr.value))
                     }
                     "constant" => self.constify_enum_variant = true,
                     _ => {}
--- a/third_party/rust/bindgen/src/ir/comment.rs
+++ b/third_party/rust/bindgen/src/ir/comment.rs
@@ -43,37 +43,37 @@ fn make_indent(indent: usize) -> String 
 /// Handles lines starting with both `//` and `///`.
 fn preprocess_single_lines(comment: &str, indent: usize) -> String {
     debug_assert!(comment.starts_with("//"), "comment is not single line");
 
     let indent = make_indent(indent);
     let mut is_first = true;
     let lines: Vec<_> = comment
         .lines()
-        .map(|l| l.trim().trim_start_matches('/'))
+        .map(|l| l.trim().trim_left_matches('/'))
         .map(|l| {
             let indent = if is_first { "" } else { &*indent };
             is_first = false;
             format!("{}///{}", indent, l)
         })
         .collect();
     lines.join("\n")
 }
 
 fn preprocess_multi_line(comment: &str, indent: usize) -> String {
     let comment = comment
-        .trim_start_matches('/')
-        .trim_end_matches('/')
-        .trim_end_matches('*');
+        .trim_left_matches('/')
+        .trim_right_matches('/')
+        .trim_right_matches('*');
 
     let indent = make_indent(indent);
     // Strip any potential `*` characters preceding each line.
     let mut is_first = true;
     let mut lines: Vec<_> = comment.lines()
-        .map(|line| line.trim().trim_start_matches('*').trim_start_matches('!'))
+        .map(|line| line.trim().trim_left_matches('*').trim_left_matches('!'))
         .skip_while(|line| line.trim().is_empty()) // Skip the first empty lines.
         .map(|line| {
             let indent = if is_first { "" } else { &*indent };
             is_first = false;
             format!("{}///{}", indent, line)
         })
         .collect();
 
--- a/third_party/rust/bindgen/src/ir/comp.rs
+++ b/third_party/rust/bindgen/src/ir/comp.rs
@@ -12,17 +12,17 @@ use super::traversal::{EdgeKind, Trace, 
 use clang;
 use codegen::struct_layout::{align_to, bytes_from_bits_pow2};
 use ir::derive::CanDeriveCopy;
 use parse::{ClangItemParser, ParseError};
 use peeking_take_while::PeekableExt;
 use std::cmp;
 use std::io;
 use std::mem;
-use HashMap;
+use std::collections::HashMap;
 
 /// The kind of compound type.
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub enum CompKind {
     /// A struct.
     Struct,
     /// A union.
     Union,
@@ -1512,20 +1512,16 @@ impl CompInfo {
                     let field_ty = ctx.resolve_type(data.ty());
                     field_ty.layout(ctx).map_or(false, |field_ty_layout| {
                         field_ty_layout.align > parent_layout.align
                     })
                 }
             }) {
                 info!("Found a struct that was defined within `#pragma packed(...)`");
                 return true;
-            } else if self.has_own_virtual_method {
-                if parent_layout.align == 1 {
-                    return true;
-                }
             }
         }
 
         false
     }
 
     /// Returns true if compound type has been forward declared
     pub fn is_forward_declaration(&self) -> bool {
@@ -1648,29 +1644,26 @@ impl IsOpaque for CompInfo {
                         .expect("Bitfield without layout? Gah!");
                     bf.width() / 8 > bitfield_layout.size as u32
                 })
             }
         }) {
             return true;
         }
 
-        if !ctx.options().rust_features().repr_packed_n {
-            // If we don't have `#[repr(packed(N)]`, the best we can
-            // do is make this struct opaque.
-            //
-            // See https://github.com/rust-lang-nursery/rust-bindgen/issues/537 and
-            // https://github.com/rust-lang/rust/issues/33158
-            if self.is_packed(ctx, layout) && layout.map_or(false, |l| l.align > 1) {
-                warn!("Found a type that is both packed and aligned to greater than \
-                       1; Rust before version 1.33 doesn't have `#[repr(packed(N))]`, so we \
-                       are treating it as opaque. You may wish to set bindgen's rust target \
-                       version to 1.33 or later to enable `#[repr(packed(N))]` support.");
-                return true;
-            }
+        // We don't have `#[repr(packed = "N")]` in Rust yet, so the best we can
+        // do is make this struct opaque.
+        //
+        // See https://github.com/rust-lang-nursery/rust-bindgen/issues/537 and
+        // https://github.com/rust-lang/rust/issues/33158
+        if self.is_packed(ctx, layout) && layout.map_or(false, |l| l.align > 1) {
+            warn!("Found a type that is both packed and aligned to greater than \
+                   1; Rust doesn't have `#[repr(packed = \"N\")]` yet, so we \
+                   are treating it as opaque");
+            return true;
         }
 
         false
     }
 }
 
 impl TemplateParameters for CompInfo {
     fn self_template_params(
--- a/third_party/rust/bindgen/src/ir/context.rs
+++ b/third_party/rust/bindgen/src/ir/context.rs
@@ -5,37 +5,37 @@ use super::analysis::{CannotDeriveCopy, 
                       HasTypeParameterInArray, HasVtableAnalysis,
                       HasVtableResult, HasDestructorAnalysis,
                       UsedTemplateParameters, HasFloat, SizednessAnalysis,
                       SizednessResult, analyze};
 use super::derive::{CanDeriveCopy, CanDeriveDebug, CanDeriveDefault,
                     CanDeriveHash, CanDerivePartialOrd, CanDeriveOrd,
                     CanDerivePartialEq, CanDeriveEq, CanDerive};
 use super::int::IntKind;
-use super::item::{IsOpaque, Item, ItemAncestors, ItemSet};
+use super::item::{IsOpaque, Item, ItemAncestors, ItemCanonicalPath, ItemSet};
 use super::item_kind::ItemKind;
 use super::module::{Module, ModuleKind};
 use super::template::{TemplateInstantiation, TemplateParameters};
 use super::traversal::{self, Edge, ItemTraversal};
 use super::ty::{FloatKind, Type, TypeKind};
 use super::function::Function;
 use super::super::time::Timer;
 use BindgenOptions;
 use callbacks::ParseCallbacks;
 use cexpr;
 use clang::{self, Cursor};
 use clang_sys;
 use parse::ClangItemParser;
-use proc_macro2::{Ident, Span};
+use proc_macro2::{Term, Span};
 use std::borrow::Cow;
 use std::cell::Cell;
+use std::collections::{HashMap, HashSet, hash_map};
+use std::collections::btree_map::{self, BTreeMap};
 use std::iter::IntoIterator;
 use std::mem;
-use std::collections::HashMap as StdHashMap;
-use {HashMap, HashSet, Entry};
 
 /// An identifier for some kind of IR item.
 #[derive(Debug, Copy, Clone, Eq, PartialOrd, Ord, Hash)]
 pub struct ItemId(usize);
 
 macro_rules! item_id_newtype {
     (
         $( #[$attr:meta] )*
@@ -296,18 +296,24 @@ where
 enum TypeKey {
     USR(String),
     Declaration(Cursor),
 }
 
 /// A context used during parsing and generation of structs.
 #[derive(Debug)]
 pub struct BindgenContext {
-    /// The map of all the items parsed so far, keyed off ItemId.
-    items: Vec<Option<Item>>,
+    /// The map of all the items parsed so far.
+    ///
+    /// It's a BTreeMap because we want the keys to be sorted to have consistent
+    /// output.
+    items: BTreeMap<ItemId, Item>,
+
+    /// The next item id to use during this bindings regeneration.
+    next_item_id: ItemId,
 
     /// Clang USR to type map. This is needed to be able to associate types with
     /// item ids during parsing.
     types: HashMap<TypeKey, TypeId>,
 
     /// Maps from a cursor to the item id of the named template type parameter
     /// for that cursor.
     type_params: HashMap<clang::Cursor, TypeId>,
@@ -337,22 +343,20 @@ pub struct BindgenContext {
     /// This means effectively, that a type has a potential ID before knowing if
     /// it's a correct type. But that's not important in practice.
     ///
     /// We could also use the `types` HashMap, but my intention with it is that
     /// only valid types and declarations end up there, and this could
     /// potentially break that assumption.
     currently_parsed_types: Vec<PartialType>,
 
-    /// A map with all the already parsed macro names. This is done to avoid
+    /// A HashSet with all the already parsed macro names. This is done to avoid
     /// hard errors while parsing duplicated macros, as well to allow macro
     /// expression parsing.
-    ///
-    /// This needs to be an std::HashMap because the cexpr API requires it.
-    parsed_macros: StdHashMap<Vec<u8>, cexpr::expr::EvalResult>,
+    parsed_macros: HashMap<Vec<u8>, cexpr::expr::EvalResult>,
 
     /// The active replacements collected from replaces="xxx" annotations.
     replacements: HashMap<Vec<String>, ItemId>,
 
     collected_typerefs: bool,
 
     in_codegen: bool,
 
@@ -585,21 +589,22 @@ If you encounter an error missing from t
                     );
                 }
             }
         }
 
         let root_module = Self::build_root_module(ItemId(0));
         let root_module_id = root_module.id().as_module_id_unchecked();
 
-        BindgenContext {
-            items: vec![Some(root_module)],
+        let mut me = BindgenContext {
+            items: Default::default(),
             types: Default::default(),
             type_params: Default::default(),
             modules: Default::default(),
+            next_item_id: ItemId(1),
             root_module: root_module_id,
             current_module: root_module_id,
             semantic_parents: Default::default(),
             currently_parsed_types: vec![],
             parsed_macros: Default::default(),
             replacements: Default::default(),
             collected_typerefs: false,
             in_codegen: false,
@@ -618,17 +623,21 @@ If you encounter an error missing from t
             cannot_derive_copy_in_array: None,
             cannot_derive_hash: None,
             cannot_derive_partialeq_or_partialord: None,
             sizedness: None,
             have_vtable: None,
             have_destructor: None,
             has_type_param_in_array: None,
             has_float: None,
-        }
+        };
+
+        me.add_item(root_module, None, None);
+
+        me
     }
 
     /// Creates a timer for the current bindgen phase. If time_phases is `true`,
     /// the timer will print to stderr when it is dropped, otherwise it will do
     /// nothing.
     pub fn timer<'a>(&self, name: &'a str) -> Timer<'a> {
         Timer::new(name).with_output(self.options.time_phases)
     }
@@ -701,17 +710,17 @@ If you encounter an error missing from t
         if item.id() != self.root_module {
             self.add_item_to_module(&item);
         }
 
         if is_type && item.expect_type().is_comp() {
             self.need_bitfield_allocation.push(id);
         }
 
-        let old_item = mem::replace(&mut self.items[id.0], Some(item));
+        let old_item = self.items.insert(id, item);
         assert!(
             old_item.is_none(),
             "should not have already associated an item with the given id"
         );
 
         // Unnamed items can have an USR, but they can't be referenced from
         // other sites explicitly and the USR can match if the unnamed items are
         // nested, so don't bother tracking them.
@@ -729,17 +738,17 @@ If you encounter an error missing from t
                 // This could happen, for example, with types like `int*` or
                 // similar.
                 //
                 // Fortunately, we don't care about those types being
                 // duplicated, so we can just ignore them.
                 debug!(
                     "Invalid declaration {:?} found for type {:?}",
                     declaration,
-                    self.resolve_item_fallible(id).unwrap().kind().expect_type()
+                    self.items.get(&id).unwrap().kind().expect_type()
                 );
                 return;
             }
 
             let key = if is_unnamed {
                 TypeKey::Declaration(declaration)
             } else if let Some(usr) = declaration.usr() {
                 TypeKey::USR(usr)
@@ -758,19 +767,19 @@ If you encounter an error missing from t
     }
 
     /// Ensure that every item (other than the root module) is in a module's
     /// children list. This is to make sure that every whitelisted item get's
     /// codegen'd, even if its parent is not whitelisted. See issue #769 for
     /// details.
     fn add_item_to_module(&mut self, item: &Item) {
         assert!(item.id() != self.root_module);
-        assert!(self.resolve_item_fallible(item.id()).is_none());
+        assert!(!self.items.contains_key(&item.id()));
 
-        if let Some(ref mut parent) = self.items[item.parent_id().0] {
+        if let Some(parent) = self.items.get_mut(&item.parent_id()) {
             if let Some(module) = parent.as_module_mut() {
                 debug!(
                     "add_item_to_module: adding {:?} as child of parent module {:?}",
                     item.id(),
                     item.parent_id()
                 );
 
                 module.children_mut().insert(item.id());
@@ -779,18 +788,18 @@ If you encounter an error missing from t
         }
 
         debug!(
             "add_item_to_module: adding {:?} as child of current module {:?}",
             item.id(),
             self.current_module
         );
 
-        self.items[(self.current_module.0).0]
-            .as_mut()
+        self.items
+            .get_mut(&self.current_module.into())
             .expect("Should always have an item for self.current_module")
             .as_module_mut()
             .expect("self.current_module should always be a module")
             .children_mut()
             .insert(item.id());
     }
 
     /// Add a new named template type parameter to this context's item set.
@@ -808,17 +817,17 @@ If you encounter an error missing from t
         assert_eq!(
             definition.kind(),
             clang_sys::CXCursor_TemplateTypeParameter
         );
 
         self.add_item_to_module(&item);
 
         let id = item.id();
-        let old_item = mem::replace(&mut self.items[id.0], Some(item));
+        let old_item = self.items.insert(id, item);
         assert!(
             old_item.is_none(),
             "should not have already associated an item with the given id"
         );
 
         let old_named_ty = self.type_params.insert(definition, id.as_type_id_unchecked());
         assert!(
             old_named_ty.is_none(),
@@ -908,65 +917,58 @@ If you encounter an error missing from t
             s = s.replace("$", "_");
             s.push_str("_");
             return Cow::Owned(s);
         }
         Cow::Borrowed(name)
     }
 
     /// Returns a mangled name as a rust identifier.
-    pub fn rust_ident<S>(&self, name: S) -> Ident
+    pub fn rust_ident<S>(&self, name: S) -> Term
     where
         S: AsRef<str>
     {
         self.rust_ident_raw(self.rust_mangle(name.as_ref()))
     }
 
     /// Returns a mangled name as a rust identifier.
-    pub fn rust_ident_raw<T>(&self, name: T) -> Ident
+    pub fn rust_ident_raw<T>(&self, name: T) -> Term
     where
         T: AsRef<str>
     {
-        Ident::new(name.as_ref(), Span::call_site())
+        Term::new(name.as_ref(), Span::call_site())
     }
 
     /// Iterate over all items that have been defined.
-    pub fn items(&self) -> impl Iterator<Item = (ItemId, &Item)> {
-        self.items
-            .iter()
-            .enumerate()
-            .filter_map(|(index, item)| {
-                let item = item.as_ref()?;
-                Some((ItemId(index), item))
-            })
+    pub fn items<'a>(&'a self) -> btree_map::Iter<'a, ItemId, Item> {
+        self.items.iter()
     }
 
     /// Have we collected all unresolved type references yet?
     pub fn collected_typerefs(&self) -> bool {
         self.collected_typerefs
     }
 
     /// Gather all the unresolved type references.
     fn collect_typerefs(
         &mut self,
     ) -> Vec<(ItemId, clang::Type, clang::Cursor, Option<ItemId>)> {
         debug_assert!(!self.collected_typerefs);
         self.collected_typerefs = true;
         let mut typerefs = vec![];
-
-        for (id, item) in self.items() {
+        for (id, ref mut item) in &mut self.items {
             let kind = item.kind();
             let ty = match kind.as_type() {
                 Some(ty) => ty,
                 None => continue,
             };
 
             match *ty.kind() {
                 TypeKind::UnresolvedTypeRef(ref ty, loc, parent_id) => {
-                    typerefs.push((id, ty.clone(), loc, parent_id));
+                    typerefs.push((*id, ty.clone(), loc, parent_id));
                 }
                 _ => {}
             };
         }
         typerefs
     }
 
     /// Collect all of our unresolved type references and resolve them.
@@ -977,17 +979,17 @@ If you encounter an error missing from t
             let _resolved = {
                 let resolved = Item::from_ty(&ty, loc, parent_id, self)
                     .unwrap_or_else(|_| {
                         warn!("Could not resolve type reference, falling back \
                                to opaque blob");
                         Item::new_opaque_type(self.next_item_id(), &ty, self)
                     });
 
-                let item = self.items[id.0].as_mut().unwrap();
+                let item = self.items.get_mut(&id).unwrap();
                 *item.kind_mut().as_type_mut().unwrap().kind_mut() =
                     TypeKind::ResolvedTypeRef(resolved);
                 resolved
             };
 
             // Something in the STL is trolling me. I don't need this assertion
             // right now, but worth investigating properly once this lands.
             //
@@ -1008,21 +1010,21 @@ If you encounter an error missing from t
     /// # Panics
     ///
     /// Panics if attempt to resolve given `ItemId` inside the given
     /// closure is made.
     fn with_loaned_item<F, T>(&mut self, id: ItemId, f: F) -> T
     where
         F: (FnOnce(&BindgenContext, &mut Item) -> T)
     {
-        let mut item = self.items[id.0].take().unwrap();
+        let mut item = self.items.remove(&id).unwrap();
 
         let result = f(self, &mut item);
 
-        let existing = mem::replace(&mut self.items[id.0], Some(item));
+        let existing = self.items.insert(id, item);
         assert!(existing.is_none());
 
         result
     }
 
     /// Compute the bitfield allocation units for all `TypeKind::Comp` items we
     /// parsed.
     fn compute_bitfield_units(&mut self) {
@@ -1041,23 +1043,25 @@ If you encounter an error missing from t
             });
         }
     }
 
     /// Assign a new generated name for each anonymous field.
     fn deanonymize_fields(&mut self) {
         let _t = self.timer("deanonymize_fields");
 
-        let comp_item_ids: Vec<ItemId> = self.items()
+        let comp_item_ids: Vec<ItemId> = self.items
+            .iter()
             .filter_map(|(id, item)| {
                 if item.kind().as_type()?.is_comp() {
                     return Some(id);
                 }
                 None
             })
+            .cloned()
             .collect();
 
         for id in comp_item_ids {
             self.with_loaned_item(id, |ctx, item| {
                 item.kind_mut()
                     .as_type_mut()
                     .unwrap()
                     .as_comp_mut()
@@ -1078,17 +1082,17 @@ If you encounter an error missing from t
 
         // FIXME: This is linear, but the replaces="xxx" annotation was already
         // there, and for better or worse it's useful, sigh...
         //
         // We leverage the ResolvedTypeRef thing, though, which is cool :P.
 
         let mut replacements = vec![];
 
-        for (id, item) in self.items() {
+        for (id, item) in self.items.iter() {
             if item.annotations().use_instead_of().is_some() {
                 continue;
             }
 
             // Calls to `canonical_name` are expensive, so eagerly filter out
             // items that cannot be replaced.
             let ty = match item.kind().as_type() {
                 Some(ty) => ty,
@@ -1098,35 +1102,35 @@ If you encounter an error missing from t
             match *ty.kind() {
                 TypeKind::Comp(..) |
                 TypeKind::TemplateAlias(..) |
                 TypeKind::Enum(..) |
                 TypeKind::Alias(..) => {}
                 _ => continue,
             }
 
-            let path = item.path_for_whitelisting(self);
+            let path = item.canonical_path(self);
             let replacement = self.replacements.get(&path[1..]);
 
             if let Some(replacement) = replacement {
-                if *replacement != id {
+                if replacement != id {
                     // We set this just after parsing the annotation. It's
                     // very unlikely, but this can happen.
-                    if self.resolve_item_fallible(*replacement).is_some() {
+                    if self.items.get(replacement).is_some() {
                         replacements.push((id.expect_type_id(self), replacement.expect_type_id(self)));
                     }
                 }
             }
         }
 
         for (id, replacement_id) in replacements {
             debug!("Replacing {:?} with {:?}", id, replacement_id);
+
             let new_parent = {
-                let item_id: ItemId = id.into();
-                let item = self.items[item_id.0].as_mut().unwrap();
+                let item = self.items.get_mut(&id.into()).unwrap();
                 *item.kind_mut().as_type_mut().unwrap().kind_mut() =
                     TypeKind::ResolvedTypeRef(replacement_id);
                 item.parent_id()
             };
 
             // Relocate the replacement item from where it was declared, to
             // where the thing it is replacing was declared.
             //
@@ -1134,19 +1138,18 @@ If you encounter an error missing from t
 
             let old_parent = self.resolve_item(replacement_id).parent_id();
             if new_parent == old_parent {
                 // Same parent and therefore also same containing
                 // module. Nothing to do here.
                 continue;
             }
 
-            let replacement_item_id: ItemId = replacement_id.into();
-            self.items[replacement_item_id.0]
-                .as_mut()
+            self.items
+                .get_mut(&replacement_id.into())
                 .unwrap()
                 .set_parent_for_replacement(new_parent);
 
             // Second, make sure that it is in the correct module's children
             // set.
 
             let old_module = {
                 let immut_self = &*self;
@@ -1172,26 +1175,26 @@ If you encounter an error missing from t
             };
             let new_module = new_module.unwrap_or(self.root_module.into());
 
             if new_module == old_module {
                 // Already in the correct module.
                 continue;
             }
 
-            self.items[old_module.0]
-                .as_mut()
+            self.items
+                .get_mut(&old_module)
                 .unwrap()
                 .as_module_mut()
                 .unwrap()
                 .children_mut()
                 .remove(&replacement_id.into());
 
-            self.items[new_module.0]
-                .as_mut()
+            self.items
+                .get_mut(&new_module)
                 .unwrap()
                 .as_module_mut()
                 .unwrap()
                 .children_mut()
                 .insert(replacement_id.into());
         }
     }
 
@@ -1249,33 +1252,33 @@ If you encounter an error missing from t
     }
 
     fn assert_no_dangling_item_traversal(
         &self,
     ) -> traversal::AssertNoDanglingItemsTraversal {
         assert!(self.in_codegen_phase());
         assert!(self.current_module == self.root_module);
 
-        let roots = self.items().map(|(id, _)| id);
+        let roots = self.items().map(|(&id, _)| id);
         traversal::AssertNoDanglingItemsTraversal::new(
             self,
             roots,
             traversal::all_edges,
         )
     }
 
     /// When the `testing_only_extra_assertions` feature is enabled, walk over
     /// every item and ensure that it is in the children set of one of its
     /// module ancestors.
     fn assert_every_item_in_a_module(&self) {
         if cfg!(feature = "testing_only_extra_assertions") {
             assert!(self.in_codegen_phase());
             assert!(self.current_module == self.root_module);
 
-            for (id, _item) in self.items() {
+            for (&id, _item) in self.items() {
                 if id == self.root_module {
                     continue;
                 }
 
                 assert!(
                     {
                         let id = id.into_resolver()
                             .through_type_refs()
@@ -1372,17 +1375,17 @@ If you encounter an error missing from t
     fn find_used_template_parameters(&mut self) {
         let _t = self.timer("find_used_template_parameters");
         if self.options.whitelist_recursively {
             let used_params = analyze::<UsedTemplateParameters>(self);
             self.used_template_parameters = Some(used_params);
         } else {
             // If you aren't recursively whitelisting, then we can't really make
             // any sense of template parameter usage, and you're on your own.
-            let mut used_params = HashMap::default();
+            let mut used_params = HashMap::new();
             for &id in self.whitelisted_items() {
                 used_params.entry(id).or_insert(
                     id.self_template_params(self).into_iter().map(|p| p.into()).collect()
                 );
             }
             self.used_template_parameters = Some(used_params);
         }
     }
@@ -1456,17 +1459,17 @@ If you encounter an error missing from t
     //
     // If at some point we care about the memory here, probably a map TypeKind
     // -> builtin type ItemId would be the best to improve that.
     fn add_builtin_item(&mut self, item: Item) {
         debug!("add_builtin_item: item = {:?}", item);
         debug_assert!(item.kind().is_type());
         self.add_item_to_module(&item);
         let id = item.id();
-        let old_item = mem::replace(&mut self.items[id.0], Some(item));
+        let old_item = self.items.insert(id, item);
         assert!(old_item.is_none(), "Inserted type twice?");
     }
 
     fn build_root_module(id: ItemId) -> Item {
         let module = Module::new(Some("root".into()), ModuleKind::Normal);
         Item::new(id, None, None, id, ItemKind::Module(module))
     }
 
@@ -1491,31 +1494,31 @@ If you encounter an error missing from t
         self.resolve_item(func_id).kind().expect_function()
     }
 
     /// Resolve the given `ItemId` as a type, or `None` if there is no item with
     /// the given id.
     ///
     /// Panics if the id resolves to an item that is not a type.
     pub fn safe_resolve_type(&self, type_id: TypeId) -> Option<&Type> {
-        self.resolve_item_fallible(type_id).map(|t| t.kind().expect_type())
+        self.items.get(&type_id.into()).map(|t| t.kind().expect_type())
     }
 
     /// Resolve the given `ItemId` into an `Item`, or `None` if no such item
     /// exists.
     pub fn resolve_item_fallible<Id: Into<ItemId>>(&self, id: Id) -> Option<&Item> {
-        self.items.get(id.into().0)?.as_ref()
+        self.items.get(&id.into())
     }
 
     /// Resolve the given `ItemId` into an `Item`.
     ///
     /// Panics if the given id does not resolve to any item.
     pub fn resolve_item<Id: Into<ItemId>>(&self, item_id: Id) -> &Item {
         let item_id = item_id.into();
-        match self.resolve_item_fallible(item_id) {
+        match self.items.get(&item_id) {
             Some(item) => item,
             None => panic!("Not an item: {:?}", item_id),
         }
     }
 
     /// Get the current module.
     pub fn current_module(&self) -> ModuleId {
         self.current_module
@@ -1771,18 +1774,18 @@ If you encounter an error missing from t
 
                         // Bypass all the validations in add_item explicitly.
                         debug!(
                             "instantiate_template: inserting nested \
                                 instantiation item: {:?}",
                             sub_item
                         );
                         self.add_item_to_module(&sub_item);
-                        debug_assert_eq!(sub_id, sub_item.id());
-                        self.items[sub_id.0] = Some(sub_item);
+                        debug_assert!(sub_id == sub_item.id());
+                        self.items.insert(sub_id, sub_item);
                         args.push(sub_id.as_type_id_unchecked());
                     }
                 }
                 _ => {
                     warn!(
                         "Found template arg cursor we can't handle: {:?}",
                         child
                     );
@@ -1831,18 +1834,18 @@ If you encounter an error missing from t
             None,
             self.current_module.into(),
             ItemKind::Type(ty),
         );
 
         // Bypass all the validations in add_item explicitly.
         debug!("instantiate_template: inserting item: {:?}", item);
         self.add_item_to_module(&item);
-        debug_assert_eq!(with_id, item.id());
-        self.items[with_id.0] = Some(item);
+        debug_assert!(with_id == item.id());
+        self.items.insert(with_id, item);
         Some(with_id.as_type_id_unchecked())
     }
 
     /// If we have already resolved the type for the given type declaration,
     /// return its `ItemId`. Otherwise, return `None`.
     pub fn get_resolved_type(
         &self,
         decl: &clang::CanonicalTypeDeclaration,
@@ -1988,18 +1991,18 @@ If you encounter an error missing from t
             ItemKind::Type(ty),
         );
         self.add_builtin_item(item);
         with_id.as_type_id_unchecked()
     }
 
     /// Returns the next item id to be used for an item.
     pub fn next_item_id(&mut self) -> ItemId {
-        let ret = ItemId(self.items.len());
-        self.items.push(None);
+        let ret = self.next_item_id;
+        self.next_item_id = ItemId(self.next_item_id.0 + 1);
         ret
     }
 
     fn build_builtin_ty(&mut self, ty: &clang::Type) -> Option<TypeId> {
         use clang_sys::*;
         let type_kind = match ty.kind() {
             CXType_NullPtr => TypeKind::NullPtr,
             CXType_Void => TypeKind::Void,
@@ -2071,17 +2074,17 @@ If you encounter an error missing from t
     }
 
     /// Have we parsed the macro named `macro_name` already?
     pub fn parsed_macro(&self, macro_name: &[u8]) -> bool {
         self.parsed_macros.contains_key(macro_name)
     }
 
     /// Get the currently parsed macros.
-    pub fn parsed_macros(&self) -> &StdHashMap<Vec<u8>, cexpr::expr::EvalResult> {
+    pub fn parsed_macros(&self) -> &HashMap<Vec<u8>, cexpr::expr::EvalResult> {
         debug_assert!(!self.in_codegen_phase());
         &self.parsed_macros
     }
 
     /// Mark the macro named `macro_name` as parsed.
     pub fn note_parsed_macro(
         &mut self,
         id: Vec<u8>,
@@ -2097,25 +2100,25 @@ If you encounter an error missing from t
 
     /// Mark the type with the given `name` as replaced by the type with id
     /// `potential_ty`.
     ///
     /// Replacement types are declared using the `replaces="xxx"` annotation,
     /// and implies that the original type is hidden.
     pub fn replace(&mut self, name: &[String], potential_ty: ItemId) {
         match self.replacements.entry(name.into()) {
-            Entry::Vacant(entry) => {
+            hash_map::Entry::Vacant(entry) => {
                 debug!(
                     "Defining replacement for {:?} as {:?}",
                     name,
                     potential_ty
                 );
                 entry.insert(potential_ty);
             }
-            Entry::Occupied(occupied) => {
+            hash_map::Entry::Occupied(occupied) => {
                 warn!(
                     "Replacement for {:?} already defined as {:?}; \
                        ignoring duplicate replacement definition as {:?}",
                     name,
                     occupied.get(),
                     potential_ty
                 );
             }
@@ -2155,51 +2158,55 @@ If you encounter an error missing from t
         assert_eq!(
             cursor.kind(),
             ::clang_sys::CXCursor_Namespace,
             "Be a nice person"
         );
 
         let mut module_name = None;
         let spelling = cursor.spelling();
-        if !spelling.is_empty() {
+        if !spelling.is_empty()
+        {
             module_name = Some(spelling)
         }
 
-        let tokens = cursor.tokens();
+        let tokens = match cursor.tokens() {
+            Some(tokens) => tokens,
+            None => return (module_name, ModuleKind::Normal),
+        };
         let mut iter = tokens.iter();
         let mut kind = ModuleKind::Normal;
         let mut found_namespace_keyword = false;
         while let Some(token) = iter.next() {
-            match token.spelling() {
-                b"inline" => {
+            match &*token.spelling {
+                "inline" => {
                     assert!(!found_namespace_keyword);
                     assert!(kind != ModuleKind::Inline);
                     kind = ModuleKind::Inline;
                 }
                 // The double colon allows us to handle nested namespaces like
                 // namespace foo::bar { }
                 //
                 // libclang still gives us two namespace cursors, which is cool,
                 // but the tokenization of the second begins with the double
                 // colon. That's ok, so we only need to handle the weird
                 // tokenization here.
                 //
                 // Fortunately enough, inline nested namespace specifiers aren't
                 // a thing, and are invalid C++ :)
-                b"namespace" | b"::" => {
+                "namespace" | "::" => {
                     found_namespace_keyword = true;
                 }
-                b"{" => {
+                "{" => {
                     assert!(found_namespace_keyword);
                     break;
                 }
                 name if found_namespace_keyword => {
                     if module_name.is_none() {
-                        module_name = Some(String::from_utf8_lossy(name).into_owned());
+                        module_name = Some(name.to_owned());
                     }
                     break;
                 }
                 _ => {
                     panic!(
                         "Unknown token while processing namespace: {:?}",
                         token
                     );
@@ -2295,65 +2302,60 @@ If you encounter an error missing from t
                         }
 
                     // If this is a type that explicitly replaces another, we assume
                     // you know what you're doing.
                     if item.annotations().use_instead_of().is_some() {
                         return true;
                     }
 
-                    let name = item.path_for_whitelisting(self)[1..].join("::");
+                    let name = item.canonical_path(self)[1..].join("::");
                     debug!("whitelisted_items: testing {:?}", name);
                     match *item.kind() {
                         ItemKind::Module(..) => true,
                         ItemKind::Function(_) => {
                             self.options().whitelisted_functions.matches(&name)
                         }
                         ItemKind::Var(_) => {
                             self.options().whitelisted_vars.matches(&name)
                         }
                         ItemKind::Type(ref ty) => {
                             if self.options().whitelisted_types.matches(&name) {
                                 return true;
                             }
 
-                            // Unnamed top-level enums are special and we
-                            // whitelist them via the `whitelisted_vars` filter,
-                            // since they're effectively top-level constants,
-                            // and there's no way for them to be referenced
-                            // consistently.
                             let parent = self.resolve_item(item.parent_id());
-                            if !parent.is_module() {
-                                return false;
+                            if parent.is_module() {
+                                let mut prefix_path = parent.canonical_path(self);
+
+                                // Unnamed top-level enums are special and we
+                                // whitelist them via the `whitelisted_vars` filter,
+                                // since they're effectively top-level constants,
+                                // and there's no way for them to be referenced
+                                // consistently.
+                                if let TypeKind::Enum(ref enum_) = *ty.kind() {
+                                    if ty.name().is_none() &&
+                                        enum_.variants().iter().any(|variant| {
+                                            prefix_path.push(variant.name().into());
+                                            let name = prefix_path[1..].join("::");
+                                            prefix_path.pop().unwrap();
+                                            self.options()
+                                                .whitelisted_vars
+                                                .matches(&name)
+                                        }) {
+                                            return true;
+                                        }
+                                }
                             }
 
-
-                            let enum_ = match *ty.kind() {
-                                TypeKind::Enum(ref e) => e,
-                                _ => return false,
-                            };
-
-                            if ty.name().is_some() {
-                                return false;
-                            }
-
-                            let mut prefix_path =
-                                parent.path_for_whitelisting(self);
-                            enum_.variants().iter().any(|variant| {
-                                prefix_path.push(variant.name().into());
-                                let name = prefix_path[1..].join("::");
-                                prefix_path.pop().unwrap();
-                                self.options()
-                                    .whitelisted_vars
-                                    .matches(&name)
-                            })
+                            false
                         }
                     }
                 })
-                .map(|(id, _)| id)
+                .map(|(&id, _)| id)
                 .collect::<Vec<_>>();
 
             // The reversal preserves the expected ordering of traversal,
             // resulting in more stable-ish bindgen-generated names for
             // anonymous types (like unions).
             roots.reverse();
             roots
         };
@@ -2382,33 +2384,21 @@ If you encounter an error missing from t
                 traversal::codegen_edges,
             ).collect::<ItemSet>()
         } else {
             whitelisted.clone()
         };
 
         self.whitelisted = Some(whitelisted);
         self.codegen_items = Some(codegen_items);
-
-        for item in self.options().whitelisted_functions.unmatched_items() {
-            error!("unused option: --whitelist-function {}", item);
-        }
-
-        for item in self.options().whitelisted_vars.unmatched_items() {
-            error!("unused option: --whitelist-var {}", item);
-        }
-
-        for item in self.options().whitelisted_types.unmatched_items() {
-            error!("unused option: --whitelist-type {}", item);
-        }
     }
 
     /// Convenient method for getting the prefix to use for most traits in
     /// codegen depending on the `use_core` option.
-    pub fn trait_prefix(&self) -> Ident {
+    pub fn trait_prefix(&self) -> Term {
         if self.options().use_core {
             self.rust_ident_raw("core")
         } else {
             self.rust_ident_raw("std")
         }
     }
 
     /// Call if a bindgen complex is generated
@@ -2575,29 +2565,29 @@ If you encounter an error missing from t
 
         // Look up the computed value for whether the item with `id` has
         // float or not.
         self.has_float.as_ref().unwrap().contains(&id.into())
     }
 
     /// Check if `--no-partialeq` flag is enabled for this item.
     pub fn no_partialeq_by_name(&self, item: &Item) -> bool {
-        let name = item.path_for_whitelisting(self)[1..].join("::");
+        let name = item.canonical_path(self)[1..].join("::");
         self.options().no_partialeq_types.matches(&name)
     }
 
     /// Check if `--no-copy` flag is enabled for this item.
     pub fn no_copy_by_name(&self, item: &Item) -> bool {
-        let name = item.path_for_whitelisting(self)[1..].join("::");
+        let name = item.canonical_path(self)[1..].join("::");
         self.options().no_copy_types.matches(&name)
     }
 
     /// Check if `--no-hash` flag is enabled for this item.
     pub fn no_hash_by_name(&self, item: &Item) -> bool {
-        let name = item.path_for_whitelisting(self)[1..].join("::");
+        let name = item.canonical_path(self)[1..].join("::");
         self.options().no_hash_types.matches(&name)
     }
 }
 
 /// A builder struct for configuring item resolution options.
 #[derive(Debug, Copy, Clone)]
 pub struct ItemResolver {
     id: ItemId,
--- a/third_party/rust/bindgen/src/ir/dot.rs
+++ b/third_party/rust/bindgen/src/ir/dot.rs
@@ -27,17 +27,17 @@ where
 {
     let file = File::create(path)?;
     let mut dot_file = io::BufWriter::new(file);
     writeln!(&mut dot_file, "digraph {{")?;
 
     let mut err: Option<io::Result<_>> = None;
 
     for (id, item) in ctx.items() {
-        let is_whitelisted = ctx.whitelisted_items().contains(&id);
+        let is_whitelisted = ctx.whitelisted_items().contains(id);
 
         writeln!(
             &mut dot_file,
             r#"{} [fontname="courier", color={}, label=< <table border="0" align="left">"#,
             id.as_usize(),
             if is_whitelisted {
                 "black"
             } else {
--- a/third_party/rust/bindgen/src/ir/function.rs
+++ b/third_party/rust/bindgen/src/ir/function.rs
@@ -7,18 +7,16 @@ use super::item::Item;
 use super::traversal::{EdgeKind, Trace, Tracer};
 use super::ty::TypeKind;
 use clang;
 use clang_sys::{self, CXCallingConv};
 use ir::derive::{CanTriviallyDeriveDebug, CanTriviallyDeriveHash,
                  CanTriviallyDerivePartialEqOrPartialOrd, CanDerive};
 use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult};
 use quote;
-use quote::TokenStreamExt;
-use proc_macro2;
 use std::io;
 
 const RUST_DERIVE_FUNPTR_LIMIT: usize = 12;
 
 /// What kind of a function are we looking at?
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub enum FunctionKind {
     /// A plain, free function.
@@ -189,17 +187,17 @@ impl Abi {
         match *self {
             Abi::Unknown(..) => true,
             _ => false,
         }
     }
 }
 
 impl quote::ToTokens for Abi {
-    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+    fn to_tokens(&self, tokens: &mut quote::Tokens) {
         tokens.append_all(match *self {
             Abi::C => quote! { "C" },
             Abi::Stdcall => quote! { "stdcall" },
             Abi::Fastcall => quote! { "fastcall" },
             Abi::ThisCall => quote! { "thiscall" },
             Abi::Aapcs => quote! { "aapcs" },
             Abi::Win64 => quote! { "win64" },
             Abi::Unknown(cc) => panic!(
@@ -218,19 +216,16 @@ pub struct FunctionSig {
 
     /// The type of the arguments, optionally with the name of the argument when
     /// declared.
     argument_types: Vec<(Option<String>, TypeId)>,
 
     /// Whether this function is variadic.
     is_variadic: bool,
 
-    /// Whether this function's return value must be used.
-    must_use: bool,
-
     /// The ABI of this function.
     abi: Abi,
 }
 
 fn get_abi(cc: CXCallingConv) -> Abi {
     use clang_sys::*;
     match cc {
         CXCallingConv_Default => Abi::C,
@@ -304,103 +299,80 @@ pub fn cursor_mangling(
             mangling.truncate(new_len);
             mangling.push_str("D1Ev");
         }
     }
 
     Some(mangling)
 }
 
-fn args_from_ty_and_cursor(
-    ty: &clang::Type,
-    cursor: &clang::Cursor,
-    ctx: &mut BindgenContext,
-) -> Vec<(Option<String>, TypeId)> {
-    match (cursor.args(), ty.args()) {
-        (Some(cursor_args), Some(ty_args)) => {
-            ty_args.iter().enumerate().map(|(i, ty)| {
-                let name = cursor_args.get(i)
-                    .map(|c| c.spelling())
-                    .and_then(|name| if name.is_empty() { None } else { Some(name) });
-                (name, Item::from_ty_or_ref(*ty, *cursor, None, ctx))
-            }).collect()
-        }
-        (Some(cursor_args), None) => {
-            cursor_args.iter().map(|cursor| {
-                let name = cursor.spelling();
-                let name = if name.is_empty() { None } else { Some(name) };
-                (name, Item::from_ty_or_ref(cursor.cur_type(), *cursor, None, ctx))
-            }).collect()
-        }
-        _ => panic!()
-    }
-}
-
 impl FunctionSig {
     /// Construct a new function signature.
     pub fn new(
         return_type: TypeId,
-        argument_types: Vec<(Option<String>, TypeId)>,
+        arguments: Vec<(Option<String>, TypeId)>,
         is_variadic: bool,
-        must_use: bool,
         abi: Abi,
     ) -> Self {
         FunctionSig {
-            return_type,
-            argument_types,
-            is_variadic,
-            must_use,
+            return_type: return_type,
+            argument_types: arguments,
+            is_variadic: is_variadic,
             abi: abi,
         }
     }
 
     /// Construct a new function signature from the given Clang type.
     pub fn from_ty(
         ty: &clang::Type,
         cursor: &clang::Cursor,
         ctx: &mut BindgenContext,
     ) -> Result<Self, ParseError> {
         use clang_sys::*;
         debug!("FunctionSig::from_ty {:?} {:?}", ty, cursor);
 
         // Skip function templates
-        let kind = cursor.kind();
-        if kind == CXCursor_FunctionTemplate {
+        if cursor.kind() == CXCursor_FunctionTemplate {
             return Err(ParseError::Continue);
         }
 
         // Don't parse operatorxx functions in C++
         let spelling = cursor.spelling();
         if spelling.starts_with("operator") {
             return Err(ParseError::Continue);
         }
 
-        // Constructors of non-type template parameter classes for some reason
-        // include the template parameter in their name. Just skip them, since
-        // we don't handle well non-type template parameters anyway.
-        if (kind == CXCursor_Constructor || kind == CXCursor_Destructor) &&
-            spelling.contains('<')
-        {
-            return Err(ParseError::Continue);
-        }
-
         let cursor = if cursor.is_valid() {
             *cursor
         } else {
             ty.declaration()
         };
 
-        let mut args = match kind {
+        let mut args: Vec<_> = match cursor.kind() {
             CXCursor_FunctionDecl |
             CXCursor_Constructor |
             CXCursor_CXXMethod |
             CXCursor_ObjCInstanceMethodDecl |
             CXCursor_ObjCClassMethodDecl => {
-                args_from_ty_and_cursor(&ty, &cursor, ctx)
-            },
+                // For CXCursor_FunctionDecl, cursor.args() is the reliable way
+                // to get parameter names and types.
+                cursor
+                    .args()
+                    .unwrap()
+                    .iter()
+                    .map(|arg| {
+                        let arg_ty = arg.cur_type();
+                        let name = arg.spelling();
+                        let name =
+                            if name.is_empty() { None } else { Some(name) };
+                        let ty = Item::from_ty_or_ref(arg_ty, *arg, None, ctx);
+                        (name, ty)
+                    })
+                    .collect()
+            }
             _ => {
                 // For non-CXCursor_FunctionDecl, visiting the cursor's children
                 // is the only reliable way to get parameter names.
                 let mut args = vec![];
                 cursor.visit(|c| {
                     if c.kind() == CXCursor_ParmDecl {
                         let ty =
                             Item::from_ty_or_ref(c.cur_type(), c, None, ctx);
@@ -410,22 +382,19 @@ impl FunctionSig {
                         args.push((name, ty));
                     }
                     CXChildVisit_Continue
                 });
                 args
             }
         };
 
-        let must_use =
-            ctx.options().enable_function_attribute_detection &&
-            cursor.has_simple_attr("warn_unused_result");
-        let is_method = kind == CXCursor_CXXMethod;
-        let is_constructor = kind == CXCursor_Constructor;
-        let is_destructor = kind == CXCursor_Destructor;
+        let is_method = cursor.kind() == CXCursor_CXXMethod;
+        let is_constructor = cursor.kind() == CXCursor_Constructor;
+        let is_destructor = cursor.kind() == CXCursor_Destructor;
         if (is_constructor || is_destructor || is_method) &&
             cursor.lexical_parent() != cursor.semantic_parent()
         {
             // Only parse constructors once.
             return Err(ParseError::Continue);
         }
 
         if is_method || is_constructor || is_destructor {
@@ -458,18 +427,18 @@ impl FunctionSig {
             } else if is_virtual {
                 let void = Item::builtin_type(TypeKind::Void, false, ctx);
                 let ptr =
                     Item::builtin_type(TypeKind::Pointer(void), false, ctx);
                 args.insert(0, (Some("this".into()), ptr));
             }
         }
 
-        let ty_ret_type = if kind == CXCursor_ObjCInstanceMethodDecl ||
-            kind == CXCursor_ObjCClassMethodDecl
+        let ty_ret_type = if cursor.kind() == CXCursor_ObjCInstanceMethodDecl ||
+            cursor.kind() == CXCursor_ObjCClassMethodDecl
         {
             ty.ret_type().or_else(|| cursor.ret_type()).ok_or(
                 ParseError::Continue,
             )?
         } else {
             ty.ret_type().ok_or(ParseError::Continue)?
         };
         let ret = Item::from_ty_or_ref(ty_ret_type, cursor, None, ctx);
@@ -484,17 +453,17 @@ impl FunctionSig {
             }
         }
         let abi = get_abi(call_conv);
 
         if abi.is_unknown() {
             warn!("Unknown calling convention: {:?}", call_conv);
         }
 
-        Ok(Self::new(ret.into(), args, ty.is_variadic(), must_use, abi))
+        Ok(Self::new(ret.into(), args, ty.is_variadic(), abi))
     }
 
     /// Get this function signature's return type.
     pub fn return_type(&self) -> TypeId {
         self.return_type
     }
 
     /// Get this function signature's argument (name, type) pairs.
@@ -510,21 +479,16 @@ impl FunctionSig {
     /// Is this function signature variadic?
     pub fn is_variadic(&self) -> bool {
         // Clang reports some functions as variadic when they *might* be
         // variadic. We do the argument check because rust doesn't codegen well
         // variadic functions without an initial argument.
         self.is_variadic && !self.argument_types.is_empty()
     }
 
-    /// Must this function's return value be used?
-    pub fn must_use(&self) -> bool {
-        self.must_use
-    }
-
     /// Are function pointers with this signature able to derive Rust traits?
     /// Rust only supports deriving traits for function pointers with a limited
     /// number of parameters and a couple ABIs.
     ///
     /// For more details, see:
     ///
     /// * https://github.com/rust-lang-nursery/rust-bindgen/issues/547,
     /// * https://github.com/rust-lang/rust/issues/38848,
--- a/third_party/rust/bindgen/src/ir/item.rs
+++ b/third_party/rust/bindgen/src/ir/item.rs
@@ -630,17 +630,17 @@ impl Item {
         debug_assert!(
             ctx.in_codegen_phase(),
             "You're not supposed to call this yet"
         );
         if self.annotations.hide() {
             return true;
         }
 
-        let path = self.path_for_whitelisting(ctx);
+        let path = self.canonical_path(ctx);
         let name = path[1..].join("::");
         ctx.options().blacklisted_items.matches(&name) ||
         match self.kind {
             ItemKind::Type(..) => {
                 ctx.options().blacklisted_types.matches(&name) ||
                     ctx.is_replaced_type(&path, self.id)
             }
             ItemKind::Function(..) => {
@@ -870,23 +870,20 @@ impl Item {
         names.reverse();
 
         if !base_name.is_empty() {
             names.push(base_name);
         }
 
         let name = names.join("_");
 
-        let name = if opt.user_mangled == UserMangled::Yes {
-            ctx.parse_callbacks()
-                .and_then(|callbacks| callbacks.item_name(&name))
-                .unwrap_or(name)
-        } else {
-            name
-        };
+        let name = ctx
+            .parse_callbacks()
+            .and_then(|callbacks| callbacks.item_name(&name))
+            .unwrap_or(name);
 
         ctx.rust_mangle(&name).into_owned()
     }
 
     /// The exposed id that represents an unique id among the siblings of a
     /// given item.
     pub fn exposed_id(&self, ctx: &BindgenContext) -> String {
         // Only use local ids for enums, classes, structs and union types.  All
@@ -970,54 +967,16 @@ impl Item {
                     FunctionKind::Method(MethodKind::VirtualDestructor { ..  }) => cc.destructors(),
                     FunctionKind::Method(MethodKind::Static) |
                     FunctionKind::Method(MethodKind::Normal) |
                     FunctionKind::Method(MethodKind::Virtual { .. }) => cc.methods(),
                 }
             }
         }
     }
-
-    /// Returns the path we should use for whitelisting / blacklisting, which
-    /// doesn't include user-mangling.
-    pub fn path_for_whitelisting(&self, ctx: &BindgenContext) -> Vec<String> {
-        self.compute_path(ctx, UserMangled::No)
-    }
-
-    fn compute_path(&self, ctx: &BindgenContext, mangled: UserMangled) -> Vec<String> {
-        if let Some(path) = self.annotations().use_instead_of() {
-            let mut ret =
-                vec![ctx.resolve_item(ctx.root_module()).name(ctx).get()];
-            ret.extend_from_slice(path);
-            return ret;
-        }
-
-        let target = ctx.resolve_item(self.name_target(ctx));
-        let mut path: Vec<_> = target
-            .ancestors(ctx)
-            .chain(iter::once(ctx.root_module().into()))
-            .map(|id| ctx.resolve_item(id))
-            .filter(|item| {
-                item.id() == target.id() ||
-                    item.as_module().map_or(false, |module| {
-                        !module.is_inline() ||
-                            ctx.options().conservative_inline_namespaces
-                    })
-            })
-            .map(|item| {
-                ctx.resolve_item(item.name_target(ctx))
-                    .name(ctx)
-                    .within_namespaces()
-                    .user_mangled(mangled)
-                    .get()
-            })
-            .collect();
-        path.reverse();
-        path
-    }
 }
 
 impl<T> IsOpaque for T
 where
     T: Copy + Into<ItemId>
 {
     type Extra = ();
 
@@ -1035,17 +994,17 @@ impl IsOpaque for Item {
 
     fn is_opaque(&self, ctx: &BindgenContext, _: &()) -> bool {
         debug_assert!(
             ctx.in_codegen_phase(),
             "You're not supposed to call this yet"
         );
         self.annotations.opaque() ||
             self.as_type().map_or(false, |ty| ty.is_opaque(ctx, self)) ||
-            ctx.opaque_by_name(&self.path_for_whitelisting(ctx))
+            ctx.opaque_by_name(&self.canonical_path(ctx))
     }
 }
 
 impl<T> HasVtable for T
 where
     T: Copy + Into<ItemId>
 {
     fn has_vtable(&self, ctx: &BindgenContext) -> bool {
@@ -1444,17 +1403,16 @@ impl ClangItemParser for Item {
             return ty;
         }
 
         debug!("New unresolved type reference: {:?}, {:?}", ty, location);
 
         let is_const = ty.is_const();
         let kind = TypeKind::UnresolvedTypeRef(ty, location, parent_id);
         let current_module = ctx.current_module();
-
         ctx.add_item(
             Item::new(
                 potential_id,
                 None,
                 None,
                 parent_id.unwrap_or(current_module.into()),
                 ItemKind::Type(Type::new(None, None, kind, is_const)),
             ),
@@ -1858,61 +1816,70 @@ impl ItemCanonicalPath for Item {
         if self.is_constified_enum_module(ctx) {
             path.push(CONSTIFIED_ENUM_MODULE_REPR_NAME.into());
         }
 
         return path;
     }
 
     fn canonical_path(&self, ctx: &BindgenContext) -> Vec<String> {
-        self.compute_path(ctx, UserMangled::Yes)
-    }
-}
+        if let Some(path) = self.annotations().use_instead_of() {
+            let mut ret =
+                vec![ctx.resolve_item(ctx.root_module()).name(ctx).get()];
+            ret.extend_from_slice(path);
+            return ret;
+        }
 
-/// Whether to use the user-mangled name (mangled by the `item_name` callback or
-/// not.
-///
-/// Most of the callers probably want just yes, but the ones dealing with
-/// whitelisting and blacklisting don't.
-#[derive(Copy, Clone, Debug, PartialEq)]
-enum UserMangled {
-    No,
-    Yes,
+        let target = ctx.resolve_item(self.name_target(ctx));
+        let mut path: Vec<_> = target
+            .ancestors(ctx)
+            .chain(iter::once(ctx.root_module().into()))
+            .map(|id| ctx.resolve_item(id))
+            .filter(|item| {
+                item.id() == target.id() ||
+                    item.as_module().map_or(false, |module| {
+                        !module.is_inline() ||
+                            ctx.options().conservative_inline_namespaces
+                    })
+            })
+            .map(|item| {
+                ctx.resolve_item(item.name_target(ctx))
+                    .name(ctx)
+                    .within_namespaces()
+                    .get()
+            })
+            .collect();
+        path.reverse();
+        path
+    }
 }
 
 /// Builder struct for naming variations, which hold inside different
 /// flags for naming options.
 #[derive(Debug)]
 pub struct NameOptions<'a> {
     item: &'a Item,
     ctx: &'a BindgenContext,
     within_namespaces: bool,
-    user_mangled: UserMangled,
 }
 
 impl<'a> NameOptions<'a> {
     /// Construct a new `NameOptions`
     pub fn new(item: &'a Item, ctx: &'a BindgenContext) -> Self {
         NameOptions {
             item: item,
             ctx: ctx,
             within_namespaces: false,
-            user_mangled: UserMangled::Yes,
         }
     }
 
     /// Construct the name without the item's containing C++ namespaces mangled
     /// into it. In other words, the item's name within the item's namespace.
     pub fn within_namespaces(&mut self) -> &mut Self {
         self.within_namespaces = true;
         self
     }
 
-    fn user_mangled(&mut self, user_mangled: UserMangled) -> &mut Self {
-        self.user_mangled = user_mangled;
-        self
-    }
-
     /// Construct a name `String`
     pub fn get(&self) -> String {
         self.item.real_canonical_name(self.ctx, self)
     }
 }
--- a/third_party/rust/bindgen/src/ir/objc.rs
+++ b/third_party/rust/bindgen/src/ir/objc.rs
@@ -7,17 +7,18 @@ use super::ty::TypeKind;
 use clang;
 use clang_sys::CXChildVisit_Continue;
 use clang_sys::CXCursor_ObjCCategoryDecl;
 use clang_sys::CXCursor_ObjCClassMethodDecl;
 use clang_sys::CXCursor_ObjCClassRef;
 use clang_sys::CXCursor_ObjCInstanceMethodDecl;
 use clang_sys::CXCursor_ObjCProtocolDecl;
 use clang_sys::CXCursor_ObjCProtocolRef;
-use proc_macro2::{TokenStream, Ident, Span};
+use quote;
+use proc_macro2::{Term, Span};
 
 /// Objective C interface as used in TypeKind
 ///
 /// Also protocols and categories are parsed as this type
 #[derive(Debug)]
 pub struct ObjCInterface {
     /// The name
     /// like, NSObject
@@ -126,19 +127,20 @@ impl ObjCInterface {
                     for (id, item) in items_map
                     {
                        if let Some(ty) = item.as_type() {
                             match *ty.kind() {
                                 TypeKind::ObjCInterface(ref protocol) => {
                                     if protocol.is_protocol
                                     {
                                         debug!("Checking protocol {}, ty.name {:?}", protocol.name, ty.name());
-                                        if Some(needle.as_ref()) == ty.name() {
+                                        if Some(needle.as_ref()) == ty.name()
+                                        {
                                             debug!("Found conforming protocol {:?}", item);
-                                            interface.conforms_to.push(id);
+                                            interface.conforms_to.push(*id);
                                             break;
                                         }
                                     }
                                 }
                                 _ => {}
                             }
                         }
                     }
@@ -206,21 +208,21 @@ impl ObjCMethod {
     }
 
     /// Is this a class method?
     pub fn is_class_method(&self) -> bool {
         self.is_class_method
     }
 
     /// Formats the method call
-    pub fn format_method_call(&self, args: &[TokenStream]) -> TokenStream {
+    pub fn format_method_call(&self, args: &[quote::Tokens]) -> quote::Tokens {
         let split_name: Vec<_> = self.name
             .split(':')
             .filter(|p| !p.is_empty())
-            .map(|name| Ident::new(name, Span::call_site()))
+            .map(|name| Term::new(name, Span::call_site()))
             .collect();
 
         // No arguments
         if args.len() == 0 && split_name.len() == 1 {
             let name = &split_name[0];
             return quote! {
                 #name
             };
@@ -236,17 +238,17 @@ impl ObjCMethod {
         }
 
         // Get arguments without type signatures to pass to `msg_send!`
         let mut args_without_types = vec![];
         for arg in args.iter() {
             let arg = arg.to_string();
             let name_and_sig: Vec<&str> = arg.split(' ').collect();
             let name = name_and_sig[0];
-            args_without_types.push(Ident::new(name, Span::call_site()))
+            args_without_types.push(Term::new(name, Span::call_site()))
         };
 
         let args = split_name
             .into_iter()
             .zip(args_without_types)
             .map(|(arg, arg_val)| quote! { #arg : #arg_val });
 
         quote! {
--- a/third_party/rust/bindgen/src/ir/var.rs
+++ b/third_party/rust/bindgen/src/ir/var.rs
@@ -194,19 +194,16 @@ impl ClangSubItemParser for Var {
                         (TypeKind::Int(IntKind::U8), VarType::Char(c))
                     }
                     EvalResult::Str(val) => {
                         let char_ty = Item::builtin_type(
                             TypeKind::Int(IntKind::U8),
                             true,
                             ctx,
                         );
-                        if let Some(callbacks) = ctx.parse_callbacks() {
-                            callbacks.str_macro(&name, &val);
-                        }
                         (TypeKind::Pointer(char_ty), VarType::String(val))
                     }
                     EvalResult::Int(Wrapping(value)) => {
                         let kind = ctx.parse_callbacks()
                             .and_then(|c| c.int_macro(&name, value))
                             .unwrap_or_else(|| default_macro_constant_type(value));
 
                         (TypeKind::Int(kind), VarType::Int(value))
@@ -304,17 +301,17 @@ impl ClangSubItemParser for Var {
 
 /// Try and parse a macro using all the macros parsed until now.
 fn parse_macro(
     ctx: &BindgenContext,
     cursor: &clang::Cursor,
 ) -> Option<(Vec<u8>, cexpr::expr::EvalResult)> {
     use cexpr::expr;
 
-    let mut cexpr_tokens = cursor.cexpr_tokens();
+    let mut cexpr_tokens = cursor.cexpr_tokens()?;
 
     let parser = expr::IdentifierParser::new(ctx.parsed_macros());
 
     match parser.macro_definition(&cexpr_tokens) {
         Ok((_, (id, val))) => {
             return Some((id.into(), val));
         }
         _ => {}
@@ -333,17 +330,17 @@ fn parse_macro(
         _ => None,
     }
 }
 
 fn parse_int_literal_tokens(cursor: &clang::Cursor) -> Option<i64> {
     use cexpr::expr;
     use cexpr::expr::EvalResult;
 
-    let cexpr_tokens = cursor.cexpr_tokens();
+    let cexpr_tokens = cursor.cexpr_tokens()?;
 
     // TODO(emilio): We can try to parse other kinds of literals.
     match expr::expr(&cexpr_tokens) {
         Ok((_, EvalResult::Int(Wrapping(val)))) => Some(val),
         _ => None,
     }
 }
 
--- a/third_party/rust/bindgen/src/lib.rs
+++ b/third_party/rust/bindgen/src/lib.rs
@@ -1,16 +1,16 @@
 //! Generate Rust bindings for C and C++ libraries.
 //!
 //! Provide a C/C++ header file, receive Rust FFI code to call into C/C++
 //! functions and use types defined in the header.
 //!
 //! See the [`Builder`](./struct.Builder.html) struct for usage.
 //!
-//! See the [Users Guide](https://rust-lang.github.io/rust-bindgen/) for
+//! See the [Users Guide](https://rust-lang-nursery.github.io/rust-bindgen/) for
 //! additional documentation.
 #![deny(missing_docs)]
 #![deny(warnings)]
 #![deny(unused_extern_crates)]
 // To avoid rather annoying warnings when matching with CXCursor_xxx as a
 // constant.
 #![allow(non_upper_case_globals)]
 // `quote!` nests quite deeply.
@@ -18,17 +18,16 @@
 
 #[macro_use]
 extern crate bitflags;
 extern crate cexpr;
 #[macro_use]
 #[allow(unused_extern_crates)]
 extern crate cfg_if;
 extern crate clang_sys;
-extern crate hashbrown;
 #[macro_use]
 extern crate lazy_static;
 extern crate peeking_take_while;
 #[macro_use]
 extern crate quote;
 extern crate proc_macro2;
 extern crate regex;
 extern crate which;
@@ -84,28 +83,24 @@ pub use features::{LATEST_STABLE_RUST, R
 use features::RustFeatures;
 use ir::context::{BindgenContext, ItemId};
 use ir::item::Item;
 use parse::{ClangItemParser, ParseError};
 use regex_set::RegexSet;
 pub use codegen::EnumVariation;
 
 use std::borrow::Cow;
+use std::collections::HashMap;
 use std::fs::{File, OpenOptions};
 use std::io::{self, Write};
 use std::iter;
 use std::path::{Path, PathBuf};
 use std::process::{Command, Stdio};
 use std::sync::Arc;
 
-// Some convenient typedefs for a fast hash map and hash set.
-type HashMap<K, V> = ::hashbrown::HashMap<K, V>;
-type HashSet<K> = ::hashbrown::HashSet<K>;
-pub(crate) use ::hashbrown::hash_map::Entry;
-
 fn args_are_cpp(clang_args: &[String]) -> bool {
     return clang_args
         .windows(2)
         .any(|w| w[0] == "-xc++" || w[1] == "-xc++" || w == &["-x", "c++"]);
 }
 
 bitflags! {
     /// A type used to indicate which kind of items we have to generate.
@@ -233,77 +228,105 @@ impl Builder {
         }
 
         self.options
             .bitfield_enums
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--bitfield-enum".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .rustified_enums
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--rustified-enum".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .constified_enum_modules
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--constified-enum-module".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .constified_enums
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--constified-enum".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .blacklisted_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--blacklist-type".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .blacklisted_functions
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--blacklist-function".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .blacklisted_items
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--blacklist-item".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         if !self.options.layout_tests {
             output_vector.push("--no-layout-tests".into());
         }
 
         if self.options.impl_debug {
@@ -390,19 +413,16 @@ impl Builder {
         }
         if let Some(ref graph) = self.options.emit_ir_graphviz {
             output_vector.push("--emit-ir-graphviz".into());
             output_vector.push(graph.clone())
         }
         if self.options.enable_cxx_namespaces {
             output_vector.push("--enable-cxx-namespaces".into());
         }
-        if self.options.enable_function_attribute_detection {
-            output_vector.push("--enable-function-attribute-detection".into());
-        }
         if self.options.disable_name_namespacing {
             output_vector.push("--disable-name-namespacing".into());
         }
 
         if !self.options.codegen_config.functions() {
             output_vector.push("--ignore-functions".into());
         }
 
@@ -444,26 +464,34 @@ impl Builder {
         }
 
         self.options
             .opaque_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--opaque-type".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .raw_lines
             .iter()
             .map(|item| {
                 output_vector.push("--raw-line".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         if self.options.use_core {
             output_vector.push("--use-core".into());
         }
 
         if self.options.conservative_inline_namespaces {
@@ -471,37 +499,49 @@ impl Builder {
         }
 
         self.options
             .whitelisted_functions
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--whitelist-function".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .whitelisted_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--whitelist-type".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .whitelisted_vars
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--whitelist-var".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         output_vector.push("--".into());
 
         if !self.options.clang_args.is_empty() {
             output_vector.extend(self.options.clang_args.iter().cloned());
         }
@@ -509,20 +549,16 @@ impl Builder {
         if self.input_headers.len() > 1 {
             output_vector.extend(
                 self.input_headers[..self.input_headers.len() - 1]
                     .iter()
                     .cloned(),
             );
         }
 
-        if !self.options.record_matches {
-            output_vector.push("--no-record-matches".into());
-        }
-
         if !self.options.rustfmt_bindings {
             output_vector.push("--no-rustfmt-bindings".into());
         }
 
         if let Some(path) = self.options
             .rustfmt_configuration_file
             .as_ref()
             .and_then(|f| f.to_str())
@@ -532,37 +568,49 @@ impl Builder {
         }
 
         self.options
             .no_partialeq_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--no-partialeq".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .no_copy_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--no-copy".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         self.options
             .no_hash_types
             .get_items()
             .iter()
             .map(|item| {
                 output_vector.push("--no-hash".into());
-                output_vector.push(item.to_owned());
+                output_vector.push(
+                    item.trim_left_matches("^")
+                        .trim_right_matches("$")
+                        .into(),
+                );
             })
             .count();
 
         output_vector
     }
 
     /// Add an input C/C++ header to generate bindings for.
     ///
@@ -1004,28 +1052,16 @@ impl Builder {
     }
 
     /// Enable C++ namespaces.
     pub fn enable_cxx_namespaces(mut self) -> Builder {
         self.options.enable_cxx_namespaces = true;
         self
     }
 
-    /// Enable detecting must_use attributes on C functions.
-    ///
-    /// This is quite slow in some cases (see #1465), so it's disabled by
-    /// default.
-    ///
-    /// Note that for this to do something meaningful for now at least, the rust
-    /// target version has to have support for `#[must_use]`.
-    pub fn enable_function_attribute_detection(mut self) -> Self {
-        self.options.enable_function_attribute_detection = true;
-        self
-    }
-
     /// Disable name auto-namespacing.
     ///
     /// By default, bindgen mangles names like `foo::bar::Baz` to look like
     /// `foo_bar_Baz` instead of just `Baz`.
     ///
     /// This method disables that behavior.
     ///
     /// Note that this intentionally does not change the names used for
@@ -1141,22 +1177,16 @@ impl Builder {
     }
 
     /// Set whether rustfmt should format the generated bindings.
     pub fn rustfmt_bindings(mut self, doit: bool) -> Self {
         self.options.rustfmt_bindings = doit;
         self
     }
 
-    /// Set whether we should record matched items in our regex sets.
-    pub fn record_matches(mut self, doit: bool) -> Self {
-        self.options.record_matches = doit;
-        self
-    }
-
     /// Set the absolute path to the rustfmt configuration file, if None, the standard rustfmt
     /// options are used.
     pub fn rustfmt_configuration_file(mut self, path: Option<PathBuf>) -> Self {
         self = self.rustfmt_bindings(true);
         self.options.rustfmt_configuration_file = path;
         self
     }
 
@@ -1356,20 +1386,16 @@ struct BindgenOptions {
 
     /// Output graphviz dot file.
     emit_ir_graphviz: Option<String>,
 
     /// True if we should emulate C++ namespaces with Rust modules in the
     /// generated bindings.
     enable_cxx_namespaces: bool,
 
-    /// True if we should try to find unexposed attributes in functions, in
-    /// order to be able to generate #[must_use] attributes in Rust.
-    enable_function_attribute_detection: bool,
-
     /// True if we should avoid mangling names with namespaces.
     disable_name_namespacing: bool,
 
     /// True if we should generate layout tests for generated structures.
     layout_tests: bool,
 
     /// True if we should implement the Debug trait for C/C++ structures and types
     /// that do not support automatically deriving Debug.
@@ -1496,22 +1522,16 @@ struct BindgenOptions {
     prepend_enum_name: bool,
 
     /// Version of the Rust compiler to target
     rust_target: RustTarget,
 
     /// Features to enable, derived from `rust_target`
     rust_features: RustFeatures,
 
-    /// Whether we should record which items in the regex sets ever matched.
-    ///
-    /// This may be a bit slower, but will enable reporting of unused whitelist
-    /// items via the `error!` log.
-    record_matches: bool,
-
     /// Whether rustfmt should format the generated bindings.
     rustfmt_bindings: bool,
 
     /// The absolute path to the rustfmt configuration file, if None, the standard rustfmt
     /// options are used.
 
     rustfmt_configuration_file: Option<PathBuf>,
 
@@ -1527,36 +1547,30 @@ struct BindgenOptions {
 
 /// TODO(emilio): This is sort of a lie (see the error message that results from
 /// removing this), but since we don't share references across panic boundaries
 /// it's ok.
 impl ::std::panic::UnwindSafe for BindgenOptions {}
 
 impl BindgenOptions {
     fn build(&mut self) {
-        let mut regex_sets = [
-            &mut self.whitelisted_vars,
-            &mut self.whitelisted_types,
-            &mut self.whitelisted_functions,
-            &mut self.blacklisted_types,
-            &mut self.blacklisted_functions,
-            &mut self.blacklisted_items,
-            &mut self.opaque_types,
-            &mut self.bitfield_enums,
-            &mut self.constified_enums,
-            &mut self.constified_enum_modules,
-            &mut self.rustified_enums,
-            &mut self.no_partialeq_types,
-            &mut self.no_copy_types,
-            &mut self.no_hash_types,
-        ];
-        let record_matches = self.record_matches;
-        for regex_set in &mut regex_sets {
-            regex_set.build(record_matches);
-        }
+        self.whitelisted_vars.build();
+        self.whitelisted_types.build();
+        self.whitelisted_functions.build();
+        self.blacklisted_types.build();
+        self.blacklisted_functions.build();
+        self.blacklisted_items.build();
+        self.opaque_types.build();
+        self.bitfield_enums.build();
+        self.constified_enums.build();
+        self.constified_enum_modules.build();
+        self.rustified_enums.build();
+        self.no_partialeq_types.build();
+        self.no_copy_types.build();
+        self.no_hash_types.build();
     }
 
     /// Update rust target version
     pub fn set_rust_target(&mut self, rust_target: RustTarget) {
         self.rust_target = rust_target;
 
         // Keep rust_features synced with rust_target
         self.rust_features = rust_target.into();
@@ -1599,17 +1613,16 @@ impl Default for BindgenOptions {
             derive_debug: true,
             derive_default: false,
             derive_hash: false,
             derive_partialord: false,
             derive_ord: false,
             derive_partialeq: false,
             derive_eq: false,
             enable_cxx_namespaces: false,
-            enable_function_attribute_detection: false,
             disable_name_namespacing: false,
             use_core: false,
             ctypes_prefix: None,
             namespaced_constants: true,
             msvc_mangling: false,
             convert_floats: true,
             raw_lines: vec![],
             module_lines: HashMap::default(),
@@ -1623,17 +1636,16 @@ impl Default for BindgenOptions {
             generate_inline_functions: false,
             whitelist_recursively: true,
             generate_block: false,
             objc_extern_crate: false,
             block_extern_crate: false,
             enable_mangling: true,
             prepend_enum_name: true,
             time_phases: false,
-            record_matches: true,
             rustfmt_bindings: true,
             rustfmt_configuration_file: None,
             no_partialeq_types: Default::default(),
             no_copy_types: Default::default(),
             no_hash_types: Default::default(),
         }
     }
 }
@@ -1658,28 +1670,26 @@ fn ensure_libclang_is_loaded() {
 
     clang_sys::set_library(Some(LIBCLANG.clone()));
 }
 
 /// Generated Rust bindings.
 #[derive(Debug)]
 pub struct Bindings {
     options: BindgenOptions,
-    module: proc_macro2::TokenStream,
+    module: quote::Tokens,
 }
 
 impl Bindings {
     /// Generate bindings for the given options.
     pub(crate) fn generate(
         mut options: BindgenOptions,
     ) -> Result<Bindings, ()> {
         ensure_libclang_is_loaded();
 
-        debug!("Generating bindings, libclang at {}", clang_sys::get_library().unwrap().path().display());
-
         options.build();
 
         // Filter out include paths and similar stuff, so we don't incorrectly
         // promote them to `-isystem`.
         let clang_args_for_clang_sys = {
             let mut last_was_include_prefix = false;
             options.clang_args.iter().filter(|arg| {
                 if last_was_include_prefix {
@@ -1699,25 +1709,21 @@ impl Bindings {
                 if arg.starts_with("-I") || arg.starts_with("--include-directory=") {
                     return false;
                 }
 
                 true
             }).cloned().collect::<Vec<_>>()
         };
 
-        debug!("Trying to find clang with flags: {:?}", clang_args_for_clang_sys);
-
         // TODO: Make this path fixup configurable?
         if let Some(clang) = clang_sys::support::Clang::find(
             None,
             &clang_args_for_clang_sys,
         ) {
-            debug!("Found clang: {:?}", clang);
-
             // If --target is specified, assume caller knows what they're doing
             // and don't mess with include paths for them
             let has_target_arg = options
                 .clang_args
                 .iter()
                 .rposition(|arg| arg.starts_with("--target"))
                 .is_some();
             if !has_target_arg {
@@ -1767,18 +1773,16 @@ impl Bindings {
                 return Err(());
             }
         }
 
         for f in options.input_unsaved_files.iter() {
             options.clang_args.push(f.name.to_str().unwrap().to_owned())
         }
 
-        debug!("Fixed-up options: {:?}", options);
-
         let time_phases = options.time_phases;
         let mut context = BindgenContext::new(options);
 
         {
             let _t = time::Timer::new("parse")
                                   .with_output(time_phases);
             parse(&mut context)?;
         }
@@ -1830,17 +1834,17 @@ impl Bindings {
 
         let bindings = self.module.to_string();
 
         match self.rustfmt_generated_string(&bindings) {
             Ok(rustfmt_bindings) => {
                 writer.write(rustfmt_bindings.as_bytes())?;
             },
             Err(err) => {
-                eprintln!("Failed to run rustfmt: {} (non-fatal, continuing)", err);
+                eprintln!("{:?}", err);
                 writer.write(bindings.as_bytes())?;
             },
         }
         Ok(())
     }
 
     /// Checks if rustfmt_bindings is set and runs rustfmt on the string
     fn rustfmt_generated_string<'a>(
@@ -1854,17 +1858,17 @@ impl Bindings {
             return Ok(Cow::Borrowed(source));
         }
 
         let rustfmt = match self.options.rustfmt_path {
             Some(ref p) => Cow::Borrowed(p),
             None => {
                 let path = which::which("rustfmt")
                     .map_err(|e| {
-                        io::Error::new(io::ErrorKind::Other, format!("{}", e))
+                        io::Error::new(io::ErrorKind::Other, e.to_owned())
                     })?;
 
                 Cow::Owned(path)
             }
         };
 
         let mut cmd = Command::new(&*rustfmt);
 
--- a/third_party/rust/bindgen/src/options.rs
+++ b/third_party/rust/bindgen/src/options.rs
@@ -272,20 +272,16 @@ where
                 .long("verbose")
                 .help("Print verbose error messages."),
             Arg::with_name("dump-preprocessed-input")
                 .long("dump-preprocessed-input")
                 .help("Preprocess and dump the input header files to disk. \
                        Useful when debugging bindgen, using C-Reduce, or when \
                        filing issues. The resulting file will be named \
                        something like `__bindgen.i` or `__bindgen.ii`."),
-            Arg::with_name("no-record-matches")
-                .long("no-record-matches")
-                .help("Do not record matching items in the regex sets. \
-                      This disables reporting of unused items."),
             Arg::with_name("no-rustfmt-bindings")
                 .long("no-rustfmt-bindings")
                 .help("Do not format the generated bindings with rustfmt."),
             Arg::with_name("rustfmt-bindings")
                 .long("rustfmt-bindings")
                 .help("Format the generated bindings with rustfmt. DEPRECATED: \
                        --rustfmt-bindings is now enabled by default. Disable \
                        with --no-rustfmt-bindings."),
@@ -314,20 +310,16 @@ where
                 .number_of_values(1),
             Arg::with_name("no-hash")
                 .long("no-hash")
                 .help("Avoid deriving Hash for types matching <regex>.")
                 .value_name("regex")
                 .takes_value(true)
                 .multiple(true)
                 .number_of_values(1),
-            Arg::with_name("enable-function-attribute-detection")
-                .long("enable-function-attribute-detection")
-                .help("Enables detecting unexposed attributes in functions (slow).
-                       Used to generate #[must_use] annotations."),
         ]) // .args()
         .get_matches_from(args);
 
     let mut builder = builder();
 
     if let Some(header) = matches.value_of("header") {
         builder = builder.header(header);
     } else {
@@ -487,20 +479,16 @@ where
     if let Some(path) = matches.value_of("emit-ir-graphviz") {
         builder = builder.emit_ir_graphviz(path);
     }
 
     if matches.is_present("enable-cxx-namespaces") {
         builder = builder.enable_cxx_namespaces();
     }
 
-    if matches.is_present("enable-function-attribute-detection") {
-        builder = builder.enable_function_attribute_detection();
-    }
-
     if matches.is_present("disable-name-namespacing") {
         builder = builder.disable_name_namespacing();
     }
 
     if matches.is_present("ignore-functions") {
         builder = builder.ignore_functions();
     }
 
@@ -590,20 +578,16 @@ where
     } else {
         Box::new(io::BufWriter::new(io::stdout())) as Box<io::Write>
     };
 
     if matches.is_present("dump-preprocessed-input") {
         builder.dump_preprocessed_input()?;
     }
 
-    if matches.is_present("no-record-matches") {
-        builder = builder.record_matches(false);
-    }
-
     let no_rustfmt_bindings = matches.is_present("no-rustfmt-bindings");
     if no_rustfmt_bindings {
         builder = builder.rustfmt_bindings(false);
     }
 
     if let Some(path_str) = matches.value_of("rustfmt-configuration-file") {
         let path = PathBuf::from(path_str);
 
--- a/third_party/rust/bindgen/src/regex_set.rs
+++ b/third_party/rust/bindgen/src/regex_set.rs
@@ -1,92 +1,65 @@
 //! A type that represents the union of a set of regular expressions.
 
 use regex::RegexSet as RxSet;
-use std::cell::Cell;
 
 /// A dynamic set of regular expressions.
-#[derive(Debug, Default)]
+#[derive(Debug)]
 pub struct RegexSet {
     items: Vec<String>,
-    /// Whether any of the items in the set was ever matched. The length of this
-    /// vector is exactly the length of `items`.
-    matched: Vec<Cell<bool>>,
     set: Option<RxSet>,
-    /// Whether we should record matching items in the `matched` vector or not.
-    record_matches: bool,
 }
 
 impl RegexSet {
     /// Is this set empty?
     pub fn is_empty(&self) -> bool {
         self.items.is_empty()
     }
 
     /// Insert a new regex into this set.
     pub fn insert<S>(&mut self, string: S)
     where
         S: AsRef<str>,
     {
-        self.items.push(string.as_ref().to_owned());
-        self.matched.push(Cell::new(false));
+        self.items.push(format!("^{}$", string.as_ref()));
         self.set = None;
     }
 
     /// Returns slice of String from its field 'items'
     pub fn get_items(&self) -> &[String] {
         &self.items[..]
     }
 
-    /// Returns an iterator over regexes in the set which didn't match any
-    /// strings yet.
-    pub fn unmatched_items(&self) -> impl Iterator<Item = &String> {
-        self.items.iter().enumerate().filter_map(move |(i, item)| {
-            if !self.record_matches || self.matched[i].get() {
-                return None;
-            }
-
-            Some(item)
-        })
-    }
-
     /// Construct a RegexSet from the set of entries we've accumulated.
     ///
     /// Must be called before calling `matches()`, or it will always return
     /// false.
-    pub fn build(&mut self, record_matches: bool) {
-        let items = self.items.iter().map(|item| format!("^{}$", item));
-        self.record_matches = record_matches;
-        self.set = match RxSet::new(items) {
+    pub fn build(&mut self) {
+        self.set = match RxSet::new(&self.items) {
             Ok(x) => Some(x),
             Err(e) => {
                 error!("Invalid regex in {:?}: {:?}", self.items, e);
                 None
             }
         }
     }
 
     /// Does the given `string` match any of the regexes in this set?
     pub fn matches<S>(&self, string: S) -> bool
     where
         S: AsRef<str>,
     {
         let s = string.as_ref();
-        let set = match self.set {
-            Some(ref set) => set,
-            None => return false,
-        };
-
-        if !self.record_matches {
-            return set.is_match(s);
-        }
-
-        let matches = set.matches(s);
-        if !matches.matched_any() {
-            return false;
-        }
-        for i in matches.iter() {
-            self.matched[i].set(true);
-        }
-
-        true
+        self.set.as_ref().map(|set| set.is_match(s)).unwrap_or(
+            false,
+        )
     }
 }
+
+impl Default for RegexSet {
+    fn default() -> Self {
+        RegexSet {
+            items: vec![],
+            set: None,
+        }
+    }
+}
--- a/third_party/rust/clang-sys/.cargo-checksum.json
+++ b/third_party/rust/clang-sys/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"CHANGELOG.md":"e6c5ac39705b3eda6359b294d138e316156b6b8e0212077557930c47073d47e2","Cargo.toml":"ba1d946f62796dd8723dcefccfef073d3cb50fc2313c89e3de29d936480aab02","LICENSE.txt":"3ddf9be5c28fe27dad143a5dc76eea25222ad1dd68934a047064e56ed2fa40c5","README.md":"21ff1488c29d612cee0d10fc48dab7efbd0a8a24158ee709b88e312e939008a0","appveyor.yml":"1a5d6953fb6e373dc760d50659628f04d48f68bd3f3f8e434800b31e74ef1497","build.rs":"06ef3732108d09118f50294fbca2b90857085fc3364777fca77eb022866399ac","build/common.rs":"4532706a124c0ff40332d5a72cd1e465bee72cd118f8071fbb6e70dde00c68f8","build/dynamic.rs":"0a498915b6a171a8d42072989a6155690a1286ff8783d58bb28346f735e2aea5","build/static.rs":"ff8de756b33efff75770a5552ff4573fe1bbb68ec8bd40d57854a05adb7e9d5c","ci/before_install.sh":"cb6de6f230066004f568d0b5e9d940b3793ff8ee7eb5d35a4f9ec777ee369725","ci/install.bat":"342ce7596152b00c8217364e9475534f6c50a4e597a31fee03205eaf2a1603f0","ci/script.sh":"52db533df970f1b44c0b2663f3bfac4476f2150e94fc392b2bab4145325f418b","ci/test_script.bat":"901609adc59dab2730e16dd374d0351d6406e7559fe4d86ddd9a857ad9c84d2a","clippy.toml":"fcf54943ba571514b244cc098ce08671b4117167733e8107e799d533a12a2195","src/lib.rs":"d9952a832909e5490fca27b08d349a0d36c4004cd04fdb024ddf246cb81503d1","src/link.rs":"c4bc39dad6476d36737aece37979c50e20f6c4724cab5d97f5d1f7374a922967","src/support.rs":"3eae21722287a462921825929c48802642d28ca194d1bc43aee47739350ecd17","tests/header.h":"1b15a686d1c06561960045a26c25a34d840f26c8246f2f5e630f993b69c7492c","tests/lib.rs":"d5d39e3ffbdc7303c2f1b9ae09f60ebf546b7c2c3599ec5d0c99d23332456908"},"package":"6ef0c1bcf2e99c649104bd7a7012d8f8802684400e03db0ec0af48583c6fa0e4"}
\ No newline at end of file
+{"files":{"CHANGELOG.md":"ed5eb852120b184fda36d06a920080e5bd377b018548f2d37eb575dee32a20b1","Cargo.toml":"ae115d5ba8ac3e9074f6f8e64a89ec88579b388db75bbbd655d010610661b19a","LICENSE.txt":"3ddf9be5c28fe27dad143a5dc76eea25222ad1dd68934a047064e56ed2fa40c5","README.md":"21ff1488c29d612cee0d10fc48dab7efbd0a8a24158ee709b88e312e939008a0","appveyor.yml":"1a5d6953fb6e373dc760d50659628f04d48f68bd3f3f8e434800b31e74ef1497","build.rs":"06ef3732108d09118f50294fbca2b90857085fc3364777fca77eb022866399ac","build/common.rs":"4532706a124c0ff40332d5a72cd1e465bee72cd118f8071fbb6e70dde00c68f8","build/dynamic.rs":"544e5cb9fe364165a641192fc3f1ab916cb1d632a92536b8ce77a941fbb3082b","build/static.rs":"ff8de756b33efff75770a5552ff4573fe1bbb68ec8bd40d57854a05adb7e9d5c","ci/before_install.sh":"efb85403bedbfc6db19a8c41c61be98eac9f6e09ac6a33c0bdaf2828b5ea73ba","ci/install.bat":"bb02414d81dd23c8597f82f390769b084c2d32129ed197475a769a25ee97249a","ci/script.sh":"52db533df970f1b44c0b2663f3bfac4476f2150e94fc392b2bab4145325f418b","ci/test_script.bat":"901609adc59dab2730e16dd374d0351d6406e7559fe4d86ddd9a857ad9c84d2a","clippy.toml":"fcf54943ba571514b244cc098ce08671b4117167733e8107e799d533a12a2195","src/lib.rs":"d9952a832909e5490fca27b08d349a0d36c4004cd04fdb024ddf246cb81503d1","src/link.rs":"625ac2a7c5d0b85122e4a68542f235f09a069e7743f67f94c433f0f4313c09db","src/support.rs":"3eae21722287a462921825929c48802642d28ca194d1bc43aee47739350ecd17","tests/header.h":"1b15a686d1c06561960045a26c25a34d840f26c8246f2f5e630f993b69c7492c","tests/lib.rs":"d5d39e3ffbdc7303c2f1b9ae09f60ebf546b7c2c3599ec5d0c99d23332456908"},"package":"481e42017c1416b1c0856ece45658ecbb7c93d8a93455f7e5fa77f3b35455557"}
\ No newline at end of file
--- a/third_party/rust/clang-sys/CHANGELOG.md
+++ b/third_party/rust/clang-sys/CHANGELOG.md
@@ -1,23 +1,8 @@
-## [0.26.4] - 2018-12-29
-
-### Changed
-- Added shared library path to `SharedLibrary` struct
-
-## [0.26.3] - 2018-11-14
-
-### Changed
-- Disable default features of `libc` dependency
-
-## [0.26.2] - 2018-11-03
-
-### Fixed
-- Fixed dynamic linking on macOS
-
 ## [0.26.1] - 2018-10-10
 
 ### Fixed
 - Fixed support for finding libraries in `bin` directories on Windows
 
 ## [0.26.0] - 2018-10-07
 
 ### Changed
--- a/third_party/rust/clang-sys/Cargo.toml
+++ b/third_party/rust/clang-sys/Cargo.toml
@@ -7,31 +7,30 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "clang-sys"
-version = "0.26.4"
+version = "0.26.1"
 authors = ["Kyle Mayes <kyle@mayeses.com>"]
 build = "build.rs"
 links = "clang"
 description = "Rust bindings for libclang."
 documentation = "https://kylemayes.github.io/clang-sys/3_5/clang_sys"
 readme = "README.md"
 license = "Apache-2.0"
 repository = "https://github.com/KyleMayes/clang-sys"
 [dependencies.glob]
 version = "0.2.11"
 
 [dependencies.libc]
 version = "0.2.39"
-default-features = false
 
 [dependencies.libloading]
 version = "0.5.0"
 optional = true
 [build-dependencies.glob]
 version = "0.2.11"
 
 [features]
--- a/third_party/rust/clang-sys/build/dynamic.rs
+++ b/third_party/rust/clang-sys/build/dynamic.rs
@@ -195,19 +195,19 @@ pub fn link() {
                 "using '{}', so 'libclang.lib' or 'libclang.dll.a' must be available in {}",
                 filename,
                 lib.display(),
             );
         }
 
         println!("cargo:rustc-link-lib=dylib=libclang");
     } else {
-        let name = filename.trim_left_matches("lib");
+        let name = filename.replace("lib", "");
 
-        // Strip extensions and trailing version numbers (e.g., the `.so.7.0` in `libclang.so.7.0`).
-        let name = match name.find(".dylib").or(name.find(".so")) {
+        // Strip trailing version numbers (e.g., the `.7.0` in `libclang.so.7.0`).
+        let name = match name.find(".so") {
+            None => &name,
             Some(index) => &name[0..index],
-            None => &name,
         };
 
         println!("cargo:rustc-link-lib=dylib={}", name);
     }
 }
--- a/third_party/rust/clang-sys/ci/before_install.sh
+++ b/third_party/rust/clang-sys/ci/before_install.sh
@@ -1,35 +1,39 @@
 set -e
 pushd ~
 
 # Workaround for Travis CI macOS bug (https://github.com/travis-ci/travis-ci/issues/6307)
 if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
     rvm get head || true
 fi
 
+function llvm_linux_target_triple() {
+    if [ "$1" == "5.0" ]; then
+        echo "linux-x86_64-ubuntu14.04"
+    else
+        echo "x86_64-linux-gnu-ubuntu-14.04"
+    fi
+}
+
 function llvm_version_triple() {
     if [ "$1" == "3.5" ]; then
         echo "3.5.2"
     elif [ "$1" == "3.6" ]; then
         echo "3.6.2"
     elif [ "$1" == "3.7" ]; then
         echo "3.7.1"
     elif [ "$1" == "3.8" ]; then
         echo "3.8.1"
     elif [ "$1" == "3.9" ]; then
         echo "3.9.0"
     elif [ "$1" == "4.0" ]; then
         echo "4.0.1"
     elif [ "$1" == "5.0" ]; then
-        echo "5.0.2"
-    elif [ "$1" == "6.0" ]; then
-        echo "6.0.1"
-    elif [ "$1" == "7.0" ]; then
-        echo "7.0.0"
+        echo "5.0.0"
     fi
 }
 
 function llvm_download() {
     export LLVM_VERSION_TRIPLE=`llvm_version_triple ${LLVM_VERSION}`
     export LLVM=clang+llvm-${LLVM_VERSION_TRIPLE}-$1
     export LLVM_DIRECTORY="$HOME/.llvm/${LLVM}"
 
@@ -40,17 +44,17 @@ function llvm_download() {
         mkdir -p "${LLVM_DIRECTORY}"
         tar xf ${LLVM}.tar.xz -C "${LLVM_DIRECTORY}" --strip-components=1
     fi
 
     export LLVM_CONFIG_PATH="${LLVM_DIRECTORY}/bin/llvm-config"
 }
 
 if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
-    llvm_download x86_64-linux-gnu-ubuntu-14.04
+    llvm_download `llvm_linux_target_triple ${LLVM_VERSION}`
     export LD_LIBRARY_PATH="${LLVM_DIRECTORY}/lib":$LD_LIBRARY_PATH
 else
     llvm_download x86_64-apple-darwin
     cp "${LLVM_DIRECTORY}/lib/libclang.dylib" /usr/local/lib/libclang.dylib
     export DYLD_LIBRARY_PATH="${LLVM_DIRECTORY}/lib":$DYLD_LIBRARY_PATH
 fi
 
 popd
--- a/third_party/rust/clang-sys/ci/install.bat
+++ b/third_party/rust/clang-sys/ci/install.bat
@@ -1,8 +1,8 @@
-curl -sSf https://static.rust-lang.org/dist/rust-1.30.0-i686-pc-windows-msvc.exe -o rust.exe
+curl -sSf https://static.rust-lang.org/dist/rust-1.24.0-i686-pc-windows-msvc.exe -o rust.exe
 rust.exe /VERYSILENT /NORESTART /DIR="C:\Rust"
 set PATH=%PATH%;C:\Rust\bin
 
 curl -sSf http://releases.llvm.org/%LLVM_VERSION%/LLVM-%LLVM_VERSION%-win32.exe -o LLVM.exe
 7z x LLVM.exe -oC:\LLVM
 set PATH=%PATH%;C:\LLVM\bin
 set LIBCLANG_PATH=C:\LLVM\bin
--- a/third_party/rust/clang-sys/src/link.rs
+++ b/third_party/rust/clang-sys/src/link.rs
@@ -36,41 +36,35 @@ macro_rules! link {
 
     (@LOAD: fn $name:ident($($pname:ident: $pty:ty), *) $(-> $ret:ty)*) => (
         link!(@LOAD: #[cfg(feature="runtime")] fn $name($($pname: $pty), *) $(-> $ret)*);
     );
 
     ($($(#[cfg($cfg:meta)])* pub fn $name:ident($($pname:ident: $pty:ty), *) $(-> $ret:ty)*;)+) => (
         use std::cell::{RefCell};
         use std::sync::{Arc};
-        use std::path::{Path, PathBuf};
 
         /// The set of functions loaded dynamically.
         #[derive(Debug, Default)]
         pub struct Functions {
             $($(#[cfg($cfg)])* pub $name: Option<unsafe extern fn($($pname: $pty), *) $(-> $ret)*>,)+
         }
 
         /// A dynamically loaded instance of the `libclang` library.
         #[derive(Debug)]
         pub struct SharedLibrary {
             library: libloading::Library,
-            path: PathBuf,
             pub functions: Functions,
         }
 
         impl SharedLibrary {
             //- Constructors -----------------------------
 
-            fn new(library: libloading::Library, path: PathBuf) -> SharedLibrary {
-                SharedLibrary { library, path, functions: Functions::default() }
-            }
-
-            pub fn path(&self) -> &Path {
-                &self.path
+            fn new(library: libloading::Library) -> SharedLibrary {
+                SharedLibrary { library: library, functions: Functions::default() }
             }
         }
 
         thread_local!(static LIBRARY: RefCell<Option<Arc<SharedLibrary>>> = RefCell::new(None));
 
         /// Returns whether a `libclang` shared library is loaded on this thread.
         pub fn is_loaded() -> bool {
             LIBRARY.with(|l| l.borrow().is_some())
@@ -131,17 +125,17 @@ macro_rules! link {
             let library = libloading::Library::new(&path).map_err(|e| {
                 format!(
                     "the `libclang` shared library at {} could not be opened: {}",
                     path.display(),
                     e,
                 )
             });
 
-            let mut library = SharedLibrary::new(try!(library), path);
+            let mut library = SharedLibrary::new(try!(library));
             $(load::$name(&mut library);)+
             Ok(library)
         }
 
         /// Loads a `libclang` shared library for use in the current thread.
         ///
         /// This functions attempts to load all the functions in the shared library. Whether a
         /// function has been loaded can be tested by calling the `is_loaded` function on the
deleted file mode 100644
--- a/third_party/rust/hashbrown/.cargo-checksum.json
+++ /dev/null
@@ -1,1 +0,0 @@
-{"files":{"CHANGELOG.md":"3a67f210ecaa8765910557cdb10dec83a0005119fc6f6a7ef3a7e1a642cfba42","Cargo.toml":"a4de88ffca37c01a019dfe5caff8654542d457b5354399f980a9aaf1769518c1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"ff8f68cb076caf8cefe7a6430d4ac086ce6af2ca8ce2c4e5a2004d4552ef52a2","README.md":"0195956b2c2433a702a16e9334ec7b2152c89df24d1f65daa6595aa9153ccdde","benches/bench.rs":"7c54f2ea943daa0699df29f3bd5a69b63dcf361f487c77909847b547e3ce709a","bors.toml":"1c81ede536a37edd30fe4e622ff0531b25372403ac9475a5d6c50f14156565a2","src/external_trait_impls/mod.rs":"d69528827794524cfd9acbeacc1ac4f6131e3c7574311e6d919f818f65fbff07","src/external_trait_impls/rayon/helpers.rs":"d4fbca4db924925548f8dab8eb94cf4a3955a53c5e1ff15f59c460546c394034","src/external_trait_impls/rayon/map.rs":"f49e7c1348d4d4a8a2b88f75619cb817bca1f7ededb63202aa2176a8865cb583","src/external_trait_impls/rayon/mod.rs":"b48139960a89ee84ed3b5e10ee5abb0259b40f2bb0ef867d0dd65784252a47c0","src/external_trait_impls/rayon/raw.rs":"bfa25de5e4184f2ab23493e72285187395bb1d81a1d0a55c03fc9fc542d91e27","src/external_trait_impls/rayon/set.rs":"cc70fb41704861d0ec58e3118305a9b80cedbdce4eeca7c96d382cb0f956cea8","src/external_trait_impls/serde.rs":"3611fd340cc18b57510823d5bba0222e98d43eee075e5157fe37f415ece909e3","src/fx.rs":"5f375fff21c94f3a7ed81bb78981d69354168e7808775d6beda20f6596e5998a","src/lib.rs":"8868a2701988f2fef82f7cd3c22f91e3717cd1cda7ff77959869c012cdbd50c4","src/map.rs":"973f217ada3698227eedca27c96955e5d540beb011281811350de7458e114857","src/raw/bitmask.rs":"67927708ecfd9f290cc1160c84d8227bdcaabd9abde70a1e5dcfe7c203545f6f","src/raw/generic.rs":"7ac56ebd531cf49df187cf6ab60e15e65bdd81be97fbea790660c1e4966ed8f3","src/raw/mod.rs":"b675b5b001ed4e922a06d1a00228b0f906e534db3f3af547866bc1a2d151ea75","src/raw/sse2.rs":"1f86578c912fd12088f5aa129ce1f73b29c4b5504b14375828789bc6fb90e8cd","src/set.rs":"8139210e9ebeaaffaf080f92b51360b7dbac02c8b250ae84f5be7dd9b9018b0e","tests/rayon.rs":"d6b5c538a7d8fac9c8dc66021a1aa7cbc67547d22d2f1aa4d8dfc34fd0b9e3f3","tests/serde.rs":"9af7e218f9a13479e60fd6b6c889da6e2d95c430e409179ea398886458a764f8","tests/set.rs":"ae3a9efe51339372d6420f9cd681141ce5c24065b3d63677b4e2ce561d3dd8c1"},"package":"3bae29b6653b3412c2e71e9d486db9f9df5d701941d86683005efb9f2d28e3da"}
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/hashbrown/CHANGELOG.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/)
-and this project adheres to [Semantic Versioning](http://semver.org/).
-
-## [Unreleased]
-
-## [v0.1.8] - 2019-01-14
-
-### Added
-- Rayon parallel iterator support (#37)
-- `raw_entry` support (#31)
-- `#[may_dangle]` on nightly (#31)
-- `try_reserve` support (#31)
-
-### Fixed
-- Fixed variance on `IterMut`. (#31)
-
-## [v0.1.7] - 2018-12-05
-
-### Fixed
-- Fixed non-SSE version of convert_special_to_empty_and_full_to_deleted. (#32)
-- Fixed overflow in rehash_in_place. (#33)
-
-## [v0.1.6] - 2018-11-17
-
-### Fixed
-- Fixed compile error on nightly. (#29)
-
-## [v0.1.5] - 2018-11-08
-
-### Fixed
-- Fixed subtraction overflow in generic::Group::match_byte. (#28)
-
-## [v0.1.4] - 2018-11-04
-
-### Fixed
-- Fixed a bug in the `erase_no_drop` implementation. (#26)
-
-## [v0.1.3] - 2018-11-01
-
-### Added
-- Serde support. (#14)
-
-### Fixed
-- Make the compiler inline functions more aggressively. (#20)
-
-## [v0.1.2] - 2018-10-31
-
-### Fixed
-- `clear` segfaults when called on an empty table. (#13)
-
-## [v0.1.1] - 2018-10-30
-
-### Fixed
-- `erase_no_drop` optimization not triggering in the SSE2 implementation. (#3)
-- Missing `Send` and `Sync` for hash map and iterator types. (#7)
-- Bug when inserting into a table smaller than the group width. (#5)
-
-## v0.1.0 - 2018-10-29
-
-- Initial release
-
-[Unreleased]: https://github.com/Amanieu/hashbrown/compare/v0.1.8...HEAD
-[v0.1.8]: https://github.com/Amanieu/hashbrown/compare/v0.1.7...v0.1.8
-[v0.1.7]: https://github.com/Amanieu/hashbrown/compare/v0.1.6...v0.1.7
-[v0.1.6]: https://github.com/Amanieu/hashbrown/compare/v0.1.5...v0.1.6
-[v0.1.5]: https://github.com/Amanieu/hashbrown/compare/v0.1.4...v0.1.5
-[v0.1.4]: https://github.com/Amanieu/hashbrown/compare/v0.1.3...v0.1.4
-[v0.1.3]: https://github.com/Amanieu/hashbrown/compare/v0.1.2...v0.1.3
-[v0.1.2]: https://github.com/Amanieu/hashbrown/compare/v0.1.1...v0.1.2
-[v0.1.1]: https://github.com/Amanieu/hashbrown/compare/v0.1.0...v0.1.1
deleted file mode 100644
--- a/third_party/rust/hashbrown/Cargo.toml
+++ /dev/null
@@ -1,55 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g. crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-name = "hashbrown"
-version = "0.1.8"
-authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
-description = "A Rust port of Google's SwissTable hash map"
-readme = "README.md"
-keywords = ["hash", "no_std", "hashmap", "swisstable"]
-categories = ["data-structures", "no-std"]
-license = "Apache-2.0/MIT"
-repository = "https://github.com/Amanieu/hashbrown"
-[dependencies.byteorder]
-version = "1.0"
-default-features = false
-
-[dependencies.rayon]
-version = "1.0"
-optional = true
-
-[dependencies.scopeguard]
-version = "0.3"
-default-features = false
-
-[dependencies.serde]
-version = "1.0"
-optional = true
-default-features = false
-[dev-dependencies.lazy_static]
-version = "~1.2"
-
-[dev-dependencies.rand]
-version = "0.5.1"
-
-[dev-dependencies.rayon]
-version = "1.0"
-
-[dev-dependencies.rustc-hash]
-version = "1.0"
-
-[dev-dependencies.serde_test]
-version = "1.0"
-
-[features]
-nightly = []
deleted file mode 100644
--- a/third_party/rust/hashbrown/LICENSE-APACHE
+++ /dev/null
@@ -1,201 +0,0 @@
-                              Apache License
-                        Version 2.0, January 2004
-                     http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-   "License" shall mean the terms and conditions for use, reproduction,
-   and distribution as defined by Sections 1 through 9 of this document.
-
-   "Licensor" shall mean the copyright owner or entity authorized by
-   the copyright owner that is granting the License.
-
-   "Legal Entity" shall mean the union of the acting entity and all
-   other entities that control, are controlled by, or are under common
-   control with that entity. For the purposes of this definition,
-   "control" means (i) the power, direct or indirect, to cause the
-   direction or management of such entity, whether by contract or
-   otherwise, or (ii) ownership of fifty percent (50%) or more of the
-   outstanding shares, or (iii) beneficial ownership of such entity.
-
-   "You" (or "Your") shall mean an individual or Legal Entity
-   exercising permissions granted by this License.
-
-   "Source" form shall mean the preferred form for making modifications,
-   including but not limited to software source code, documentation
-   source, and configuration files.
-
-   "Object" form shall mean any form resulting from mechanical
-   transformation or translation of a Source form, including but
-   not limited to compiled object code, generated documentation,
-   and conversions to other media types.
-
-   "Work" shall mean the work of authorship, whether in Source or
-   Object form, made available under the License, as indicated by a
-   copyright notice that is included in or attached to the work
-   (an example is provided in the Appendix below).
-
-   "Derivative Works" shall mean any work, whether in Source or Object
-   form, that is based on (or derived from) the Work and for which the
-   editorial revisions, annotations, elaborations, or other modifications
-   represent, as a whole, an original work of authorship. For the purposes
-   of this License, Derivative Works shall not include works that remain
-   separable from, or merely link (or bind by name) to the interfaces of,
-   the Work and Derivative Works thereof.
-
-   "Contribution" shall mean any work of authorship, including
-   the original version of the Work and any modifications or additions
-   to that Work or Derivative Works thereof, that is intentionally
-   submitted to Licensor for inclusion in the Work by the copyright owner
-   or by an individual or Legal Entity authorized to submit on behalf of
-   the copyright owner. For the purposes of this definition, "submitted"
-   means any form of electronic, verbal, or written communication sent
-   to the Licensor or its representatives, including but not limited to
-   communication on electronic mailing lists, source code control systems,
-   and issue tracking systems that are managed by, or on behalf of, the
-   Licensor for the purpose of discussing and improving the Work, but
-   excluding communication that is conspicuously marked or otherwise
-   designated in writing by the copyright owner as "Not a Contribution."
-
-   "Contributor" shall mean Licensor and any individual or Legal Entity
-   on behalf of whom a Contribution has been received by Licensor and
-   subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   copyright license to reproduce, prepare Derivative Works of,
-   publicly display, publicly perform, sublicense, and distribute the
-   Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   (except as stated in this section) patent license to make, have made,
-   use, offer to sell, sell, import, and otherwise transfer the Work,
-   where such license applies only to those patent claims licensable
-   by such Contributor that are necessarily infringed by their
-   Contribution(s) alone or by combination of their Contribution(s)
-   with the Work to which such Contribution(s) was submitted. If You
-   institute patent litigation against any entity (including a
-   cross-claim or counterclaim in a lawsuit) alleging that the Work
-   or a Contribution incorporated within the Work constitutes direct
-   or contributory patent infringement, then any patent licenses
-   granted to You under this License for that Work shall terminate
-   as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
-   Work or Derivative Works thereof in any medium, with or without
-   modifications, and in Source or Object form, provided that You
-   meet the following conditions:
-
-   (a) You must give any other recipients of the Work or
-       Derivative Works a copy of this License; and
-
-   (b) You must cause any modified files to carry prominent notices
-       stating that You changed the files; and
-
-   (c) You must retain, in the Source form of any Derivative Works
-       that You distribute, all copyright, patent, trademark, and
-       attribution notices from the Source form of the Work,
-       excluding those notices that do not pertain to any part of
-       the Derivative Works; and
-
-   (d) If the Work includes a "NOTICE" text file as part of its
-       distribution, then any Derivative Works that You distribute must
-       include a readable copy of the attribution notices contained
-       within such NOTICE file, excluding those notices that do not
-       pertain to any part of the Derivative Works, in at least one
-       of the following places: within a NOTICE text file distributed
-       as part of the Derivative Works; within the Source form or
-       documentation, if provided along with the Derivative Works; or,
-       within a display generated by the Derivative Works, if and
-       wherever such third-party notices normally appear. The contents
-       of the NOTICE file are for informational purposes only and
-       do not modify the License. You may add Your own attribution
-       notices within Derivative Works that You distribute, alongside
-       or as an addendum to the NOTICE text from the Work, provided
-       that such additional attribution notices cannot be construed
-       as modifying the License.
-
-   You may add Your own copyright statement to Your modifications and
-   may provide additional or different license terms and conditions
-   for use, reproduction, or distribution of Your modifications, or
-   for any such Derivative Works as a whole, provided Your use,
-   reproduction, and distribution of the Work otherwise complies with
-   the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
-   any Contribution intentionally submitted for inclusion in the Work
-   by You to the Licensor shall be under the terms and conditions of
-   this License, without any additional terms or conditions.
-   Notwithstanding the above, nothing herein shall supersede or modify
-   the terms of any separate license agreement you may have executed
-   with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
-   names, trademarks, service marks, or product names of the Licensor,
-   except as required for reasonable and customary use in describing the
-   origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
-   agreed to in writing, Licensor provides the Work (and each
-   Contributor provides its Contributions) on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-   implied, including, without limitation, any warranties or conditions
-   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-   PARTICULAR PURPOSE. You are solely responsible for determining the
-   appropriateness of using or redistributing the Work and assume any
-   risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
-   whether in tort (including negligence), contract, or otherwise,
-   unless required by applicable law (such as deliberate and grossly
-   negligent acts) or agreed to in writing, shall any Contributor be
-   liable to You for damages, including any direct, indirect, special,
-   incidental, or consequential damages of any character arising as a
-   result of this License or out of the use or inability to use the
-   Work (including but not limited to damages for loss of goodwill,
-   work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses), even if such Contributor
-   has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
-   the Work or Derivative Works thereof, You may choose to offer,
-   and charge a fee for, acceptance of support, warranty, indemnity,
-   or other liability obligations and/or rights consistent with this
-   License. However, in accepting such obligations, You may act only
-   on Your own behalf and on Your sole responsibility, not on behalf
-   of any other Contributor, and only if You agree to indemnify,
-   defend, and hold each Contributor harmless for any liability
-   incurred by, or claims asserted against, such Contributor by reason
-   of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-   To apply the Apache License to your work, attach the following
-   boilerplate notice, with the fields enclosed by brackets "[]"
-   replaced with your own identifying information. (Don't include
-   the brackets!)  The text should be enclosed in the appropriate
-   comment syntax for the file format. We also recommend that a
-   file or class name and description of purpose be included on the
-   same "printed page" as the copyright notice for easier
-   identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-	http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
deleted file mode 100644
--- a/third_party/rust/hashbrown/LICENSE-MIT
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2016 Amanieu d'Antras
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
deleted file mode 100644
--- a/third_party/rust/hashbrown/README.md
+++ /dev/null
@@ -1,91 +0,0 @@
-hashbrown
-=========
-
-[![Build Status](https://travis-ci.com/Amanieu/hashbrown.svg?branch=master)](https://travis-ci.com/Amanieu/hashbrown) [![Crates.io](https://img.shields.io/crates/v/hashbrown.svg)](https://crates.io/crates/hashbrown)
-
-This crate is a Rust port of Google's high-performance [SwissTable] hash
-map, adapted to make it a drop-in replacement for Rust's standard `HashMap`
-and `HashSet` types.
-
-The original C++ version of SwissTable can be found [here], and this
-[CppCon talk] gives an overview of how the algorithm works.
-
-[SwissTable]: https://abseil.io/blog/20180927-swisstables
-[here]: https://github.com/abseil/abseil-cpp/blob/master/absl/container/internal/raw_hash_set.h
-[CppCon talk]: https://www.youtube.com/watch?v=ncHmEUmJZf4
-
-## [Documentation](https://docs.rs/hashbrown)
-
-## [Change log](CHANGELOG.md)
-
-## Features
-
-- Drop-in replacement for the standard library `HashMap` and `HashSet` types.
-- Uses `FxHash` as the default hasher, which is much faster than SipHash.
-- Around 2x faster than `FxHashMap` and 8x faster than the standard `HashMap`.
-- Lower memory usage: only 1 byte of overhead per entry instead of 8.
-- Compatible with `#[no_std]` (currently requires nightly for the `alloc` crate).
-- Empty hash maps do not allocate any memory.
-- SIMD lookups to scan multiple hash entries in parallel.
-
-## Performance
-
-Compared to `std::collections::HashMap`:
-
-```
- name               stdhash ns/iter  hashbrown ns/iter  diff ns/iter    diff %  speedup
- find_existing      23,831           2,935                   -20,896   -87.68%   x 8.12
- find_nonexisting   25,326           2,283                   -23,043   -90.99%  x 11.09
- get_remove_insert  124              25                          -99   -79.84%   x 4.96
- grow_by_insertion  197              177                         -20   -10.15%   x 1.11
- hashmap_as_queue   72               18                          -54   -75.00%   x 4.00
- new_drop           14               0                           -14  -100.00%    x inf
- new_insert_drop    78               55                          -23   -29.49%   x 1.42
-```
-
-Compared to `rustc_hash::FxHashMap` (standard `HashMap` using `FxHash` instead of `SipHash`):
-
-```
- name               fxhash ns/iter  hashbrown ns/iter  diff ns/iter    diff %  speedup
- find_existing      5,951           2,935                    -3,016   -50.68%   x 2.03
- find_nonexisting   4,637           2,283                    -2,354   -50.77%   x 2.03
- get_remove_insert  29              25                           -4   -13.79%   x 1.16
- grow_by_insertion  160             177                          17    10.62%   x 0.90
- hashmap_as_queue   22              18                           -4   -18.18%   x 1.22
- new_drop           9               0                            -9  -100.00%    x inf
- new_insert_drop    64              55                           -9   -14.06%   x 1.16
-```
-
-## Usage
-
-Add this to your `Cargo.toml`:
-
-```toml
-[dependencies]
-hashbrown = "0.1"
-```
-
-and this to your crate root:
-
-```rust
-extern crate hashbrown;
-```
-
-This crate has the following Cargo features:
-
-- `nightly`: Enables nightly-only features: `no_std` support, `#[may_dangle]` and ~10% speedup from branch hint intrinsics.
-
-## License
-
-Licensed under either of:
-
- * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
- * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
-
-at your option.
-
-### Contribution
-
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
-additional terms or conditions.
deleted file mode 100644
--- a/third_party/rust/hashbrown/benches/bench.rs
+++ /dev/null
@@ -1,115 +0,0 @@
-#![feature(test)]
-
-extern crate hashbrown;
-extern crate rustc_hash;
-extern crate test;
-
-use std::hash::Hash;
-use test::Bencher;
-
-use hashbrown::HashMap;
-//use rustc_hash::FxHashMap as HashMap;
-//use std::collections::HashMap;
-
-fn new_map<K: Eq + Hash, V>() -> HashMap<K, V> {
-    HashMap::default()
-}
-
-#[bench]
-fn new_drop(b: &mut Bencher) {
-    b.iter(|| {
-        let m: HashMap<i32, i32> = new_map();
-        assert_eq!(m.len(), 0);
-    })
-}
-
-#[bench]
-fn new_insert_drop(b: &mut Bencher) {
-    b.iter(|| {
-        let mut m = new_map();
-        m.insert(0, 0);
-        assert_eq!(m.len(), 1);
-    })
-}
-
-#[bench]
-fn grow_by_insertion(b: &mut Bencher) {
-    let mut m = new_map();
-
-    for i in 1..1001 {
-        m.insert(i, i);
-    }
-
-    let mut k = 1001;
-
-    b.iter(|| {
-        m.insert(k, k);
-        k += 1;
-    });
-}
-
-#[bench]
-fn find_existing(b: &mut Bencher) {
-    let mut m = new_map();
-
-    for i in 1..1001 {
-        m.insert(i, i);
-    }
-
-    b.iter(|| {
-        for i in 1..1001 {
-            m.contains_key(&i);
-        }
-    });
-}
-
-#[bench]
-fn find_nonexisting(b: &mut Bencher) {
-    let mut m = new_map();
-
-    for i in 1..1001 {
-        m.insert(i, i);
-    }
-
-    b.iter(|| {
-        for i in 1001..2001 {
-            m.contains_key(&i);
-        }
-    });
-}
-
-#[bench]
-fn hashmap_as_queue(b: &mut Bencher) {
-    let mut m = new_map();
-
-    for i in 1..1001 {
-        m.insert(i, i);
-    }
-
-    let mut k = 1;
-
-    b.iter(|| {
-        m.remove(&k);
-        m.insert(k + 1000, k + 1000);
-        k += 1;
-    });
-}
-
-#[bench]
-fn get_remove_insert(b: &mut Bencher) {
-    let mut m = new_map();
-
-    for i in 1..1001 {
-        m.insert(i, i);
-    }
-
-    let mut k = 1;
-
-    b.iter(|| {
-        m.get(&(k + 400));
-        m.get(&(k + 2000));
-        m.remove(&k);
-        m.insert(k + 1000, k + 1000);
-        k += 1;
-    })
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/bors.toml
+++ /dev/null
@@ -1,3 +0,0 @@
-status = [
-  "continuous-integration/travis-ci/push",
-]
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/mod.rs
+++ /dev/null
@@ -1,4 +0,0 @@
-#[cfg(feature = "rayon")]
-pub(crate) mod rayon;
-#[cfg(feature = "serde")]
-mod serde;
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/rayon/helpers.rs
+++ /dev/null
@@ -1,26 +0,0 @@
-use alloc::collections::LinkedList;
-use alloc::vec::Vec;
-
-use rayon::iter::{IntoParallelIterator, ParallelIterator};
-
-/// Helper for collecting parallel iterators to an intermediary
-pub(super) fn collect<I: IntoParallelIterator>(iter: I) -> (LinkedList<Vec<I::Item>>, usize) {
-    let list = iter
-        .into_par_iter()
-        .fold(Vec::new, |mut vec, elem| {
-            vec.push(elem);
-            vec
-        })
-        .map(|vec| {
-            let mut list = LinkedList::new();
-            list.push_back(vec);
-            list
-        })
-        .reduce(LinkedList::new, |mut list1, mut list2| {
-            list1.append(&mut list2);
-            list1
-        });
-
-    let len = list.iter().map(Vec::len).sum();
-    (list, len)
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/rayon/map.rs
+++ /dev/null
@@ -1,680 +0,0 @@
-//! Rayon extensions for `HashMap`.
-
-use core::fmt;
-use core::hash::{BuildHasher, Hash};
-use hash_map::HashMap;
-use rayon::iter::plumbing::UnindexedConsumer;
-use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
-
-/// Parallel iterator over shared references to entries in a map.
-///
-/// This iterator is created by the [`par_iter`] method on [`HashMap`]
-/// (provided by the [`IntoParallelRefIterator`] trait).
-/// See its documentation for more.
-///
-/// [`par_iter`]: /hashbrown/struct.HashMap.html#method.par_iter
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-/// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html
-pub struct ParIter<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a HashMap<K, V, S>,
-}
-
-impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParIter<'a, K, V, S> {
-    type Item = (&'a K, &'a V);
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map
-            .table
-            .par_iter()
-            .map(|x| unsafe {
-                let r = x.as_ref();
-                (&r.0, &r.1)
-            })
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K, V, S> Clone for ParIter<'a, K, V, S> {
-    #[inline]
-    fn clone(&self) -> Self {
-        ParIter { map: self.map }
-    }
-}
-
-impl<'a, K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
-    for ParIter<'a, K, V, S>
-{
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.iter().fmt(f)
-    }
-}
-
-/// Parallel iterator over shared references to keys in a map.
-///
-/// This iterator is created by the [`par_keys`] method on [`HashMap`].
-/// See its documentation for more.
-///
-/// [`par_keys`]: /hashbrown/struct.HashMap.html#method.par_keys
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-pub struct ParKeys<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a HashMap<K, V, S>,
-}
-
-impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParKeys<'a, K, V, S> {
-    type Item = &'a K;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map
-            .table
-            .par_iter()
-            .map(|x| unsafe { &x.as_ref().0 })
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K, V, S> Clone for ParKeys<'a, K, V, S> {
-    #[inline]
-    fn clone(&self) -> Self {
-        ParKeys { map: self.map }
-    }
-}
-
-impl<'a, K: fmt::Debug + Eq + Hash, V, S: BuildHasher> fmt::Debug for ParKeys<'a, K, V, S> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.keys().fmt(f)
-    }
-}
-
-/// Parallel iterator over shared references to values in a map.
-///
-/// This iterator is created by the [`par_values`] method on [`HashMap`].
-/// See its documentation for more.
-///
-/// [`par_values`]: /hashbrown/struct.HashMap.html#method.par_values
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-pub struct ParValues<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a HashMap<K, V, S>,
-}
-
-impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParValues<'a, K, V, S> {
-    type Item = &'a V;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map
-            .table
-            .par_iter()
-            .map(|x| unsafe { &x.as_ref().1 })
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K, V, S> Clone for ParValues<'a, K, V, S> {
-    #[inline]
-    fn clone(&self) -> Self {
-        ParValues { map: self.map }
-    }
-}
-
-impl<'a, K: Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for ParValues<'a, K, V, S> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.values().fmt(f)
-    }
-}
-
-/// Parallel iterator over mutable references to entries in a map.
-///
-/// This iterator is created by the [`par_iter_mut`] method on [`HashMap`]
-/// (provided by the [`IntoParallelRefMutIterator`] trait).
-/// See its documentation for more.
-///
-/// [`par_iter_mut`]: /hashbrown/struct.HashMap.html#method.par_iter_mut
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-/// [`IntoParallelRefMutIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefMutIterator.html
-pub struct ParIterMut<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a mut HashMap<K, V, S>,
-}
-
-impl<'a, K: Send + Sync, V: Send, S: Send> ParallelIterator for ParIterMut<'a, K, V, S> {
-    type Item = (&'a K, &'a mut V);
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map
-            .table
-            .par_iter()
-            .map(|x| unsafe {
-                let r = x.as_mut();
-                (&r.0, &mut r.1)
-            })
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
-    for ParIterMut<'a, K, V, S>
-{
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.iter().fmt(f)
-    }
-}
-
-/// Parallel iterator over mutable references to values in a map.
-///
-/// This iterator is created by the [`par_values_mut`] method on [`HashMap`].
-/// See its documentation for more.
-///
-/// [`par_values_mut`]: /hashbrown/struct.HashMap.html#method.par_values_mut
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-pub struct ParValuesMut<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a mut HashMap<K, V, S>,
-}
-
-impl<'a, K: Send, V: Send, S: Send> ParallelIterator for ParValuesMut<'a, K, V, S> {
-    type Item = &'a mut V;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map
-            .table
-            .par_iter()
-            .map(|x| unsafe { &mut x.as_mut().1 })
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K: Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for ParValuesMut<'a, K, V, S> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.values().fmt(f)
-    }
-}
-
-/// Parallel iterator over entries of a consumed map.
-///
-/// This iterator is created by the [`into_par_iter`] method on [`HashMap`]
-/// (provided by the [`IntoParallelIterator`] trait).
-/// See its documentation for more.
-///
-/// [`into_par_iter`]: /hashbrown/struct.HashMap.html#method.into_par_iter
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-/// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html
-pub struct IntoParIter<K, V, S> {
-    map: HashMap<K, V, S>,
-}
-
-impl<K: Send, V: Send, S: Send> ParallelIterator for IntoParIter<K, V, S> {
-    type Item = (K, V);
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map.table.into_par_iter().drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
-    for IntoParIter<K, V, S>
-{
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.iter().fmt(f)
-    }
-}
-
-/// Parallel draining iterator over entries of a map.
-///
-/// This iterator is created by the [`par_drain`] method on [`HashMap`].
-/// See its documentation for more.
-///
-/// [`par_drain`]: /hashbrown/struct.HashMap.html#method.par_drain
-/// [`HashMap`]: /hashbrown/struct.HashMap.html
-pub struct ParDrain<'a, K: 'a, V: 'a, S: 'a> {
-    map: &'a mut HashMap<K, V, S>,
-}
-
-impl<'a, K: Send, V: Send, S: Send> ParallelIterator for ParDrain<'a, K, V, S> {
-    type Item = (K, V);
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.map.table.par_drain().drive_unindexed(consumer)
-    }
-}
-
-impl<'a, K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
-    for ParDrain<'a, K, V, S>
-{
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.map.iter().fmt(f)
-    }
-}
-
-impl<K: Sync, V: Sync, S: Sync> HashMap<K, V, S> {
-    /// Visits (potentially in parallel) immutably borrowed keys in an arbitrary order.
-    #[inline]
-    pub fn par_keys(&self) -> ParKeys<K, V, S> {
-        ParKeys { map: self }
-    }
-
-    /// Visits (potentially in parallel) immutably borrowed values in an arbitrary order.
-    #[inline]
-    pub fn par_values(&self) -> ParValues<K, V, S> {
-        ParValues { map: self }
-    }
-}
-
-impl<K: Send, V: Send, S: Send> HashMap<K, V, S> {
-    /// Visits (potentially in parallel) mutably borrowed values in an arbitrary order.
-    #[inline]
-    pub fn par_values_mut(&mut self) -> ParValuesMut<K, V, S> {
-        ParValuesMut { map: self }
-    }
-
-    /// Consumes (potentially in parallel) all values in an arbitrary order,
-    /// while preserving the map's allocated memory for reuse.
-    #[inline]
-    pub fn par_drain(&mut self) -> ParDrain<K, V, S> {
-        ParDrain { map: self }
-    }
-}
-
-impl<K, V, S> HashMap<K, V, S>
-where
-    K: Eq + Hash + Sync,
-    V: PartialEq + Sync,
-    S: BuildHasher + Sync,
-{
-    /// Returns `true` if the map is equal to another,
-    /// i.e. both maps contain the same keys mapped to the same values.
-    ///
-    /// This method runs in a potentially parallel fashion.
-    pub fn par_eq(&self, other: &Self) -> bool {
-        self.len() == other.len()
-            && self
-                .into_par_iter()
-                .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
-    }
-}
-
-impl<K: Send, V: Send, S: Send> IntoParallelIterator for HashMap<K, V, S> {
-    type Item = (K, V);
-    type Iter = IntoParIter<K, V, S>;
-
-    #[inline]
-    fn into_par_iter(self) -> Self::Iter {
-        IntoParIter { map: self }
-    }
-}
-
-impl<'a, K: Sync, V: Sync, S: Sync> IntoParallelIterator for &'a HashMap<K, V, S> {
-    type Item = (&'a K, &'a V);
-    type Iter = ParIter<'a, K, V, S>;
-
-    #[inline]
-    fn into_par_iter(self) -> Self::Iter {
-        ParIter { map: self }
-    }
-}
-
-impl<'a, K: Send + Sync, V: Send, S: Send> IntoParallelIterator for &'a mut HashMap<K, V, S> {
-    type Item = (&'a K, &'a mut V);
-    type Iter = ParIterMut<'a, K, V, S>;
-
-    #[inline]
-    fn into_par_iter(self) -> Self::Iter {
-        ParIterMut { map: self }
-    }
-}
-
-/// Collect (key, value) pairs from a parallel iterator into a
-/// hashmap. If multiple pairs correspond to the same key, then the
-/// ones produced earlier in the parallel iterator will be
-/// overwritten, just as with a sequential iterator.
-impl<K, V, S> FromParallelIterator<(K, V)> for HashMap<K, V, S>
-where
-    K: Eq + Hash + Send,
-    V: Send,
-    S: BuildHasher + Default,
-{
-    fn from_par_iter<P>(par_iter: P) -> Self
-    where
-        P: IntoParallelIterator<Item = (K, V)>,
-    {
-        let mut map = HashMap::default();
-        map.par_extend(par_iter);
-        map
-    }
-}
-
-/// Extend a hash map with items from a parallel iterator.
-impl<K, V, S> ParallelExtend<(K, V)> for HashMap<K, V, S>
-where
-    K: Eq + Hash + Send,
-    V: Send,
-    S: BuildHasher,
-{
-    fn par_extend<I>(&mut self, par_iter: I)
-    where
-        I: IntoParallelIterator<Item = (K, V)>,
-    {
-        extend(self, par_iter);
-    }
-}
-
-/// Extend a hash map with copied items from a parallel iterator.
-impl<'a, K, V, S> ParallelExtend<(&'a K, &'a V)> for HashMap<K, V, S>
-where
-    K: Copy + Eq + Hash + Sync,
-    V: Copy + Sync,
-    S: BuildHasher,
-{
-    fn par_extend<I>(&mut self, par_iter: I)
-    where
-        I: IntoParallelIterator<Item = (&'a K, &'a V)>,
-    {
-        extend(self, par_iter);
-    }
-}
-
-// This is equal to the normal `HashMap` -- no custom advantage.
-fn extend<K, V, S, I>(map: &mut HashMap<K, V, S>, par_iter: I)
-where
-    K: Eq + Hash,
-    S: BuildHasher,
-    I: IntoParallelIterator,
-    HashMap<K, V, S>: Extend<I::Item>,
-{
-    let (list, len) = super::helpers::collect(par_iter);
-
-    // Keys may be already present or show multiple times in the iterator.
-    // Reserve the entire length if the map is empty.
-    // Otherwise reserve half the length (rounded up), so the map
-    // will only resize twice in the worst case.
-    let reserve = if map.is_empty() { len } else { (len + 1) / 2 };
-    map.reserve(reserve);
-    for vec in list {
-        map.extend(vec);
-    }
-}
-
-#[cfg(test)]
-mod test_par_map {
-    use alloc::vec::Vec;
-    use core::hash::{Hash, Hasher};
-    use core::sync::atomic::{AtomicUsize, Ordering};
-
-    use rayon::prelude::*;
-
-    use hash_map::HashMap;
-
-    struct Dropable<'a> {
-        k: usize,
-        counter: &'a AtomicUsize,
-    }
-
-    impl<'a> Dropable<'a> {
-        fn new(k: usize, counter: &AtomicUsize) -> Dropable {
-            counter.fetch_add(1, Ordering::Relaxed);
-
-            Dropable { k, counter }
-        }
-    }
-
-    impl<'a> Drop for Dropable<'a> {
-        fn drop(&mut self) {
-            self.counter.fetch_sub(1, Ordering::Relaxed);
-        }
-    }
-
-    impl<'a> Clone for Dropable<'a> {
-        fn clone(&self) -> Dropable<'a> {
-            Dropable::new(self.k, self.counter)
-        }
-    }
-
-    impl<'a> Hash for Dropable<'a> {
-        fn hash<H>(&self, state: &mut H)
-        where
-            H: Hasher,
-        {
-            self.k.hash(state)
-        }
-    }
-
-    impl<'a> PartialEq for Dropable<'a> {
-        fn eq(&self, other: &Self) -> bool {
-            self.k == other.k
-        }
-    }
-
-    impl<'a> Eq for Dropable<'a> {}
-
-    #[test]
-    fn test_into_iter_drops() {
-        let key = AtomicUsize::new(0);
-        let value = AtomicUsize::new(0);
-
-        let hm = {
-            let mut hm = HashMap::new();
-
-            assert_eq!(key.load(Ordering::Relaxed), 0);
-            assert_eq!(value.load(Ordering::Relaxed), 0);
-
-            for i in 0..100 {
-                let d1 = Dropable::new(i, &key);
-                let d2 = Dropable::new(i + 100, &value);
-                hm.insert(d1, d2);
-            }
-
-            assert_eq!(key.load(Ordering::Relaxed), 100);
-            assert_eq!(value.load(Ordering::Relaxed), 100);
-
-            hm
-        };
-
-        // By the way, ensure that cloning doesn't screw up the dropping.
-        drop(hm.clone());
-
-        assert_eq!(key.load(Ordering::Relaxed), 100);
-        assert_eq!(value.load(Ordering::Relaxed), 100);
-
-        // Ensure that dropping the iterator does not leak anything.
-        drop(hm.clone().into_par_iter());
-
-        {
-            assert_eq!(key.load(Ordering::Relaxed), 100);
-            assert_eq!(value.load(Ordering::Relaxed), 100);
-
-            // retain only half
-            let _v: Vec<_> = hm
-                .into_par_iter()
-                .filter(|&(ref key, _)| key.k < 50)
-                .collect();
-
-            assert_eq!(key.load(Ordering::Relaxed), 50);
-            assert_eq!(value.load(Ordering::Relaxed), 50);
-        };
-
-        assert_eq!(key.load(Ordering::Relaxed), 0);
-        assert_eq!(value.load(Ordering::Relaxed), 0);
-    }
-
-    #[test]
-    fn test_drain_drops() {
-        let key = AtomicUsize::new(0);
-        let value = AtomicUsize::new(0);
-
-        let mut hm = {
-            let mut hm = HashMap::new();
-
-            assert_eq!(key.load(Ordering::Relaxed), 0);
-            assert_eq!(value.load(Ordering::Relaxed), 0);
-
-            for i in 0..100 {
-                let d1 = Dropable::new(i, &key);
-                let d2 = Dropable::new(i + 100, &value);
-                hm.insert(d1, d2);
-            }
-
-            assert_eq!(key.load(Ordering::Relaxed), 100);
-            assert_eq!(value.load(Ordering::Relaxed), 100);
-
-            hm
-        };
-
-        // By the way, ensure that cloning doesn't screw up the dropping.
-        drop(hm.clone());
-
-        assert_eq!(key.load(Ordering::Relaxed), 100);
-        assert_eq!(value.load(Ordering::Relaxed), 100);
-
-        // Ensure that dropping the drain iterator does not leak anything.
-        drop(hm.clone().par_drain());
-
-        {
-            assert_eq!(key.load(Ordering::Relaxed), 100);
-            assert_eq!(value.load(Ordering::Relaxed), 100);
-
-            // retain only half
-            let _v: Vec<_> = hm.drain().filter(|&(ref key, _)| key.k < 50).collect();
-            assert!(hm.is_empty());
-
-            assert_eq!(key.load(Ordering::Relaxed), 50);
-            assert_eq!(value.load(Ordering::Relaxed), 50);
-        };
-
-        assert_eq!(key.load(Ordering::Relaxed), 0);
-        assert_eq!(value.load(Ordering::Relaxed), 0);
-    }
-
-    #[test]
-    fn test_empty_iter() {
-        let mut m: HashMap<isize, bool> = HashMap::new();
-        assert_eq!(m.par_drain().count(), 0);
-        assert_eq!(m.par_keys().count(), 0);
-        assert_eq!(m.par_values().count(), 0);
-        assert_eq!(m.par_values_mut().count(), 0);
-        assert_eq!(m.par_iter().count(), 0);
-        assert_eq!(m.par_iter_mut().count(), 0);
-        assert_eq!(m.len(), 0);
-        assert!(m.is_empty());
-        assert_eq!(m.into_par_iter().count(), 0);
-    }
-
-    #[test]
-    fn test_iterate() {
-        let mut m = HashMap::with_capacity(4);
-        for i in 0..32 {
-            assert!(m.insert(i, i * 2).is_none());
-        }
-        assert_eq!(m.len(), 32);
-
-        let observed = AtomicUsize::new(0);
-
-        m.par_iter().for_each(|(k, v)| {
-            assert_eq!(*v, *k * 2);
-            observed.fetch_or(1 << *k, Ordering::Relaxed);
-        });
-        assert_eq!(observed.into_inner(), 0xFFFF_FFFF);
-    }
-
-    #[test]
-    fn test_keys() {
-        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
-        let map: HashMap<_, _> = vec.into_par_iter().collect();
-        let keys: Vec<_> = map.par_keys().cloned().collect();
-        assert_eq!(keys.len(), 3);
-        assert!(keys.contains(&1));
-        assert!(keys.contains(&2));
-        assert!(keys.contains(&3));
-    }
-
-    #[test]
-    fn test_values() {
-        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
-        let map: HashMap<_, _> = vec.into_par_iter().collect();
-        let values: Vec<_> = map.par_values().cloned().collect();
-        assert_eq!(values.len(), 3);
-        assert!(values.contains(&'a'));
-        assert!(values.contains(&'b'));
-        assert!(values.contains(&'c'));
-    }
-
-    #[test]
-    fn test_values_mut() {
-        let vec = vec![(1, 1), (2, 2), (3, 3)];
-        let mut map: HashMap<_, _> = vec.into_par_iter().collect();
-        map.par_values_mut().for_each(|value| *value = (*value) * 2);
-        let values: Vec<_> = map.par_values().cloned().collect();
-        assert_eq!(values.len(), 3);
-        assert!(values.contains(&2));
-        assert!(values.contains(&4));
-        assert!(values.contains(&6));
-    }
-
-    #[test]
-    fn test_eq() {
-        let mut m1 = HashMap::new();
-        m1.insert(1, 2);
-        m1.insert(2, 3);
-        m1.insert(3, 4);
-
-        let mut m2 = HashMap::new();
-        m2.insert(1, 2);
-        m2.insert(2, 3);
-
-        assert!(!m1.par_eq(&m2));
-
-        m2.insert(3, 4);
-
-        assert!(m1.par_eq(&m2));
-    }
-
-    #[test]
-    fn test_from_iter() {
-        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
-        let map: HashMap<_, _> = xs.par_iter().cloned().collect();
-
-        for &(k, v) in &xs {
-            assert_eq!(map.get(&k), Some(&v));
-        }
-    }
-
-    #[test]
-    fn test_extend_ref() {
-        let mut a = HashMap::new();
-        a.insert(1, "one");
-        let mut b = HashMap::new();
-        b.insert(2, "two");
-        b.insert(3, "three");
-
-        a.par_extend(&b);
-
-        assert_eq!(a.len(), 3);
-        assert_eq!(a[&1], "one");
-        assert_eq!(a[&2], "two");
-        assert_eq!(a[&3], "three");
-    }
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/rayon/mod.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-mod helpers;
-mod raw;
-
-pub(crate) mod map;
-pub(crate) mod set;
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/rayon/raw.rs
+++ /dev/null
@@ -1,201 +0,0 @@
-use alloc::alloc::dealloc;
-use core::marker::PhantomData;
-use core::mem;
-use core::ptr::NonNull;
-use raw::Bucket;
-use raw::{RawIterRange, RawTable};
-use rayon::iter::{
-    plumbing::{self, Folder, UnindexedConsumer, UnindexedProducer},
-    ParallelIterator,
-};
-use scopeguard::guard;
-
-/// Parallel iterator which returns a raw pointer to every full bucket in the table.
-pub struct RawParIter<T> {
-    iter: RawIterRange<T>,
-}
-
-unsafe impl<T> Send for RawParIter<T> {}
-
-impl<T> ParallelIterator for RawParIter<T> {
-    type Item = Bucket<T>;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        let producer = ParIterProducer { iter: self.iter };
-        plumbing::bridge_unindexed(producer, consumer)
-    }
-}
-
-/// Producer which returns a `Bucket<T>` for every element.
-struct ParIterProducer<T> {
-    iter: RawIterRange<T>,
-}
-
-unsafe impl<T> Send for ParIterProducer<T> {}
-
-impl<T> UnindexedProducer for ParIterProducer<T> {
-    type Item = Bucket<T>;
-
-    #[inline]
-    fn split(self) -> (Self, Option<Self>) {
-        let (left, right) = self.iter.split();
-        let left = ParIterProducer { iter: left };
-        let right = right.map(|right| ParIterProducer { iter: right });
-        (left, right)
-    }
-
-    #[inline]
-    fn fold_with<F>(self, folder: F) -> F
-    where
-        F: Folder<Self::Item>,
-    {
-        folder.consume_iter(self.iter)
-    }
-}
-
-/// Parallel iterator which consumes a table and returns elements.
-pub struct RawIntoParIter<T> {
-    table: RawTable<T>,
-}
-
-unsafe impl<T> Send for RawIntoParIter<T> {}
-
-impl<T: Send> ParallelIterator for RawIntoParIter<T> {
-    type Item = T;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        let iter = unsafe { self.table.iter().iter };
-        let _guard = guard(self.table.into_alloc(), |alloc| {
-            if let Some((ptr, layout)) = *alloc {
-                unsafe {
-                    dealloc(ptr.as_ptr(), layout);
-                }
-            }
-        });
-        let producer = ParDrainProducer { iter };
-        plumbing::bridge_unindexed(producer, consumer)
-    }
-}
-
-/// Parallel iterator which consumes elements without freeing the table storage.
-pub struct RawParDrain<'a, T> {
-    // We don't use a &'a RawTable<T> because we want RawParDrain to be
-    // covariant over 'a.
-    table: NonNull<RawTable<T>>,
-    _marker: PhantomData<&'a RawTable<T>>,
-}
-
-unsafe impl<'a, T> Send for RawParDrain<'a, T> {}
-
-impl<'a, T: Send> ParallelIterator for RawParDrain<'a, T> {
-    type Item = T;
-
-    #[inline]
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        let _guard = guard(self.table, |table| unsafe {
-            table.as_mut().clear_no_drop()
-        });
-        let iter = unsafe { self.table.as_ref().iter().iter };
-        mem::forget(self);
-        let producer = ParDrainProducer { iter };
-        plumbing::bridge_unindexed(producer, consumer)
-    }
-}
-
-impl<'a, T> Drop for RawParDrain<'a, T> {
-    fn drop(&mut self) {
-        // If drive_unindexed is not called then simply clear the table.
-        unsafe { self.table.as_mut().clear() }
-    }
-}
-
-/// Producer which will consume all elements in the range, even if it is dropped
-/// halfway through.
-struct ParDrainProducer<T> {
-    iter: RawIterRange<T>,
-}
-
-unsafe impl<T: Send> Send for ParDrainProducer<T> {}
-
-impl<T: Send> UnindexedProducer for ParDrainProducer<T> {
-    type Item = T;
-
-    #[inline]
-    fn split(self) -> (Self, Option<Self>) {
-        let (left, right) = self.iter.clone().split();
-        mem::forget(self);
-        let left = ParDrainProducer { iter: left };
-        let right = right.map(|right| ParDrainProducer { iter: right });
-        (left, right)
-    }
-
-    #[inline]
-    fn fold_with<F>(mut self, mut folder: F) -> F
-    where
-        F: Folder<Self::Item>,
-    {
-        // Make sure to modify the iterator in-place so that any remaining
-        // elements are processed in our Drop impl.
-        while let Some(item) = self.iter.next() {
-            folder = folder.consume(unsafe { item.read() });
-            if folder.full() {
-                return folder;
-            }
-        }
-
-        // If we processed all elements then we don't need to run the drop.
-        mem::forget(self);
-        folder
-    }
-}
-
-impl<T> Drop for ParDrainProducer<T> {
-    #[inline]
-    fn drop(&mut self) {
-        // Drop all remaining elements
-        if mem::needs_drop::<T>() {
-            while let Some(item) = self.iter.next() {
-                unsafe {
-                    item.drop();
-                }
-            }
-        }
-    }
-}
-
-impl<T> RawTable<T> {
-    /// Returns a parallel iterator over the elements in a `RawTable`.
-    #[inline]
-    pub fn par_iter(&self) -> RawParIter<T> {
-        RawParIter {
-            iter: unsafe { self.iter().iter },
-        }
-    }
-
-    /// Returns a parallel iterator over the elements in a `RawTable`.
-    #[inline]
-    pub fn into_par_iter(self) -> RawIntoParIter<T> {
-        RawIntoParIter { table: self }
-    }
-
-    /// Returns a parallel iterator which consumes all elements of a `RawTable`
-    /// without freeing its memory allocation.
-    #[inline]
-    pub fn par_drain(&mut self) -> RawParDrain<T> {
-        RawParDrain {
-            table: NonNull::from(self),
-            _marker: PhantomData,
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/rayon/set.rs
+++ /dev/null
@@ -1,642 +0,0 @@
-//! Rayon extensions for `HashSet`.
-
-use core::hash::{BuildHasher, Hash};
-use hash_set::HashSet;
-use rayon::iter::plumbing::UnindexedConsumer;
-use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
-
-/// Parallel iterator over elements of a consumed set.
-///
-/// This iterator is created by the [`into_par_iter`] method on [`HashSet`]
-/// (provided by the [`IntoParallelIterator`] trait).
-/// See its documentation for more.
-///
-/// [`into_par_iter`]: /hashbrown/struct.HashSet.html#method.into_par_iter
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-/// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html
-pub struct IntoParIter<T, S> {
-    set: HashSet<T, S>,
-}
-
-impl<T: Send, S: Send> ParallelIterator for IntoParIter<T, S> {
-    type Item = T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.set
-            .map
-            .into_par_iter()
-            .map(|(k, _)| k)
-            .drive_unindexed(consumer)
-    }
-}
-
-/// Parallel draining iterator over entries of a set.
-///
-/// This iterator is created by the [`par_drain`] method on [`HashSet`].
-/// See its documentation for more.
-///
-/// [`par_drain`]: /hashbrown/struct.HashSet.html#method.par_drain
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-pub struct ParDrain<'a, T, S> {
-    set: &'a mut HashSet<T, S>,
-}
-
-impl<'a, T: Send, S: Send> ParallelIterator for ParDrain<'a, T, S> {
-    type Item = T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.set
-            .map
-            .par_drain()
-            .map(|(k, _)| k)
-            .drive_unindexed(consumer)
-    }
-}
-
-/// Parallel iterator over shared references to elements in a set.
-///
-/// This iterator is created by the [`par_iter`] method on [`HashSet`]
-/// (provided by the [`IntoParallelRefIterator`] trait).
-/// See its documentation for more.
-///
-/// [`par_iter`]: /hashbrown/struct.HashSet.html#method.par_iter
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-/// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html
-pub struct ParIter<'a, T: 'a, S: 'a> {
-    set: &'a HashSet<T, S>,
-}
-
-impl<'a, T: Sync, S: Sync> ParallelIterator for ParIter<'a, T, S> {
-    type Item = &'a T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.set.map.par_keys().drive_unindexed(consumer)
-    }
-}
-
-/// Parallel iterator over shared references to elements in the difference of
-/// sets.
-///
-/// This iterator is created by the [`par_difference`] method on [`HashSet`].
-/// See its documentation for more.
-///
-/// [`par_difference`]: /hashbrown/struct.HashSet.html#method.par_difference
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-pub struct ParDifference<'a, T: 'a, S: 'a> {
-    a: &'a HashSet<T, S>,
-    b: &'a HashSet<T, S>,
-}
-
-impl<'a, T, S> ParallelIterator for ParDifference<'a, T, S>
-where
-    T: Eq + Hash + Sync,
-    S: BuildHasher + Sync,
-{
-    type Item = &'a T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.a
-            .into_par_iter()
-            .filter(|&x| !self.b.contains(x))
-            .drive_unindexed(consumer)
-    }
-}
-
-/// Parallel iterator over shared references to elements in the symmetric
-/// difference of sets.
-///
-/// This iterator is created by the [`par_symmetric_difference`] method on
-/// [`HashSet`].
-/// See its documentation for more.
-///
-/// [`par_symmetric_difference`]: /hashbrown/struct.HashSet.html#method.par_symmetric_difference
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-pub struct ParSymmetricDifference<'a, T: 'a, S: 'a> {
-    a: &'a HashSet<T, S>,
-    b: &'a HashSet<T, S>,
-}
-
-impl<'a, T, S> ParallelIterator for ParSymmetricDifference<'a, T, S>
-where
-    T: Eq + Hash + Sync,
-    S: BuildHasher + Sync,
-{
-    type Item = &'a T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.a
-            .par_difference(self.b)
-            .chain(self.b.par_difference(self.a))
-            .drive_unindexed(consumer)
-    }
-}
-
-/// Parallel iterator over shared references to elements in the intersection of
-/// sets.
-///
-/// This iterator is created by the [`par_intersection`] method on [`HashSet`].
-/// See its documentation for more.
-///
-/// [`par_intersection`]: /hashbrown/struct.HashSet.html#method.par_intersection
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-pub struct ParIntersection<'a, T: 'a, S: 'a> {
-    a: &'a HashSet<T, S>,
-    b: &'a HashSet<T, S>,
-}
-
-impl<'a, T, S> ParallelIterator for ParIntersection<'a, T, S>
-where
-    T: Eq + Hash + Sync,
-    S: BuildHasher + Sync,
-{
-    type Item = &'a T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.a
-            .into_par_iter()
-            .filter(|&x| self.b.contains(x))
-            .drive_unindexed(consumer)
-    }
-}
-
-/// Parallel iterator over shared references to elements in the union of sets.
-///
-/// This iterator is created by the [`par_union`] method on [`HashSet`].
-/// See its documentation for more.
-///
-/// [`par_union`]: /hashbrown/struct.HashSet.html#method.par_union
-/// [`HashSet`]: /hashbrown/struct.HashSet.html
-pub struct ParUnion<'a, T: 'a, S: 'a> {
-    a: &'a HashSet<T, S>,
-    b: &'a HashSet<T, S>,
-}
-
-impl<'a, T, S> ParallelIterator for ParUnion<'a, T, S>
-where
-    T: Eq + Hash + Sync,
-    S: BuildHasher + Sync,
-{
-    type Item = &'a T;
-
-    fn drive_unindexed<C>(self, consumer: C) -> C::Result
-    where
-        C: UnindexedConsumer<Self::Item>,
-    {
-        self.a
-            .into_par_iter()
-            .chain(self.b.par_difference(self.a))
-            .drive_unindexed(consumer)
-    }
-}
-
-impl<T, S> HashSet<T, S>
-where
-    T: Eq + Hash + Sync,
-    S: BuildHasher + Sync,
-{
-    /// Visits (potentially in parallel) the values representing the difference,
-    /// i.e. the values that are in `self` but not in `other`.
-    #[inline]
-    pub fn par_difference<'a>(&'a self, other: &'a Self) -> ParDifference<'a, T, S> {
-        ParDifference { a: self, b: other }
-    }
-
-    /// Visits (potentially in parallel) the values representing the symmetric
-    /// difference, i.e. the values that are in `self` or in `other` but not in both.
-    #[inline]
-    pub fn par_symmetric_difference<'a>(
-        &'a self,
-        other: &'a Self,
-    ) -> ParSymmetricDifference<'a, T, S> {
-        ParSymmetricDifference { a: self, b: other }
-    }
-
-    /// Visits (potentially in parallel) the values representing the
-    /// intersection, i.e. the values that are both in `self` and `other`.
-    #[inline]
-    pub fn par_intersection<'a>(&'a self, other: &'a Self) -> ParIntersection<'a, T, S> {
-        ParIntersection { a: self, b: other }
-    }
-
-    /// Visits (potentially in parallel) the values representing the union,
-    /// i.e. all the values in `self` or `other`, without duplicates.
-    #[inline]
-    pub fn par_union<'a>(&'a self, other: &'a Self) -> ParUnion<'a, T, S> {
-        ParUnion { a: self, b: other }
-    }
-
-    /// Returns `true` if `self` has no elements in common with `other`.
-    /// This is equivalent to checking for an empty intersection.
-    ///
-    /// This method runs in a potentially parallel fashion.
-    pub fn par_is_disjoint(&self, other: &Self) -> bool {
-        self.into_par_iter().all(|x| !other.contains(x))
-    }
-
-    /// Returns `true` if the set is a subset of another,
-    /// i.e. `other` contains at least all the values in `self`.
-    ///
-    /// This method runs in a potentially parallel fashion.
-    pub fn par_is_subset(&self, other: &Self) -> bool {
-        self.into_par_iter().all(|x| other.contains(x))
-    }
-
-    /// Returns `true` if the set is a superset of another,
-    /// i.e. `self` contains at least all the values in `other`.
-    ///
-    /// This method runs in a potentially parallel fashion.
-    pub fn par_is_superset(&self, other: &Self) -> bool {
-        other.par_is_subset(self)
-    }
-
-    /// Returns `true` if the set is equal to another,
-    /// i.e. both sets contain the same values.
-    ///
-    /// This method runs in a potentially parallel fashion.
-    pub fn par_eq(&self, other: &Self) -> bool {
-        self.len() == other.len() && self.par_is_subset(other)
-    }
-}
-
-impl<T, S> HashSet<T, S>
-where
-    T: Eq + Hash + Send,
-    S: BuildHasher + Send,
-{
-    /// Consumes (potentially in parallel) all values in an arbitrary order,
-    /// while preserving the set's allocated memory for reuse.
-    #[inline]
-    pub fn par_drain(&mut self) -> ParDrain<T, S> {
-        ParDrain { set: self }
-    }
-}
-
-impl<T: Send, S: Send> IntoParallelIterator for HashSet<T, S> {
-    type Item = T;
-    type Iter = IntoParIter<T, S>;
-
-    #[inline]
-    fn into_par_iter(self) -> Self::Iter {
-        IntoParIter { set: self }
-    }
-}
-
-impl<'a, T: Sync, S: Sync> IntoParallelIterator for &'a HashSet<T, S> {
-    type Item = &'a T;
-    type Iter = ParIter<'a, T, S>;
-
-    #[inline]
-    fn into_par_iter(self) -> Self::Iter {
-        ParIter { set: self }
-    }
-}
-
-/// Collect values from a parallel iterator into a hashset.
-impl<T, S> FromParallelIterator<T> for HashSet<T, S>
-where
-    T: Eq + Hash + Send,
-    S: BuildHasher + Default,
-{
-    fn from_par_iter<P>(par_iter: P) -> Self
-    where
-        P: IntoParallelIterator<Item = T>,
-    {
-        let mut set = HashSet::default();
-        set.par_extend(par_iter);
-        set
-    }
-}
-
-/// Extend a hash set with items from a parallel iterator.
-impl<T, S> ParallelExtend<T> for HashSet<T, S>
-where
-    T: Eq + Hash + Send,
-    S: BuildHasher,
-{
-    fn par_extend<I>(&mut self, par_iter: I)
-    where
-        I: IntoParallelIterator<Item = T>,
-    {
-        extend(self, par_iter);
-    }
-}
-
-/// Extend a hash set with copied items from a parallel iterator.
-impl<'a, T, S> ParallelExtend<&'a T> for HashSet<T, S>
-where
-    T: 'a + Copy + Eq + Hash + Sync,
-    S: BuildHasher,
-{
-    fn par_extend<I>(&mut self, par_iter: I)
-    where
-        I: IntoParallelIterator<Item = &'a T>,
-    {
-        extend(self, par_iter);
-    }
-}
-
-// This is equal to the normal `HashSet` -- no custom advantage.
-fn extend<T, S, I>(set: &mut HashSet<T, S>, par_iter: I)
-where
-    T: Eq + Hash,
-    S: BuildHasher,
-    I: IntoParallelIterator,
-    HashSet<T, S>: Extend<I::Item>,
-{
-    let (list, len) = super::helpers::collect(par_iter);
-
-    // Values may be already present or show multiple times in the iterator.
-    // Reserve the entire length if the set is empty.
-    // Otherwise reserve half the length (rounded up), so the set
-    // will only resize twice in the worst case.
-    let reserve = if set.is_empty() { len } else { (len + 1) / 2 };
-    set.reserve(reserve);
-    for vec in list {
-        set.extend(vec);
-    }
-}
-
-#[cfg(test)]
-mod test_par_set {
-    use alloc::vec::Vec;
-    use core::sync::atomic::{AtomicUsize, Ordering};
-
-    use rayon::prelude::*;
-
-    use hash_set::HashSet;
-
-    #[test]
-    fn test_disjoint() {
-        let mut xs = HashSet::new();
-        let mut ys = HashSet::new();
-        assert!(xs.par_is_disjoint(&ys));
-        assert!(ys.par_is_disjoint(&xs));
-        assert!(xs.insert(5));
-        assert!(ys.insert(11));
-        assert!(xs.par_is_disjoint(&ys));
-        assert!(ys.par_is_disjoint(&xs));
-        assert!(xs.insert(7));
-        assert!(xs.insert(19));
-        assert!(xs.insert(4));
-        assert!(ys.insert(2));
-        assert!(ys.insert(-11));
-        assert!(xs.par_is_disjoint(&ys));
-        assert!(ys.par_is_disjoint(&xs));
-        assert!(ys.insert(7));
-        assert!(!xs.par_is_disjoint(&ys));
-        assert!(!ys.par_is_disjoint(&xs));
-    }
-
-    #[test]
-    fn test_subset_and_superset() {
-        let mut a = HashSet::new();
-        assert!(a.insert(0));
-        assert!(a.insert(5));
-        assert!(a.insert(11));
-        assert!(a.insert(7));
-
-        let mut b = HashSet::new();
-        assert!(b.insert(0));
-        assert!(b.insert(7));
-        assert!(b.insert(19));
-        assert!(b.insert(250));
-        assert!(b.insert(11));
-        assert!(b.insert(200));
-
-        assert!(!a.par_is_subset(&b));
-        assert!(!a.par_is_superset(&b));
-        assert!(!b.par_is_subset(&a));
-        assert!(!b.par_is_superset(&a));
-
-        assert!(b.insert(5));
-
-        assert!(a.par_is_subset(&b));
-        assert!(!a.par_is_superset(&b));
-        assert!(!b.par_is_subset(&a));
-        assert!(b.par_is_superset(&a));
-    }
-
-    #[test]
-    fn test_iterate() {
-        let mut a = HashSet::new();
-        for i in 0..32 {
-            assert!(a.insert(i));
-        }
-        let observed = AtomicUsize::new(0);
-        a.par_iter().for_each(|k| {
-            observed.fetch_or(1 << *k, Ordering::Relaxed);
-        });
-        assert_eq!(observed.into_inner(), 0xFFFF_FFFF);
-    }
-
-    #[test]
-    fn test_intersection() {
-        let mut a = HashSet::new();
-        let mut b = HashSet::new();
-
-        assert!(a.insert(11));
-        assert!(a.insert(1));
-        assert!(a.insert(3));
-        assert!(a.insert(77));
-        assert!(a.insert(103));
-        assert!(a.insert(5));
-        assert!(a.insert(-5));
-
-        assert!(b.insert(2));
-        assert!(b.insert(11));
-        assert!(b.insert(77));
-        assert!(b.insert(-9));
-        assert!(b.insert(-42));
-        assert!(b.insert(5));
-        assert!(b.insert(3));
-
-        let expected = [3, 5, 11, 77];
-        let i = a
-            .par_intersection(&b)
-            .map(|x| {
-                assert!(expected.contains(x));
-                1
-            })
-            .sum::<usize>();
-        assert_eq!(i, expected.len());
-    }
-
-    #[test]
-    fn test_difference() {
-        let mut a = HashSet::new();
-        let mut b = HashSet::new();
-
-        assert!(a.insert(1));
-        assert!(a.insert(3));
-        assert!(a.insert(5));
-        assert!(a.insert(9));
-        assert!(a.insert(11));
-
-        assert!(b.insert(3));
-        assert!(b.insert(9));
-
-        let expected = [1, 5, 11];
-        let i = a
-            .par_difference(&b)
-            .map(|x| {
-                assert!(expected.contains(x));
-                1
-            })
-            .sum::<usize>();
-        assert_eq!(i, expected.len());
-    }
-
-    #[test]
-    fn test_symmetric_difference() {
-        let mut a = HashSet::new();
-        let mut b = HashSet::new();
-
-        assert!(a.insert(1));
-        assert!(a.insert(3));
-        assert!(a.insert(5));
-        assert!(a.insert(9));
-        assert!(a.insert(11));
-
-        assert!(b.insert(-2));
-        assert!(b.insert(3));
-        assert!(b.insert(9));
-        assert!(b.insert(14));
-        assert!(b.insert(22));
-
-        let expected = [-2, 1, 5, 11, 14, 22];
-        let i = a
-            .par_symmetric_difference(&b)
-            .map(|x| {
-                assert!(expected.contains(x));
-                1
-            })
-            .sum::<usize>();
-        assert_eq!(i, expected.len());
-    }
-
-    #[test]
-    fn test_union() {
-        let mut a = HashSet::new();
-        let mut b = HashSet::new();
-
-        assert!(a.insert(1));
-        assert!(a.insert(3));
-        assert!(a.insert(5));
-        assert!(a.insert(9));
-        assert!(a.insert(11));
-        assert!(a.insert(16));
-        assert!(a.insert(19));
-        assert!(a.insert(24));
-
-        assert!(b.insert(-2));
-        assert!(b.insert(1));
-        assert!(b.insert(5));
-        assert!(b.insert(9));
-        assert!(b.insert(13));
-        assert!(b.insert(19));
-
-        let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
-        let i = a
-            .par_union(&b)
-            .map(|x| {
-                assert!(expected.contains(x));
-                1
-            })
-            .sum::<usize>();
-        assert_eq!(i, expected.len());
-    }
-
-    #[test]
-    fn test_from_iter() {
-        let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
-
-        let set: HashSet<_> = xs.par_iter().cloned().collect();
-
-        for x in &xs {
-            assert!(set.contains(x));
-        }
-    }
-
-    #[test]
-    fn test_move_iter() {
-        let hs = {
-            let mut hs = HashSet::new();
-
-            hs.insert('a');
-            hs.insert('b');
-
-            hs
-        };
-
-        let v = hs.into_par_iter().collect::<Vec<char>>();
-        assert!(v == ['a', 'b'] || v == ['b', 'a']);
-    }
-
-    #[test]
-    fn test_eq() {
-        // These constants once happened to expose a bug in insert().
-        // I'm keeping them around to prevent a regression.
-        let mut s1 = HashSet::new();
-
-        s1.insert(1);
-        s1.insert(2);
-        s1.insert(3);
-
-        let mut s2 = HashSet::new();
-
-        s2.insert(1);
-        s2.insert(2);
-
-        assert!(!s1.par_eq(&s2));
-
-        s2.insert(3);
-
-        assert!(s1.par_eq(&s2));
-    }
-
-    #[test]
-    fn test_extend_ref() {
-        let mut a = HashSet::new();
-        a.insert(1);
-
-        a.par_extend(&[2, 3, 4][..]);
-
-        assert_eq!(a.len(), 4);
-        assert!(a.contains(&1));
-        assert!(a.contains(&2));
-        assert!(a.contains(&3));
-        assert!(a.contains(&4));
-
-        let mut b = HashSet::new();
-        b.insert(5);
-        b.insert(6);
-
-        a.par_extend(&b);
-
-        assert_eq!(a.len(), 6);
-        assert!(a.contains(&1));
-        assert!(a.contains(&2));
-        assert!(a.contains(&3));
-        assert!(a.contains(&4));
-        assert!(a.contains(&5));
-        assert!(a.contains(&6));
-    }
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/external_trait_impls/serde.rs
+++ /dev/null
@@ -1,200 +0,0 @@
-mod size_hint {
-    use core::cmp;
-
-    /// This presumably exists to prevent denial of service attacks.
-    ///
-    /// Original discussion: https://github.com/serde-rs/serde/issues/1114.
-    #[inline]
-    pub(super) fn cautious(hint: Option<usize>) -> usize {
-        cmp::min(hint.unwrap_or(0), 4096)
-    }
-}
-
-mod map {
-    use core::fmt;
-    use core::hash::{BuildHasher, Hash};
-    use core::marker::PhantomData;
-    use serde::de::{Deserialize, Deserializer, MapAccess, Visitor};
-    use serde::ser::{Serialize, Serializer};
-
-    use hash_map::HashMap;
-
-    use super::size_hint;
-
-    impl<K, V, H> Serialize for HashMap<K, V, H>
-    where
-        K: Serialize + Eq + Hash,
-        V: Serialize,
-        H: BuildHasher,
-    {
-        #[inline]
-        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-        where
-            S: Serializer,
-        {
-            serializer.collect_map(self)
-        }
-    }
-
-    impl<'de, K, V, S> Deserialize<'de> for HashMap<K, V, S>
-    where
-        K: Deserialize<'de> + Eq + Hash,
-        V: Deserialize<'de>,
-        S: BuildHasher + Default,
-    {
-        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-        where
-            D: Deserializer<'de>,
-        {
-            struct MapVisitor<K, V, S> {
-                marker: PhantomData<HashMap<K, V, S>>,
-            }
-
-            impl<'de, K, V, S> Visitor<'de> for MapVisitor<K, V, S>
-            where
-                K: Deserialize<'de> + Eq + Hash,
-                V: Deserialize<'de>,
-                S: BuildHasher + Default,
-            {
-                type Value = HashMap<K, V, S>;
-
-                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-                    formatter.write_str("a map")
-                }
-
-                #[inline]
-                fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
-                where
-                    A: MapAccess<'de>,
-                {
-                    let mut values = HashMap::with_capacity_and_hasher(
-                        size_hint::cautious(map.size_hint()),
-                        S::default(),
-                    );
-
-                    while let Some((key, value)) = map.next_entry()? {
-                        values.insert(key, value);
-                    }
-
-                    Ok(values)
-                }
-            }
-
-            let visitor = MapVisitor {
-                marker: PhantomData,
-            };
-            deserializer.deserialize_map(visitor)
-        }
-    }
-}
-
-mod set {
-    use core::fmt;
-    use core::hash::{BuildHasher, Hash};
-    use core::marker::PhantomData;
-    use serde::de::{Deserialize, Deserializer, SeqAccess, Visitor};
-    use serde::ser::{Serialize, Serializer};
-
-    use hash_set::HashSet;
-
-    use super::size_hint;
-
-    impl<T, H> Serialize for HashSet<T, H>
-    where
-        T: Serialize + Eq + Hash,
-        H: BuildHasher,
-    {
-        #[inline]
-        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-        where
-            S: Serializer,
-        {
-            serializer.collect_seq(self)
-        }
-    }
-
-    impl<'de, T, S> Deserialize<'de> for HashSet<T, S>
-    where
-        T: Deserialize<'de> + Eq + Hash,
-        S: BuildHasher + Default,
-    {
-        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-        where
-            D: Deserializer<'de>,
-        {
-            struct SeqVisitor<T, S> {
-                marker: PhantomData<HashSet<T, S>>,
-            }
-
-            impl<'de, T, S> Visitor<'de> for SeqVisitor<T, S>
-            where
-                T: Deserialize<'de> + Eq + Hash,
-                S: BuildHasher + Default,
-            {
-                type Value = HashSet<T, S>;
-
-                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-                    formatter.write_str("a sequence")
-                }
-
-                #[inline]
-                fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
-                where
-                    A: SeqAccess<'de>,
-                {
-                    let mut values = HashSet::with_capacity_and_hasher(
-                        size_hint::cautious(seq.size_hint()),
-                        S::default(),
-                    );
-
-                    while let Some(value) = seq.next_element()? {
-                        values.insert(value);
-                    }
-
-                    Ok(values)
-                }
-            }
-
-            let visitor = SeqVisitor {
-                marker: PhantomData,
-            };
-            deserializer.deserialize_seq(visitor)
-        }
-
-        fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error>
-        where
-            D: Deserializer<'de>,
-        {
-            struct SeqInPlaceVisitor<'a, T: 'a, S: 'a>(&'a mut HashSet<T, S>);
-
-            impl<'a, 'de, T, S> Visitor<'de> for SeqInPlaceVisitor<'a, T, S>
-            where
-                T: Deserialize<'de> + Eq + Hash,
-                S: BuildHasher + Default,
-            {
-                type Value = ();
-
-                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-                    formatter.write_str("a sequence")
-                }
-
-                #[inline]
-                fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
-                where
-                    A: SeqAccess<'de>,
-                {
-                    self.0.clear();
-                    self.0.reserve(size_hint::cautious(seq.size_hint()));
-
-                    while let Some(value) = seq.next_element()? {
-                        self.0.insert(value);
-                    }
-
-                    Ok(())
-                }
-            }
-
-            deserializer.deserialize_seq(SeqInPlaceVisitor(place))
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/fx.rs
+++ /dev/null
@@ -1,112 +0,0 @@
-//! Fast, non-cryptographic hash used by rustc and Firefox.
-
-use core::default::Default;
-use core::hash::{BuildHasherDefault, Hasher};
-use core::mem::size_of;
-use core::ops::BitXor;
-
-use byteorder::{ByteOrder, NativeEndian};
-
-/// Type alias for a `HashBuilder` using the `fx` hash algorithm.
-pub type FxHashBuilder = BuildHasherDefault<FxHasher>;
-
-/// A speedy hash algorithm for use within rustc. The hashmap in liballoc
-/// by default uses SipHash which isn't quite as speedy as we want. In the
-/// compiler we're not really worried about DOS attempts, so we use a fast
-/// non-cryptographic hash.
-///
-/// This is the same as the algorithm used by Firefox -- which is a homespun
-/// one not based on any widely-known algorithm -- though modified to produce
-/// 64-bit hash values instead of 32-bit hash values. It consistently
-/// out-performs an FNV-based hash within rustc itself -- the collision rate is
-/// similar or slightly worse than FNV, but the speed of the hash function
-/// itself is much higher because it works on up to 8 bytes at a time.
-pub struct FxHasher {
-    hash: usize,
-}
-
-#[cfg(target_pointer_width = "32")]
-const K: usize = 0x9e3779b9;
-#[cfg(target_pointer_width = "64")]
-const K: usize = 0x517cc1b727220a95;
-
-impl Default for FxHasher {
-    #[inline]
-    fn default() -> FxHasher {
-        FxHasher { hash: 0 }
-    }
-}
-
-impl FxHasher {
-    #[inline]
-    fn add_to_hash(&mut self, i: usize) {
-        self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K);
-    }
-}
-
-impl Hasher for FxHasher {
-    #[inline]
-    fn write(&mut self, mut bytes: &[u8]) {
-        #[cfg(target_pointer_width = "32")]
-        let read_usize = |bytes| NativeEndian::read_u32(bytes);
-        #[cfg(target_pointer_width = "64")]
-        let read_usize = |bytes| NativeEndian::read_u64(bytes);
-
-        let mut hash = FxHasher { hash: self.hash };
-        assert!(size_of::<usize>() <= 8);
-        while bytes.len() >= size_of::<usize>() {
-            hash.add_to_hash(read_usize(bytes) as usize);
-            bytes = &bytes[size_of::<usize>()..];
-        }
-        if (size_of::<usize>() > 4) && (bytes.len() >= 4) {
-            hash.add_to_hash(NativeEndian::read_u32(bytes) as usize);
-            bytes = &bytes[4..];
-        }
-        if (size_of::<usize>() > 2) && bytes.len() >= 2 {
-            hash.add_to_hash(NativeEndian::read_u16(bytes) as usize);
-            bytes = &bytes[2..];
-        }
-        if (size_of::<usize>() > 1) && bytes.len() >= 1 {
-            hash.add_to_hash(bytes[0] as usize);
-        }
-        self.hash = hash.hash;
-    }
-
-    #[inline]
-    fn write_u8(&mut self, i: u8) {
-        self.add_to_hash(i as usize);
-    }
-
-    #[inline]
-    fn write_u16(&mut self, i: u16) {
-        self.add_to_hash(i as usize);
-    }
-
-    #[inline]
-    fn write_u32(&mut self, i: u32) {
-        self.add_to_hash(i as usize);
-    }
-
-    #[cfg(target_pointer_width = "32")]
-    #[inline]
-    fn write_u64(&mut self, i: u64) {
-        self.add_to_hash(i as usize);
-        self.add_to_hash((i >> 32) as usize);
-    }
-
-    #[cfg(target_pointer_width = "64")]
-    #[inline]
-    fn write_u64(&mut self, i: u64) {
-        self.add_to_hash(i as usize);
-    }
-
-    #[inline]
-    fn write_usize(&mut self, i: usize) {
-        self.add_to_hash(i);
-    }
-
-    #[inline]
-    fn finish(&self) -> u64 {
-        self.hash as u64
-    }
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/lib.rs
+++ /dev/null
@@ -1,92 +0,0 @@
-//! This crate is a Rust port of Google's high-performance [SwissTable] hash
-//! map, adapted to make it a drop-in replacement for Rust's standard `HashMap`
-//! and `HashSet` types.
-//!
-//! The original C++ version of SwissTable can be found [here], and this
-//! [CppCon talk] gives an overview of how the algorithm works.
-//!
-//! [SwissTable]: https://abseil.io/blog/20180927-swisstables
-//! [here]: https://github.com/abseil/abseil-cpp/blob/master/absl/container/internal/raw_hash_set.h
-//! [CppCon talk]: https://www.youtube.com/watch?v=ncHmEUmJZf4
-
-#![no_std]
-#![cfg_attr(
-    feature = "nightly",
-    feature(
-        alloc,
-        alloc_layout_extra,
-        allocator_api,
-        ptr_offset_from,
-        test,
-        core_intrinsics,
-        dropck_eyepatch
-    )
-)]
-#![warn(missing_docs)]
-
-#[cfg(test)]
-#[macro_use]
-extern crate std;
-#[cfg(test)]
-extern crate rand;
-
-#[cfg(feature = "nightly")]
-#[cfg_attr(test, macro_use)]
-extern crate alloc;
-extern crate byteorder;
-#[cfg(feature = "rayon")]
-extern crate rayon;
-extern crate scopeguard;
-#[cfg(feature = "serde")]
-extern crate serde;
-#[cfg(not(feature = "nightly"))]
-#[cfg_attr(test, macro_use)]
-extern crate std as alloc;
-
-mod external_trait_impls;
-mod fx;
-mod map;
-mod raw;
-mod set;
-
-pub mod hash_map {
-    //! A hash map implemented with quadratic probing and SIMD lookup.
-    pub use map::*;
-
-    #[cfg(feature = "rayon")]
-    /// [rayon]-based parallel iterator types for hash maps.
-    /// You will rarely need to interact with it directly unless you have need
-    /// to name one of the iterator types.
-    ///
-    /// [rayon]: https://docs.rs/rayon/1.0/rayon
-    pub mod rayon {
-        pub use external_trait_impls::rayon::map::*;
-    }
-}
-pub mod hash_set {
-    //! A hash set implemented as a `HashMap` where the value is `()`.
-    pub use set::*;
-
-    #[cfg(feature = "rayon")]
-    /// [rayon]-based parallel iterator types for hash sets.
-    /// You will rarely need to interact with it directly unless you have need
-    /// to name one of the iterator types.
-    ///
-    /// [rayon]: https://docs.rs/rayon/1.0/rayon
-    pub mod rayon {
-        pub use external_trait_impls::rayon::set::*;
-    }
-}
-
-pub use map::HashMap;
-pub use set::HashSet;
-
-/// Augments `AllocErr` with a CapacityOverflow variant.
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub enum CollectionAllocErr {
-    /// Error due to the computed capacity exceeding the collection's maximum
-    /// (usually `isize::MAX` bytes).
-    CapacityOverflow,
-    /// Error due to the allocator (see the `AllocErr` type's docs).
-    AllocErr,
-}
deleted file mode 100644
--- a/third_party/rust/hashbrown/src/map.rs
+++ /dev/null
@@ -1,3496 +0,0 @@
-use self::Entry::*;
-
-use core::borrow::Borrow;
-use core::fmt::{self, Debug};
-use core::hash::{BuildHasher, Hash, Hasher};
-use core::iter::{FromIterator, FusedIterator};
-use core::marker::PhantomData;
-use core::mem;
-use core::ops::Index;
-use raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
-use CollectionAllocErr;
-
-pub use fx::FxHashBuilder as DefaultHashBuilder;
-
-/// A hash map implemented with quadratic probing and SIMD lookup.
-///
-/// The default hashing algorithm is currently `fx`, though this is
-/// subject to change at any point in the future. This hash function is very
-/// fast for all types of keys, but this algorithm will typically *not* protect
-/// against attacks such as HashDoS.
-///
-/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
-/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
-/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
-///
-/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
-/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
-/// If you implement these yourself, it is important that the following
-/// property holds:
-///
-/// ```text
-/// k1 == k2 -> hash(k1) == hash(k2)
-/// ```
-///
-/// In other words, if two keys are equal, their hashes must be equal.
-///
-/// It is a logic error for a key to be modified in such a way that the key's
-/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
-/// the [`Eq`] trait, changes while it is in the map. This is normally only
-/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
-///
-/// It is also a logic error for the [`Hash`] implementation of a key to panic.
-/// This is generally only possible if the trait is implemented manually. If a
-/// panic does occur then the contents of the `HashMap` may become corrupted and
-/// some items may be dropped from the table.
-///
-/// # Examples
-///
-/// ```
-/// use hashbrown::HashMap;
-///
-/// // Type inference lets us omit an explicit type signature (which
-/// // would be `HashMap<String, String>` in this example).
-/// let mut book_reviews = HashMap::new();
-///
-/// // Review some books.
-/// book_reviews.insert(
-///     "Adventures of Huckleberry Finn".to_string(),
-///     "My favorite book.".to_string(),
-/// );
-/// book_reviews.insert(
-///     "Grimms' Fairy Tales".to_string(),
-///     "Masterpiece.".to_string(),
-/// );
-/// book_reviews.insert(
-///     "Pride and Prejudice".to_string(),
-///     "Very enjoyable.".to_string(),
-/// );
-/// book_reviews.insert(
-///     "The Adventures of Sherlock Holmes".to_string(),
-///     "Eye lyked it alot.".to_string(),
-/// );
-///
-/// // Check for a specific one.
-/// // When collections store owned values (String), they can still be
-/// // queried using references (&str).
-/// if !book_reviews.contains_key("Les Misérables") {
-///     println!("We've got {} reviews, but Les Misérables ain't one.",
-///              book_reviews.len());
-/// }
-///
-/// // oops, this review has a lot of spelling mistakes, let's delete it.
-/// book_reviews.remove("The Adventures of Sherlock Holmes");
-///
-/// // Look up the values associated with some keys.
-/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
-/// for &book in &to_find {
-///     match book_reviews.get(book) {
-///         Some(review) => println!("{}: {}", book, review),
-///         None => println!("{} is unreviewed.", book)
-///     }
-/// }
-///
-/// // Look up the value for a key (will panic if the key is not found).
-/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
-///
-/// // Iterate over everything.
-/// for (book, review) in &book_reviews {
-///     println!("{}: \"{}\"", book, review);
-/// }
-/// ```
-///
-/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
-/// for more complex methods of getting, setting, updating and removing keys and
-/// their values:
-///
-/// ```
-/// use hashbrown::HashMap;
-///
-/// // type inference lets us omit an explicit type signature (which
-/// // would be `HashMap<&str, u8>` in this example).
-/// let mut player_stats = HashMap::new();
-///
-/// fn random_stat_buff() -> u8 {
-///     // could actually return some random value here - let's just return
-///     // some fixed value for now
-///     42
-/// }
-///
-/// // insert a key only if it doesn't already exist
-/// player_stats.entry("health").or_insert(100);
-///
-/// // insert a key using a function that provides a new value only if it
-/// // doesn't already exist
-/// player_stats.entry("defence").or_insert_with(random_stat_buff);
-///
-/// // update a key, guarding against the key possibly not being set
-/// let stat = player_stats.entry("attack").or_insert(100);
-/// *stat += random_stat_buff();
-/// ```
-///
-/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
-/// We must also derive [`PartialEq`].
-///
-/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
-/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
-/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
-/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
-/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
-/// [`default`]: #method.default
-/// [`with_hasher`]: #method.with_hasher
-/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
-/// [`fnv`]: https://crates.io/crates/fnv
-///
-/// ```
-/// use hashbrown::HashMap;
-///
-/// #[derive(Hash, Eq, PartialEq, Debug)]
-/// struct Viking {
-///     name: String,
-///     country: String,
-/// }
-///
-/// impl Viking {
-///     /// Create a new Viking.
-///     fn new(name: &str, country: &str) -> Viking {
-///         Viking { name: name.to_string(), country: country.to_string() }
-///     }
-/// }
-///
-/// // Use a HashMap to store the vikings' health points.
-/// let mut vikings = HashMap::new();
-///
-/// vikings.insert(Viking::new("Einar", "Norway"), 25);
-/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
-/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
-///
-/// // Use derived implementation to print the status of the vikings.
-/// for (viking, health) in &vikings {
-///     println!("{:?} has {} hp", viking, health);
-/// }
-/// ```
-///
-/// A `HashMap` with fixed list of elements can be initialized from an array:
-///
-/// ```
-/// use hashbrown::HashMap;
-///
-/// fn main() {
-///     let timber_resources: HashMap<&str, i32> =
-///     [("Norway", 100),
-///      ("Denmark", 50),
-///      ("Iceland", 10)]
-///      .iter().cloned().collect();
-///     // use the values stored in map
-/// }
-/// ```
-
-#[derive(Clone)]
-pub struct HashMap<K, V, S = DefaultHashBuilder> {
-    hash_builder: S,
-    pub(crate) table: RawTable<(K, V)>,
-}
-
-#[inline]
-fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
-    let mut state = hash_builder.build_hasher();
-    val.hash(&mut state);
-    state.finish()
-}
-
-impl<K: Hash + Eq, V> HashMap<K, V, DefaultHashBuilder> {
-    /// Creates an empty `HashMap`.
-    ///
-    /// The hash map is initially created with a capacity of 0, so it will not allocate until it
-    /// is first inserted into.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// let mut map: HashMap<&str, i32> = HashMap::new();
-    /// ```
-    #[inline]
-    pub fn new() -> HashMap<K, V, DefaultHashBuilder> {
-        Default::default()
-    }
-
-    /// Creates an empty `HashMap` with the specified capacity.
-    ///
-    /// The hash map will be able to hold at least `capacity` elements without
-    /// reallocating. If `capacity` is 0, the hash map will not allocate.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
-    /// ```
-    #[inline]
-    pub fn with_capacity(capacity: usize) -> HashMap<K, V, DefaultHashBuilder> {
-        HashMap::with_capacity_and_hasher(capacity, Default::default())
-    }
-}
-
-impl<K, V, S> HashMap<K, V, S>
-where
-    K: Eq + Hash,
-    S: BuildHasher,
-{
-    /// Creates an empty `HashMap` which will use the given hash builder to hash
-    /// keys.
-    ///
-    /// The created map has the default initial capacity.
-    ///
-    /// Warning: `hash_builder` is normally randomly generated, and
-    /// is designed to allow HashMaps to be resistant to attacks that
-    /// cause many collisions and very poor performance. Setting it
-    /// manually using this function can expose a DoS attack vector.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// use hashbrown::hash_map::DefaultHashBuilder;
-    ///
-    /// let s = DefaultHashBuilder::default();
-    /// let mut map = HashMap::with_hasher(s);
-    /// map.insert(1, 2);
-    /// ```
-    #[inline]
-    pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
-        HashMap {
-            hash_builder,
-            table: RawTable::new(),
-        }
-    }
-
-    /// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
-    /// to hash the keys.
-    ///
-    /// The hash map will be able to hold at least `capacity` elements without
-    /// reallocating. If `capacity` is 0, the hash map will not allocate.
-    ///
-    /// Warning: `hash_builder` is normally randomly generated, and
-    /// is designed to allow HashMaps to be resistant to attacks that
-    /// cause many collisions and very poor performance. Setting it
-    /// manually using this function can expose a DoS attack vector.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// use hashbrown::hash_map::DefaultHashBuilder;
-    ///
-    /// let s = DefaultHashBuilder::default();
-    /// let mut map = HashMap::with_capacity_and_hasher(10, s);
-    /// map.insert(1, 2);
-    /// ```
-    #[inline]
-    pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> {
-        HashMap {
-            hash_builder,
-            table: RawTable::with_capacity(capacity),
-        }
-    }
-
-    /// Returns a reference to the map's [`BuildHasher`].
-    ///
-    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// use hashbrown::hash_map::DefaultHashBuilder;
-    ///
-    /// let hasher = DefaultHashBuilder::default();
-    /// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
-    /// let hasher: &DefaultHashBuilder = map.hasher();
-    /// ```
-    #[inline]
-    pub fn hasher(&self) -> &S {
-        &self.hash_builder
-    }
-
-    /// Returns the number of elements the map can hold without reallocating.
-    ///
-    /// This number is a lower bound; the `HashMap<K, V>` might be able to hold
-    /// more, but is guaranteed to be able to hold at least this many.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
-    /// assert!(map.capacity() >= 100);
-    /// ```
-    #[inline]
-    pub fn capacity(&self) -> usize {
-        self.table.capacity()
-    }
-
-    /// Reserves capacity for at least `additional` more elements to be inserted
-    /// in the `HashMap`. The collection may reserve more space to avoid
-    /// frequent reallocations.
-    ///
-    /// # Panics
-    ///
-    /// Panics if the new allocation size overflows [`usize`].
-    ///
-    /// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// let mut map: HashMap<&str, i32> = HashMap::new();
-    /// map.reserve(10);
-    /// ```
-    #[inline]
-    pub fn reserve(&mut self, additional: usize) {
-        let hash_builder = &self.hash_builder;
-        self.table
-            .reserve(additional, |x| make_hash(hash_builder, &x.0));
-    }
-
-    /// Tries to reserve capacity for at least `additional` more elements to be inserted
-    /// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
-    /// frequent reallocations.
-    ///
-    /// # Errors
-    ///
-    /// If the capacity overflows, or the allocator reports a failure, then an error
-    /// is returned.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    /// let mut map: HashMap<&str, isize> = HashMap::new();
-    /// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
-    /// ```
-    #[inline]
-    pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
-        let hash_builder = &self.hash_builder;
-        self.table
-            .try_reserve(additional, |x| make_hash(hash_builder, &x.0))
-    }
-
-    /// Shrinks the capacity of the map as much as possible. It will drop
-    /// down as much as possible while maintaining the internal rules
-    /// and possibly leaving some space in accordance with the resize policy.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
-    /// map.insert(1, 2);
-    /// map.insert(3, 4);
-    /// assert!(map.capacity() >= 100);
-    /// map.shrink_to_fit();
-    /// assert!(map.capacity() >= 2);
-    /// ```
-    #[inline]
-    pub fn shrink_to_fit(&mut self) {
-        let hash_builder = &self.hash_builder;
-        self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
-    }
-
-    /// Shrinks the capacity of the map with a lower limit. It will drop
-    /// down no lower than the supplied limit while maintaining the internal rules
-    /// and possibly leaving some space in accordance with the resize policy.
-    ///
-    /// Panics if the current capacity is smaller than the supplied
-    /// minimum capacity.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
-    /// map.insert(1, 2);
-    /// map.insert(3, 4);
-    /// assert!(map.capacity() >= 100);
-    /// map.shrink_to(10);
-    /// assert!(map.capacity() >= 10);
-    /// map.shrink_to(0);
-    /// assert!(map.capacity() >= 2);
-    /// ```
-    #[inline]
-    pub fn shrink_to(&mut self, min_capacity: usize) {
-        assert!(
-            self.capacity() >= min_capacity,
-            "Tried to shrink to a larger capacity"
-        );
-
-        let hash_builder = &self.hash_builder;
-        self.table
-            .shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
-    }
-
-    /// An iterator visiting all keys in arbitrary order.
-    /// The iterator element type is `&'a K`.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert("a", 1);
-    /// map.insert("b", 2);
-    /// map.insert("c", 3);
-    ///
-    /// for key in map.keys() {
-    ///     println!("{}", key);
-    /// }
-    /// ```
-    #[inline]
-    pub fn keys(&self) -> Keys<K, V> {
-        Keys { inner: self.iter() }
-    }
-
-    /// An iterator visiting all values in arbitrary order.
-    /// The iterator element type is `&'a V`.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert("a", 1);
-    /// map.insert("b", 2);
-    /// map.insert("c", 3);
-    ///
-    /// for val in map.values() {
-    ///     println!("{}", val);
-    /// }
-    /// ```
-    #[inline]
-    pub fn values(&self) -> Values<K, V> {
-        Values { inner: self.iter() }
-    }
-
-    /// An iterator visiting all values mutably in arbitrary order.
-    /// The iterator element type is `&'a mut V`.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    ///
-    /// map.insert("a", 1);
-    /// map.insert("b", 2);
-    /// map.insert("c", 3);
-    ///
-    /// for val in map.values_mut() {
-    ///     *val = *val + 10;
-    /// }
-    ///
-    /// for val in map.values() {
-    ///     println!("{}", val);
-    /// }
-    /// ```
-    #[inline]
-    pub fn values_mut(&mut self) -> ValuesMut<K, V> {
-        ValuesMut {
-            inner: self.iter_mut(),
-        }
-    }
-
-    /// An iterator visiting all key-value pairs in arbitrary order.
-    /// The iterator element type is `(&'a K, &'a V)`.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert("a", 1);
-    /// map.insert("b", 2);
-    /// map.insert("c", 3);
-    ///
-    /// for (key, val) in map.iter() {
-    ///     println!("key: {} val: {}", key, val);
-    /// }
-    /// ```
-    #[inline]
-    pub fn iter(&self) -> Iter<K, V> {
-        // Here we tie the lifetime of self to the iter.
-        unsafe {
-            Iter {
-                inner: self.table.iter(),
-                _marker: PhantomData,
-            }
-        }
-    }
-
-    /// An iterator visiting all key-value pairs in arbitrary order,
-    /// with mutable references to the values.
-    /// The iterator element type is `(&'a K, &'a mut V)`.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert("a", 1);
-    /// map.insert("b", 2);
-    /// map.insert("c", 3);
-    ///
-    /// // Update all values
-    /// for (_, val) in map.iter_mut() {
-    ///     *val *= 2;
-    /// }
-    ///
-    /// for (key, val) in &map {
-    ///     println!("key: {} val: {}", key, val);
-    /// }
-    /// ```
-    #[inline]
-    pub fn iter_mut(&mut self) -> IterMut<K, V> {
-        // Here we tie the lifetime of self to the iter.
-        unsafe {
-            IterMut {
-                inner: self.table.iter(),
-                _marker: PhantomData,
-            }
-        }
-    }
-
-    /// Gets the given key's corresponding entry in the map for in-place manipulation.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut letters = HashMap::new();
-    ///
-    /// for ch in "a short treatise on fungi".chars() {
-    ///     let counter = letters.entry(ch).or_insert(0);
-    ///     *counter += 1;
-    /// }
-    ///
-    /// assert_eq!(letters[&'s'], 2);
-    /// assert_eq!(letters[&'t'], 3);
-    /// assert_eq!(letters[&'u'], 1);
-    /// assert_eq!(letters.get(&'y'), None);
-    /// ```
-    #[inline]
-    pub fn entry(&mut self, key: K) -> Entry<K, V, S> {
-        let hash = make_hash(&self.hash_builder, &key);
-        if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
-            Entry::Occupied(OccupiedEntry {
-                key: Some(key),
-                elem,
-                table: self,
-            })
-        } else {
-            Entry::Vacant(VacantEntry {
-                hash,
-                key,
-                table: self,
-            })
-        }
-    }
-
-    #[cfg(test)]
-    #[inline]
-    fn raw_capacity(&self) -> usize {
-        self.table.buckets()
-    }
-
-    /// Returns the number of elements in the map.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut a = HashMap::new();
-    /// assert_eq!(a.len(), 0);
-    /// a.insert(1, "a");
-    /// assert_eq!(a.len(), 1);
-    /// ```
-    #[inline]
-    pub fn len(&self) -> usize {
-        self.table.len()
-    }
-
-    /// Returns true if the map contains no elements.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut a = HashMap::new();
-    /// assert!(a.is_empty());
-    /// a.insert(1, "a");
-    /// assert!(!a.is_empty());
-    /// ```
-    #[inline]
-    pub fn is_empty(&self) -> bool {
-        self.len() == 0
-    }
-
-    /// Clears the map, returning all key-value pairs as an iterator. Keeps the
-    /// allocated memory for reuse.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut a = HashMap::new();
-    /// a.insert(1, "a");
-    /// a.insert(2, "b");
-    ///
-    /// for (k, v) in a.drain().take(1) {
-    ///     assert!(k == 1 || k == 2);
-    ///     assert!(v == "a" || v == "b");
-    /// }
-    ///
-    /// assert!(a.is_empty());
-    /// ```
-    #[inline]
-    pub fn drain(&mut self) -> Drain<K, V> {
-        // Here we tie the lifetime of self to the iter.
-        unsafe {
-            Drain {
-                inner: self.table.drain(),
-            }
-        }
-    }
-
-    /// Clears the map, removing all key-value pairs. Keeps the allocated memory
-    /// for reuse.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut a = HashMap::new();
-    /// a.insert(1, "a");
-    /// a.clear();
-    /// assert!(a.is_empty());
-    /// ```
-    #[inline]
-    pub fn clear(&mut self) {
-        self.table.clear();
-    }
-
-    /// Returns a reference to the value corresponding to the key.
-    ///
-    /// The key may be any borrowed form of the map's key type, but
-    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
-    /// the key type.
-    ///
-    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
-    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert(1, "a");
-    /// assert_eq!(map.get(&1), Some(&"a"));
-    /// assert_eq!(map.get(&2), None);
-    /// ```
-    #[inline]
-    pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
-    where
-        K: Borrow<Q>,
-        Q: Hash + Eq,
-    {
-        self.get_key_value(k).map(|(_, v)| v)
-    }
-
-    /// Returns the key-value pair corresponding to the supplied key.
-    ///
-    /// The supplied key may be any borrowed form of the map's key type, but
-    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
-    /// the key type.
-    ///
-    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
-    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert(1, "a");
-    /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
-    /// assert_eq!(map.get_key_value(&2), None);
-    /// ```
-    #[inline]
-    pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
-    where
-        K: Borrow<Q>,
-        Q: Hash + Eq,
-    {
-        let hash = make_hash(&self.hash_builder, k);
-        self.table
-            .find(hash, |x| k.eq(x.0.borrow()))
-            .map(|item| unsafe {
-                let &(ref key, ref value) = item.as_ref();
-                (key, value)
-            })
-    }
-
-    /// Returns true if the map contains a value for the specified key.
-    ///
-    /// The key may be any borrowed form of the map's key type, but
-    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
-    /// the key type.
-    ///
-    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
-    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert(1, "a");
-    /// assert_eq!(map.contains_key(&1), true);
-    /// assert_eq!(map.contains_key(&2), false);
-    /// ```
-    #[inline]
-    pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
-    where
-        K: Borrow<Q>,
-        Q: Hash + Eq,
-    {
-        self.get(k).is_some()
-    }
-
-    /// Returns a mutable reference to the value corresponding to the key.
-    ///
-    /// The key may be any borrowed form of the map's key type, but
-    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
-    /// the key type.
-    ///
-    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
-    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use hashbrown::HashMap;
-    ///
-    /// let mut map = HashMap::new();
-    /// map.insert(1, "a");
-    /// if let Some(x) = map.get_mut(&1) {
-    ///     *x = "b";
-    /// }
-    /// assert_eq!(map[&1], "b");
-    /// ```
-    #[inline]
-    pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
-    where
-        K: Borrow<Q>,
-        Q: Hash + Eq,
-    {
-        let hash = make_hash(&self.hash_builder, k);
-        self.table
-            .find(hash, |x| k.eq(x.0.borrow()))
-            .map(|item| unsafe { &mut item.as_mut().1 })
-    }
-
-    /// Inserts a key-value pair into the map.
-    ///
-    /// If the map did not have this key present, [`None`] is returned.
-    ///
-    /// If the map did have this key present, the value is updated, and the old
-    /// value is returned. The key is not updated, though; this matters for
-    /// types that can be `==` without being identical. See the [module-level
-    /// documentation] for more.
-    ///
-    /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
-    /// [module-level documentation]: index.html#insert-and-complex-keys
-    ///
-    /// # Examples
-    ///
-    /// ```