No bug: Update bindgen to 0.33. r=nox on a CLOSED TREE
authorEmilio Cobos Álvarez <emilio@crisal.io>
Fri, 02 Mar 2018 20:28:26 +0100
changeset 461379 14a28deeba4dadfbdec9adec0e2ff59bc65527f5
parent 461378 d301d4626305b8db02a62f57426cdc781c89325c
child 461380 e2d5ccd7274fa8ba578e413b0452217058f0b885
push id1683
push usersfraser@mozilla.com
push dateThu, 26 Apr 2018 16:43:40 +0000
treeherdermozilla-release@5af6cb21869d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersnox
milestone60.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
No bug: Update bindgen to 0.33. r=nox on a CLOSED TREE MozReview-Commit-ID: 4PgOmtt3zrg
js/rust/Cargo.lock
js/rust/Cargo.toml
third_party/rust/bindgen/.cargo-checksum.json
third_party/rust/bindgen/Cargo.toml
third_party/rust/bindgen/src/callbacks.rs
third_party/rust/bindgen/src/codegen/helpers.rs
third_party/rust/bindgen/src/codegen/impl_partialeq.rs
third_party/rust/bindgen/src/codegen/mod.rs
third_party/rust/bindgen/src/codegen/struct_layout.rs
third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
third_party/rust/bindgen/src/ir/analysis/derive_default.rs
third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
third_party/rust/bindgen/src/ir/comp.rs
third_party/rust/bindgen/src/ir/context.rs
third_party/rust/bindgen/src/ir/enum_ty.rs
third_party/rust/bindgen/src/ir/function.rs
third_party/rust/bindgen/src/ir/objc.rs
third_party/rust/bindgen/src/ir/var.rs
third_party/rust/bindgen/src/lib.rs
third_party/rust/bindgen/src/main.rs
third_party/rust/bindgen/src/options.rs
third_party/rust/cfg-if-0.1.1/.cargo-checksum.json
third_party/rust/cfg-if-0.1.1/.travis.yml
third_party/rust/cfg-if-0.1.1/Cargo.toml
third_party/rust/cfg-if-0.1.1/LICENSE-APACHE
third_party/rust/cfg-if-0.1.1/LICENSE-MIT
third_party/rust/cfg-if-0.1.1/README.md
third_party/rust/cfg-if-0.1.1/src/lib.rs
third_party/rust/cfg-if-0.1.1/tests/xcrate.rs
third_party/rust/env_logger-0.4.3/.cargo-checksum.json
third_party/rust/env_logger-0.4.3/Cargo.toml
third_party/rust/env_logger-0.4.3/LICENSE-APACHE
third_party/rust/env_logger-0.4.3/LICENSE-MIT
third_party/rust/env_logger-0.4.3/src/lib.rs
third_party/rust/env_logger-0.4.3/src/regex.rs
third_party/rust/env_logger-0.4.3/src/string.rs
third_party/rust/env_logger-0.4.3/tests/regexp_filter.rs
third_party/rust/env_logger/.cargo-checksum.json
third_party/rust/env_logger/.travis.yml
third_party/rust/env_logger/Cargo.toml
third_party/rust/env_logger/README.md
third_party/rust/env_logger/examples/custom_format.rs
third_party/rust/env_logger/examples/custom_logger.rs
third_party/rust/env_logger/examples/default.rs
third_party/rust/env_logger/examples/direct_logger.rs
third_party/rust/env_logger/src/filter/mod.rs
third_party/rust/env_logger/src/filter/regex.rs
third_party/rust/env_logger/src/filter/string.rs
third_party/rust/env_logger/src/fmt.rs
third_party/rust/env_logger/src/lib.rs
third_party/rust/env_logger/src/regex.rs
third_party/rust/env_logger/src/string.rs
third_party/rust/env_logger/tests/log-in-log.rs
third_party/rust/env_logger/tests/regexp_filter.rs
third_party/rust/humantime/.cargo-checksum.json
third_party/rust/humantime/.travis.yml
third_party/rust/humantime/Cargo.toml
third_party/rust/humantime/LICENSE-APACHE
third_party/rust/humantime/LICENSE-MIT
third_party/rust/humantime/README.md
third_party/rust/humantime/benches/datetime_format.rs
third_party/rust/humantime/benches/datetime_parse.rs
third_party/rust/humantime/bulk.yaml
third_party/rust/humantime/src/date.rs
third_party/rust/humantime/src/duration.rs
third_party/rust/humantime/src/lib.rs
third_party/rust/humantime/src/wrapper.rs
third_party/rust/humantime/vagga.yaml
third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json
third_party/rust/proc-macro2-0.2.2/.travis.yml
third_party/rust/proc-macro2-0.2.2/Cargo.toml
third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE
third_party/rust/proc-macro2-0.2.2/LICENSE-MIT
third_party/rust/proc-macro2-0.2.2/README.md
third_party/rust/proc-macro2-0.2.2/src/lib.rs
third_party/rust/proc-macro2-0.2.2/src/macros.rs
third_party/rust/proc-macro2-0.2.2/src/stable.rs
third_party/rust/proc-macro2-0.2.2/src/strnom.rs
third_party/rust/proc-macro2-0.2.2/src/unstable.rs
third_party/rust/proc-macro2-0.2.2/tests/test.rs
third_party/rust/proc-macro2/.cargo-checksum.json
third_party/rust/proc-macro2/.travis.yml
third_party/rust/proc-macro2/Cargo.toml
third_party/rust/proc-macro2/src/lib.rs
third_party/rust/proc-macro2/src/stable.rs
third_party/rust/proc-macro2/src/strnom.rs
third_party/rust/proc-macro2/tests/test.rs
third_party/rust/quick-error/.cargo-checksum.json
third_party/rust/quick-error/.travis.yml
third_party/rust/quick-error/Cargo.toml
third_party/rust/quick-error/LICENSE-APACHE
third_party/rust/quick-error/LICENSE-MIT
third_party/rust/quick-error/README.rst
third_party/rust/quick-error/bulk.yaml
third_party/rust/quick-error/examples/context.rs
third_party/rust/quick-error/src/lib.rs
third_party/rust/quick-error/vagga.yaml
third_party/rust/termcolor/.cargo-checksum.json
third_party/rust/termcolor/COPYING
third_party/rust/termcolor/Cargo.toml
third_party/rust/termcolor/LICENSE-MIT
third_party/rust/termcolor/README.md
third_party/rust/termcolor/UNLICENSE
third_party/rust/termcolor/src/lib.rs
third_party/rust/wincolor/.cargo-checksum.json
third_party/rust/wincolor/COPYING
third_party/rust/wincolor/Cargo.toml
third_party/rust/wincolor/LICENSE-MIT
third_party/rust/wincolor/README.md
third_party/rust/wincolor/UNLICENSE
third_party/rust/wincolor/src/lib.rs
third_party/rust/wincolor/src/win.rs
toolkit/library/gtest/rust/Cargo.lock
toolkit/library/rust/Cargo.lock
--- a/js/rust/Cargo.lock
+++ b/js/rust/Cargo.lock
@@ -18,29 +18,28 @@ source = "registry+https://github.com/ru
 dependencies = [
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bindgen"
-version = "0.32.3"
+version = "0.33.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cexpr 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cfg-if 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "clang-sys 0.21.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "clap 2.29.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bitflags"
 version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -50,17 +49,17 @@ name = "cexpr"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "nom 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "cfg-if"
-version = "0.1.1"
+version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "clang-sys"
 version = "0.21.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -95,30 +94,50 @@ name = "env_logger"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "env_logger"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termcolor 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "gcc"
 version = "0.3.54"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "glob"
 version = "0.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "humantime"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "js"
 version = "0.1.4"
 dependencies = [
- "bindgen 0.32.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bindgen 0.33.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "mozjs_sys 0.0.0",
  "num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -170,16 +189,24 @@ dependencies = [
 ]
 
 [[package]]
 name = "log"
 version = "0.3.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "log"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "memchr"
 version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -215,30 +242,24 @@ version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "pkg-config"
 version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
-name = "proc-macro2"
-version = "0.2.3"
+name = "quick-error"
+version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
 
 [[package]]
 name = "quote"
-version = "0.4.2"
+version = "0.3.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
-]
 
 [[package]]
 name = "regex"
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -253,16 +274,24 @@ version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "strsim"
 version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "termcolor"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "textwrap"
 version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -284,21 +313,16 @@ dependencies = [
 ]
 
 [[package]]
 name = "unicode-width"
 version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
-name = "unicode-xid"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
 name = "unreachable"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -330,57 +354,91 @@ dependencies = [
 ]
 
 [[package]]
 name = "winapi"
 version = "0.2.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "winapi"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "winapi-build"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "wincolor"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [metadata]
 "checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
 "checksum ansi_term 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3568b48b7cefa6b8ce125f9bb4989e52fbcc29ebea88df04cc7c5f12f70455"
 "checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
-"checksum bindgen 0.32.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8b242e11a8f446f5fc7b76b37e81d737cabca562a927bd33766dac55b5f1177f"
+"checksum bindgen 0.33.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1657d607dd7a8e10b3181149f60f3b27ea0eac81058c09a1c791b8f6ead91f19"
 "checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
 "checksum cexpr 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "393a5f0088efbe41f9d1fcd062f24e83c278608420e62109feb2c8abee07de7d"
-"checksum cfg-if 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c47d456a36ebf0536a6705c83c1cbbcb9255fbc1d905a6ded104f479268a29"
+"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
 "checksum clang-sys 0.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00048189ee171715296dfe3b2fcfd439563c7bfec0d98d3976ce3402d62c8f07"
 "checksum clap 2.29.0 (registry+https://github.com/rust-lang/crates.io-index)" = "110d43e343eb29f4f51c1db31beb879d546db27998577e5715270a54bcf41d3f"
 "checksum cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ebbb35d3dc9cd09497168f33de1acb79b265d350ab0ac34133b98f8509af1f"
 "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"
+"checksum env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f3cc21490995c841d68e00276eba02071ebb269ec24011d5728bd00eabd39e31"
 "checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
 "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
+"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
 "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
 "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
 "checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
 "checksum libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "5ba3df4dcb460b9dfbd070d41c94c19209620c191b0340b929ce748a2bcd42d2"
 "checksum libloading 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "be99f814beb3e9503a786a592c909692bb6d4fc5a695f6ed7987223acfbd5194"
 "checksum libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd64ef8ee652185674455c1d450b83cbc8ad895625d543b5324d923f82e4d8"
 "checksum log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "880f77541efa6e5cc74e76910c9884d9859683118839d6a1dc3b11e63512565b"
+"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2"
 "checksum memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "148fab2e51b4f1cfc66da2a7c32981d1d3c083a803978268bb11fe4b86925e7a"
 "checksum nom 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b8c256fd9471521bcb84c3cdba98921497f1a331cbc15b8030fc63b82050ce"
 "checksum num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "cacfcab5eb48250ee7d0c7896b51a2c5eec99c1feea5f32025635f5ae4b00070"
 "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d"
 "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
 "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
-"checksum proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cd07deb3c6d1d9ff827999c7f9b04cdfd66b1b17ae508e14fe47b620f2282ae0"
-"checksum quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"
+"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
+"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
 "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
 "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
 "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
+"checksum termcolor 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "56c456352e44f9f91f774ddeeed27c1ec60a2455ed66d692059acfb1d731bda1"
 "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
 "checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
 "checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
 "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
-"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
 "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
 "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
 "checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b"
 "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
 "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
 "checksum which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4be6cfa54dab45266e98b5d7be2f8ce959ddd49abd141a05d52dce4b07f803bb"
 "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+"checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3"
 "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+"checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767"
--- a/js/rust/Cargo.toml
+++ b/js/rust/Cargo.toml
@@ -3,17 +3,17 @@ name = "js"
 version = "0.1.4"
 authors = ["The Servo Project Developers"]
 build = "build.rs"
 license = "MPL-2.0"
 
 [build-dependencies]
 env_logger = "0.4"
 log = "0.3"
-bindgen = "0.32.3"
+bindgen = "0.33.1"
 cmake = "0.1"
 glob = "0.2.11"
 
 [[test]]
 name = "callback"
 [[test]]
 name = "enumerate"
 [[test]]
--- a/third_party/rust/bindgen/.cargo-checksum.json
+++ b/third_party/rust/bindgen/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"9194dcdbb9477b1f4311debaad3ad044f721be054574416734ba09d2d7194a81","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"c5c4e5bc8c49cb191d1b100339772fdc7dd1dbf5025a9de1ecaafb70f86cb48f","src/clang.rs":"c816d5291ccdc1d4ba028ae817ef385048e1fb8443911d026af5c721a6d57854","src/codegen/bitfield_unit.rs":"bd1a19701f1766d0bae3bcb97d7c3cb3881d4b182c56b8f4dfd24b7cc87b5338","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"15407de2a521043d456fddc0934757a8da0a5decac3ecdf93b669b27fb6a6be2","src/codegen/impl_debug.rs":"e2ffd5b6ed936698aa4b9e7e3459d353383792707ad51f829a18a822f69cab0e","src/codegen/impl_partialeq.rs":"9f3c6aaa8da01ecad0c4d4d8996f6d20c00b8cf68d7620654b7d42d6a23725d9","src/codegen/mod.rs":"c7d933cab870d798bfa8fb6ea3d5768832c6e5fbae741994a3b6ce2c96b159ec","src/codegen/struct_layout.rs":"9edf0bbd0a3d17a93e188f9aba42bdf5bd938eca9c50e9c509977923eb16ef11","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"8c2148a6f922ca9cb0de2dd3ad77c4dd5734c4c219a5bea9d6b22c4367acb187","src/ir/analysis/derive_copy.rs":"c8d700dcde2419d2687377a7cb1024ad3722bbcb83d0b1031618f57d484793b4","src/ir/analysis/derive_debug.rs":"1d6621c0fa5d899310cc175cb99703606ed34fd7f7ad77bb60f012f25ba504af","src/ir/analysis/derive_default.rs":"3af97eaa9bdc91a0bded060b393d0bb23ef9dcf59a7a6ed7d79814f35f73b66a","src/ir/analysis/derive_hash.rs":"c8a95040352b3d10f8edb46f6ae513d4f15ec87197668b1fc1d7cf2cb416054f","src/ir/analysis/derive_partialeq_or_partialord.rs":"1f7c097bde85ebb4082a3a318a753db13e8b4733fa1433dcc054f137dc0e6c2d","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"2a0465503d2c8247eaf916bd6a03594f3dc0370533d9a7c58cc5afb86693816c","src/ir/analysis/has_type_param_in_array.rs":"fcb1c78b6000f1f5eb8d8147e2afdaba9eb0e3a81b61e72537048dfdbeea7bcd","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"3d3c8bde40604d53bb64273a3cbd8c55936a7dfe1de9b2ba92fc2c45572624b4","src/ir/analysis/template_params.rs":"5c6ee7a251a321ef5733e2e7ac3264621b4181268babcc008b69dbfc37691fb1","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"461ae606b0f49638c1f78c627da4837949dd367d3589d92a7f7d05eca7028796","src/ir/context.rs":"870e2743cc7b547c04b20205354aa1c3b8c70f4d393db37288dd72ab6997104d","src/ir/derive.rs":"1fd6ad621e3c60b950acbd51fbe386d1f0fadb7c1889c723245afff45e42e143","src/ir/dot.rs":"eca03a493a7fe48044b832c37c71d51f1bd4e8689feb6f1cb8bc0422541c7284","src/ir/enum_ty.rs":"63cf8ba48858e2f02b58cd437d763cc18ddf6a841044cafdd51687c28ace1862","src/ir/function.rs":"2eb054ae04ef8067b020717c82d055c9f9853c8b31e14de6319be85656d88cf2","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"527b9a006dd1865cb77bac7e0e3826904c71b3b77679bcf7cd4ea63791dfa5ec","src/ir/item_kind.rs":"13048962657436a757ff9483d96f2ce351ec5f827ecab4921ed643f6f115c497","src/ir/layout.rs":"e3d1adf1ad2fa5bd96530cdd5097db3d9cc7b44d33ec23a04fcfccecd9cf4469","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"6516f60b71a8b652e618c64cd4eac6e8bfaa561a3c159cfc44d15bf216b1224f","src/ir/template.rs":"bcd750450a4df0200a6e7958f9c96a09b91e3ccd29c60712f2b9d3458f1234aa","src/ir/traversal.rs":"a108f2b2a1b3d2e679274eead8cf6f8fc78454e88943527d56bb30b5f071f104","src/ir/ty.rs":"4d5f1b2e11f6fb7df3348bdab7775cdfa209e136afe84a7b0e862068e53e7183","src/ir/var.rs":"f6eb7a29b262c2eb8615612f207f251097e4bcbcb523f53dcad174ec6d11f4ce","src/lib.rs":"439d0f5c4e2c6df76c71856fdbddf03657c677fe341760a4305f4a240b6fee2b","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"fa0a21b4aca4df72896f911de3ee252bec87abb4c871e39b53e90923181a1658","src/options.rs":"c31da7f76035a7831073e44773bfbe796840403b88db7ae58a49b688cd90aef6","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"a55241f2117f15729d174790f386e255fcb224b692325bbe6716dbb1d6874881","src/time.rs":"a02befb48d10dcc31e3f9571b2fa1c40f97fafe6f6ae7d7fc4f8fd01f1a169ba"},"package":"8b242e11a8f446f5fc7b76b37e81d737cabca562a927bd33766dac55b5f1177f"}
\ No newline at end of file
+{"files":{"Cargo.toml":"88154517e146758d019c015d3f194f364be77e927686c8018f0ff766cff9a756","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"a8b6c8cc227d5a01406a431109520c0a0467777bae430c4b769a1305b820926c","src/clang.rs":"c816d5291ccdc1d4ba028ae817ef385048e1fb8443911d026af5c721a6d57854","src/codegen/bitfield_unit.rs":"bd1a19701f1766d0bae3bcb97d7c3cb3881d4b182c56b8f4dfd24b7cc87b5338","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"bcb951f320fd0948e341d8eabdb58567296d77bf1ae5040c05d967e6435a15d5","src/codegen/impl_debug.rs":"e2ffd5b6ed936698aa4b9e7e3459d353383792707ad51f829a18a822f69cab0e","src/codegen/impl_partialeq.rs":"e86050b98f57fa4496dbde0beea319a89e46290309d274f626361779549b95bd","src/codegen/mod.rs":"e154f6e30b29ce8fd075813a0052de363157a975446013b4032d4080dfd65cf3","src/codegen/struct_layout.rs":"3c4b2b4543c6aa1da03f7522ad7f831e95dc146cf5edd08ebd5facb1305daddb","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"8c2148a6f922ca9cb0de2dd3ad77c4dd5734c4c219a5bea9d6b22c4367acb187","src/ir/analysis/derive_copy.rs":"14b53c53b337be00d59424371c07c6712d7cfc9a6735f9d913cda043fddce797","src/ir/analysis/derive_debug.rs":"1d6621c0fa5d899310cc175cb99703606ed34fd7f7ad77bb60f012f25ba504af","src/ir/analysis/derive_default.rs":"4fac04fc3019562cd213586680ecdcf8a3b3544ca3a5c5117f68e5c26e7ee0d9","src/ir/analysis/derive_hash.rs":"a50e849b4388115264c2d6afef5ab07e309d2469f4c3342fb683c799451e9e19","src/ir/analysis/derive_partialeq_or_partialord.rs":"46611c7f3caa0fe78243187742c4a36003dbc266de4c4390642e136bb889c43f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"2a0465503d2c8247eaf916bd6a03594f3dc0370533d9a7c58cc5afb86693816c","src/ir/analysis/has_type_param_in_array.rs":"fcb1c78b6000f1f5eb8d8147e2afdaba9eb0e3a81b61e72537048dfdbeea7bcd","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"3d3c8bde40604d53bb64273a3cbd8c55936a7dfe1de9b2ba92fc2c45572624b4","src/ir/analysis/template_params.rs":"5c6ee7a251a321ef5733e2e7ac3264621b4181268babcc008b69dbfc37691fb1","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"d6811204a5cf68b4e9f1ef1dac80c7178394a18bc5165765e479925921671fa4","src/ir/context.rs":"7d28388430ad0587fab282ba50d3c536e58c53d84746ab557fe6d9dc0e917a01","src/ir/derive.rs":"1fd6ad621e3c60b950acbd51fbe386d1f0fadb7c1889c723245afff45e42e143","src/ir/dot.rs":"eca03a493a7fe48044b832c37c71d51f1bd4e8689feb6f1cb8bc0422541c7284","src/ir/enum_ty.rs":"05bfe0bc9abfee0f9692a443a9493b45b20ebf8275d15b1b4cba5c9cb63182b9","src/ir/function.rs":"a3ecac28cc75a61926a62f2082999c0ffe6ffeef2beb157367ade865cd899a9a","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"527b9a006dd1865cb77bac7e0e3826904c71b3b77679bcf7cd4ea63791dfa5ec","src/ir/item_kind.rs":"13048962657436a757ff9483d96f2ce351ec5f827ecab4921ed643f6f115c497","src/ir/layout.rs":"e3d1adf1ad2fa5bd96530cdd5097db3d9cc7b44d33ec23a04fcfccecd9cf4469","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"05068c4fbf42429c4ac2a233c874f18ffcf7dc1744398e400a5a48d0e7a972f2","src/ir/template.rs":"bcd750450a4df0200a6e7958f9c96a09b91e3ccd29c60712f2b9d3458f1234aa","src/ir/traversal.rs":"a108f2b2a1b3d2e679274eead8cf6f8fc78454e88943527d56bb30b5f071f104","src/ir/ty.rs":"4d5f1b2e11f6fb7df3348bdab7775cdfa209e136afe84a7b0e862068e53e7183","src/ir/var.rs":"96cca085d33d75493ae2b4465272a044d96a0e98f7732f13456313af3cf5beb8","src/lib.rs":"8a2a28797d2c7a014082a48a91361ed8090941de204e786ac107f2adae5d9cda","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"54b9fb74c8a998e3b3272f282493214371fbf42a9af6a54455824ef8b76d2445","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"a55241f2117f15729d174790f386e255fcb224b692325bbe6716dbb1d6874881","src/time.rs":"a02befb48d10dcc31e3f9571b2fa1c40f97fafe6f6ae7d7fc4f8fd01f1a169ba"},"package":"1657d607dd7a8e10b3181149f60f3b27ea0eac81058c09a1c791b8f6ead91f19"}
\ No newline at end of file
--- a/third_party/rust/bindgen/Cargo.toml
+++ b/third_party/rust/bindgen/Cargo.toml
@@ -7,17 +7,17 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "bindgen"
-version = "0.32.3"
+version = "0.33.1"
 authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
 build = "build.rs"
 include = ["Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
 description = "Automatically generates Rust FFI bindings to C and C++ libraries."
 documentation = "https://docs.rs/bindgen"
 readme = "README.md"
 keywords = ["bindings", "ffi", "code-generation"]
 categories = ["external-ffi-bindings", "development-tools::ffi"]
@@ -40,34 +40,31 @@ version = "0.1.0"
 [dependencies.clang-sys]
 version = "0.21.0"
 features = ["runtime", "clang_3_9"]
 
 [dependencies.clap]
 version = "2"
 
 [dependencies.env_logger]
-version = "0.4"
+version = "0.5"
 optional = true
 
 [dependencies.lazy_static]
 version = "1"
 
 [dependencies.log]
-version = "0.3"
+version = "0.4"
 optional = true
 
 [dependencies.peeking_take_while]
 version = "0.1.2"
 
-[dependencies.proc-macro2]
-version = "0.2"
-
 [dependencies.quote]
-version = "0.4"
+version = "0.3.15"
 
 [dependencies.regex]
 version = "0.2"
 
 [dependencies.which]
 version = "1.0.2"
 [dev-dependencies.clap]
 version = "2"
--- a/third_party/rust/bindgen/src/callbacks.rs
+++ b/third_party/rust/bindgen/src/callbacks.rs
@@ -1,20 +1,38 @@
 //! A public API for more fine-grained customization of bindgen behavior.
 
 pub use ir::enum_ty::{EnumVariantCustomBehavior, EnumVariantValue};
 pub use ir::int::IntKind;
 use std::fmt;
 use std::panic::UnwindSafe;
 
+/// An enum to allow ignoring parsing of macros.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum MacroParsingBehavior {
+    /// Ignore the macro, generating no code for it, or anything that depends on
+    /// it.
+    Ignore,
+    /// The default behavior bindgen would have otherwise.
+    Default,
+}
+
+impl Default for MacroParsingBehavior {
+    fn default() -> Self {
+        MacroParsingBehavior::Default
+    }
+}
+
 /// A trait to allow configuring different kinds of types in different
 /// situations.
 pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
-    /// This function will be run on every macro that is identified
-    fn parsed_macro(&self, _name: &str) {}
+    /// This function will be run on every macro that is identified.
+    fn will_parse_macro(&self, _name: &str) -> MacroParsingBehavior {
+        MacroParsingBehavior::Default
+    }
 
     /// The integer kind an integer macro should have, given a name and the
     /// value of that macro, or `None` if you want the default to be chosen.
     fn int_macro(&self, _name: &str, _value: i64) -> Option<IntKind> {
         None
     }
 
     /// This function should return whether, given the a given enum variant
--- a/third_party/rust/bindgen/src/codegen/helpers.rs
+++ b/third_party/rust/bindgen/src/codegen/helpers.rs
@@ -1,53 +1,54 @@
 //! Helpers for code generation that don't need macro expansion.
 
 use ir::context::BindgenContext;
 use ir::layout::Layout;
 use quote;
-use proc_macro2;
 use std::mem;
 
 pub mod attributes {
     use quote;
-    use proc_macro2;
 
     pub fn repr(which: &str) -> quote::Tokens {
-        let which = proc_macro2::Term::intern(which);
+        let which = quote::Ident::new(which);
         quote! {
             #[repr( #which )]
         }
     }
 
     pub fn repr_list(which_ones: &[&str]) -> quote::Tokens {
-        let which_ones = which_ones.iter().cloned().map(proc_macro2::Term::intern);
+        let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
         quote! {
             #[repr( #( #which_ones ),* )]
         }
     }
 
     pub fn derives(which_ones: &[&str]) -> quote::Tokens {
-        let which_ones = which_ones.iter().cloned().map(proc_macro2::Term::intern);
+        let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
         quote! {
             #[derive( #( #which_ones ),* )]
         }
     }
 
     pub fn inline() -> quote::Tokens {
         quote! {
             #[inline]
         }
     }
 
     pub fn doc(comment: String) -> quote::Tokens {
         // Doc comments are already preprocessed into nice `///` formats by the
         // time they get here. Just make sure that we have newlines around it so
         // that nothing else gets wrapped into the comment.
-        let comment = proc_macro2::Literal::doccomment(&comment);
-        quote! {#comment}
+        let mut tokens = quote! {};
+        tokens.append("\n");
+        tokens.append(comment);
+        tokens.append("\n");
+        tokens
     }
 
     pub fn link_name(name: &str) -> quote::Tokens {
         // LLVM mangles the name by default but it's already mangled.
         // Prefixing the name with \u{1} should tell LLVM to not mangle it.
         let name = format!("\u{1}{}", name);
         quote! {
             #[link_name = #name]
@@ -67,17 +68,17 @@ pub fn blob(layout: Layout) -> quote::To
     let ty_name = match opaque.known_rust_type_for_array() {
         Some(ty) => ty,
         None => {
             warn!("Found unknown alignment on code generation!");
             "u8"
         }
     };
 
-    let ty_name = proc_macro2::Term::intern(ty_name);
+    let ty_name = quote::Ident::new(ty_name);
 
     let data_len = opaque.array_size().unwrap_or(layout.size);
 
     if data_len == 1 {
         quote! {
             #ty_name
         }
     } else {
@@ -97,40 +98,39 @@ pub fn integer_type(layout: Layout) -> O
     }
 }
 
 /// Generates a bitfield allocation unit type for a type with the given `Layout`.
 pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
     let mut tokens = quote! {};
 
     if ctx.options().enable_cxx_namespaces {
-        tokens.append_all(quote! { root:: });
+        tokens.append(quote! { root:: });
     }
 
     let align = match layout.align {
         n if n >= 8 => quote! { u64 },
         4 => quote! { u32 },
         2 => quote! { u16 },
         _ => quote! { u8  },
     };
 
     let size = layout.size;
-    tokens.append_all(quote! {
+    tokens.append(quote! {
         __BindgenBitfieldUnit<[u8; #size], #align>
     });
 
     tokens
 }
 
 pub mod ast_ty {
     use ir::context::BindgenContext;
     use ir::function::FunctionSig;
     use ir::ty::FloatKind;
     use quote;
-    use proc_macro2;
 
     pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens {
         let ident = ctx.rust_ident_raw(name);
         match ctx.options().ctypes_prefix {
             Some(ref prefix) => {
                 let prefix = ctx.rust_ident_raw(prefix.as_str());
                 quote! {
                     #prefix::#ident
@@ -161,48 +161,59 @@ pub mod ast_ty {
             (FloatKind::Double, false) |
             (FloatKind::LongDouble, false) => raw_type(ctx, "c_double"),
             (FloatKind::Float128, _) => quote! { [u8; 16] },
         }
     }
 
     pub fn int_expr(val: i64) -> quote::Tokens {
         // Don't use quote! { #val } because that adds the type suffix.
-        let val = proc_macro2::Literal::integer(val);
-        quote!(#val)
+        let mut tokens = quote! {};
+        tokens.append(val.to_string());
+        tokens
     }
 
     pub fn uint_expr(val: u64) -> quote::Tokens {
         // Don't use quote! { #val } because that adds the type suffix.
-        let val = proc_macro2::Term::intern(&val.to_string());
-        quote!(#val)
+        let mut tokens = quote! {};
+        tokens.append(val.to_string());
+        tokens
     }
 
     pub fn byte_array_expr(bytes: &[u8]) -> quote::Tokens {
         let mut bytes: Vec<_> = bytes.iter().cloned().collect();
         bytes.push(0);
-        quote! { [ #(#bytes),* ] }
+        quote! {
+            #bytes
+        }
     }
 
     pub fn cstr_expr(mut string: String) -> quote::Tokens {
         string.push('\0');
-        let b = proc_macro2::Literal::byte_string(&string.as_bytes());
+        let b = quote::ByteStr(&string);
         quote! {
             #b
         }
     }
 
     pub fn float_expr(
         ctx: &BindgenContext,
         f: f64,
     ) -> Result<quote::Tokens, ()> {
         if f.is_finite() {
-            let val = proc_macro2::Literal::float(f);
+            let mut string = f.to_string();
 
-            return Ok(quote!(#val));
+            // So it gets properly recognised as a floating point constant.
+            if !string.contains('.') {
+                string.push('.');
+            }
+
+            let mut tokens = quote! {};
+            tokens.append(string);
+            return Ok(tokens);
         }
 
         let prefix = ctx.trait_prefix();
 
         if f.is_nan() {
             return Ok(quote! {
                 ::#prefix::f64::NAN
             });
--- a/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
+++ b/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
@@ -1,15 +1,14 @@
 
 use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
 use ir::context::BindgenContext;
 use ir::item::{IsOpaque, Item};
 use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
 use quote;
-use proc_macro2;
 
 /// Generate a manual implementation of `PartialEq` trait for the
 /// specified compound type.
 pub fn gen_partialeq_impl(
     ctx: &BindgenContext,
     comp_info: &CompInfo,
     item: &Item,
     ty_for_impl: &quote::Tokens,
@@ -67,17 +66,17 @@ pub fn gen_partialeq_impl(
     Some(quote! {
         fn eq(&self, other: & #ty_for_impl) -> bool {
             #( #tokens )&&*
         }
     })
 }
 
 fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> quote::Tokens {
-    fn quote_equals(name_ident: proc_macro2::Term) -> quote::Tokens {
+    fn quote_equals(name_ident: quote::Ident) -> quote::Tokens {
         quote! { self.#name_ident == other.#name_ident }
     }
 
     let name_ident = ctx.rust_ident(name);
     let ty = ty_item.expect_type();
 
     match *ty.kind() {
         TypeKind::Void |
--- a/third_party/rust/bindgen/src/codegen/mod.rs
+++ b/third_party/rust/bindgen/src/codegen/mod.rs
@@ -33,17 +33,16 @@ use ir::item_kind::ItemKind;
 use ir::layout::Layout;
 use ir::module::Module;
 use ir::objc::{ObjCInterface, ObjCMethod};
 use ir::template::{AsTemplateParam, TemplateInstantiation, TemplateParameters};
 use ir::ty::{Type, TypeKind};
 use ir::var::Var;
 
 use quote;
-use proc_macro2;
 
 use std::borrow::Cow;
 use std::cell::Cell;
 use std::collections::{HashSet, VecDeque};
 use std::collections::hash_map::{Entry, HashMap};
 use std::fmt::Write;
 use std::iter;
 use std::mem;
@@ -71,17 +70,17 @@ fn root_import(ctx: &BindgenContext, mod
     let mut path = top_level_path(ctx, module);
 
     let root = ctx.root_module().canonical_name(ctx);
     let root_ident = ctx.rust_ident(&root);
     path.push(quote! { #root_ident });
 
 
     let mut tokens = quote! {};
-    tokens.append_separated(path, proc_macro2::Term::intern("::"));
+    tokens.append_separated(path, "::");
 
     quote! {
         #[allow(unused_imports)]
         use #tokens ;
     }
 }
 
 struct CodegenResult<'a> {
@@ -305,17 +304,17 @@ impl AppendImplicitTemplateParams for qu
                 return;
             }
 
             let params = params.into_iter().map(|p| {
                 p.try_to_rust_ty(ctx, &())
                     .expect("template params cannot fail to be a rust type")
             });
 
-            self.append_all(quote! {
+            self.append(quote! {
                 < #( #params ),* >
             });
         }
     }
 }
 
 trait CodeGenerator {
     /// Extra information from the caller.
@@ -609,24 +608,30 @@ impl CodeGenerator for Type {
             TypeKind::Comp(ref ci) => ci.codegen(ctx, result, item),
             TypeKind::TemplateAlias(inner, _) |
             TypeKind::Alias(inner) => {
                 let inner_item = inner.into_resolver()
                     .through_type_refs()
                     .resolve(ctx);
                 let name = item.canonical_name(ctx);
 
-                // Try to catch the common pattern:
-                //
-                // typedef struct foo { ... } foo;
-                //
-                // here.
-                //
-                if inner_item.canonical_name(ctx) == name {
-                    return;
+                {
+                    let through_type_aliases = inner.into_resolver()
+                        .through_type_refs()
+                        .through_type_aliases()
+                        .resolve(ctx);
+
+                    // Try to catch the common pattern:
+                    //
+                    // typedef struct foo { ... } foo;
+                    //
+                    // here, and also other more complex cases like #946.
+                    if through_type_aliases.canonical_name(ctx) == name {
+                        return;
+                    }
                 }
 
                 // If this is a known named type, disallow generating anything
                 // for it too.
                 let spelling = self.name().expect("Unnamed alias?");
                 if utils::type_from_named(ctx, spelling).is_some() {
                     return;
                 }
@@ -681,40 +686,40 @@ impl CodeGenerator for Type {
                 let mut tokens = if let Some(comment) = item.comment(ctx) {
                     attributes::doc(comment)
                 } else {
                     quote! {}
                 };
 
                 // We prefer using `pub use` over `pub type` because of:
                 // https://github.com/rust-lang/rust/issues/26264
-                if inner_rust_type.to_string()
+                if inner_rust_type.as_str()
                     .chars()
                     .all(|c| match c {
                         // These are the only characters allowed in simple
                         // paths, eg `good::dogs::Bront`.
                         'A'...'Z' | 'a'...'z' | '0'...'9' | ':' | '_' | ' ' => true,
                         _ => false,
                     }) &&
                     outer_params.is_none() &&
                     inner_item.expect_type().canonical_type(ctx).is_enum()
                 {
-                    tokens.append_all(quote! {
+                    tokens.append(quote! {
                         pub use
                     });
                     let path = top_level_path(ctx, item);
-                    tokens.append_separated(path, proc_macro2::Term::intern("::"));
-                    tokens.append_all(quote! {
+                    tokens.append_separated(path, "::");
+                    tokens.append(quote! {
                         :: #inner_rust_type  as #rust_name ;
                     });
                     result.push(tokens);
                     return;
                 }
 
-                tokens.append_all(quote! {
+                tokens.append(quote! {
                     pub type #rust_name
                 });
 
                 if let Some(params) = outer_params {
                     let params: Vec<_> = params.into_iter()
                         .filter_map(|p| p.as_template_param(ctx, &()))
                         .collect();
                     if params.iter().any(|p| ctx.resolve_type(*p).is_invalid_type_param()) {
@@ -727,22 +732,22 @@ impl CodeGenerator for Type {
                     }
 
                     let params = params.iter()
                         .map(|p| {
                             p.try_to_rust_ty(ctx, &())
                                 .expect("type parameters can always convert to rust ty OK")
                         });
 
-                    tokens.append_all(quote! {
+                    tokens.append(quote! {
                         < #( #params ),* >
                     });
                 }
 
-                tokens.append_all(quote! {
+                tokens.append(quote! {
                     = #inner_rust_type ;
                 });
 
                 result.push(tokens);
             }
             TypeKind::Enum(ref ei) => ei.codegen(ctx, result, item),
             TypeKind::ObjCId | TypeKind::ObjCSel => {
                 result.saw_objc();
@@ -1049,21 +1054,21 @@ impl<'a> FieldCodegen<'a> for FieldData 
         let is_private = self.annotations().private_fields().unwrap_or(
             fields_should_be_private,
         );
 
         let accessor_kind =
             self.annotations().accessor_kind().unwrap_or(accessor_kind);
 
         if is_private {
-            field.append_all(quote! {
+            field.append(quote! {
                 #field_ident : #ty ,
             });
         } else {
-            field.append_all(quote! {
+            field.append(quote! {
                 pub #field_ident : #ty ,
             });
         }
 
         fields.extend(Some(field));
 
         // TODO: Factor the following code out, please!
         if accessor_kind == FieldAccessorKind::None {
@@ -1113,17 +1118,17 @@ impl<'a> FieldCodegen<'a> for FieldData 
             }
         }));
     }
 }
 
 impl BitfieldUnit {
     /// Get the constructor name for this bitfield unit.
     fn ctor_name(&self) -> quote::Tokens {
-        let ctor_name = proc_macro2::Term::intern(&format!("new_bitfield_{}", self.nth()));
+        let ctor_name = quote::Ident::new(format!("new_bitfield_{}", self.nth()));
         quote! {
             #ctor_name
         }
     }
 }
 
 impl Bitfield {
     /// Extend an under construction bitfield unit constructor with this
@@ -1144,17 +1149,17 @@ impl Bitfield {
             "Bitfield without layout? Gah!",
         );
         let bitfield_int_ty = helpers::blob(bitfield_ty_layout);
 
         let offset = self.offset_into_unit();
         let width = self.width() as u8;
         let prefix = ctx.trait_prefix();
 
-        ctor_impl.append_all(quote! {
+        ctor_impl.append(quote! {
             __bindgen_bitfield_unit.set(
                 #offset,
                 #width,
                 {
                     let #param_name: #bitfield_int_ty = unsafe {
                         ::#prefix::mem::transmute(#param_name)
                     };
                     #param_name as u64
@@ -1312,17 +1317,17 @@ impl<'a> FieldCodegen<'a> for Bitfield {
         (unit_field_name, bitfield_representable_as_int): (&'a str, &mut bool),
     ) where
         F: Extend<quote::Tokens>,
         M: Extend<quote::Tokens>,
     {
         let prefix = ctx.trait_prefix();
         let getter_name = bitfield_getter_name(ctx, self);
         let setter_name = bitfield_setter_name(ctx, self);
-        let unit_field_ident = proc_macro2::Term::intern(unit_field_name);
+        let unit_field_ident = quote::Ident::new(unit_field_name);
 
         let bitfield_ty_item = ctx.resolve_item(self.ty());
         let bitfield_ty = bitfield_ty_item.expect_type();
 
         let bitfield_ty_layout = bitfield_ty.layout(ctx).expect(
             "Bitfield without layout? Gah!",
         );
         let bitfield_int_ty = match helpers::integer_type(bitfield_ty_layout) {
@@ -1414,33 +1419,16 @@ impl CodeGenerator for CompInfo {
         }
 
         let used_template_params = item.used_template_params(ctx);
 
         let ty = item.expect_type();
         let layout = ty.layout(ctx);
         let mut packed = self.is_packed(ctx, &layout);
 
-        // generate tuple struct if struct or union is a forward declaration,
-        // skip for now if template parameters are needed.
-        //
-        // NB: We generate a proper struct to avoid struct/function name
-        // collisions.
-        if self.is_forward_declaration() && used_template_params.is_none() {
-            let struct_name = item.canonical_name(ctx);
-            let struct_name = ctx.rust_ident_raw(struct_name);
-            let tuple_struct = quote! {
-                #[repr(C)]
-                #[derive(Debug, Copy, Clone)]
-                pub struct #struct_name { _unused: [u8; 0] }
-            };
-            result.push(tuple_struct);
-            return;
-        }
-
         let canonical_name = item.canonical_name(ctx);
         let canonical_ident = ctx.rust_ident(&canonical_name);
 
         // Generate the vtable from the method list if appropriate.
         //
         // TODO: I don't know how this could play with virtual methods that are
         // not in the list of methods found by us, we'll see. Also, could the
         // order of the vtable pointers vary?
@@ -1486,24 +1474,16 @@ impl CodeGenerator for CompInfo {
                 struct_layout.saw_base(base_ty);
 
                 fields.push(quote! {
                     pub #field_name : #inner ,
                 });
             }
         }
 
-        let is_union = self.kind() == CompKind::Union;
-        if is_union {
-            result.saw_union();
-            if !self.can_be_rust_union(ctx) {
-                result.saw_bindgen_union();
-            }
-        }
-
         let mut methods = vec![];
         if !is_opaque {
             let codegen_depth = item.codegen_depth(ctx);
             let fields_should_be_private =
                 item.annotations().private_fields().unwrap_or(false);
             let struct_accessor_kind = item.annotations()
                 .accessor_kind()
                 .unwrap_or(FieldAccessorKind::None);
@@ -1518,18 +1498,24 @@ impl CodeGenerator for CompInfo {
                     &mut struct_layout,
                     &mut fields,
                     &mut methods,
                     (),
                 );
             }
         }
 
+        let is_union = self.kind() == CompKind::Union;
         let layout = item.kind().expect_type().layout(ctx);
-        if is_union && !is_opaque {
+        if is_union && !is_opaque && !self.is_forward_declaration() {
+            result.saw_union();
+            if !self.can_be_rust_union(ctx) {
+                result.saw_bindgen_union();
+            }
+
             let layout = layout.expect("Unable to get layout information?");
             let ty = helpers::blob(layout);
 
             fields.push(if self.can_be_rust_union(ctx) {
                 quote! {
                     _bindgen_union_align: #ty ,
                 }
             } else {
@@ -1583,17 +1569,21 @@ impl CodeGenerator for CompInfo {
         //
         // This is apparently not the case for C, see:
         // https://github.com/rust-lang-nursery/rust-bindgen/issues/551
         //
         // Just get the layout, and assume C++ if not.
         //
         // NOTE: This check is conveniently here to avoid the dummy fields we
         // may add for unused template parameters.
-        if item.is_zero_sized(ctx) {
+        if self.is_forward_declaration() {
+            fields.push(quote! {
+                _unused: [u8; 0],
+            });
+        } else if item.is_zero_sized(ctx) {
             let has_address = if is_opaque {
                 // Generate the address field if it's an opaque type and
                 // couldn't determine the layout of the blob.
                 layout.is_none()
             } else {
                 layout.map_or(true, |l| l.size != 0)
             };
 
@@ -1653,22 +1643,21 @@ impl CodeGenerator for CompInfo {
         } else {
             needs_debug_impl = ctx.options().derive_debug &&
                 ctx.options().impl_debug
         }
 
         if item.can_derive_default(ctx) {
             derives.push("Default");
         } else {
-            needs_default_impl = ctx.options().derive_default;
+            needs_default_impl =
+                ctx.options().derive_default && !self.is_forward_declaration();
         }
 
-        if item.can_derive_copy(ctx) && !item.annotations().disallow_copy() &&
-            ctx.options().derive_copy
-        {
+        if item.can_derive_copy(ctx) && !item.annotations().disallow_copy() {
             derives.push("Copy");
 
             if ctx.options().rust_features().builtin_clone_impls() ||
                 used_template_params.is_some()
             {
                 // FIXME: This requires extra logic if you have a big array in a
                 // templated struct. The reason for this is that the magic:
                 //     fn clone(&self) -> Self { *self }
@@ -1717,17 +1706,17 @@ impl CodeGenerator for CompInfo {
             }
         } else {
             quote! {
                 #( #attributes )*
                 pub struct #canonical_ident
             }
         };
 
-        tokens.append_all(quote! {
+        tokens.append(quote! {
             #generics {
                 #( #fields )*
             }
         });
         result.push(tokens);
 
         // Generate the inner types and all that stuff.
         //
@@ -1740,31 +1729,31 @@ impl CodeGenerator for CompInfo {
         }
 
         // NOTE: Some unexposed attributes (like alignment attributes) may
         // affect layout, so we're bad and pray to the gods for avoid sending
         // all the tests to shit when parsing things like max_align_t.
         if self.found_unknown_attr() {
             warn!(
                 "Type {} has an unkown attribute that may affect layout",
-                canonical_ident.as_str()
+                canonical_ident
             );
         }
 
         if used_template_params.is_none() {
             if !is_opaque {
                 for var in self.inner_vars() {
                     ctx.resolve_item(*var).codegen(ctx, result, &());
                 }
             }
 
-            if ctx.options().layout_tests {
+            if ctx.options().layout_tests && !self.is_forward_declaration() {
                 if let Some(layout) = layout {
                     let fn_name =
-                        format!("bindgen_test_layout_{}", canonical_ident.as_str());
+                        format!("bindgen_test_layout_{}", canonical_ident);
                     let fn_name = ctx.rust_ident_raw(fn_name);
                     let prefix = ctx.trait_prefix();
                     let size_of_expr = quote! {
                         ::#prefix::mem::size_of::<#canonical_ident>()
                     };
                     let align_of_expr = quote! {
                         ::#prefix::mem::align_of::<#canonical_ident>()
                     };
@@ -1980,16 +1969,17 @@ impl MethodCodegen for Method {
                 MethodKind::Constructor => cc.constructors,
                 MethodKind::Destructor => cc.destructors,
                 MethodKind::VirtualDestructor { .. } => cc.destructors,
                 MethodKind::Static | MethodKind::Normal |
                 MethodKind::Virtual { .. } => cc.methods,
             }
         });
 
+        // TODO(emilio): We could generate final stuff at least.
         if self.is_virtual() {
             return; // FIXME
         }
 
         // First of all, output the actual function.
         let function_item = ctx.resolve_item(self.signature());
         function_item.codegen(ctx, result, &());
 
@@ -2133,79 +2123,104 @@ impl EnumVariation {
             _ => false
         }
     }
 }
 
 /// A helper type to construct different enum variations.
 enum EnumBuilder<'a> {
     Rust {
+        codegen_depth: usize,
         attrs: Vec<quote::Tokens>,
-        ident: proc_macro2::Term,
+        ident: quote::Ident,
         tokens: quote::Tokens,
         emitted_any_variants: bool,
     },
     Bitfield {
+        codegen_depth: usize,
         canonical_name: &'a str,
         tokens: quote::Tokens,
     },
-    Consts(Vec<quote::Tokens>),
+    Consts {
+        variants: Vec<quote::Tokens>,
+        codegen_depth: usize,
+    },
     ModuleConsts {
+        codegen_depth: usize,
         module_name: &'a str,
         module_items: Vec<quote::Tokens>,
     },
 }
 
 impl<'a> EnumBuilder<'a> {
+    /// Returns the depth of the code generation for a variant of this enum.
+    fn codegen_depth(&self) -> usize {
+        match *self {
+            EnumBuilder::Rust { codegen_depth, .. } |
+            EnumBuilder::Bitfield { codegen_depth, .. } |
+            EnumBuilder::ModuleConsts { codegen_depth, .. } |
+            EnumBuilder::Consts { codegen_depth, .. } => codegen_depth,
+        }
+    }
+
     /// Create a new enum given an item builder, a canonical name, a name for
     /// the representation, and which variation it should be generated as.
     fn new(
         name: &'a str,
         attrs: Vec<quote::Tokens>,
         repr: quote::Tokens,
-        enum_variation: EnumVariation
+        enum_variation: EnumVariation,
+        enum_codegen_depth: usize,
     ) -> Self {
-        let ident = proc_macro2::Term::intern(name);
+        let ident = quote::Ident::new(name);
 
         match enum_variation {
             EnumVariation::Bitfield => {
                 EnumBuilder::Bitfield {
+                    codegen_depth: enum_codegen_depth,
                     canonical_name: name,
                     tokens: quote! {
                         #( #attrs )*
                         pub struct #ident (pub #repr);
                     },
                 }
             }
 
             EnumVariation::Rust => {
-                let tokens = quote!();
+                let tokens = quote!{};
                 EnumBuilder::Rust {
+                    codegen_depth: enum_codegen_depth + 1,
                     attrs,
                     ident,
                     tokens,
                     emitted_any_variants: false,
                 }
             }
 
             EnumVariation::Consts => {
-                EnumBuilder::Consts(vec![
-                    quote! {
-                        pub type #ident = #repr;
-                    }
-                ])
+                EnumBuilder::Consts {
+                    variants: vec![
+                        quote! {
+                            #( #attrs )*
+                            pub type #ident = #repr;
+                        }
+                    ],
+                    codegen_depth: enum_codegen_depth,
+                }
             }
 
             EnumVariation::ModuleConsts => {
-                let ident = proc_macro2::Term::intern(CONSTIFIED_ENUM_MODULE_REPR_NAME);
+                let ident = quote::Ident::new(CONSTIFIED_ENUM_MODULE_REPR_NAME);
                 let type_definition = quote! {
+                    #( #attrs )*
                     pub type #ident = #repr;
                 };
 
                 EnumBuilder::ModuleConsts {
+                    codegen_depth: enum_codegen_depth + 1,
                     module_name: name,
                     module_items: vec![type_definition],
                 }
             }
         }
     }
 
     /// Add a variant to this enum.
@@ -2218,40 +2233,51 @@ impl<'a> EnumBuilder<'a> {
         result: &mut CodegenResult<'b>,
     ) -> Self {
         let variant_name = ctx.rust_mangle(variant.name());
         let expr = match variant.val() {
             EnumVariantValue::Signed(v) => helpers::ast_ty::int_expr(v),
             EnumVariantValue::Unsigned(v) => helpers::ast_ty::uint_expr(v),
         };
 
+        let mut doc = quote! {};
+        if ctx.options().generate_comments {
+            if let Some(raw_comment) = variant.comment() {
+                let comment = comment::preprocess(raw_comment, self.codegen_depth());
+                doc = attributes::doc(comment);
+            }
+        }
+
         match self {
-            EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants: _ } => {
+            EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants: _, codegen_depth } => {
                 let name = ctx.rust_ident(variant_name);
                 EnumBuilder::Rust {
                     attrs,
                     ident,
+                    codegen_depth,
                     tokens: quote! {
                         #tokens
+                        #doc
                         #name = #expr,
                     },
                     emitted_any_variants: true,
                 }
             }
 
             EnumBuilder::Bitfield { .. } => {
                 let constant_name = match mangling_prefix {
                     Some(prefix) => {
                         Cow::Owned(format!("{}_{}", prefix, variant_name))
                     }
                     None => variant_name,
                 };
 
                 let ident = ctx.rust_ident(constant_name);
                 result.push(quote! {
+                    #doc
                     pub const #ident : #rust_ty = #rust_ty ( #expr );
                 });
 
                 self
             }
 
             EnumBuilder::Consts {
                 ..
@@ -2260,63 +2286,68 @@ impl<'a> EnumBuilder<'a> {
                     Some(prefix) => {
                         Cow::Owned(format!("{}_{}", prefix, variant_name))
                     }
                     None => variant_name,
                 };
 
                 let ident = ctx.rust_ident(constant_name);
                 result.push(quote! {
+                    #doc
                     pub const #ident : #rust_ty = #expr ;
                 });
 
                 self
             }
             EnumBuilder::ModuleConsts {
+                codegen_depth,
                 module_name,
                 mut module_items,
             } => {
                 let name = ctx.rust_ident(variant_name);
                 let ty = ctx.rust_ident(CONSTIFIED_ENUM_MODULE_REPR_NAME);
                 module_items.push(quote! {
+                    #doc
                     pub const #name : #ty = #expr ;
                 });
 
                 EnumBuilder::ModuleConsts {
                     module_name,
                     module_items,
+                    codegen_depth,
                 }
             }
         }
     }
 
     fn build<'b>(
         self,
         ctx: &BindgenContext,
         rust_ty: quote::Tokens,
         result: &mut CodegenResult<'b>,
     ) -> quote::Tokens {
         match self {
-            EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants } => {
+            EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants, .. } => {
                 let variants = if !emitted_any_variants {
                     quote!(__bindgen_cannot_repr_c_on_empty_enum = 0)
                 } else {
                     tokens
                 };
 
-                quote! (
+                quote! {
                     #( #attrs )*
                     pub enum #ident {
                         #variants
                     }
-                )
+                }
             }
             EnumBuilder::Bitfield {
                 canonical_name,
                 tokens,
+                ..
             } => {
                 let rust_ty_name = ctx.rust_ident_raw(canonical_name);
                 let prefix = ctx.trait_prefix();
 
                 result.push(quote! {
                     impl ::#prefix::ops::BitOr<#rust_ty> for #rust_ty {
                         type Output = Self;
 
@@ -2353,20 +2384,21 @@ impl<'a> EnumBuilder<'a> {
                         fn bitand_assign(&mut self, rhs: #rust_ty) {
                             self.0 &= rhs.0;
                         }
                     }
                 });
 
                 tokens
             }
-            EnumBuilder::Consts(tokens) => quote! { #( #tokens )* },
+            EnumBuilder::Consts { variants, .. } => quote! { #( #variants )* },
             EnumBuilder::ModuleConsts {
                 module_items,
                 module_name,
+                ..
             } => {
                 let ident = ctx.rust_ident(module_name);
                 quote! {
                     pub mod #ident {
                         #( #module_items )*
                     }
                 }
             }
@@ -2458,28 +2490,28 @@ impl CodeGenerator for Enum {
                 &["Debug", "Copy", "Clone", "PartialEq", "Eq", "Hash"],
             ));
         }
 
         fn add_constant<'a>(
             ctx: &BindgenContext,
             enum_: &Type,
             // Only to avoid recomputing every time.
-            enum_canonical_name: &proc_macro2::Term,
+            enum_canonical_name: &quote::Ident,
             // May be the same as "variant" if it's because the
             // enum is unnamed and we still haven't seen the
             // value.
             variant_name: &str,
-            referenced_name: &proc_macro2::Term,
+            referenced_name: &quote::Ident,
             enum_rust_ty: quote::Tokens,
             result: &mut CodegenResult<'a>,
         ) {
             let constant_name = if enum_.name().is_some() {
                 if ctx.options().prepend_enum_name {
-                    format!("{}_{}", enum_canonical_name.as_str(), variant_name)
+                    format!("{}_{}", enum_canonical_name, variant_name)
                 } else {
                     variant_name.into()
                 }
             } else {
                 variant_name.into()
             };
             let constant_name = ctx.rust_ident(constant_name);
 
@@ -2493,21 +2525,22 @@ impl CodeGenerator for Enum {
             let repr_name = ctx.rust_ident_raw(repr_name);
             quote! { #repr_name }
         };
 
         let mut builder = EnumBuilder::new(
             &name,
             attrs,
             repr,
-            variation
+            variation,
+            item.codegen_depth(ctx),
         );
 
         // A map where we keep a value -> variant relation.
-        let mut seen_values = HashMap::<_, proc_macro2::Term>::new();
+        let mut seen_values = HashMap::<_, quote::Ident>::new();
         let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
         let is_toplevel = item.is_toplevel(ctx);
 
         // Used to mangle the constants we generate in the unnamed-enum case.
         let parent_canonical_name = if is_toplevel {
             None
         } else {
             Some(item.parent_id().canonical_name(ctx))
@@ -2526,18 +2559,17 @@ impl CodeGenerator for Enum {
         // NB: We defer the creation of constified variants, in case we find
         // another variant with the same value (which is the common thing to
         // do).
         let mut constified_variants = VecDeque::new();
 
         let mut iter = self.variants().iter().peekable();
         while let Some(variant) = iter.next().or_else(|| {
             constified_variants.pop_front()
-        })
-        {
+        }) {
             if variant.hidden() {
                 continue;
             }
 
             if variant.force_constification() && iter.peek().is_some() {
                 constified_variants.push_back(variant);
                 continue;
             }
@@ -2596,37 +2628,37 @@ impl CodeGenerator for Enum {
                         variant.force_constification()
                     {
                         let mangled_name = if is_toplevel {
                             variant_name.clone()
                         } else {
                             let parent_name =
                                 parent_canonical_name.as_ref().unwrap();
 
-                            proc_macro2::Term::intern(
-                                &format!(
+                            quote::Ident::new(
+                                format!(
                                     "{}_{}",
                                     parent_name,
-                                    variant_name.as_str()
+                                    variant_name
                                 )
                             )
                         };
 
                         add_constant(
                             ctx,
                             enum_ty,
                             &ident,
-                            mangled_name.as_str(),
+                            mangled_name.as_ref(),
                             &variant_name,
                             enum_rust_ty.clone(),
                             result,
                         );
                     }
 
-                    entry.insert(variant_name);
+                    entry.insert(quote::Ident::new(variant_name));
                 }
             }
         }
 
         let item = builder.build(ctx, enum_rust_ty, result);
         result.push(item);
     }
 }
@@ -2944,19 +2976,20 @@ impl TryToRustTy for Type {
             }
             TypeKind::Array(item, len) => {
                 let ty = item.try_to_rust_ty(ctx, &())?;
                 Ok(quote! {
                     [ #ty ; #len ]
                 })
             }
             TypeKind::Enum(..) => {
+                let mut tokens = quote! {};
                 let path = item.namespace_aware_canonical_path(ctx);
-                let path = proc_macro2::Term::intern(&path.join("::"));
-                Ok(quote!(#path))
+                tokens.append_separated(path.into_iter().map(quote::Ident::new), "::");
+                Ok(tokens)
             }
             TypeKind::TemplateInstantiation(ref inst) => {
                 inst.try_to_rust_ty(ctx, item)
             }
             TypeKind::ResolvedTypeRef(inner) => inner.try_to_rust_ty(ctx, &()),
             TypeKind::TemplateAlias(..) |
             TypeKind::Alias(..) => {
                 let template_params = item.used_template_params(ctx)
@@ -3068,17 +3101,17 @@ impl TryToRustTy for TemplateInstantiati
 
         let def = self.template_definition()
             .into_resolver()
             .through_type_refs()
             .resolve(ctx);
 
         let mut ty = quote! {};
         let def_path = def.namespace_aware_canonical_path(ctx);
-        ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), proc_macro2::Term::intern("::"));
+        ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), "::");
 
         let def_params = match def.self_template_params(ctx) {
             Some(params) => params,
             None => {
                 // This can happen if we generated an opaque type for a partial
                 // template specialization, and we've hit an instantiation of
                 // that partial specialization.
                 extra_assert!(
@@ -3281,17 +3314,17 @@ fn objc_method_codegen(
     };
 
     let methods_and_args = method.format_method_call(&fn_args);
 
     let body = if method.is_class_method() {
         let class_name = class_name
             .expect("Generating a class method without class name?")
             .to_owned();
-        let expect_msg = proc_macro2::Literal::string(&format!("Couldn't find {}", class_name));
+        let expect_msg = format!("Couldn't find {}", class_name);
         quote! {
             msg_send!(objc::runtime::Class::get(#class_name).expect(#expect_msg), #methods_and_args)
         }
     } else {
         quote! {
             msg_send!(self, #methods_and_args)
         }
     };
@@ -3407,22 +3440,21 @@ pub(crate) fn codegen(context: BindgenCo
 
 mod utils {
     use super::{ToRustTyOrOpaque, error};
     use ir::context::BindgenContext;
     use ir::function::FunctionSig;
     use ir::item::{Item, ItemCanonicalPath};
     use ir::ty::TypeKind;
     use quote;
-    use proc_macro2;
     use std::mem;
 
     pub fn prepend_bitfield_unit_type(result: &mut Vec<quote::Tokens>) {
-        let bitfield_unit_type = proc_macro2::Term::intern(include_str!("./bitfield_unit.rs"));
-        let bitfield_unit_type = quote!(#bitfield_unit_type);
+        let mut bitfield_unit_type = quote! {};
+        bitfield_unit_type.append(include_str!("./bitfield_unit.rs"));
 
         let items = vec![bitfield_unit_type];
         let old_items = mem::replace(result, items);
         result.extend(old_items);
     }
 
     pub fn prepend_objc_header(
         ctx: &BindgenContext,
@@ -3638,21 +3670,20 @@ mod utils {
         let old_items = mem::replace(result, items);
         result.extend(old_items.into_iter());
     }
 
     pub fn build_path(
         item: &Item,
         ctx: &BindgenContext,
     ) -> error::Result<quote::Tokens> {
-        use proc_macro2;
         let path = item.namespace_aware_canonical_path(ctx);
-        let path = proc_macro2::Term::intern(&path.join("::"));
-        let tokens = quote! {#path};
-        //tokens.append_separated(path, "::");
+
+        let mut tokens = quote! {};
+        tokens.append_separated(path.into_iter().map(quote::Ident::new), "::");
 
         Ok(tokens)
     }
 
     fn primitive_ty(ctx: &BindgenContext, name: &str) -> quote::Tokens {
         let ident = ctx.rust_ident_raw(name);
         quote! {
             #ident
--- a/third_party/rust/bindgen/src/codegen/struct_layout.rs
+++ b/third_party/rust/bindgen/src/codegen/struct_layout.rs
@@ -2,17 +2,16 @@
 
 use super::helpers;
 
 use ir::comp::CompInfo;
 use ir::context::BindgenContext;
 use ir::layout::Layout;
 use ir::ty::{Type, TypeKind};
 use quote;
-use proc_macro2;
 use std::cmp;
 use std::mem;
 
 /// Trace the layout of struct.
 #[derive(Debug)]
 pub struct StructLayoutTracker<'a> {
     name: &'a str,
     ctx: &'a BindgenContext,
@@ -302,17 +301,17 @@ impl<'a> StructLayoutTracker<'a> {
     }
 
     fn padding_field(&mut self, layout: Layout) -> quote::Tokens {
         let ty = helpers::blob(layout);
         let padding_count = self.padding_count;
 
         self.padding_count += 1;
 
-        let padding_field_name = proc_macro2::Term::intern(&format!("__bindgen_padding_{}", padding_count));
+        let padding_field_name = quote::Ident::new(format!("__bindgen_padding_{}", padding_count));
 
         self.max_field_align = cmp::max(self.max_field_align, layout.align);
 
         quote! {
             pub #padding_field_name : #ty ,
         }
     }
 
--- a/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
@@ -230,19 +230,20 @@ impl<'ctx> MonotoneFramework for CannotD
                 // defer this check just for the union case.
                 if self.ctx.lookup_has_destructor(id.expect_type_id(self.ctx)) {
                     trace!("    comp has destructor which cannot derive copy");
                     return self.insert(id);
                 }
 
                 if info.kind() == CompKind::Union {
                     if !self.ctx.options().rust_features().untagged_union() {
-                        // NOTE: If there's no template parameters we can derive copy
-                        // unconditionally, since arrays are magical for rustc, and
-                        // __BindgenUnionField always implements copy.
+                        // NOTE: If there's no template parameters we can derive
+                        // copy unconditionally, since arrays are magical for
+                        // rustc, and __BindgenUnionField always implements
+                        // copy.
                         trace!(
                             "    comp can always derive debug if it's a Union and no template parameters"
                         );
                         return ConstrainResult::Same;
                     }
 
                     // https://github.com/rust-lang/rust/issues/36640
                     if info.self_template_params(self.ctx).is_some() ||
--- a/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
@@ -260,16 +260,21 @@ impl<'ctx> MonotoneFramework for CannotD
             }
 
             TypeKind::Comp(ref info) => {
                 assert!(
                     !info.has_non_type_template_params(),
                     "The early ty.is_opaque check should have handled this case"
                 );
 
+                if info.is_forward_declaration() {
+                    trace!("    cannot derive Default for forward decls");
+                    return self.insert(id);
+                }
+
                 if info.kind() == CompKind::Union {
                     if self.ctx.options().rust_features().untagged_union() {
                         trace!("    cannot derive Default for Rust unions");
                         return self.insert(id);
                     }
 
                     if ty.layout(self.ctx).map_or(true, |l| {
                         l.opaque().can_trivially_derive_default()
--- a/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
@@ -246,16 +246,21 @@ impl<'ctx> MonotoneFramework for CannotD
             }
 
             TypeKind::Comp(ref info) => {
                 assert!(
                     !info.has_non_type_template_params(),
                     "The early ty.is_opaque check should have handled this case"
                 );
 
+                if info.is_forward_declaration() {
+                    trace!("    cannot derive Hash for forward decls");
+                    return self.insert(id);
+                }
+
                 if info.kind() == CompKind::Union {
                     if self.ctx.options().rust_features().untagged_union() {
                         trace!("    cannot derive Hash for Rust unions");
                         return self.insert(id);
                     }
 
                     if ty.layout(self.ctx).map_or(true, |l| {
                         l.opaque().can_trivially_derive_hash()
--- a/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
@@ -231,16 +231,21 @@ impl<'ctx> CannotDerivePartialEqOrPartia
             }
 
             TypeKind::Comp(ref info) => {
                 assert!(
                     !info.has_non_type_template_params(),
                     "The early ty.is_opaque check should have handled this case"
                 );
 
+                if info.is_forward_declaration() {
+                    trace!("    cannot derive for forward decls");
+                    return CanDerive::No;
+                }
+
                 if info.kind() == CompKind::Union {
                     if self.ctx.options().rust_features().untagged_union() {
                         trace!(
                             "    cannot derive `PartialEq`/`PartialOrd` for Rust unions"
                         );
                         return CanDerive::No;
                     }
 
--- a/third_party/rust/bindgen/src/ir/comp.rs
+++ b/third_party/rust/bindgen/src/ir/comp.rs
@@ -1538,23 +1538,30 @@ impl CompInfo {
     }
 
     /// Returns whether the current union can be represented as a Rust `union`
     ///
     /// Requirements:
     ///     1. Current RustTarget allows for `untagged_union`
     ///     2. Each field can derive `Copy`
     pub fn can_be_rust_union(&self, ctx: &BindgenContext) -> bool {
-        ctx.options().rust_features().untagged_union() &&
-            self.fields().iter().all(|f| match *f {
-                Field::DataMember(ref field_data) => {
-                    field_data.ty().can_derive_copy(ctx)
-                }
-                Field::Bitfields(_) => true,
-            })
+        if !ctx.options().rust_features().untagged_union() {
+            return false;
+        }
+
+        if self.is_forward_declaration() {
+            return false;
+        }
+
+        self.fields().iter().all(|f| match *f {
+            Field::DataMember(ref field_data) => {
+                field_data.ty().can_derive_copy(ctx)
+            }
+            Field::Bitfields(_) => true,
+        })
     }
 }
 
 impl DotAttributes for CompInfo {
     fn dot_attributes<W>(
         &self,
         ctx: &BindgenContext,
         out: &mut W,
--- a/third_party/rust/bindgen/src/ir/context.rs
+++ b/third_party/rust/bindgen/src/ir/context.rs
@@ -19,17 +19,17 @@ use super::ty::{FloatKind, Type, TypeKin
 use super::function::Function;
 use super::super::time::Timer;
 use BindgenOptions;
 use callbacks::ParseCallbacks;
 use cexpr;
 use clang::{self, Cursor};
 use clang_sys;
 use parse::ClangItemParser;
-use proc_macro2;
+use quote;
 use std::borrow::Cow;
 use std::cell::Cell;
 use std::collections::{HashMap, HashSet, hash_map};
 use std::collections::btree_map::{self, BTreeMap};
 use std::iter::IntoIterator;
 use std::mem;
 
 /// An identifier for some kind of IR item.
@@ -217,27 +217,26 @@ where
     }
 }
 
 impl<T> CanDeriveDefault for T
 where
     T: Copy + Into<ItemId>
 {
     fn can_derive_default(&self, ctx: &BindgenContext) -> bool {
-        ctx.options().derive_default &&
-            ctx.lookup_can_derive_default(*self)
+        ctx.options().derive_default && ctx.lookup_can_derive_default(*self)
     }
 }
 
 impl<'a, T> CanDeriveCopy<'a> for T
 where
     T: Copy + Into<ItemId>
 {
     fn can_derive_copy(&self, ctx: &BindgenContext) -> bool {
-        ctx.lookup_can_derive_copy(*self)
+        ctx.options().derive_copy && ctx.lookup_can_derive_copy(*self)
     }
 }
 
 impl<T> CanDeriveHash for T
 where
     T: Copy + Into<ItemId>
 {
     fn can_derive_hash(&self, ctx: &BindgenContext) -> bool {
@@ -876,29 +875,29 @@ impl BindgenContext {
             s = s.replace("$", "_");
             s.push_str("_");
             return Cow::Owned(s);
         }
         Cow::Borrowed(name)
     }
 
     /// Returns a mangled name as a rust identifier.
-    pub fn rust_ident<S>(&self, name: S) -> proc_macro2::Term
+    pub fn rust_ident<S>(&self, name: S) -> quote::Ident
     where
         S: AsRef<str>
     {
         self.rust_ident_raw(self.rust_mangle(name.as_ref()))
     }
 
     /// Returns a mangled name as a rust identifier.
-    pub fn rust_ident_raw<T>(&self, name: T) -> proc_macro2::Term
+    pub fn rust_ident_raw<T>(&self, name: T) -> quote::Ident
     where
-        T: AsRef<str>
+        T: Into<quote::Ident>
     {
-        proc_macro2::Term::intern(name.as_ref())
+        name.into()
     }
 
     /// Iterate over all items that have been defined.
     pub fn items<'a>(&'a self) -> btree_map::Iter<'a, ItemId, Item> {
         self.items.iter()
     }
 
     /// Have we collected all unresolved type references yet?
@@ -2316,17 +2315,17 @@ impl BindgenContext {
         };
 
         self.whitelisted = Some(whitelisted);
         self.codegen_items = Some(codegen_items);
     }
 
     /// Convenient method for getting the prefix to use for most traits in
     /// codegen depending on the `use_core` option.
-    pub fn trait_prefix(&self) -> proc_macro2::Term {
+    pub fn trait_prefix(&self) -> quote::Ident {
         if self.options().use_core {
             self.rust_ident_raw("core")
         } else {
             self.rust_ident_raw("std")
         }
     }
 
     /// Call if a binden complex is generated
@@ -2447,16 +2446,17 @@ impl BindgenContext {
         assert!(
             self.in_codegen_phase(),
             "We only compute can_derive_debug when we enter codegen"
         );
 
         // Look up the computed value for whether the item with `id` can
         // derive `Copy` or not.
         let id = id.into();
+
         !self.lookup_has_type_param_in_array(id) &&
             !self.cannot_derive_copy.as_ref().unwrap().contains(&id)
     }
 
     /// Compute whether the type has type parameter in array.
     fn compute_has_type_param_in_array(&mut self) {
         let _t = self.timer("compute_has_type_param_in_array");
         assert!(self.has_type_param_in_array.is_none());
--- a/third_party/rust/bindgen/src/ir/enum_ty.rs
+++ b/third_party/rust/bindgen/src/ir/enum_ty.rs
@@ -216,16 +216,21 @@ impl EnumVariant {
         &self.name
     }
 
     /// Get this variant's value.
     pub fn val(&self) -> EnumVariantValue {
         self.val
     }
 
+    /// Get this variant's documentation.
+    pub fn comment(&self) -> Option<&str> {
+        self.comment.as_ref().map(|s| &**s)
+    }
+
     /// Returns whether this variant should be enforced to be a constant by code
     /// generation.
     pub fn force_constification(&self) -> bool {
         self.custom_behavior.map_or(false, |b| {
             b == EnumVariantCustomBehavior::Constify
         })
     }
 
--- a/third_party/rust/bindgen/src/ir/function.rs
+++ b/third_party/rust/bindgen/src/ir/function.rs
@@ -188,17 +188,17 @@ impl Abi {
             Abi::Unknown(..) => true,
             _ => false,
         }
     }
 }
 
 impl quote::ToTokens for Abi {
     fn to_tokens(&self, tokens: &mut quote::Tokens) {
-        tokens.append_all(match *self {
+        tokens.append(match *self {
             Abi::C => quote! { "C" },
             Abi::Stdcall => quote! { "stdcall" },
             Abi::Fastcall => quote! { "fastcall" },
             Abi::ThisCall => quote! { "thiscall" },
             Abi::Aapcs => quote! { "aapcs" },
             Abi::Win64 => quote! { "win64" },
             Abi::Unknown(cc) => panic!(
                 "Cannot turn unknown calling convention to tokens: {:?}",
@@ -240,39 +240,46 @@ fn get_abi(cc: CXCallingConv) -> Abi {
 }
 
 /// Get the mangled name for the cursor's referent.
 pub fn cursor_mangling(
     ctx: &BindgenContext,
     cursor: &clang::Cursor,
 ) -> Option<String> {
     use clang_sys;
+
     if !ctx.options().enable_mangling {
         return None;
     }
 
     // We early return here because libclang may crash in some case
     // if we pass in a variable inside a partial specialized template.
     // See rust-lang-nursery/rust-bindgen#67, and rust-lang-nursery/rust-bindgen#462.
     if cursor.is_in_non_fully_specialized_template() {
         return None;
     }
 
+    let is_destructor = cursor.kind() == clang_sys::CXCursor_Destructor;
     if let Ok(mut manglings) = cursor.cxx_manglings() {
-        if let Some(m) = manglings.pop() {
+        while let Some(m) = manglings.pop() {
+            // Only generate the destructor group 1, see below.
+            if is_destructor && !m.ends_with("D1Ev") {
+                continue;
+            }
+
             return Some(m);
         }
     }
 
     let mut mangling = cursor.mangling();
     if mangling.is_empty() {
         return None;
     }
 
-    if cursor.kind() == clang_sys::CXCursor_Destructor {
+    if is_destructor {
         // With old (3.8-) libclang versions, and the Itanium ABI, clang returns
         // the "destructor group 0" symbol, which means that it'll try to free
         // memory, which definitely isn't what we want.
         //
         // Explicitly force the destructor group 1 symbol.
         //
         // See http://refspecs.linuxbase.org/cxxabi-1.83.html#mangling-special
         // for the reference, and http://stackoverflow.com/a/6614369/1091587 for
--- a/third_party/rust/bindgen/src/ir/objc.rs
+++ b/third_party/rust/bindgen/src/ir/objc.rs
@@ -8,17 +8,16 @@ use clang;
 use clang_sys::CXChildVisit_Continue;
 use clang_sys::CXCursor_ObjCCategoryDecl;
 use clang_sys::CXCursor_ObjCClassMethodDecl;
 use clang_sys::CXCursor_ObjCClassRef;
 use clang_sys::CXCursor_ObjCInstanceMethodDecl;
 use clang_sys::CXCursor_ObjCProtocolDecl;
 use clang_sys::CXCursor_ObjCProtocolRef;
 use quote;
-use proc_macro2;
 
 /// Objective C interface as used in TypeKind
 ///
 /// Also protocols and categories are parsed as this type
 #[derive(Debug)]
 pub struct ObjCInterface {
     /// The name
     /// like, NSObject
@@ -212,17 +211,17 @@ impl ObjCMethod {
         self.is_class_method
     }
 
     /// Formats the method call
     pub fn format_method_call(&self, args: &[quote::Tokens]) -> quote::Tokens {
         let split_name: Vec<_> = self.name
             .split(':')
             .filter(|p| !p.is_empty())
-            .map(proc_macro2::Term::intern)
+            .map(quote::Ident::new)
             .collect();
 
         // No arguments
         if args.len() == 0 && split_name.len() == 1 {
             let name = &split_name[0];
             return quote! {
                 #name
             };
@@ -235,20 +234,19 @@ impl ObjCMethod {
                 args,
                 split_name
             );
         }
 
         // Get arguments without type signatures to pass to `msg_send!`
         let mut args_without_types = vec![];
         for arg in args.iter() {
-            let arg = arg.to_string();
-            let name_and_sig: Vec<&str> = arg.split(' ').collect();
+            let name_and_sig: Vec<&str> = arg.as_str().split(' ').collect();
             let name = name_and_sig[0];
-            args_without_types.push(proc_macro2::Term::intern(name))
+            args_without_types.push(quote::Ident::new(name))
         };
 
         let args = split_name
             .into_iter()
             .zip(args_without_types)
             .map(|(arg, arg_val)| quote! { #arg : #arg_val });
 
         quote! {
--- a/third_party/rust/bindgen/src/ir/var.rs
+++ b/third_party/rust/bindgen/src/ir/var.rs
@@ -1,10 +1,11 @@
 //! Intermediate representation of variables.
 
+use callbacks::MacroParsingBehavior;
 use super::context::{BindgenContext, TypeId};
 use super::dot::DotAttributes;
 use super::function::cursor_mangling;
 use super::int::IntKind;
 use super::item::Item;
 use super::ty::{FloatKind, TypeKind};
 use cexpr;
 use clang;
@@ -107,29 +108,48 @@ impl DotAttributes for Var {
                 mangled
             ));
         }
 
         Ok(())
     }
 }
 
+// TODO(emilio): we could make this more (or less) granular, I guess.
+fn default_macro_constant_type(value: i64) -> IntKind {
+    if value < 0 {
+        if value < i32::min_value() as i64 {
+            IntKind::I64
+        } else {
+            IntKind::I32
+        }
+    } else if value > u32::max_value() as i64 {
+        IntKind::U64
+    } else {
+        IntKind::U32
+    }
+}
+
 impl ClangSubItemParser for Var {
     fn parse(
         cursor: clang::Cursor,
         ctx: &mut BindgenContext,
     ) -> Result<ParseResult<Self>, ParseError> {
         use clang_sys::*;
         use cexpr::expr::EvalResult;
         use cexpr::literal::CChar;
         match cursor.kind() {
             CXCursor_MacroDefinition => {
-
-                if let Some(visitor) = ctx.parse_callbacks() {
-                    visitor.parsed_macro(&cursor.spelling());
+                if let Some(callbacks) = ctx.parse_callbacks() {
+                    match callbacks.will_parse_macro(&cursor.spelling()) {
+                        MacroParsingBehavior::Ignore => {
+                            return Err(ParseError::Continue);
+                        }
+                        MacroParsingBehavior::Default => {}
+                    }
                 }
 
                 let value = parse_macro(ctx, &cursor);
 
                 let (id, value) = match value {
                     Some(v) => v,
                     None => return Err(ParseError::Continue),
                 };
@@ -179,27 +199,17 @@ impl ClangSubItemParser for Var {
                             true,
                             ctx,
                         );
                         (TypeKind::Pointer(char_ty), VarType::String(val))
                     }
                     EvalResult::Int(Wrapping(value)) => {
                         let kind = ctx.parse_callbacks()
                             .and_then(|c| c.int_macro(&name, value))
-                            .unwrap_or_else(|| if value < 0 {
-                                if value < i32::min_value() as i64 {
-                                    IntKind::LongLong
-                                } else {
-                                    IntKind::Int
-                                }
-                            } else if value > u32::max_value() as i64 {
-                                IntKind::ULongLong
-                            } else {
-                                IntKind::UInt
-                            });
+                            .unwrap_or_else(|| default_macro_constant_type(value));
 
                         (TypeKind::Int(kind), VarType::Int(value))
                     }
                 };
 
                 let ty = Item::builtin_type(type_kind, true, ctx);
 
                 Ok(ParseResult::New(
--- a/third_party/rust/bindgen/src/lib.rs
+++ b/third_party/rust/bindgen/src/lib.rs
@@ -18,17 +18,16 @@ extern crate cexpr;
 #[allow(unused_extern_crates)]
 extern crate cfg_if;
 extern crate clang_sys;
 #[macro_use]
 extern crate lazy_static;
 extern crate peeking_take_while;
 #[macro_use]
 extern crate quote;
-extern crate proc_macro2;
 extern crate regex;
 extern crate which;
 
 #[cfg(feature = "logging")]
 #[macro_use]
 extern crate log;
 
 #[cfg(not(feature = "logging"))]
@@ -1106,51 +1105,55 @@ impl Builder {
     }
 
     /// Preprocess and dump the input header files to disk.
     ///
     /// This is useful when debugging bindgen, using C-Reduce, or when filing
     /// issues. The resulting file will be named something like `__bindgen.i` or
     /// `__bindgen.ii`
     pub fn dump_preprocessed_input(&self) -> io::Result<()> {
+        fn check_is_cpp(name_file: &str) -> bool {
+            name_file.ends_with(".hpp") || name_file.ends_with(".hxx")
+                || name_file.ends_with(".hh")
+                || name_file.ends_with(".h++")
+        }
+        
         let clang = clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
             io::Error::new(io::ErrorKind::Other, "Cannot find clang executable")
         })?;
 
         // The contents of a wrapper file that includes all the input header
         // files.
         let mut wrapper_contents = String::new();
 
         // Whether we are working with C or C++ inputs.
-        let mut is_cpp = false;
+        let mut is_cpp = self.options.clang_args.windows(2).any(|w| {
+            w[0] == "-x=c++" || w[1] == "-x=c++" || w == &["-x", "c++"]
+        });
 
         // For each input header, add `#include "$header"`.
         for header in &self.input_headers {
-            is_cpp |= header.ends_with(".hpp");
+            is_cpp |= check_is_cpp(header);
 
             wrapper_contents.push_str("#include \"");
             wrapper_contents.push_str(header);
             wrapper_contents.push_str("\"\n");
         }
 
         // For each input header content, add a prefix line of `#line 0 "$name"`
         // followed by the contents.
         for &(ref name, ref contents) in &self.input_header_contents {
-            is_cpp |= name.ends_with(".hpp");
+            is_cpp |= check_is_cpp(name);
 
             wrapper_contents.push_str("#line 0 \"");
             wrapper_contents.push_str(name);
             wrapper_contents.push_str("\"\n");
             wrapper_contents.push_str(contents);
         }
 
-        is_cpp |= self.options.clang_args.windows(2).any(|w| {
-            w[0] == "-x=c++" || w[1] == "-x=c++" || w == &["-x", "c++"]
-        });
-
         let wrapper_path = PathBuf::from(if is_cpp {
             "__bindgen.cpp"
         } else {
             "__bindgen.c"
         });
 
         {
             let mut wrapper_file = File::create(&wrapper_path)?;
@@ -1186,32 +1189,32 @@ impl Builder {
                 io::ErrorKind::Other,
                 "clang exited with non-zero status",
             ))
         }
     }
 
     /// Don't derive `PartialEq` for a given type. Regular
     /// expressions are supported.
-    pub fn no_partialeq(mut self, arg: String) -> Builder {
-        self.options.no_partialeq_types.insert(arg);
+    pub fn no_partialeq<T: Into<String>>(mut self, arg: T) -> Builder {
+        self.options.no_partialeq_types.insert(arg.into());
         self
     }
 
     /// Don't derive `Copy` for a given type. Regular
     /// expressions are supported.
-    pub fn no_copy(mut self, arg: String) -> Self {
-        self.options.no_copy_types.insert(arg);
+    pub fn no_copy<T: Into<String>>(mut self, arg: T) -> Self {
+        self.options.no_copy_types.insert(arg.into());
         self
     }
 
     /// Don't derive `Hash` for a given type. Regular
     /// expressions are supported.
-    pub fn no_hash(mut self, arg: String) -> Builder {
-        self.options.no_hash_types.insert(arg);
+    pub fn no_hash<T: Into<String>>(mut self, arg: T) -> Builder {
+        self.options.no_hash_types.insert(arg.into());
         self
     }
 }
 
 /// Configuration options for generated bindings.
 #[derive(Debug)]
 struct BindgenOptions {
     /// The set of types that have been blacklisted and should not appear
@@ -1688,25 +1691,25 @@ impl Bindings {
             writer.write(line.as_bytes())?;
             writer.write("\n".as_bytes())?;
         }
 
         if !self.options.raw_lines.is_empty() {
             writer.write("\n".as_bytes())?;
         }
 
-        let bindings = self.module.to_string();
+        let bindings = self.module.as_str().to_string();
 
         match self.rustfmt_generated_string(&bindings) {
             Ok(rustfmt_bindings) => {
                 writer.write(rustfmt_bindings.as_bytes())?;
             },
             Err(err) => {
                 eprintln!("{:?}", err);
-                writer.write(bindings.as_bytes())?;
+                writer.write(bindings.as_str().as_bytes())?;
             },
         }
         Ok(())
     }
 
     /// Checks if rustfmt_bindings is set and runs rustfmt on the string
     fn rustfmt_generated_string<'a>(
         &self,
--- a/third_party/rust/bindgen/src/main.rs
+++ b/third_party/rust/bindgen/src/main.rs
@@ -14,22 +14,17 @@ use std::panic;
 #[cfg(not(feature = "logging"))]
 mod log_stubs;
 
 mod options;
 use options::builder_from_flags;
 
 pub fn main() {
     #[cfg(feature = "logging")]
-    log::set_logger(|max_log_level| {
-        use env_logger::Logger;
-        let env_logger = Logger::new();
-        max_log_level.set(env_logger.filter());
-        Box::new(env_logger)
-    }).expect("Failed to set logger.");
+    env_logger::init();
 
     let bind_args: Vec<_> = env::args().collect();
 
     let version = clang_version();
     let expected_version = if cfg!(feature = "testing_only_libclang_4") {
         (4, 0)
     } else if cfg!(feature = "testing_only_libclang_3_8") {
         (3, 8)
--- a/third_party/rust/bindgen/src/options.rs
+++ b/third_party/rust/bindgen/src/options.rs
@@ -584,28 +584,28 @@ where
             ));
         }
 
         builder = builder.rustfmt_configuration_file(Some(path));
     }
 
     if let Some(no_partialeq) = matches.values_of("no-partialeq") {
         for regex in no_partialeq {
-            builder = builder.no_partialeq(String::from(regex));
+            builder = builder.no_partialeq(regex);
         }
     }
 
     if let Some(no_copy) = matches.values_of("no-copy") {
         for regex in no_copy {
-            builder = builder.no_copy(String::from(regex));
+            builder = builder.no_copy(regex);
         }
     }
 
     if let Some(no_hash) = matches.values_of("no-hash") {
         for regex in no_hash {
-            builder = builder.no_hash(String::from(regex));
+            builder = builder.no_hash(regex);
         }
     }
 
     let verbose = matches.is_present("verbose");
 
     Ok((builder, output, verbose))
 }
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/.cargo-checksum.json
+++ /dev/null
@@ -1,1 +0,0 @@
-{"files":{".travis.yml":"50f67b20a376d437e0706d9bbb0343c677295529991370b2962f814c0b2efea3","Cargo.toml":"d217067290b088db7f696e02ecc25461a0277e5e5e3bca6c7c0412b604c3526b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"3fa9368c60bc701dea294fbacae0469188c4be1de79f82e972bb9b321776cd52","src/lib.rs":"74b287a91a46dc9d0c179053b439fd40c43ad5389fbd5199fff1cce0094726a6","tests/xcrate.rs":"30dcb70fbb9c96fda2b7825592558279f534776f72e2a8a0a3e26df4dedb3caa"},"package":"d0c47d456a36ebf0536a6705c83c1cbbcb9255fbc1d905a6ded104f479268a29"}
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-language: rust
-rust:
-  - stable
-  - beta
-  - nightly
-sudo: false
-before_script:
-  - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
-script:
-  - cargo build --verbose
-  - cargo test --verbose
-  - rustdoc --test README.md -L target/debug -L target/debug/deps
-  - cargo doc --no-deps
-after_success:
-  - travis-cargo --only nightly doc-upload
-env:
-  global:
-    secure: "TyMGH+sbPmKs9lKCziKShxWr3G6im0owEchVrbUChWnQIQv1WydXftFoEoUsVl6qZspjehWK1b1AsnIgCXK0HtEi4DnqLsxs0s36bOjfg5yHBT/pETTr6kcq7KAL4Be4GmI331k6gT1Oi0TPFp7Sg9xpiWsQqKIHA5Szk2wpFQ8="
-
-
-notifications:
-  email:
-    on_success: never
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/Cargo.toml
+++ /dev/null
@@ -1,14 +0,0 @@
-[package]
-name = "cfg-if"
-version = "0.1.1"
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
-license = "MIT/Apache-2.0"
-readme = "README.md"
-repository = "https://github.com/alexcrichton/cfg-if"
-homepage = "https://github.com/alexcrichton/cfg-if"
-documentation = "http://alexcrichton.com/cfg-if"
-description = """
-A macro to ergonomically define an item depending on a large number of #[cfg]
-parameters. Structured like an if-else chain, the first matching branch is the
-item that gets emitted.
-"""
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/LICENSE-MIT
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2014 Alex Crichton
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/README.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# cfg-if
-
-[![Build Status](https://travis-ci.org/alexcrichton/cfg-if.svg?branch=master)](https://travis-ci.org/alexcrichton/cfg-if)
-
-[Documentation](http://alexcrichton.com/cfg-if)
-
-A macro to ergonomically define an item depending on a large number of #[cfg]
-parameters. Structured like an if-else chain, the first matching branch is the
-item that gets emitted.
-
-```toml
-[dependencies]
-cfg-if = "0.1"
-```
-
-## Example
-
-```rust
-#[macro_use]
-extern crate cfg_if;
-
-cfg_if! {
-    if #[cfg(unix)] {
-        fn foo() { /* unix specific functionality */ }
-    } else if #[cfg(target_pointer_width = "32")] {
-        fn foo() { /* non-unix, 32-bit functionality */ }
-    } else {
-        fn foo() { /* fallback implementation */ }
-    }
-}
-
-fn main() {
-    foo();
-}
-```
-
-# License
-
-`cfg-if` is primarily distributed under the terms of both the MIT license and
-the Apache License (Version 2.0), with portions covered by various BSD-like
-licenses.
-
-See LICENSE-APACHE, and LICENSE-MIT for details.
-
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/src/lib.rs
+++ /dev/null
@@ -1,103 +0,0 @@
-//! A macro for defining #[cfg] if-else statements.
-//!
-//! The macro provided by this crate, `cfg_if`, is similar to the `if/elif` C
-//! preprocessor macro by allowing definition of a cascade of `#[cfg]` cases,
-//! emitting the implementation which matches first.
-//!
-//! This allows you to conveniently provide a long list #[cfg]'d blocks of code
-//! without having to rewrite each clause multiple times.
-//!
-//! # Example
-//!
-//! ```
-//! #[macro_use]
-//! extern crate cfg_if;
-//!
-//! cfg_if! {
-//!     if #[cfg(unix)] {
-//!         fn foo() { /* unix specific functionality */ }
-//!     } else if #[cfg(target_pointer_width = "32")] {
-//!         fn foo() { /* non-unix, 32-bit functionality */ }
-//!     } else {
-//!         fn foo() { /* fallback implementation */ }
-//!     }
-//! }
-//!
-//! # fn main() {}
-//! ```
-
-#![no_std]
-
-#![doc(html_root_url = "http://alexcrichton.com/cfg-if")]
-#![deny(missing_docs)]
-#![cfg_attr(test, deny(warnings))]
-
-#[macro_export]
-macro_rules! cfg_if {
-    ($(
-        if #[cfg($($meta:meta),*)] { $($it:item)* }
-    ) else * else {
-        $($it2:item)*
-    }) => {
-        __cfg_if_items! {
-            () ;
-            $( ( ($($meta),*) ($($it)*) ), )*
-            ( () ($($it2)*) ),
-        }
-    }
-}
-
-#[macro_export]
-#[doc(hidden)]
-macro_rules! __cfg_if_items {
-    (($($not:meta,)*) ; ) => {};
-    (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
-        __cfg_if_apply! { cfg(all($($m,)* not(any($($not),*)))), $($it)* }
-        __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
-    }
-}
-
-#[macro_export]
-#[doc(hidden)]
-macro_rules! __cfg_if_apply {
-    ($m:meta, $($it:item)*) => {
-        $(#[$m] $it)*
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    cfg_if! {
-        if #[cfg(test)] {
-            use core::option::Option as Option2;
-            fn works1() -> Option2<u32> { Some(1) }
-        } else {
-            fn works1() -> Option<u32> { None }
-        }
-    }
-
-    cfg_if! {
-        if #[cfg(foo)] {
-            fn works2() -> bool { false }
-        } else if #[cfg(test)] {
-            fn works2() -> bool { true }
-        } else {
-            fn works2() -> bool { false }
-        }
-    }
-
-    cfg_if! {
-        if #[cfg(foo)] {
-            fn works3() -> bool { false }
-        } else {
-            fn works3() -> bool { true }
-        }
-    }
-
-    #[test]
-    fn it_works() {
-        assert!(works1().is_some());
-        assert!(works2());
-        assert!(works3());
-    }
-}
deleted file mode 100644
--- a/third_party/rust/cfg-if-0.1.1/tests/xcrate.rs
+++ /dev/null
@@ -1,17 +0,0 @@
-#[macro_use]
-extern crate cfg_if;
-
-cfg_if! {
-    if #[cfg(foo)] {
-        fn works() -> bool { false }
-    } else if #[cfg(test)] {
-        fn works() -> bool { true }
-    } else {
-        fn works() -> bool { false }
-    }
-}
-
-#[test]
-fn smoke() {
-    assert!(works());
-}
copy from third_party/rust/env_logger/.cargo-checksum.json
copy to third_party/rust/env_logger-0.4.3/.cargo-checksum.json
copy from third_party/rust/env_logger/Cargo.toml
copy to third_party/rust/env_logger-0.4.3/Cargo.toml
rename from third_party/rust/cfg-if-0.1.1/LICENSE-APACHE
rename to third_party/rust/env_logger-0.4.3/LICENSE-APACHE
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger-0.4.3/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2014 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
copy from third_party/rust/env_logger/src/lib.rs
copy to third_party/rust/env_logger-0.4.3/src/lib.rs
rename from third_party/rust/env_logger/src/regex.rs
rename to third_party/rust/env_logger-0.4.3/src/regex.rs
rename from third_party/rust/env_logger/src/string.rs
rename to third_party/rust/env_logger-0.4.3/src/string.rs
copy from third_party/rust/env_logger/tests/regexp_filter.rs
copy to third_party/rust/env_logger-0.4.3/tests/regexp_filter.rs
--- a/third_party/rust/env_logger/.cargo-checksum.json
+++ b/third_party/rust/env_logger/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"3f4cf313ce063b0d3888a3acdc3a31ed99432415aa4204b143fd39de0d037996","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","src/lib.rs":"5fac719110645152e649993bfd7edb8ca755155844867cbfa8077b486ddde9e1","src/regex.rs":"d8e2a6958d4ed8084867063aae4b5c77ffc5d271dc2e17909d56c5a5e1552034","src/string.rs":"26ede9ab41a2673c3ad6001bc1802c005ce9a4f190f55860a24aa66b6b71bbc7","tests/regexp_filter.rs":"a3f9c01623e90e54b247a62c53b25caf5f502d054f28c0bdf92abbea486a95b5"},"package":"3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"}
\ No newline at end of file
+{"files":{".travis.yml":"5c47cc16744fb57728e07c5d76dc48182395b24866b602801fe2431db213a176","Cargo.toml":"4892cb24005bbf043a0e6fcaa2e5d78999d33ae10ed2a60e7db1590643f54f49","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"9c510b6bf284e86af605d762f44bbfa6eeb312045520458b80bd4f38223ccb19","examples/custom_format.rs":"b0321c84bad849626caf5e92ac154537ee894c7fcf220e426ed0a41b3935a0fc","examples/custom_logger.rs":"565d722b190127b7cd8bd56a42eab79b417368c3c370b717c75037ebc4b26106","examples/default.rs":"21a67d1a70a3f3888ccf6c5d4723a12b374d2340ac0321c7748d6a8cbb8c0711","examples/direct_logger.rs":"549f6a10e0903d06aca2cc7ba82415b07a23392676101c9bc7aa72b4a9b0b9e2","src/filter/mod.rs":"1bb0b6b90bafbceb5554c372fd2c61ff83b9aab45eda972f9eeed4f23f665aee","src/filter/regex.rs":"5fff47d1d4d0aa3f2bab90636127d3e72aebf800c3b78faba99637220ffdf865","src/filter/string.rs":"52bbd047c31a1afdb3cd1c11629b956f21b3f47bf22e06421baf3d693a045e59","src/fmt.rs":"d7f3cc571cb3b75c146b80a482f355d35a18ffba1ae0d959ef5813a4c45e2042","src/lib.rs":"9899d72c97f688260f61dee7e44ce95705fb8382c3eb33697c3c0ae12153fbb0","tests/log-in-log.rs":"41126910998adfbac771c2a1237fecbc5437344f8e4dfc2f93235bab764a087e","tests/regexp_filter.rs":"44aa6c39de894be090e37083601e501cfffb15e3c0cd36209c48abdf3e2cb120"},"package":"f3cc21490995c841d68e00276eba02071ebb269ec24011d5728bd00eabd39e31"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/.travis.yml
@@ -0,0 +1,20 @@
+language: rust
+sudo: false
+rust:
+  - 1.18.0
+  - stable
+  - beta
+  - nightly
+before_script:
+  - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
+script:
+  - cargo build --verbose
+  - cargo build --verbose --no-default-features
+  - cargo test --verbose
+  - cargo test --verbose --no-default-features
+after_success:
+  - travis-cargo --only nightly doc-upload
+
+notifications:
+  email:
+on_success: never
--- a/third_party/rust/env_logger/Cargo.toml
+++ b/third_party/rust/env_logger/Cargo.toml
@@ -7,29 +7,44 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "env_logger"
-version = "0.4.3"
+version = "0.5.4"
 authors = ["The Rust Project Developers"]
 description = "A logging implementation for `log` which is configured via an environment\nvariable.\n"
-homepage = "https://github.com/rust-lang/log"
-documentation = "http://doc.rust-lang.org/log/env_logger"
+documentation = "https://docs.rs/env_logger"
+readme = "README.md"
+keywords = ["logging", "log", "logger"]
 categories = ["development-tools::debugging"]
 license = "MIT/Apache-2.0"
-repository = "https://github.com/rust-lang/log"
+repository = "https://github.com/sebasmagri/env_logger/"
 
 [[test]]
 name = "regexp_filter"
 harness = false
+
+[[test]]
+name = "log-in-log"
+harness = false
+[dependencies.atty]
+version = "0.2"
+
+[dependencies.humantime]
+version = "1.1.0"
+
+[dependencies.log]
+version = "0.4.0"
+features = ["std"]
+
 [dependencies.regex]
 version = "0.2"
 optional = true
 
-[dependencies.log]
+[dependencies.termcolor]
 version = "0.3"
 
 [features]
 default = ["regex"]
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/README.md
@@ -0,0 +1,142 @@
+env_logger [![Build Status](https://travis-ci.org/sebasmagri/env_logger.svg?branch=master)](https://travis-ci.org/sebasmagri/env_logger)
+==========
+
+Implements a logger that can be configured via an environment variable.
+
+* [`env_logger` documentation](https://docs.rs/env_logger)
+
+## Usage
+
+### In libraries
+
+`env_logger` makes sense when used in executables (binary projects). Libraries should use the [`log`](https://doc.rust-lang.org/log) crate instead.
+
+### In executables
+
+It must be added along with `log` to the project dependencies:
+
+```toml
+[dependencies]
+log = "0.4.0"
+env_logger = "0.5.4"
+```
+
+`env_logger` must be initialized as early as possible in the project. After it's initialized, you can use the `log` macros to do actual logging.
+
+```rust
+#[macro_use]
+extern crate log;
+extern crate env_logger;
+
+fn main() {
+    env_logger::init();
+
+    info!("starting up");
+
+    // ...
+}
+```
+
+Then when running the executable, specify a value for the `RUST_LOG`
+environment variable that corresponds with the log messages you want to show.
+
+```bash
+$ RUST_LOG=info ./main
+INFO: 2017-11-09T02:12:24Z: main: starting up
+```
+
+### In tests
+
+Tests can use the `env_logger` crate to see log messages generated during that test:
+
+```toml
+[dependencies]
+log = "0.4.0"
+
+[dev-dependencies]
+env_logger = "0.5.4"
+```
+
+```rust
+#[macro_use]
+extern crate log;
+
+fn add_one(num: i32) -> i32 {
+    info!("add_one called with {}", num);
+    num + 1
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    extern crate env_logger;
+
+    #[test]
+    fn it_adds_one() {
+        let _ = env_logger::try_init();
+        info!("can log from the test too");
+        assert_eq!(3, add_one(2));
+    }
+
+    #[test]
+    fn it_handles_negative_numbers() {
+        let _ = env_logger::try_init();
+        info!("logging from another test");
+        assert_eq!(-7, add_one(-8));
+    }
+}
+```
+
+Assuming the module under test is called `my_lib`, running the tests with the
+`RUST_LOG` filtering to info messages from this module looks like:
+
+```bash
+$ RUST_LOG=my_lib=info cargo test
+     Running target/debug/my_lib-...
+
+running 2 tests
+INFO: 2017-11-09T02:12:24Z: my_lib::tests: logging from another test
+INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with -8
+test tests::it_handles_negative_numbers ... ok
+INFO: 2017-11-09T02:12:24Z: my_lib::tests: can log from the test too
+INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with 2
+test tests::it_adds_one ... ok
+
+test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured
+```
+
+Note that `env_logger::try_init()` needs to be called in each test in which you
+want to enable logging. Additionally, the default behavior of tests to
+run in parallel means that logging output may be interleaved with test output.
+Either run tests in a single thread by specifying `RUST_TEST_THREADS=1` or by
+running one test by specifying its name as an argument to the test binaries as
+directed by the `cargo test` help docs:
+
+```bash
+$ RUST_LOG=my_lib=info cargo test it_adds_one
+     Running target/debug/my_lib-...
+
+running 1 test
+INFO: 2017-11-09T02:12:24Z: my_lib::tests: can log from the test too
+INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with 2
+test tests::it_adds_one ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
+```
+
+## Configuring log target
+
+By default, `env_logger` logs to stderr. If you want to log to stdout instead,
+you can use the `Builder` to change the log target:
+
+```rust
+use std::env;
+use env_logger::{Builder, Target};
+
+let mut builder = Builder::new();
+builder.target(Target::Stdout);
+if env::var("RUST_LOG").is_ok() {
+    builder.parse(&env::var("RUST_LOG").unwrap());
+}
+builder.init();
+```
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/examples/custom_format.rs
@@ -0,0 +1,52 @@
+/*!
+Changing the default logging format.
+
+Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`:
+
+```no_run,shell
+$ export MY_LOG_LEVEL='info'
+```
+
+Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors
+or `auto` to enable them:
+
+```no_run,shell
+$ export MY_LOG_STYLE=never
+```
+
+If you want to control the logging output completely, see the `custom_logger` example.
+*/
+
+#[macro_use]
+extern crate log;
+extern crate env_logger;
+
+use std::io::Write;
+
+use env_logger::{Env, Builder, fmt};
+
+fn init_logger() {
+    let env = Env::default()
+        .filter("MY_LOG_LEVEL")
+        .write_style("MY_LOG_STYLE");
+
+    let mut builder = Builder::from_env(env);
+
+    // Use a different format for writing log records
+    builder.format(|buf, record| {
+        let mut style = buf.style();
+        style.set_bg(fmt::Color::Yellow).set_bold(true);
+
+        let timestamp = buf.timestamp();
+
+        writeln!(buf, "My formatted log ({}): {}", timestamp, style.value(record.args()))
+    });
+
+    builder.init();
+}
+
+fn main() {
+    init_logger();
+
+    info!("a log from `MyLogger`");
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/examples/custom_logger.rs
@@ -0,0 +1,67 @@
+/*!
+Using `env_logger` to drive a custom logger.
+
+Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`:
+
+```no_run,shell
+$ export MY_LOG_LEVEL='info'
+```
+
+Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors
+or `auto` to enable them:
+
+```no_run,shell
+$ export MY_LOG_STYLE=never
+```
+
+If you only want to change the way logs are formatted, look at the `custom_format` example.
+*/
+
+#[macro_use]
+extern crate log;
+extern crate env_logger;
+use env_logger::filter::Filter;
+use log::{Log, Metadata, Record, SetLoggerError};
+
+struct MyLogger {
+    inner: Filter
+}
+
+impl MyLogger {
+    fn new() -> MyLogger {
+        use env_logger::filter::Builder;
+        let mut builder = Builder::from_env("MY_LOG_LEVEL");
+
+        MyLogger {
+            inner: builder.build()
+        }
+    }
+
+    fn init() -> Result<(), SetLoggerError> {
+        let logger = Self::new();
+
+        log::set_max_level(logger.inner.filter());
+        log::set_boxed_logger(Box::new(logger))
+    }
+}
+
+impl Log for MyLogger {
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        self.inner.enabled(metadata)
+    }
+
+    fn log(&self, record: &Record) {
+        // Check if the record is matched by the logger before logging
+        if self.inner.matches(record) {
+            println!("{} - {}", record.level(), record.args());
+        }
+    }
+
+    fn flush(&self) { }
+}
+
+fn main() {
+    MyLogger::init().unwrap();
+
+    info!("a log from `MyLogger`");
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/examples/default.rs
@@ -0,0 +1,30 @@
+/*!
+Using `env_logger`.
+
+Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`:
+
+```no_run,shell
+$ export MY_LOG_LEVEL='info'
+```
+
+Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors
+or `auto` to enable them:
+
+```no_run,shell
+$ export MY_LOG_STYLE=never
+```
+*/
+
+#[macro_use]
+extern crate log;
+extern crate env_logger;
+
+fn main() {
+    env_logger::init_from_env("MY_LOG_LEVEL");
+
+    trace!("some trace log");
+    debug!("some debug log");
+    info!("some information log");
+    warn!("some warning log");
+    error!("some error log");
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/examples/direct_logger.rs
@@ -0,0 +1,40 @@
+/*!
+Using `env_logger::Logger` and the `log::Log` trait directly.
+
+This example doesn't rely on environment variables, or having a static logger installed.
+*/
+
+extern crate log;
+extern crate env_logger;
+
+fn record() -> log::Record<'static> {
+    let error_metadata = log::MetadataBuilder::new()
+        .target("myApp")
+        .level(log::Level::Error)
+        .build();
+
+    log::Record::builder()
+        .metadata(error_metadata)
+        .args(format_args!("Error!"))
+        .line(Some(433))
+        .file(Some("app.rs"))
+        .module_path(Some("server"))
+        .build()
+}
+
+fn main() {
+    use log::Log;
+
+    let stylish_logger = env_logger::Builder::new()
+        .filter(None, log::LevelFilter::Error)
+        .write_style(env_logger::WriteStyle::Always)
+        .build();
+
+    let unstylish_logger = env_logger::Builder::new()
+        .filter(None, log::LevelFilter::Error)
+        .write_style(env_logger::WriteStyle::Never)
+        .build();
+    
+    stylish_logger.log(&record());
+    unstylish_logger.log(&record());
+}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/src/filter/mod.rs
@@ -0,0 +1,558 @@
+//! Filtering for log records.
+//! 
+//! This module contains the log filtering used by `env_logger` to match records.
+//! You can use the `Filter` type in your own logger implementation to use the same 
+//! filter parsing and matching as `env_logger`. For more details about the format 
+//! for directive strings see [Enabling Logging].
+//! 
+//! ## Using `env_logger` in your own logger
+//!
+//! You can use `env_logger`'s filtering functionality with your own logger.
+//! Call [`Builder::parse`] to parse directives from a string when constructing 
+//! your logger. Call [`Filter::matches`] to check whether a record should be 
+//! logged based on the parsed filters when log records are received.
+//!
+//! ```
+//! extern crate log;
+//! extern crate env_logger;
+//! use env_logger::filter::Filter;
+//! use log::{Log, Metadata, Record};
+//!
+//! struct MyLogger {
+//!     filter: Filter
+//! }
+//!
+//! impl MyLogger {
+//!     fn new() -> MyLogger {
+//!         use env_logger::filter::Builder;
+//!         let mut builder = Builder::new();
+//!
+//!         // Parse a directives string from an environment variable
+//!         if let Ok(ref filter) = std::env::var("MY_LOG_LEVEL") {
+//!            builder.parse(filter);
+//!         }
+//!
+//!         MyLogger {
+//!             filter: builder.build()
+//!         }
+//!     }
+//! }
+//!
+//! impl Log for MyLogger {
+//!     fn enabled(&self, metadata: &Metadata) -> bool {
+//!         self.filter.enabled(metadata)
+//!     }
+//!
+//!     fn log(&self, record: &Record) {
+//!         // Check if the record is matched by the filter
+//!         if self.filter.matches(record) {
+//!             println!("{:?}", record);
+//!         }
+//!     }
+//!
+//!     fn flush(&self) {}
+//! }
+//! # fn main() {}
+//! ```
+//! 
+//! [Enabling Logging]: ../index.html#enabling-logging
+//! [`Builder::parse`]: struct.Builder.html#method.parse
+//! [`Filter::matches`]: struct.Filter.html#method.matches
+
+use std::env;
+use std::mem;
+use std::fmt;
+use log::{Level, LevelFilter, Record, Metadata};
+
+#[cfg(feature = "regex")]
+#[path = "regex.rs"]
+mod inner;
+
+#[cfg(not(feature = "regex"))]
+#[path = "string.rs"]
+mod inner;
+
+/// A log filter.
+/// 
+/// This struct can be used to determine whether or not a log record
+/// should be written to the output.
+/// Use the [`Builder`] type to parse and construct a `Filter`.
+/// 
+/// [`Builder`]: struct.Builder.html
+pub struct Filter {
+    directives: Vec<Directive>,
+    filter: Option<inner::Filter>,
+}
+
+/// A builder for a log filter.
+/// 
+/// It can be used to parse a set of directives from a string before building
+/// a [`Filter`] instance.
+/// 
+/// ## Example
+///
+/// ```
+/// #[macro_use]
+/// extern crate log;
+/// extern crate env_logger;
+///
+/// use std::env;
+/// use std::io;
+/// use env_logger::filter::Builder;
+///
+/// fn main() {
+///     let mut builder = Builder::new();
+///
+///     // Parse a logging filter from an environment variable.
+///     if let Ok(rust_log) = env::var("RUST_LOG") {
+///        builder.parse(&rust_log);
+///     }
+///
+///     let filter = builder.build();
+/// }
+/// ```
+/// 
+/// [`Filter`]: struct.Filter.html
+pub struct Builder {
+    directives: Vec<Directive>,
+    filter: Option<inner::Filter>,
+}
+
+#[derive(Debug)]
+struct Directive {
+    name: Option<String>,
+    level: LevelFilter,
+}
+
+impl Filter {
+    /// Returns the maximum `LevelFilter` that this filter instance is
+    /// configured to output.
+    ///
+    /// # Example
+    ///
+    /// ```rust
+    /// extern crate log;
+    /// extern crate env_logger;
+    ///
+    /// use log::LevelFilter;
+    /// use env_logger::filter::Builder;
+    ///
+    /// fn main() {
+    ///     let mut builder = Builder::new();
+    ///     builder.filter(Some("module1"), LevelFilter::Info);
+    ///     builder.filter(Some("module2"), LevelFilter::Error);
+    ///
+    ///     let filter = builder.build();
+    ///     assert_eq!(filter.filter(), LevelFilter::Info);
+    /// }
+    /// ```
+    pub fn filter(&self) -> LevelFilter {
+        self.directives.iter()
+            .map(|d| d.level)
+            .max()
+            .unwrap_or(LevelFilter::Off)
+    }
+
+    /// Checks if this record matches the configured filter.
+    pub fn matches(&self, record: &Record) -> bool {
+        if !self.enabled(record.metadata()) {
+            return false;
+        }
+
+        if let Some(filter) = self.filter.as_ref() {
+            if !filter.is_match(&*record.args().to_string()) {
+                return false;
+            }
+        }
+
+        true
+    }
+
+    /// Determines if a log message with the specified metadata would be logged.
+    pub fn enabled(&self, metadata: &Metadata) -> bool {
+        let level = metadata.level();
+        let target = metadata.target();
+
+        enabled(&self.directives, level, target)
+    }
+}
+
+impl Builder {
+    /// Initializes the filter builder with defaults.
+    pub fn new() -> Builder {
+        Builder {
+            directives: Vec::new(),
+            filter: None,
+        }
+    }
+
+    /// Initializes the filter builder from an environment.
+    pub fn from_env(env: &str) -> Builder {
+        let mut builder = Builder::new();
+
+        if let Ok(s) = env::var(env) {
+            builder.parse(&s);
+        }
+
+        builder
+    }
+
+    /// Adds a directive to the filter.
+    ///
+    /// The given module (if any) will log at most the specified level provided.
+    /// If no module is provided then the filter will apply to all log messages.
+    pub fn filter(&mut self,
+                  module: Option<&str>,
+                  level: LevelFilter) -> &mut Self {
+        self.directives.push(Directive {
+            name: module.map(|s| s.to_string()),
+            level: level,
+        });
+        self
+    }
+
+    /// Parses the directives string.
+    ///
+    /// See the [Enabling Logging] section for more details.
+    /// 
+    /// [Enabling Logging]: ../index.html#enabling-logging
+    pub fn parse(&mut self, filters: &str) -> &mut Self {
+        let (directives, filter) = parse_spec(filters);
+
+        self.filter = filter;
+
+        for directive in directives {
+            self.directives.push(directive);
+        }
+        self
+    }
+
+    /// Build a log filter.
+    pub fn build(&mut self) -> Filter {
+        if self.directives.is_empty() {
+            // Adds the default filter if none exist
+            self.directives.push(Directive {
+                name: None,
+                level: LevelFilter::Error,
+            });
+        } else {
+            // Sort the directives by length of their name, this allows a
+            // little more efficient lookup at runtime.
+            self.directives.sort_by(|a, b| {
+                let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0);
+                let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0);
+                alen.cmp(&blen)
+            });
+        }
+
+        Filter {
+            directives: mem::replace(&mut self.directives, Vec::new()),
+            filter: mem::replace(&mut self.filter, None),
+        }
+    }
+}
+
+impl Default for Builder {
+    fn default() -> Self {
+        Builder::new()
+    }
+}
+
+impl fmt::Debug for Filter {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Filter")
+            .field("filter", &self.filter)
+            .field("directives", &self.directives)
+            .finish()
+    }
+}
+
+impl fmt::Debug for Builder {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Filter")
+            .field("filter", &self.filter)
+            .field("directives", &self.directives)
+            .finish()
+    }
+}
+
+/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=error/foo")
+/// and return a vector with log directives.
+fn parse_spec(spec: &str) -> (Vec<Directive>, Option<inner::Filter>) {
+    let mut dirs = Vec::new();
+
+    let mut parts = spec.split('/');
+    let mods = parts.next();
+    let filter = parts.next();
+    if parts.next().is_some() {
+        println!("warning: invalid logging spec '{}', \
+                 ignoring it (too many '/'s)", spec);
+        return (dirs, None);
+    }
+    mods.map(|m| { for s in m.split(',') {
+        if s.len() == 0 { continue }
+        let mut parts = s.split('=');
+        let (log_level, name) = match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {
+            (Some(part0), None, None) => {
+                // if the single argument is a log-level string or number,
+                // treat that as a global fallback
+                match part0.parse() {
+                    Ok(num) => (num, None),
+                    Err(_) => (LevelFilter::max(), Some(part0)),
+                }
+            }
+            (Some(part0), Some(""), None) => (LevelFilter::max(), Some(part0)),
+            (Some(part0), Some(part1), None) => {
+                match part1.parse() {
+                    Ok(num) => (num, Some(part0)),
+                    _ => {
+                        println!("warning: invalid logging spec '{}', \
+                                 ignoring it", part1);
+                        continue
+                    }
+                }
+            },
+            _ => {
+                println!("warning: invalid logging spec '{}', \
+                         ignoring it", s);
+                continue
+            }
+        };
+        dirs.push(Directive {
+            name: name.map(|s| s.to_string()),
+            level: log_level,
+        });
+    }});
+
+    let filter = filter.map_or(None, |filter| {
+        match inner::Filter::new(filter) {
+            Ok(re) => Some(re),
+            Err(e) => {
+                println!("warning: invalid regex filter - {}", e);
+                None
+            }
+        }
+    });
+
+    return (dirs, filter);
+}
+
+
+// Check whether a level and target are enabled by the set of directives.
+fn enabled(directives: &[Directive], level: Level, target: &str) -> bool {
+    // Search for the longest match, the vector is assumed to be pre-sorted.
+    for directive in directives.iter().rev() {
+        match directive.name {
+            Some(ref name) if !target.starts_with(&**name) => {},
+            Some(..) | None => {
+                return level <= directive.level
+            }
+        }
+    }
+    false
+}
+
+#[cfg(test)]
+mod tests {
+    use log::{Level, LevelFilter};
+
+    use super::{Builder, Filter, Directive, parse_spec, enabled};
+
+    fn make_logger_filter(dirs: Vec<Directive>) -> Filter {
+        let mut logger = Builder::new().build();
+        logger.directives = dirs;
+        logger
+    }
+
+    #[test]
+    fn filter_info() {
+        let logger = Builder::new().filter(None, LevelFilter::Info).build();
+        assert!(enabled(&logger.directives, Level::Info, "crate1"));
+        assert!(!enabled(&logger.directives, Level::Debug, "crate1"));
+    }
+
+    #[test]
+    fn filter_beginning_longest_match() {
+        let logger = Builder::new()
+                        .filter(Some("crate2"), LevelFilter::Info)
+                        .filter(Some("crate2::mod"), LevelFilter::Debug)
+                        .filter(Some("crate1::mod1"), LevelFilter::Warn)
+                        .build();
+        assert!(enabled(&logger.directives, Level::Debug, "crate2::mod1"));
+        assert!(!enabled(&logger.directives, Level::Debug, "crate2"));
+    }
+
+    #[test]
+    fn parse_default() {
+        let logger = Builder::new().parse("info,crate1::mod1=warn").build();
+        assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1"));
+        assert!(enabled(&logger.directives, Level::Info, "crate2::mod2"));
+    }
+
+    #[test]
+    fn match_full_path() {
+        let logger = make_logger_filter(vec![
+            Directive {
+                name: Some("crate2".to_string()),
+                level: LevelFilter::Info
+            },
+            Directive {
+                name: Some("crate1::mod1".to_string()),
+                level: LevelFilter::Warn
+            }
+        ]);
+        assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1"));
+        assert!(!enabled(&logger.directives, Level::Info, "crate1::mod1"));
+        assert!(enabled(&logger.directives, Level::Info, "crate2"));
+        assert!(!enabled(&logger.directives, Level::Debug, "crate2"));
+    }
+
+    #[test]
+    fn no_match() {
+        let logger = make_logger_filter(vec![
+            Directive { name: Some("crate2".to_string()), level: LevelFilter::Info },
+            Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn }
+        ]);
+        assert!(!enabled(&logger.directives, Level::Warn, "crate3"));
+    }
+
+    #[test]
+    fn match_beginning() {
+        let logger = make_logger_filter(vec![
+            Directive { name: Some("crate2".to_string()), level: LevelFilter::Info },
+            Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn }
+        ]);
+        assert!(enabled(&logger.directives, Level::Info, "crate2::mod1"));
+    }
+
+    #[test]
+    fn match_beginning_longest_match() {
+        let logger = make_logger_filter(vec![
+            Directive { name: Some("crate2".to_string()), level: LevelFilter::Info },
+            Directive { name: Some("crate2::mod".to_string()), level: LevelFilter::Debug },
+            Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn }
+        ]);
+        assert!(enabled(&logger.directives, Level::Debug, "crate2::mod1"));
+        assert!(!enabled(&logger.directives, Level::Debug, "crate2"));
+    }
+
+    #[test]
+    fn match_default() {
+        let logger = make_logger_filter(vec![
+            Directive { name: None, level: LevelFilter::Info },
+            Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn }
+        ]);
+        assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1"));
+        assert!(enabled(&logger.directives, Level::Info, "crate2::mod2"));
+    }
+
+    #[test]
+    fn zero_level() {
+        let logger = make_logger_filter(vec![
+            Directive { name: None, level: LevelFilter::Info },
+            Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Off }
+        ]);
+        assert!(!enabled(&logger.directives, Level::Error, "crate1::mod1"));
+        assert!(enabled(&logger.directives, Level::Info, "crate2::mod2"));
+    }
+
+    #[test]
+    fn parse_spec_valid() {
+        let (dirs, filter) = parse_spec("crate1::mod1=error,crate1::mod2,crate2=debug");
+        assert_eq!(dirs.len(), 3);
+        assert_eq!(dirs[0].name, Some("crate1::mod1".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Error);
+
+        assert_eq!(dirs[1].name, Some("crate1::mod2".to_string()));
+        assert_eq!(dirs[1].level, LevelFilter::max());
+
+        assert_eq!(dirs[2].name, Some("crate2".to_string()));
+        assert_eq!(dirs[2].level, LevelFilter::Debug);
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_invalid_crate() {
+        // test parse_spec with multiple = in specification
+        let (dirs, filter) = parse_spec("crate1::mod1=warn=info,crate2=debug");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate2".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Debug);
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_invalid_level() {
+        // test parse_spec with 'noNumber' as log level
+        let (dirs, filter) = parse_spec("crate1::mod1=noNumber,crate2=debug");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate2".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Debug);
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_string_level() {
+        // test parse_spec with 'warn' as log level
+        let (dirs, filter) = parse_spec("crate1::mod1=wrong,crate2=warn");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate2".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Warn);
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_empty_level() {
+        // test parse_spec with '' as log level
+        let (dirs, filter) = parse_spec("crate1::mod1=wrong,crate2=");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate2".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::max());
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_global() {
+        // test parse_spec with no crate
+        let (dirs, filter) = parse_spec("warn,crate2=debug");
+        assert_eq!(dirs.len(), 2);
+        assert_eq!(dirs[0].name, None);
+        assert_eq!(dirs[0].level, LevelFilter::Warn);
+        assert_eq!(dirs[1].name, Some("crate2".to_string()));
+        assert_eq!(dirs[1].level, LevelFilter::Debug);
+        assert!(filter.is_none());
+    }
+
+    #[test]
+    fn parse_spec_valid_filter() {
+        let (dirs, filter) = parse_spec("crate1::mod1=error,crate1::mod2,crate2=debug/abc");
+        assert_eq!(dirs.len(), 3);
+        assert_eq!(dirs[0].name, Some("crate1::mod1".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Error);
+
+        assert_eq!(dirs[1].name, Some("crate1::mod2".to_string()));
+        assert_eq!(dirs[1].level, LevelFilter::max());
+
+        assert_eq!(dirs[2].name, Some("crate2".to_string()));
+        assert_eq!(dirs[2].level, LevelFilter::Debug);
+        assert!(filter.is_some() && filter.unwrap().to_string() == "abc");
+    }
+
+    #[test]
+    fn parse_spec_invalid_crate_filter() {
+        let (dirs, filter) = parse_spec("crate1::mod1=error=warn,crate2=debug/a.c");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate2".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::Debug);
+        assert!(filter.is_some() && filter.unwrap().to_string() == "a.c");
+    }
+
+    #[test]
+    fn parse_spec_empty_with_filter() {
+        let (dirs, filter) = parse_spec("crate1/a*c");
+        assert_eq!(dirs.len(), 1);
+        assert_eq!(dirs[0].name, Some("crate1".to_string()));
+        assert_eq!(dirs[0].level, LevelFilter::max());
+        assert!(filter.is_some() && filter.unwrap().to_string() == "a*c");
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/src/filter/regex.rs
@@ -0,0 +1,29 @@
+extern crate regex;
+
+use std::fmt;
+
+use self::regex::Regex;
+
+#[derive(Debug)]
+pub struct Filter {
+    inner: Regex,
+}
+
+impl Filter {
+    pub fn new(spec: &str) -> Result<Filter, String> {
+        match Regex::new(spec){
+            Ok(r) => Ok(Filter { inner: r }),
+            Err(e) => Err(e.to_string()),
+        }
+    }
+
+    pub fn is_match(&self, s: &str) -> bool {
+        self.inner.is_match(s)
+    }
+}
+
+impl fmt::Display for Filter {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/src/filter/string.rs
@@ -0,0 +1,22 @@
+use std::fmt;
+
+#[derive(Debug)]
+pub struct Filter {
+    inner: String,
+}
+
+impl Filter {
+    pub fn new(spec: &str) -> Result<Filter, String> {
+        Ok(Filter { inner: spec.to_string() })
+    }
+
+    pub fn is_match(&self, s: &str) -> bool {
+        s.contains(&self.inner)
+    }
+}
+
+impl fmt::Display for Filter {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/src/fmt.rs
@@ -0,0 +1,617 @@
+//! Formatting for log records.
+//!
+//! This module contains a [`Formatter`] that can be used to format log records
+//! into without needing temporary allocations. Usually you won't need to worry
+//! about the contents of this module and can use the `Formatter` like an ordinary
+//! [`Write`].
+//!
+//! # Formatting log records
+//!
+//! The format used to print log records can be customised using the [`Builder::format`]
+//! method.
+//! Custom formats can apply different color and weight to printed values using
+//! [`Style`] builders.
+//!
+//! ```
+//! use std::io::Write;
+//! use env_logger::fmt::Color;
+//!
+//! let mut builder = env_logger::Builder::new();
+//!
+//! builder.format(|buf, record| {
+//!     let mut level_style = buf.style();
+//!
+//!     level_style.set_color(Color::Red).set_bold(true);
+//!
+//!     writeln!(buf, "{}: {}",
+//!         level_style.value(record.level()),
+//!         record.args())
+//! });
+//! ```
+//!
+//! [`Formatter`]: struct.Formatter.html
+//! [`Style`]: struct.Style.html
+//! [`Builder::format`]: ../struct.Builder.html#method.format
+//! [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html
+
+use std::io::prelude::*;
+use std::{io, fmt};
+use std::rc::Rc;
+use std::cell::RefCell;
+use std::time::SystemTime;
+
+use termcolor::{ColorSpec, ColorChoice, Buffer, BufferWriter, WriteColor};
+use atty;
+use humantime::format_rfc3339_seconds;
+
+pub use termcolor::Color;
+
+/// A formatter to write logs into.
+///
+/// `Formatter` implements the standard [`Write`] trait for writing log records.
+/// It also supports terminal colors, through the [`style`] method.
+///
+/// # Examples
+///
+/// Use the [`writeln`] macro to easily format a log record:
+///
+/// ```
+/// use std::io::Write;
+///
+/// let mut builder = env_logger::Builder::new();
+///
+/// builder.format(|buf, record| writeln!(buf, "{}: {}", record.level(), record.args()));
+/// ```
+///
+/// [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html
+/// [`writeln`]: https://doc.rust-lang.org/stable/std/macro.writeln.html
+/// [`style`]: #method.style
+pub struct Formatter {
+    buf: Rc<RefCell<Buffer>>,
+    write_style: WriteStyle,
+}
+
+/// A set of styles to apply to the terminal output.
+///
+/// Call [`Formatter::style`] to get a `Style` and use the builder methods to
+/// set styling properties, like [color] and [weight].
+/// To print a value using the style, wrap it in a call to [`value`] when the log
+/// record is formatted.
+///
+/// # Examples
+///
+/// Create a bold, red colored style and use it to print the log level:
+///
+/// ```
+/// use std::io::Write;
+/// use env_logger::fmt::Color;
+///
+/// let mut builder = env_logger::Builder::new();
+///
+/// builder.format(|buf, record| {
+///     let mut level_style = buf.style();
+///
+///     level_style.set_color(Color::Red).set_bold(true);
+///
+///     writeln!(buf, "{}: {}",
+///         level_style.value(record.level()),
+///         record.args())
+/// });
+/// ```
+///
+/// Styles can be re-used to output multiple values:
+///
+/// ```
+/// use std::io::Write;
+/// use env_logger::fmt::Color;
+///
+/// let mut builder = env_logger::Builder::new();
+///
+/// builder.format(|buf, record| {
+///     let mut bold = buf.style();
+///
+///     bold.set_bold(true);
+///
+///     writeln!(buf, "{}: {} {}",
+///         bold.value(record.level()),
+///         bold.value("some bold text"),
+///         record.args())
+/// });
+/// ```
+///
+/// [`Formatter::style`]: struct.Formatter.html#method.style
+/// [color]: #method.set_color
+/// [weight]: #method.set_bold
+/// [`value`]: #method.value
+#[derive(Clone)]
+pub struct Style {
+    buf: Rc<RefCell<Buffer>>,
+    spec: ColorSpec,
+}
+
+/// A value that can be printed using the given styles.
+///
+/// It is the result of calling [`Style::value`].
+///
+/// [`Style::value`]: struct.Style.html#method.value
+pub struct StyledValue<'a, T> {
+    style: &'a Style,
+    value: T,
+}
+
+/// An [RFC3339] formatted timestamp.
+///
+/// The timestamp implements [`Display`] and can be written to a [`Formatter`].
+///
+/// [RFC3339]: https://www.ietf.org/rfc/rfc3339.txt
+/// [`Display`]: https://doc.rust-lang.org/stable/std/fmt/trait.Display.html
+/// [`Formatter`]: struct.Formatter.html
+pub struct Timestamp(SystemTime);
+
+/// Log target, either `stdout` or `stderr`.
+#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+pub enum Target {
+    /// Logs will be sent to standard output.
+    Stdout,
+    /// Logs will be sent to standard error.
+    Stderr,
+}
+
+impl Default for Target {
+    fn default() -> Self {
+        Target::Stderr
+    }
+}
+
+/// Whether or not to print styles to the target.
+#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+pub enum WriteStyle {
+    /// Try to print styles, but don't force the issue.
+    Auto,
+    /// Try very hard to print styles.
+    Always,
+    /// Never print styles.
+    Never,
+}
+
+impl Default for WriteStyle {
+    fn default() -> Self {
+        WriteStyle::Auto
+    }
+}
+
+/// A terminal target with color awareness.
+pub(crate) struct Writer {
+    inner: BufferWriter,
+    write_style: WriteStyle,
+}
+
+impl Writer {
+    pub(crate) fn write_style(&self) -> WriteStyle {
+        self.write_style
+    }
+}
+
+/// A builder for a terminal writer.
+///
+/// The target and style choice can be configured before building.
+pub(crate) struct Builder {
+    target: Target,
+    write_style: WriteStyle,
+}
+
+impl Builder {
+    /// Initialize the writer builder with defaults.
+    pub fn new() -> Self {
+        Builder {
+            target: Default::default(),
+            write_style: Default::default(),
+        }
+    }
+
+    /// Set the target to write to.
+    pub fn target(&mut self, target: Target) -> &mut Self {
+        self.target = target;
+        self
+    }
+
+    /// Parses a style choice string.
+    ///
+    /// See the [Disabling colors] section for more details.
+    ///
+    /// [Disabling colors]: ../index.html#disabling-colors
+    pub fn parse(&mut self, write_style: &str) -> &mut Self {
+        self.write_style(parse_write_style(write_style))
+    }
+
+    /// Whether or not to print style characters when writing.
+    pub fn write_style(&mut self, write_style: WriteStyle) -> &mut Self {
+        self.write_style = write_style;
+        self
+    }
+
+    /// Build a terminal writer.
+    pub fn build(&mut self) -> Writer {
+        let color_choice = match self.write_style {
+            WriteStyle::Auto => {
+                if atty::is(match self.target {
+                    Target::Stderr => atty::Stream::Stderr,
+                    Target::Stdout => atty::Stream::Stdout,
+                }) {
+                    ColorChoice::Auto
+                } else {
+                    ColorChoice::Never
+                }
+            },
+            WriteStyle::Always => ColorChoice::Always,
+            WriteStyle::Never => ColorChoice::Never,
+        };
+
+        let writer = match self.target {
+            Target::Stderr => BufferWriter::stderr(color_choice),
+            Target::Stdout => BufferWriter::stdout(color_choice),
+        };
+
+        Writer {
+            inner: writer,
+            write_style: self.write_style,
+        }
+    }
+}
+
+impl Default for Builder {
+    fn default() -> Self {
+        Builder::new()
+    }
+}
+
+impl Style {
+    /// Set the text color.
+    ///
+    /// # Examples
+    ///
+    /// Create a style with red text:
+    ///
+    /// ```
+    /// use std::io::Write;
+    /// use env_logger::fmt::Color;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut style = buf.style();
+    ///
+    ///     style.set_color(Color::Red);
+    ///
+    ///     writeln!(buf, "{}", style.value(record.args()))
+    /// });
+    /// ```
+    pub fn set_color(&mut self, color: Color) -> &mut Style {
+        self.spec.set_fg(Some(color));
+        self
+    }
+
+    /// Set the text weight.
+    ///
+    /// If `yes` is true then text will be written in bold.
+    /// If `yes` is false then text will be written in the default weight.
+    ///
+    /// # Examples
+    ///
+    /// Create a style with bold text:
+    ///
+    /// ```
+    /// use std::io::Write;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut style = buf.style();
+    ///
+    ///     style.set_bold(true);
+    ///
+    ///     writeln!(buf, "{}", style.value(record.args()))
+    /// });
+    /// ```
+    pub fn set_bold(&mut self, yes: bool) -> &mut Style {
+        self.spec.set_bold(yes);
+        self
+    }
+
+    /// Set the text intensity.
+    ///
+    /// If `yes` is true then text will be written in a brighter color.
+    /// If `yes` is false then text will be written in the default color.
+    ///
+    /// # Examples
+    ///
+    /// Create a style with intense text:
+    ///
+    /// ```
+    /// use std::io::Write;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut style = buf.style();
+    ///
+    ///     style.set_intense(true);
+    ///
+    ///     writeln!(buf, "{}", style.value(record.args()))
+    /// });
+    /// ```
+    pub fn set_intense(&mut self, yes: bool) -> &mut Style {
+        self.spec.set_intense(yes);
+        self
+    }
+
+    /// Set the background color.
+    ///
+    /// # Examples
+    ///
+    /// Create a style with a yellow background:
+    ///
+    /// ```
+    /// use std::io::Write;
+    /// use env_logger::fmt::Color;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut style = buf.style();
+    ///
+    ///     style.set_bg(Color::Yellow);
+    ///
+    ///     writeln!(buf, "{}", style.value(record.args()))
+    /// });
+    /// ```
+    pub fn set_bg(&mut self, color: Color) -> &mut Style {
+        self.spec.set_bg(Some(color));
+        self
+    }
+
+    /// Wrap a value in the style.
+    ///
+    /// The same `Style` can be used to print multiple different values.
+    ///
+    /// # Examples
+    ///
+    /// Create a bold, red colored style and use it to print the log level:
+    ///
+    /// ```
+    /// use std::io::Write;
+    /// use env_logger::fmt::Color;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut style = buf.style();
+    ///
+    ///     style.set_color(Color::Red).set_bold(true);
+    ///
+    ///     writeln!(buf, "{}: {}",
+    ///         style.value(record.level()),
+    ///         record.args())
+    /// });
+    /// ```
+    pub fn value<T>(&self, value: T) -> StyledValue<T> {
+        StyledValue {
+            style: &self,
+            value
+        }
+    }
+}
+
+impl Formatter {
+    pub(crate) fn new(writer: &Writer) -> Self {
+        Formatter {
+            buf: Rc::new(RefCell::new(writer.inner.buffer())),
+            write_style: writer.write_style(),
+        }
+    }
+
+    pub(crate) fn write_style(&self) -> WriteStyle {
+        self.write_style
+    }
+
+    /// Begin a new [`Style`].
+    ///
+    /// # Examples
+    ///
+    /// Create a bold, red colored style and use it to print the log level:
+    ///
+    /// ```
+    /// use std::io::Write;
+    /// use env_logger::fmt::Color;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let mut level_style = buf.style();
+    ///
+    ///     level_style.set_color(Color::Red).set_bold(true);
+    ///
+    ///     writeln!(buf, "{}: {}",
+    ///         level_style.value(record.level()),
+    ///         record.args())
+    /// });
+    /// ```
+    ///
+    /// [`Style`]: struct.Style.html
+    pub fn style(&self) -> Style {
+        Style {
+            buf: self.buf.clone(),
+            spec: ColorSpec::new(),
+        }
+    }
+
+    /// Get a [`Timestamp`] for the current date and time in UTC.
+    ///
+    /// # Examples
+    ///
+    /// Include the current timestamp with the log record:
+    ///
+    /// ```
+    /// use std::io::Write;
+    ///
+    /// let mut builder = env_logger::Builder::new();
+    ///
+    /// builder.format(|buf, record| {
+    ///     let ts = buf.timestamp();
+    ///
+    ///     writeln!(buf, "{}: {}: {}", ts, record.level(), record.args())
+    /// });
+    /// ```
+    ///
+    /// [`Timestamp`]: struct.Timestamp.html
+    pub fn timestamp(&self) -> Timestamp {
+        Timestamp(SystemTime::now())
+    }
+
+    pub(crate) fn print(&self, writer: &Writer) -> io::Result<()> {
+        writer.inner.print(&self.buf.borrow())
+    }
+
+    pub(crate) fn clear(&mut self) {
+        self.buf.borrow_mut().clear()
+    }
+}
+
+impl Write for Formatter {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.buf.borrow_mut().write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.buf.borrow_mut().flush()
+    }
+}
+
+impl<'a, T> StyledValue<'a, T> {
+    fn write_fmt<F>(&self, f: F) -> fmt::Result
+    where
+        F: FnOnce() -> fmt::Result,
+    {
+        self.style.buf.borrow_mut().set_color(&self.style.spec).map_err(|_| fmt::Error)?;
+
+        // Always try to reset the terminal style, even if writing failed
+        let write = f();
+        let reset = self.style.buf.borrow_mut().reset().map_err(|_| fmt::Error);
+
+        write.and(reset)
+    }
+}
+
+impl fmt::Debug for Timestamp {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        /// A `Debug` wrapper for `Timestamp` that uses the `Display` implementation.
+        struct TimestampValue<'a>(&'a Timestamp);
+
+        impl<'a> fmt::Debug for TimestampValue<'a> {
+            fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+                fmt::Display::fmt(&self.0, f)
+            }
+        }
+
+        f.debug_tuple("Timestamp")
+         .field(&TimestampValue(&self))
+         .finish()
+    }
+}
+
+impl fmt::Debug for Writer {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Writer").finish()
+    }
+}
+
+impl fmt::Debug for Formatter {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Formatter").finish()
+    }
+}
+
+impl fmt::Debug for Builder {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Logger")
+        .field("target", &self.target)
+        .field("write_style", &self.write_style)
+        .finish()
+    }
+}
+
+impl fmt::Debug for Style {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        f.debug_struct("Style").field("spec", &self.spec).finish()
+    }
+}
+
+macro_rules! impl_styled_value_fmt {
+    ($($fmt_trait:path),*) => {
+        $(
+            impl<'a, T: $fmt_trait> $fmt_trait for StyledValue<'a, T> {
+                fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+                    self.write_fmt(|| T::fmt(&self.value, f))
+                }
+            }
+        )*
+    };
+}
+
+impl_styled_value_fmt!(
+    fmt::Debug,
+    fmt::Display,
+    fmt::Pointer,
+    fmt::Octal,
+    fmt::Binary,
+    fmt::UpperHex,
+    fmt::LowerHex,
+    fmt::UpperExp,
+    fmt::LowerExp);
+
+impl fmt::Display for Timestamp {
+    fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+        format_rfc3339_seconds(self.0).fmt(f)
+    }
+}
+
+fn parse_write_style(spec: &str) -> WriteStyle {
+    match spec {
+        "auto" => WriteStyle::Auto,
+        "always" => WriteStyle::Always,
+        "never" => WriteStyle::Never,
+        _ => Default::default(),
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn parse_write_style_valid() {
+        let inputs = vec![
+            ("auto", WriteStyle::Auto),
+            ("always", WriteStyle::Always),
+            ("never", WriteStyle::Never),
+        ];
+
+        for (input, expected) in inputs {
+            assert_eq!(expected, parse_write_style(input));
+        }
+    }
+
+    #[test]
+    fn parse_write_style_invalid() {
+        let inputs = vec![
+            "",
+            "true",
+            "false",
+            "NEVER!!"
+        ];
+
+        for input in inputs {
+            assert_eq!(WriteStyle::Auto, parse_write_style(input));
+        }
+    }
+}
--- a/third_party/rust/env_logger/src/lib.rs
+++ b/third_party/rust/env_logger/src/lib.rs
@@ -3,650 +3,674 @@
 // http://rust-lang.org/COPYRIGHT.
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! A logger configured via an environment variable which writes to standard
-//! error.
+//! A simple logger configured via environment variables which writes
+//! to stdout or stderr, for use with the logging facade exposed by the
+//! [`log` crate][log-crate-url].
 //!
 //! ## Example
 //!
 //! ```
 //! #[macro_use] extern crate log;
 //! extern crate env_logger;
 //!
-//! use log::LogLevel;
+//! use log::Level;
 //!
 //! fn main() {
-//!     env_logger::init().unwrap();
+//!     env_logger::init();
 //!
 //!     debug!("this is a debug {}", "message");
 //!     error!("this is printed by default");
 //!
-//!     if log_enabled!(LogLevel::Info) {
+//!     if log_enabled!(Level::Info) {
 //!         let x = 3 * 4; // expensive computation
 //!         info!("the answer was: {}", x);
 //!     }
 //! }
 //! ```
 //!
 //! Assumes the binary is `main`:
 //!
 //! ```{.bash}
 //! $ RUST_LOG=error ./main
-//! ERROR:main: this is printed by default
+//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default
 //! ```
 //!
 //! ```{.bash}
 //! $ RUST_LOG=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
+//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default
+//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12
 //! ```
 //!
 //! ```{.bash}
 //! $ RUST_LOG=debug ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
+//! DEBUG: 2017-11-09T02:12:24Z: main: this is a debug message
+//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default
+//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12
 //! ```
 //!
 //! You can also set the log level on a per module basis:
 //!
 //! ```{.bash}
 //! $ RUST_LOG=main=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
+//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default
+//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12
 //! ```
 //!
 //! And enable all logging:
 //!
 //! ```{.bash}
 //! $ RUST_LOG=main ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
+//! DEBUG: 2017-11-09T02:12:24Z: main: this is a debug message
+//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default
+//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12
 //! ```
 //!
-//! See the documentation for the log crate for more information about its API.
+//! See the documentation for the [`log` crate][log-crate-url] for more
+//! information about its API.
 //!
 //! ## Enabling logging
 //!
 //! Log levels are controlled on a per-module basis, and by default all logging
 //! is disabled except for `error!`. Logging is controlled via the `RUST_LOG`
 //! environment variable. The value of this environment variable is a
 //! comma-separated list of logging directives. A logging directive is of the
 //! form:
 //!
 //! ```text
-//! path::to::module=log_level
+//! path::to::module=level
 //! ```
 //!
 //! The path to the module is rooted in the name of the crate it was compiled
 //! for, so if your program is contained in a file `hello.rs`, for example, to
 //! turn on logging for this file you would use a value of `RUST_LOG=hello`.
 //! Furthermore, this path is a prefix-search, so all modules nested in the
 //! specified module will also have logging enabled.
 //!
-//! The actual `log_level` is optional to specify. If omitted, all logging will
+//! The actual `level` is optional to specify. If omitted, all logging will
 //! be enabled. If specified, it must be one of the strings `debug`, `error`,
 //! `info`, `warn`, or `trace`.
 //!
 //! As the log level for a module is optional, the module to enable logging for
-//! is also optional. If only a `log_level` is provided, then the global log
+//! is also optional. If only a `level` is provided, then the global log
 //! level for all modules is set to this value.
 //!
 //! Some examples of valid values of `RUST_LOG` are:
 //!
 //! * `hello` turns on all logging for the 'hello' module
 //! * `info` turns on all info logging
 //! * `hello=debug` turns on debug logging for 'hello'
 //! * `hello,std::option` turns on hello, and std's option logging
 //! * `error,hello=warn` turn on global error logging and also warn for hello
 //!
 //! ## Filtering results
 //!
-//! A RUST_LOG directive may include a regex filter. The syntax is to append `/`
+//! A `RUST_LOG` directive may include a regex filter. The syntax is to append `/`
 //! followed by a regex. Each message is checked against the regex, and is only
 //! logged if it matches. Note that the matching is done after formatting the
 //! log string but before adding any logging meta-data. There is a single filter
 //! for all modules.
 //!
 //! Some examples:
 //!
 //! * `hello/foo` turns on all logging for the 'hello' module where the log
 //!   message includes 'foo'.
 //! * `info/f.o` turns on all info logging where the log message includes 'foo',
 //!   'f1o', 'fao', etc.
 //! * `hello=debug/foo*foo` turns on debug logging for 'hello' where the log
 //!   message includes 'foofoo' or 'fofoo' or 'fooooooofoo', etc.
-//! * `error,hello=warn/[0-9] scopes` turn on global error logging and also
+//! * `error,hello=warn/[0-9]scopes` turn on global error logging and also
 //!   warn for hello. In both cases the log message must include a single digit
 //!   number followed by 'scopes'.
+//!
+//! ## Disabling colors
+//!
+//! Colors and other styles can be configured with the `RUST_LOG_STYLE`
+//! environment variable. It accepts the following values:
+//!
+//! * `auto` (default) will attempt to print style characters, but don't force the issue.
+//! If the console isn't available on Windows, or if TERM=dumb, for example, then don't print colors.
+//! * `always` will always print style characters even if they aren't supported by the terminal.
+//! This includes emitting ANSI colors on Windows if the console API is unavailable.
+//! * `never` will never print style characters.
+//!
+//! [log-crate-url]: https://docs.rs/log/
 
 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
        html_favicon_url = "http://www.rust-lang.org/favicon.ico",
-       html_root_url = "http://doc.rust-lang.org/env_logger/")]
+       html_root_url = "https://docs.rs/env_logger/0.5.4")]
 #![cfg_attr(test, deny(warnings))]
 
 // When compiled for the rustc compiler itself we want to make sure that this is
 // an unstable crate
 #![cfg_attr(rustbuild, feature(staged_api, rustc_private))]
 #![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))]
 
+#![deny(missing_debug_implementations, missing_docs, warnings)]
+
 extern crate log;
+extern crate termcolor;
+extern crate humantime;
+extern crate atty;
 
 use std::env;
+use std::borrow::Cow;
 use std::io::prelude::*;
 use std::io;
 use std::mem;
+use std::cell::RefCell;
 
-use log::{Log, LogLevel, LogLevelFilter, LogRecord, SetLoggerError, LogMetadata};
+use log::{Log, LevelFilter, Level, Record, SetLoggerError, Metadata};
 
-#[cfg(feature = "regex")]
-#[path = "regex.rs"]
-mod filter;
+pub mod filter;
+pub mod fmt;
+
+pub use self::fmt::{Target, WriteStyle, Color, Formatter};
+
+const DEFAULT_FILTER_ENV: &'static str = "RUST_LOG";
+const DEFAULT_WRITE_STYLE_ENV: &'static str = "RUST_LOG_STYLE";
 
-#[cfg(not(feature = "regex"))]
-#[path = "string.rs"]
-mod filter;
-
-/// Log target, either stdout or stderr.
+/// Set of environment variables to configure from.
+///
+/// By default, the `Env` will read the following environment variables:
+///
+/// - `RUST_LOG`: the level filter
+/// - `RUST_LOG_STYLE`: whether or not to print styles with records.
+///
+/// These sources can be configured using the builder methods on `Env`.
 #[derive(Debug)]
-pub enum LogTarget {
-    Stdout,
-    Stderr,
+pub struct Env<'a> {
+    filter: Cow<'a, str>,
+    write_style: Cow<'a, str>,
 }
 
-/// The logger.
+/// The env logger.
+///
+/// This struct implements the `Log` trait from the [`log` crate][log-crate-url],
+/// which allows it to act as a logger.
+///
+/// The [`init()`], [`try_init()`], [`Builder::init()`] and [`Builder::try_init()`]
+/// methods will each construct a `Logger` and immediately initialize it as the
+/// default global logger.
+///
+/// If you'd instead need access to the constructed `Logger`, you can use
+/// the associated [`Builder`] and install it with the
+/// [`log` crate][log-crate-url] directly.
+///
+/// [log-crate-url]: https://docs.rs/log/
+/// [`init()`]: fn.init.html
+/// [`try_init()`]: fn.try_init.html
+/// [`Builder::init()`]: struct.Builder.html#method.init
+/// [`Builder::try_init()`]: struct.Builder.html#method.try_init
+/// [`Builder`]: struct.Builder.html
 pub struct Logger {
-    directives: Vec<LogDirective>,
-    filter: Option<filter::Filter>,
-    format: Box<Fn(&LogRecord) -> String + Sync + Send>,
-    target: LogTarget,
+    writer: fmt::Writer,
+    filter: filter::Filter,
+    format: Box<Fn(&mut Formatter, &Record) -> io::Result<()> + Sync + Send>,
 }
 
-/// LogBuilder acts as builder for initializing the Logger.
-/// It can be used to customize the log format, change the enviromental variable used
+/// `Builder` acts as builder for initializing a `Logger`.
+///
+/// It can be used to customize the log format, change the environment variable used
 /// to provide the logging directives and also set the default log level filter.
 ///
 /// ## Example
 ///
 /// ```
 /// #[macro_use]
 /// extern crate log;
 /// extern crate env_logger;
 ///
 /// use std::env;
-/// use log::{LogRecord, LogLevelFilter};
-/// use env_logger::LogBuilder;
+/// use std::io::Write;
+/// use log::LevelFilter;
+/// use env_logger::Builder;
 ///
 /// fn main() {
-///     let format = |record: &LogRecord| {
-///         format!("{} - {}", record.level(), record.args())
-///     };
+///     let mut builder = Builder::new();
 ///
-///     let mut builder = LogBuilder::new();
-///     builder.format(format).filter(None, LogLevelFilter::Info);
+///     builder.format(|buf, record| writeln!(buf, "{} - {}", record.level(), record.args()))
+///            .filter(None, LevelFilter::Info);
 ///
-///     if env::var("RUST_LOG").is_ok() {
-///        builder.parse(&env::var("RUST_LOG").unwrap());
+///     if let Ok(rust_log) = env::var("RUST_LOG") {
+///        builder.parse(&rust_log);
 ///     }
 ///
-///     builder.init().unwrap();
+///     builder.init();
 ///
 ///     error!("error message");
 ///     info!("info message");
 /// }
 /// ```
-pub struct LogBuilder {
-    directives: Vec<LogDirective>,
-    filter: Option<filter::Filter>,
-    format: Box<Fn(&LogRecord) -> String + Sync + Send>,
-    target: LogTarget,
+pub struct Builder {
+    filter: filter::Builder,
+    writer: fmt::Builder,
+    format: Box<Fn(&mut Formatter, &Record) -> io::Result<()> + Sync + Send>,
 }
 
-impl LogBuilder {
-    /// Initializes the log builder with defaults
-    pub fn new() -> LogBuilder {
-        LogBuilder {
-            directives: Vec::new(),
-            filter: None,
-            format: Box::new(|record: &LogRecord| {
-                format!("{}:{}: {}", record.level(),
-                        record.location().module_path(), record.args())
+impl Builder {
+    /// Initializes the log builder with defaults.
+    pub fn new() -> Builder {
+        Builder {
+            filter: Default::default(),
+            writer: Default::default(),
+            format: Box::new(|buf, record| {
+                let ts = buf.timestamp();
+                let level = record.level();
+                let mut level_style = buf.style();
+
+                match level {
+                    Level::Trace => level_style.set_color(Color::White),
+                    Level::Debug => level_style.set_color(Color::Blue),
+                    Level::Info => level_style.set_color(Color::Green),
+                    Level::Warn => level_style.set_color(Color::Yellow),
+                    Level::Error => level_style.set_color(Color::Red).set_bold(true),
+                };
+
+                if let Some(module_path) = record.module_path() {
+                    writeln!(buf, "{:>5} {}: {}: {}", level_style.value(level), ts, module_path, record.args())
+                }
+                else {
+                    writeln!(buf, "{:>5} {}: {}", level_style.value(level), ts, record.args())
+                }
             }),
-            target: LogTarget::Stderr,
         }
     }
 
-    /// Adds filters to the logger
+    /// Initializes the log builder from the environment.
+    ///
+    /// The variables used to read configuration from can be tweaked before
+    /// passing in.
+    ///
+    /// # Examples
+    ///
+    /// Initialise a logger using the default environment variables:
+    ///
+    /// ```
+    /// use env_logger::{Builder, Env};
+    ///
+    /// let mut builder = Builder::from_env(Env::default());
+    /// builder.init();
+    /// ```
+    ///
+    /// Initialise a logger using the `MY_LOG` variable for filtering and
+    /// `MY_LOG_STYLE` for whether or not to write styles:
+    ///
+    /// ```
+    /// use env_logger::{Builder, Env};
+    ///
+    /// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE");
+    ///
+    /// let mut builder = Builder::from_env(env);
+    /// builder.init();
+    /// ```
+    pub fn from_env<'a, E>(env: E) -> Self
+    where
+        E: Into<Env<'a>>
+    {
+        let mut builder = Builder::new();
+        let env = env.into();
+
+        if let Some(s) = env.get_filter() {
+            builder.parse(&s);
+        }
+
+        if let Some(s) = env.get_write_style() {
+            builder.parse_write_style(&s);
+        }
+
+        builder
+    }
+
+    /// Adds filters to the logger.
     ///
     /// The given module (if any) will log at most the specified level provided.
     /// If no module is provided then the filter will apply to all log messages.
     pub fn filter(&mut self,
                   module: Option<&str>,
-                  level: LogLevelFilter) -> &mut Self {
-        self.directives.push(LogDirective {
-            name: module.map(|s| s.to_string()),
-            level: level,
-        });
+                  level: LevelFilter) -> &mut Self {
+        self.filter.filter(module, level);
         self
     }
 
     /// Sets the format function for formatting the log output.
     ///
-    /// This function is called on each record logged to produce a string which
-    /// is actually printed out.
+    /// This function is called on each record logged and should format the
+    /// log record and output it to the given [`Formatter`].
+    ///
+    /// The format function is expected to output the string directly to the
+    /// `Formatter` so that implementations can use the [`std::fmt`] macros
+    /// to format and output without intermediate heap allocations. The default
+    /// `env_logger` formatter takes advantage of this.
+    ///
+    /// [`Formatter`]: fmt/struct.Formatter.html
+    /// [`String`]: https://doc.rust-lang.org/stable/std/string/struct.String.html
+    /// [`std::fmt`]: https://doc.rust-lang.org/std/fmt/index.html
     pub fn format<F: 'static>(&mut self, format: F) -> &mut Self
-        where F: Fn(&LogRecord) -> String + Sync + Send
+        where F: Fn(&mut Formatter, &Record) -> io::Result<()> + Sync + Send
     {
         self.format = Box::new(format);
         self
     }
 
     /// Sets the target for the log output.
     ///
     /// Env logger can log to either stdout or stderr. The default is stderr.
-    pub fn target(&mut self, target: LogTarget) -> &mut Self {
-        self.target = target;
+    pub fn target(&mut self, target: fmt::Target) -> &mut Self {
+        self.writer.target(target);
         self
     }
 
-    /// Parses the directives string in the same form as the RUST_LOG
+    /// Sets whether or not styles will be written.
+    ///
+    /// This can be useful in environments that don't support control characters
+    /// for setting colors.
+    pub fn write_style(&mut self, write_style: fmt::WriteStyle) -> &mut Self {
+        self.writer.write_style(write_style);
+        self
+    }
+
+    /// Parses the directives string in the same form as the `RUST_LOG`
     /// environment variable.
     ///
     /// See the module documentation for more details.
     pub fn parse(&mut self, filters: &str) -> &mut Self {
-        let (directives, filter) = parse_logging_spec(filters);
-
-        self.filter = filter;
+        self.filter.parse(filters);
+        self
+    }
 
-        for directive in directives {
-            self.directives.push(directive);
-        }
+    /// Parses whether or not to write styles in the same form as the `RUST_LOG_STYLE`
+    /// environment variable.
+    ///
+    /// See the module documentation for more details.
+    pub fn parse_write_style(&mut self, write_style: &str) -> &mut Self {
+        self.writer.parse(write_style);
         self
     }
 
-    /// Initializes the global logger with an env logger.
+    /// Initializes the global logger with the built env logger.
+    ///
+    /// This should be called early in the execution of a Rust program. Any log
+    /// events that occur before initialization will be ignored.
+    ///
+    /// # Errors
     ///
-    /// This should be called early in the execution of a Rust program, and the
-    /// global logger may only be initialized once. Future initialization
-    /// attempts will return an error.
-    pub fn init(&mut self) -> Result<(), SetLoggerError> {
-        log::set_logger(|max_level| {
-            let logger = self.build();
-            max_level.set(logger.filter());
-            Box::new(logger)
-        })
+    /// This function will fail if it is called more than once, or if another
+    /// library has already initialized a global logger.
+    pub fn try_init(&mut self) -> Result<(), SetLoggerError> {
+        let logger = self.build();
+
+        log::set_max_level(logger.filter());
+        log::set_boxed_logger(Box::new(logger))
+    }
+
+    /// Initializes the global logger with the built env logger.
+    ///
+    /// This should be called early in the execution of a Rust program. Any log
+    /// events that occur before initialization will be ignored.
+    ///
+    /// # Panics
+    ///
+    /// This function will panic if it is called more than once, or if another
+    /// library has already initialized a global logger.
+    pub fn init(&mut self) {
+        self.try_init().expect("Builder::init should not be called after logger initialized");
     }
 
     /// Build an env logger.
+    ///
+    /// This method is kept private because the only way we support building
+    /// loggers is by installing them as the single global logger for the
+    /// `log` crate.
     pub fn build(&mut self) -> Logger {
-        if self.directives.is_empty() {
-            // Adds the default filter if none exist
-            self.directives.push(LogDirective {
-                name: None,
-                level: LogLevelFilter::Error,
-            });
-        } else {
-            // Sort the directives by length of their name, this allows a
-            // little more efficient lookup at runtime.
-            self.directives.sort_by(|a, b| {
-                let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0);
-                let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0);
-                alen.cmp(&blen)
-            });
-        }
-
         Logger {
-            directives: mem::replace(&mut self.directives, Vec::new()),
-            filter: mem::replace(&mut self.filter, None),
-            format: mem::replace(&mut self.format, Box::new(|_| String::new())),
-            target: mem::replace(&mut self.target, LogTarget::Stderr),
+            writer: self.writer.build(),
+            filter: self.filter.build(),
+            format: mem::replace(&mut self.format, Box::new(|_, _| Ok(()))),
         }
     }
 }
 
 impl Logger {
-    pub fn new() -> Logger {
-        let mut builder = LogBuilder::new();
-
-        if let Ok(s) = env::var("RUST_LOG") {
-            builder.parse(&s);
-        }
-
-        builder.build()
+    /// Returns the maximum `LevelFilter` that this env logger instance is
+    /// configured to output.
+    pub fn filter(&self) -> LevelFilter {
+        self.filter.filter()
     }
 
-    pub fn filter(&self) -> LogLevelFilter {
-        self.directives.iter()
-            .map(|d| d.level).max()
-            .unwrap_or(LogLevelFilter::Off)
-    }
-
-    fn enabled(&self, level: LogLevel, target: &str) -> bool {
-        // Search for the longest match, the vector is assumed to be pre-sorted.
-        for directive in self.directives.iter().rev() {
-            match directive.name {
-                Some(ref name) if !target.starts_with(&**name) => {},
-                Some(..) | None => {
-                    return level <= directive.level
-                }
-            }
-        }
-        false
+    /// Checks if this record matches the configured filter.
+    pub fn matches(&self, record: &Record) -> bool {
+        self.filter.matches(record)
     }
 }
 
 impl Log for Logger {
-    fn enabled(&self, metadata: &LogMetadata) -> bool {
-        self.enabled(metadata.level(), metadata.target())
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        self.filter.enabled(metadata)
     }
 
-    fn log(&self, record: &LogRecord) {
-        if !Log::enabled(self, record.metadata()) {
-            return;
-        }
+    fn log(&self, record: &Record) {
+        if self.matches(record) {
+            // Log records are written to a thread-local buffer before being printed
+            // to the terminal. We clear these buffers afterwards, but they aren't shrinked
+            // so will always at least have capacity for the largest log record formatted
+            // on that thread.
+            //
+            // If multiple `Logger`s are used by the same threads then the thread-local
+            // formatter might have different color support. If this is the case the
+            // formatter and its buffer are discarded and recreated.
 
-        if let Some(filter) = self.filter.as_ref() {
-            if !filter.is_match(&*record.args().to_string()) {
-                return;
+            thread_local! {
+                static FORMATTER: RefCell<Option<Formatter>> = RefCell::new(None);
             }
-        }
+
+            FORMATTER.with(|tl_buf| {
+                // It's possible for implementations to sometimes
+                // log-while-logging (e.g. a `std::fmt` implementation logs
+                // internally) but it's super rare. If this happens make sure we
+                // at least don't panic and ship some output to the screen.
+                let mut a;
+                let mut b = None;
+                let tl_buf = match tl_buf.try_borrow_mut() {
+                    Ok(f) => {
+                        a = f;
+                        &mut *a
+                    }
+                    Err(_) => &mut b,
+                };
+
+                // Check the buffer style. If it's different from the logger's
+                // style then drop the buffer and recreate it.
+                match *tl_buf {
+                    Some(ref mut formatter) => {
+                        if formatter.write_style() != self.writer.write_style() {
+                            *formatter = Formatter::new(&self.writer)
+                        }
+                    },
+                    ref mut tl_buf => *tl_buf = Some(Formatter::new(&self.writer))
+                }
+
+                // The format is guaranteed to be `Some` by this point
+                let mut formatter = tl_buf.as_mut().unwrap();
+
+                let _ = (self.format)(&mut formatter, record).and_then(|_| formatter.print(&self.writer));
 
-        match self.target {
-            LogTarget::Stdout => println!("{}", (self.format)(record)),
-            LogTarget::Stderr => {
-                let _ = writeln!(&mut io::stderr(), "{}", (self.format)(record));
-            },
-        };
+                // Always clear the buffer afterwards
+                formatter.clear();
+            });
+        }
+    }
+
+    fn flush(&self) {}
+}
+
+impl<'a> Env<'a> {
+    /// Get a default set of environment variables.
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    /// Specify an environment variable to read the filter from.
+    pub fn filter<E>(mut self, filter_env: E) -> Self
+    where
+        E: Into<Cow<'a, str>>
+    {
+        self.filter = filter_env.into();
+        self
+    }
+
+    fn get_filter(&self) -> Option<String> {
+        env::var(&*self.filter).ok()
+    }
+
+    /// Specify an environment variable to read the style from.
+    pub fn write_style<E>(mut self, write_style_env: E) -> Self
+    where
+        E: Into<Cow<'a, str>>
+    {
+        self.write_style = write_style_env.into();
+        self
+    }
+
+    fn get_write_style(&self) -> Option<String> {
+        env::var(&*self.write_style).ok()
     }
 }
 
-struct LogDirective {
-    name: Option<String>,
-    level: LogLevelFilter,
+impl<'a, T> From<T> for Env<'a>
+where
+    T: Into<Cow<'a, str>>
+{
+    fn from(filter_env: T) -> Self {
+        Env::default().filter(filter_env.into())
+    }
+}
+
+impl<'a> Default for Env<'a> {
+    fn default() -> Self {
+        Env {
+            filter: DEFAULT_FILTER_ENV.into(),
+            write_style: DEFAULT_WRITE_STYLE_ENV.into()
+        }
+    }
+}
+
+mod std_fmt_impls {
+    use std::fmt;
+    use super::*;
+
+    impl fmt::Debug for Logger{
+        fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+            f.debug_struct("Logger")
+                .field("filter", &self.filter)
+                .finish()
+        }
+    }
+
+    impl fmt::Debug for Builder{
+        fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
+            f.debug_struct("Logger")
+            .field("filter", &self.filter)
+            .field("writer", &self.writer)
+            .finish()
+        }
+    }
+}
+
+/// Attempts to initialize the global logger with an env logger.
+///
+/// This should be called early in the execution of a Rust program. Any log
+/// events that occur before initialization will be ignored.
+///
+/// # Errors
+///
+/// This function will fail if it is called more than once, or if another
+/// library has already initialized a global logger.
+pub fn try_init() -> Result<(), SetLoggerError> {
+    try_init_from_env(Env::default())
 }
 
 /// Initializes the global logger with an env logger.
 ///
-/// This should be called early in the execution of a Rust program, and the
-/// global logger may only be initialized once. Future initialization attempts
-/// will return an error.
-pub fn init() -> Result<(), SetLoggerError> {
-    let mut builder = LogBuilder::new();
-
-    if let Ok(s) = env::var("RUST_LOG") {
-        builder.parse(&s);
-    }
-
-    builder.init()
-}
-
-/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=error/foo")
-/// and return a vector with log directives.
-fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<filter::Filter>) {
-    let mut dirs = Vec::new();
-
-    let mut parts = spec.split('/');
-    let mods = parts.next();
-    let filter = parts.next();
-    if parts.next().is_some() {
-        println!("warning: invalid logging spec '{}', \
-                 ignoring it (too many '/'s)", spec);
-        return (dirs, None);
-    }
-    mods.map(|m| { for s in m.split(',') {
-        if s.len() == 0 { continue }
-        let mut parts = s.split('=');
-        let (log_level, name) = match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {
-            (Some(part0), None, None) => {
-                // if the single argument is a log-level string or number,
-                // treat that as a global fallback
-                match part0.parse() {
-                    Ok(num) => (num, None),
-                    Err(_) => (LogLevelFilter::max(), Some(part0)),
-                }
-            }
-            (Some(part0), Some(""), None) => (LogLevelFilter::max(), Some(part0)),
-            (Some(part0), Some(part1), None) => {
-                match part1.parse() {
-                    Ok(num) => (num, Some(part0)),
-                    _ => {
-                        println!("warning: invalid logging spec '{}', \
-                                 ignoring it", part1);
-                        continue
-                    }
-                }
-            },
-            _ => {
-                println!("warning: invalid logging spec '{}', \
-                         ignoring it", s);
-                continue
-            }
-        };
-        dirs.push(LogDirective {
-            name: name.map(|s| s.to_string()),
-            level: log_level,
-        });
-    }});
-
-    let filter = filter.map_or(None, |filter| {
-        match filter::Filter::new(filter) {
-            Ok(re) => Some(re),
-            Err(e) => {
-                println!("warning: invalid regex filter - {}", e);
-                None
-            }
-        }
-    });
-
-    return (dirs, filter);
+/// This should be called early in the execution of a Rust program. Any log
+/// events that occur before initialization will be ignored.
+///
+/// # Panics
+///
+/// This function will panic if it is called more than once, or if another
+/// library has already initialized a global logger.
+pub fn init() {
+    try_init().expect("env_logger::init should not be called after logger initialized");
 }
 
-#[cfg(test)]
-mod tests {
-    use log::{LogLevel, LogLevelFilter};
-
-    use super::{LogBuilder, Logger, LogDirective, parse_logging_spec};
-
-    fn make_logger(dirs: Vec<LogDirective>) -> Logger {
-        let mut logger = LogBuilder::new().build();
-        logger.directives = dirs;
-        logger
-    }
-
-    #[test]
-    fn filter_info() {
-        let logger = LogBuilder::new().filter(None, LogLevelFilter::Info).build();
-        assert!(logger.enabled(LogLevel::Info, "crate1"));
-        assert!(!logger.enabled(LogLevel::Debug, "crate1"));
-    }
-
-    #[test]
-    fn filter_beginning_longest_match() {
-        let logger = LogBuilder::new()
-                        .filter(Some("crate2"), LogLevelFilter::Info)
-                        .filter(Some("crate2::mod"), LogLevelFilter::Debug)
-                        .filter(Some("crate1::mod1"), LogLevelFilter::Warn)
-                        .build();
-        assert!(logger.enabled(LogLevel::Debug, "crate2::mod1"));
-        assert!(!logger.enabled(LogLevel::Debug, "crate2"));
-    }
-
-    #[test]
-    fn parse_default() {
-        let logger = LogBuilder::new().parse("info,crate1::mod1=warn").build();
-        assert!(logger.enabled(LogLevel::Warn, "crate1::mod1"));
-        assert!(logger.enabled(LogLevel::Info, "crate2::mod2"));
-    }
-
-    #[test]
-    fn match_full_path() {
-        let logger = make_logger(vec![
-            LogDirective {
-                name: Some("crate2".to_string()),
-                level: LogLevelFilter::Info
-            },
-            LogDirective {
-                name: Some("crate1::mod1".to_string()),
-                level: LogLevelFilter::Warn
-            }
-        ]);
-        assert!(logger.enabled(LogLevel::Warn, "crate1::mod1"));
-        assert!(!logger.enabled(LogLevel::Info, "crate1::mod1"));
-        assert!(logger.enabled(LogLevel::Info, "crate2"));
-        assert!(!logger.enabled(LogLevel::Debug, "crate2"));
-    }
-
-    #[test]
-    fn no_match() {
-        let logger = make_logger(vec![
-            LogDirective { name: Some("crate2".to_string()), level: LogLevelFilter::Info },
-            LogDirective { name: Some("crate1::mod1".to_string()), level: LogLevelFilter::Warn }
-        ]);
-        assert!(!logger.enabled(LogLevel::Warn, "crate3"));
-    }
-
-    #[test]
-    fn match_beginning() {
-        let logger = make_logger(vec![
-            LogDirective { name: Some("crate2".to_string()), level: LogLevelFilter::Info },
-            LogDirective { name: Some("crate1::mod1".to_string()), level: LogLevelFilter::Warn }
-        ]);
-        assert!(logger.enabled(LogLevel::Info, "crate2::mod1"));
-    }
-
-    #[test]
-    fn match_beginning_longest_match() {
-        let logger = make_logger(vec![
-            LogDirective { name: Some("crate2".to_string()), level: LogLevelFilter::Info },
-            LogDirective { name: Some("crate2::mod".to_string()), level: LogLevelFilter::Debug },
-            LogDirective { name: Some("crate1::mod1".to_string()), level: LogLevelFilter::Warn }
-        ]);
-        assert!(logger.enabled(LogLevel::Debug, "crate2::mod1"));
-        assert!(!logger.enabled(LogLevel::Debug, "crate2"));
-    }
-
-    #[test]
-    fn match_default() {
-        let logger = make_logger(vec![
-            LogDirective { name: None, level: LogLevelFilter::Info },
-            LogDirective { name: Some("crate1::mod1".to_string()), level: LogLevelFilter::Warn }
-        ]);
-        assert!(logger.enabled(LogLevel::Warn, "crate1::mod1"));
-        assert!(logger.enabled(LogLevel::Info, "crate2::mod2"));
-    }
+/// Attempts to initialize the global logger with an env logger from the given
+/// environment variables.
+///
+/// This should be called early in the execution of a Rust program. Any log
+/// events that occur before initialization will be ignored.
+///
+/// # Examples
+///
+/// Initialise a logger using the `MY_LOG` environment variable for filters
+/// and `MY_LOG_STYLE` for writing colors:
+///
+/// ```
+/// # extern crate env_logger;
+/// use env_logger::{Builder, Env};
+///
+/// # fn run() -> Result<(), Box<::std::error::Error>> {
+/// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE");
+///
+/// env_logger::try_init_from_env(env)?;
+///
+/// Ok(())
+/// # }
+/// # fn main() { run().unwrap(); }
+/// ```
+///
+/// # Errors
+///
+/// This function will fail if it is called more than once, or if another
+/// library has already initialized a global logger.
+pub fn try_init_from_env<'a, E>(env: E) -> Result<(), SetLoggerError>
+where
+    E: Into<Env<'a>>
+{
+    let mut builder = Builder::from_env(env);
 
-    #[test]
-    fn zero_level() {
-        let logger = make_logger(vec![
-            LogDirective { name: None, level: LogLevelFilter::Info },
-            LogDirective { name: Some("crate1::mod1".to_string()), level: LogLevelFilter::Off }
-        ]);
-        assert!(!logger.enabled(LogLevel::Error, "crate1::mod1"));
-        assert!(logger.enabled(LogLevel::Info, "crate2::mod2"));
-    }
-
-    #[test]
-    fn parse_logging_spec_valid() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=error,crate1::mod2,crate2=debug");
-        assert_eq!(dirs.len(), 3);
-        assert_eq!(dirs[0].name, Some("crate1::mod1".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Error);
-
-        assert_eq!(dirs[1].name, Some("crate1::mod2".to_string()));
-        assert_eq!(dirs[1].level, LogLevelFilter::max());
-
-        assert_eq!(dirs[2].name, Some("crate2".to_string()));
-        assert_eq!(dirs[2].level, LogLevelFilter::Debug);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_crate() {
-        // test parse_logging_spec with multiple = in specification
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=warn=info,crate2=debug");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Debug);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_log_level() {
-        // test parse_logging_spec with 'noNumber' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=noNumber,crate2=debug");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Debug);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_string_log_level() {
-        // test parse_logging_spec with 'warn' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=warn");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Warn);
-        assert!(filter.is_none());
-    }
+    builder.try_init()
+}
 
-    #[test]
-    fn parse_logging_spec_empty_log_level() {
-        // test parse_logging_spec with '' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::max());
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_global() {
-        // test parse_logging_spec with no crate
-        let (dirs, filter) = parse_logging_spec("warn,crate2=debug");
-        assert_eq!(dirs.len(), 2);
-        assert_eq!(dirs[0].name, None);
-        assert_eq!(dirs[0].level, LogLevelFilter::Warn);
-        assert_eq!(dirs[1].name, Some("crate2".to_string()));
-        assert_eq!(dirs[1].level, LogLevelFilter::Debug);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_valid_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=error,crate1::mod2,crate2=debug/abc");
-        assert_eq!(dirs.len(), 3);
-        assert_eq!(dirs[0].name, Some("crate1::mod1".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Error);
-
-        assert_eq!(dirs[1].name, Some("crate1::mod2".to_string()));
-        assert_eq!(dirs[1].level, LogLevelFilter::max());
-
-        assert_eq!(dirs[2].name, Some("crate2".to_string()));
-        assert_eq!(dirs[2].level, LogLevelFilter::Debug);
-        assert!(filter.is_some() && filter.unwrap().to_string() == "abc");
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_crate_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=error=warn,crate2=debug/a.c");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::Debug);
-        assert!(filter.is_some() && filter.unwrap().to_string() == "a.c");
-    }
-
-    #[test]
-    fn parse_logging_spec_empty_with_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1/a*c");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate1".to_string()));
-        assert_eq!(dirs[0].level, LogLevelFilter::max());
-        assert!(filter.is_some() && filter.unwrap().to_string() == "a*c");
-    }
+/// Initializes the global logger with an env logger from the given environment
+/// variables.
+///
+/// This should be called early in the execution of a Rust program. Any log
+/// events that occur before initialization will be ignored.
+///
+/// # Examples
+///
+/// Initialise a logger using the `MY_LOG` environment variable for filters
+/// and `MY_LOG_STYLE` for writing colors:
+///
+/// ```
+/// use env_logger::{Builder, Env};
+///
+/// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE");
+///
+/// env_logger::init_from_env(env);
+/// ```
+///
+/// # Panics
+///
+/// This function will panic if it is called more than once, or if another
+/// library has already initialized a global logger.
+pub fn init_from_env<'a, E>(env: E)
+where
+    E: Into<Env<'a>>
+{
+    try_init_from_env(env).expect("env_logger::init_from_env should not be called after logger initialized");
 }
new file mode 100644
--- /dev/null
+++ b/third_party/rust/env_logger/tests/log-in-log.rs
@@ -0,0 +1,38 @@
+#[macro_use] extern crate log;
+extern crate env_logger;
+
+use std::process;
+use std::fmt;
+use std::env;
+use std::str;
+
+struct Foo;
+
+impl fmt::Display for Foo {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        info!("test");
+        f.write_str("bar")
+    }
+}
+
+fn main() {
+    env_logger::init();
+    if env::var("YOU_ARE_TESTING_NOW").is_ok() {
+        return info!("{}", Foo);
+    }
+
+    let exe = env::current_exe().unwrap();
+    let out = process::Command::new(exe)
+        .env("YOU_ARE_TESTING_NOW", "1")
+        .env("RUST_LOG", "debug")
+        .output()
+        .unwrap_or_else(|e| panic!("Unable to start child process: {}", e));
+    if out.status.success() {
+        return
+    }
+
+    println!("test failed: {}", out.status);
+    println!("--- stdout\n{}", str::from_utf8(&out.stdout).unwrap());
+    println!("--- stderr\n{}", str::from_utf8(&out.stderr).unwrap());
+    process::exit(1);
+}
--- a/third_party/rust/env_logger/tests/regexp_filter.rs
+++ b/third_party/rust/env_logger/tests/regexp_filter.rs
@@ -9,17 +9,17 @@ fn main() {
     if env::var("LOG_REGEXP_TEST").ok() == Some(String::from("1")) {
         child_main();
     } else {
         parent_main()
     }
 }
 
 fn child_main() {
-    env_logger::init().unwrap();
+    env_logger::init();
     info!("XYZ Message");
 }
 
 fn run_child(rust_log: String) -> bool {
     let exe = env::current_exe().unwrap();
     let out = process::Command::new(exe)
         .env("LOG_REGEXP_TEST", "1")
         .env("RUST_LOG", rust_log)
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{".travis.yml":"d1e17a472064a752c46f13c93b9b171c62bd5c390d4990018b097e61617f67d9","Cargo.toml":"834d5faba5920aaf480fb085fed13244b16559aa8471b67c7412b105df369d1d","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"f6deca8261a8f4a3403dc74c725c46051157fd36c27cd4b100277eb1f303ad11","README.md":"4ff0d6dacf16da81b2d856f96b2a4f1210d82fe43048899a7da9bb0dc5ecc693","benches/datetime_format.rs":"a44c7ffb3c9515e92828564df8f91a86470343a2fa7e1a08bc2bc7397ba2591c","benches/datetime_parse.rs":"336a241755ccfa546d1840e56a40218c4f98f22d7d274829c5102ec280f2d963","bulk.yaml":"17c2548388e0cd3a63473021a2f1e4ddedee082d79d9167cb31ad06a1890d3fc","src/date.rs":"2fef2fa1fdd30086557f44d2f4fae6be1ee65e21c61d9eb40f1e51a9148b970f","src/duration.rs":"300e01c4e61a3495ebb31fb3a752204c5955d58f53c283deb87782a867fd2f45","src/lib.rs":"9f6637ccc38c00b4a90ebf4fcc16a0af070984658c205f579518c355fdbeb5f6","src/wrapper.rs":"5bbdaf43256b445a8ca3a6b3eded9dd4fa8fb4dadf3683a5bac9648d91117396","vagga.yaml":"59761e5138a3015ef5654fec095ac8f1587cd23529b37822dd540b866fe4cd5b"},"package":"0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/.travis.yml
@@ -0,0 +1,36 @@
+sudo: false
+dist: trusty
+language: rust
+
+cache:
+- cargo
+
+before_cache:
+- rm -r $TRAVIS_BUILD_DIR/target/debug
+
+jobs:
+  include:
+  - os: linux
+    rust: 1.18.0
+  - os: linux
+    rust: stable
+  - os: linux
+    rust: beta
+  - os: linux
+    rust: nightly
+
+  # deploy
+  - stage: publish
+    os: linux
+    rust: stable
+    env:
+    # CARGO_TOKEN
+    - secure: "OLR1+9BgqV6eQB2el9rJAGyN1blKH77RcqMWZc1UAtBoT9/iiWCrsEVbD78Qgvei37QCqoa9f4BaQ20Eo4gaodo2f7NoqAYubxRoCNnDN1Mn7GA66YcPTwajhI41KEGXI2lE35zcNbCuJR9cSax2Lb+Gu6Et0tIhT5Vi2Rem5g7Id/v/sqD6FRpP/oTMKT1YcZTt81lvRPsOJHjImDKYWwTcxDmK6/FPPqrLVnXRdZeArAIgfE37KAZQPjrySC1JxESdQ0OhSkg84lC1mxFk83w4MFIY14Pi59tzN5ukI1nhNNIuvkjErKI+/rpp7obJ1do9/pesh2kVeiaR2XXdB4T6SR+oJo3VAfe+LmmBjBLTUXHM7Iwg6vc8Xors7HCsgyvxY0S0ppjwh/PQa9yjx5eKRw2tB26HjCS/pom8ffTyjTh1//+4rVvMs/qp3cpfYyYVLGaGNRMtXCsYdiURCAZwTtKKbk9P4dWHX9ceVpi/Ikbqu0C18FueerBEbAc+Rum2bhEkf0smUC4LTPXQgsb2l2DDCFnTAvfeGz+7rJ26g7UAG+ua4KDeVU+R3VUUNjBwU5GeWy08/bEQz64KX+pTcRxEpxWVs55CeOQPTVEjoyhUvK0BYvRCovri+3YSqznf4Vm42QluJ6H90uyQTmfI030fQLnabmZn4M9Um+U="
+    install: true
+    script: true
+
+    deploy:
+    - provider: script
+      script: 'cargo publish --verbose --token=$CARGO_TOKEN'
+      on:
+        tags: true
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/Cargo.toml
@@ -0,0 +1,37 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "humantime"
+version = "1.1.1"
+authors = ["Paul Colomiets <paul@colomiets.name>"]
+description = "    A parser and formatter for std::time::{Duration, SystemTime}\n"
+homepage = "https://github.com/tailhook/humantime"
+documentation = "https://docs.rs/humantime"
+readme = "README.md"
+keywords = ["time", "human", "human-friendly", "parser", "duration"]
+categories = ["date-and-time"]
+license = "MIT/Apache-2.0"
+
+[lib]
+name = "humantime"
+path = "src/lib.rs"
+[dependencies.quick-error]
+version = "1.0.0"
+[dev-dependencies.chrono]
+version = "0.4.0"
+
+[dev-dependencies.rand]
+version = "0.4.2"
+
+[dev-dependencies.time]
+version = "0.1.39"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/LICENSE-APACHE
@@ -0,0 +1,202 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {yyyy} {name of copyright owner}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/LICENSE-MIT
@@ -0,0 +1,26 @@
+Copyright (c) 2016 The humantime Developers
+
+Includes parts of http date with the following copyright:
+Copyright (c) 2016 Pyfisch
+
+Includes portions of musl libc with the following copyright:
+Copyright © 2005-2013 Rich Felker
+
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/README.md
@@ -0,0 +1,67 @@
+Human Time
+==========
+
+**Status: stable**
+
+[Documentation](https://docs.rs/humantime) |
+[Github](https://github.com/tailhook/humantime) |
+[Crate](https://crates.io/crates/humantime)
+
+
+Features:
+
+* Parses durations in free form like `15days 2min 2s`
+* Formats durations in similar form `2years 2min 12us`
+* Parses and formats timestamp in `rfc3339` format: `2018-01-01T12:53:00Z`
+* Parses timestamps in a weaker format: `2018-01-01 12:53:00`
+
+Timestamp parsing/formatting is super-fast because format is basically
+fixed.
+
+Here are some micro-benchmarks:
+
+```
+test result: ok. 0 passed; 0 failed; 26 ignored; 0 measured; 0 filtered out
+
+     Running target/release/deps/datetime_format-8facb4ac832d9770
+
+running 2 tests
+test rfc3339_chrono            ... bench:         737 ns/iter (+/- 37)
+test rfc3339_humantime_seconds ... bench:          73 ns/iter (+/- 2)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 2 measured; 0 filtered out
+
+     Running target/release/deps/datetime_parse-342628f877d7867c
+
+running 6 tests
+test datetime_utc_parse_millis  ... bench:         228 ns/iter (+/- 11)
+test datetime_utc_parse_nanos   ... bench:         236 ns/iter (+/- 10)
+test datetime_utc_parse_seconds ... bench:         204 ns/iter (+/- 18)
+test rfc3339_humantime_millis   ... bench:          28 ns/iter (+/- 1)
+test rfc3339_humantime_nanos    ... bench:          36 ns/iter (+/- 2)
+test rfc3339_humantime_seconds  ... bench:          24 ns/iter (+/- 1)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 6 measured; 0 filtered out
+```
+
+See [serde-humantime] for serde integration.
+
+[serde-humantime]: https://docs.rs/serde-humantime/0.1.1/serde_humantime/
+
+License
+=======
+
+Licensed under either of
+
+* Apache License, Version 2.0, (./LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0)
+* MIT license (./LICENSE-MIT or http://opensource.org/licenses/MIT)
+
+at your option.
+
+Contribution
+------------
+
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual licensed as above, without any additional terms or
+conditions.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/benches/datetime_format.rs
@@ -0,0 +1,58 @@
+#![feature(test)]
+extern crate chrono;
+extern crate humantime;
+extern crate test;
+
+use std::io::Write;
+use std::time::{Duration, UNIX_EPOCH};
+use humantime::format_rfc3339;
+
+
+#[bench]
+fn rfc3339_humantime_seconds(b: &mut test::Bencher) {
+    let time = UNIX_EPOCH + Duration::new(1483228799, 0);
+    let mut buf = Vec::with_capacity(100);
+    b.iter(|| {
+        buf.truncate(0);
+        write!(&mut buf, "{}", format_rfc3339(time)).unwrap()
+    });
+}
+
+#[bench]
+fn rfc3339_chrono(b: &mut test::Bencher) {
+    use chrono::{DateTime, NaiveDateTime, Utc};
+    use chrono::format::Item;
+    use chrono::format::Item::*;
+    use chrono::format::Numeric::*;
+    use chrono::format::Fixed::*;
+    use chrono::format::Pad::*;
+
+    let time = DateTime::<Utc>::from_utc(
+        NaiveDateTime::from_timestamp(1483228799, 0), Utc);
+    let mut buf = Vec::with_capacity(100);
+
+    // formatting code from env_logger
+    const ITEMS: &'static [Item<'static>] = {
+        &[
+            Numeric(Year, Zero),
+            Literal("-"),
+            Numeric(Month, Zero),
+            Literal("-"),
+            Numeric(Day, Zero),
+            Literal("T"),
+            Numeric(Hour, Zero),
+            Literal(":"),
+            Numeric(Minute, Zero),
+            Literal(":"),
+            Numeric(Second, Zero),
+            Fixed(TimezoneOffsetZ),
+        ]
+    };
+
+
+    b.iter(|| {
+        buf.truncate(0);
+        write!(&mut buf, "{}", time.format_with_items(ITEMS.iter().cloned()))
+            .unwrap()
+    });
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/benches/datetime_parse.rs
@@ -0,0 +1,50 @@
+#![feature(test)]
+extern crate chrono;
+extern crate humantime;
+extern crate test;
+
+use chrono::{DateTime};
+use humantime::parse_rfc3339;
+
+
+#[bench]
+fn rfc3339_humantime_seconds(b: &mut test::Bencher) {
+    b.iter(|| {
+        parse_rfc3339("2018-02-13T23:08:32Z").unwrap()
+    });
+}
+
+#[bench]
+fn datetime_utc_parse_seconds(b: &mut test::Bencher) {
+    b.iter(|| {
+        DateTime::parse_from_rfc3339("2018-02-13T23:08:32Z").unwrap()
+    });
+}
+
+#[bench]
+fn rfc3339_humantime_millis(b: &mut test::Bencher) {
+    b.iter(|| {
+        parse_rfc3339("2018-02-13T23:08:32.123Z").unwrap()
+    });
+}
+
+#[bench]
+fn datetime_utc_parse_millis(b: &mut test::Bencher) {
+    b.iter(|| {
+        DateTime::parse_from_rfc3339("2018-02-13T23:08:32.123Z").unwrap()
+    });
+}
+
+#[bench]
+fn rfc3339_humantime_nanos(b: &mut test::Bencher) {
+    b.iter(|| {
+        parse_rfc3339("2018-02-13T23:08:32.123456983Z").unwrap()
+    });
+}
+
+#[bench]
+fn datetime_utc_parse_nanos(b: &mut test::Bencher) {
+    b.iter(|| {
+        DateTime::parse_from_rfc3339("2018-02-13T23:08:32.123456983Z").unwrap()
+    });
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/bulk.yaml
@@ -0,0 +1,8 @@
+minimum-bulk: v0.4.5
+
+versions:
+
+- file: Cargo.toml
+  block-start: ^\[package\]
+  block-end: ^\[.*\]
+  regex: ^version\s*=\s*"(\S+)"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/src/date.rs
@@ -0,0 +1,530 @@
+use std::fmt;
+use std::str;
+use std::time::{SystemTime, Duration, UNIX_EPOCH};
+
+#[cfg(target_os="cloudabi")]
+mod max {
+    pub const SECONDS: u64 = ::std::u64::MAX / 1_000_000_000;
+    #[allow(unused)]
+    pub const TIMESTAMP: &'static str = "2554-07-21T23:34:33Z";
+}
+#[cfg(all(
+    target_pointer_width="32",
+    not(target_os="cloudabi"),
+    not(target_os="windows"),
+    not(all(target_arch="wasm32", not(target_os="emscripten")))
+))]
+mod max {
+    pub const SECONDS: u64 = ::std::i32::MAX as u64;
+    #[allow(unused)]
+    pub const TIMESTAMP: &'static str = "2038-01-19T03:14:07Z";
+}
+
+#[cfg(any(
+    target_pointer_width="64",
+    target_os="windows",
+    all(target_arch="wasm32", not(target_os="emscripten")),
+))]
+mod max {
+    pub const SECONDS: u64 = 253402300800-1;  // last second of year 9999
+    #[allow(unused)]
+    pub const TIMESTAMP: &'static str = "9999-12-31T23:59:59Z";
+}
+
+quick_error! {
+    /// Error parsing datetime (timestamp)
+    #[derive(Debug, PartialEq, Clone, Copy)]
+    pub enum Error {
+        /// Numeric component is out of range
+        OutOfRange {
+            display("numeric component is out of range")
+        }
+        /// Bad character where digit is expected
+        InvalidDigit {
+            display("bad character where digit is expected")
+        }
+        /// Other formatting errors
+        InvalidFormat {
+            display("timestamp format is invalid")
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+enum Precision {
+    Smart,
+    Seconds,
+    Nanos,
+}
+
+/// A wrapper type that allows you to Display a SystemTime
+#[derive(Debug)]
+pub struct Rfc3339Timestamp(SystemTime, Precision);
+
+#[inline]
+fn two_digits(b1: u8, b2: u8) -> Result<u64, Error> {
+    if b1 < b'0' || b2 < b'0' || b1 > b'9' || b2 > b'9' {
+        return Err(Error::InvalidDigit);
+    }
+    Ok(((b1 - b'0')*10 + (b2 - b'0')) as u64)
+}
+
+/// Parse RFC3339 timestamp `2018-02-14T00:28:07Z`
+///
+/// Supported feature: any precision of fractional
+/// digits `2018-02-14T00:28:07.133Z`.
+///
+/// Unsupported feature: localized timestamps. Only UTC is supported.
+pub fn parse_rfc3339(s: &str) -> Result<SystemTime, Error> {
+    if s.len() < "2018-02-14T00:28:07Z".len() {
+        return Err(Error::InvalidFormat);
+    }
+    let b = s.as_bytes();
+    if b[10] != b'T' || b[b.len()-1] != b'Z' {
+        return Err(Error::InvalidFormat);
+    }
+    return parse_rfc3339_weak(s);
+}
+
+/// Parse RFC3339-like timestamp `2018-02-14 00:28:07`
+///
+/// Supported features:
+///
+/// 1. Any precision of fractional digits `2018-02-14 00:28:07.133`.
+/// 2. Supports timestamp with or without either of `T` or `Z`
+/// 3. Anything valid for `parse_3339` is valid for this function
+///
+/// Unsupported feature: localized timestamps. Only UTC is supported, even if
+/// `Z` is not specified.
+///
+/// This function is intended to use for parsing human input. Whereas
+/// `parse_rfc3339` is for strings generated programmatically.
+pub fn parse_rfc3339_weak(s: &str) -> Result<SystemTime, Error> {
+    if s.len() < "2018-02-14T00:28:07".len() {
+        return Err(Error::InvalidFormat);
+    }
+    let b = s.as_bytes();  // for careless slicing
+    if b[4] != b'-' || b[7] != b'-' || (b[10] != b'T' && b[10] != b' ') ||
+       b[13] != b':' || b[16] != b':'
+    {
+        return Err(Error::InvalidFormat);
+    }
+    let year = two_digits(b[0], b[1])? * 100 + two_digits(b[2], b[3])?;
+    let month = two_digits(b[5], b[6])?;
+    let day = two_digits(b[8], b[9])?;
+    let hour = two_digits(b[11], b[12])?;
+    let minute = two_digits(b[14], b[15])?;
+    let mut second = two_digits(b[17], b[18])?;
+
+    if year < 1970 || hour > 23 || minute > 59 || second > 60 {
+        return Err(Error::OutOfRange);
+    }
+    // TODO(tailhook) should we check that leaps second is only on midnight ?
+    if second == 60 {
+        second = 59
+    };
+    let leap_years = ((year - 1) - 1968) / 4 - ((year - 1) - 1900) / 100 +
+                     ((year - 1) - 1600) / 400;
+    let leap = is_leap_year(year);
+    let (mut ydays, mdays) = match month {
+        1 => (0, 31),
+        2 if leap => (31, 29),
+        2 => (31, 28),
+        3 => (59, 31),
+        4 => (90, 30),
+        5 => (120, 31),
+        6 => (151, 30),
+        7 => (181, 31),
+        8 => (212, 31),
+        9 => (243, 30),
+        10 => (273, 31),
+        11 => (304, 30),
+        12 => (334, 31),
+        _ => return Err(Error::OutOfRange),
+    };
+    if day > mdays || day == 0 {
+        return Err(Error::OutOfRange);
+    }
+    ydays += day - 1;
+    if leap && month > 2 {
+        ydays += 1;
+    }
+    let days = (year - 1970) * 365 + leap_years + ydays;
+
+    let time = second + minute * 60 + hour * 3600;
+
+    let mut nanos = 0;
+    let mut mult = 100_000_000;
+    if b.get(19) == Some(&b'.') {
+        for idx in 20..b.len() {
+            if b[idx] == b'Z' {
+                if idx == b.len()-1 {
+                    break;
+                } else {
+                    return Err(Error::InvalidDigit);
+                }
+            }
+            if b[idx] < b'0' || b[idx] > b'9' {
+                return Err(Error::InvalidDigit);
+            }
+            nanos += mult * (b[idx] - b'0') as u32;
+            mult /= 10;
+        }
+    } else {
+        if b.len() != 19 && (b.len() > 20 || b[19] != b'Z') {
+            return Err(Error::InvalidFormat);
+        }
+    }
+
+    let total_seconds = time + days * 86400;
+    if total_seconds > max::SECONDS {
+        return Err(Error::OutOfRange);
+    }
+
+    return Ok(UNIX_EPOCH + Duration::new(total_seconds, nanos));
+}
+
+fn is_leap_year(y: u64) -> bool {
+    y % 4 == 0 && (!(y % 100 == 0) || y % 400 == 0)
+}
+
+/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z`
+///
+/// This function formats timestamp with smart precision: i.e. if it has no
+/// fractional seconds, they aren't written at all. And up to nine digits if
+/// they are.
+///
+/// The value is always UTC and ignores system timezone.
+pub fn format_rfc3339(system_time: SystemTime) -> Rfc3339Timestamp {
+    return Rfc3339Timestamp(system_time, Precision::Smart);
+}
+
+/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z`
+///
+/// This format always shows timestamp without fractional seconds.
+///
+/// The value is always UTC and ignores system timezone.
+pub fn format_rfc3339_seconds(system_time: SystemTime) -> Rfc3339Timestamp {
+    return Rfc3339Timestamp(system_time, Precision::Seconds);
+}
+
+/// Format an RFC3339 timestamp `2018-02-14T00:28:07.000000000Z`
+///
+/// This format always shows nanoseconds even if nanosecond value is zero.
+///
+/// The value is always UTC and ignores system timezone.
+pub fn format_rfc3339_nanos(system_time: SystemTime) -> Rfc3339Timestamp {
+    return Rfc3339Timestamp(system_time, Precision::Nanos);
+}
+
+impl fmt::Display for Rfc3339Timestamp {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        use self::Precision::*;
+
+        let dur = self.0.duration_since(UNIX_EPOCH)
+            .expect("all times should be after the epoch");
+        let secs_since_epoch = dur.as_secs();
+        let nanos = dur.subsec_nanos();
+
+        if secs_since_epoch >= 253402300800 { // year 9999
+            return Err(fmt::Error);
+        }
+
+        /* 2000-03-01 (mod 400 year, immediately after feb29 */
+        const LEAPOCH: i64 = 11017;
+        const DAYS_PER_400Y: i64 = 365*400 + 97;
+        const DAYS_PER_100Y: i64 = 365*100 + 24;
+        const DAYS_PER_4Y: i64 = 365*4 + 1;
+
+        let days = (secs_since_epoch / 86400) as i64 - LEAPOCH;
+        let secs_of_day = secs_since_epoch % 86400;
+
+        let mut qc_cycles = days / DAYS_PER_400Y;
+        let mut remdays = days % DAYS_PER_400Y;
+
+        if remdays < 0 {
+            remdays += DAYS_PER_400Y;
+            qc_cycles -= 1;
+        }
+
+        let mut c_cycles = remdays / DAYS_PER_100Y;
+        if c_cycles == 4 { c_cycles -= 1; }
+        remdays -= c_cycles * DAYS_PER_100Y;
+
+        let mut q_cycles = remdays / DAYS_PER_4Y;
+        if q_cycles == 25 { q_cycles -= 1; }
+        remdays -= q_cycles * DAYS_PER_4Y;
+
+        let mut remyears = remdays / 365;
+        if remyears == 4 { remyears -= 1; }
+        remdays -= remyears * 365;
+
+        let mut year = 2000 +
+            remyears + 4*q_cycles + 100*c_cycles + 400*qc_cycles;
+
+        let months = [31,30,31,30,31,31,30,31,30,31,31,29];
+        let mut mon = 0;
+        for mon_len in months.iter() {
+            mon += 1;
+            if remdays < *mon_len {
+                break;
+            }
+            remdays -= *mon_len;
+        }
+        let mday = remdays+1;
+        let mon = if mon + 2 > 12 {
+            year += 1;
+            mon - 10
+        } else {
+            mon + 2
+        };
+
+        let mut buf: [u8; 30] = [
+            // Too long to write as: b"0000-00-00T00:00:00.000000000Z"
+            b'0', b'0', b'0', b'0', b'-', b'0', b'0', b'-', b'0', b'0', b'T',
+            b'0', b'0', b':', b'0', b'0', b':', b'0', b'0',
+            b'.', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'Z',
+        ];
+        buf[0] = b'0' + (year / 1000) as u8;
+        buf[1] = b'0' + (year / 100 % 10) as u8;
+        buf[2] = b'0' + (year / 10 % 10) as u8;
+        buf[3] = b'0' + (year % 10) as u8;
+        buf[5] = b'0' + (mon / 10) as u8;
+        buf[6] = b'0' + (mon % 10) as u8;
+        buf[8] = b'0' + (mday / 10) as u8;
+        buf[9] = b'0' + (mday % 10) as u8;
+        buf[11] = b'0' + (secs_of_day / 3600 / 10) as u8;
+        buf[12] = b'0' + (secs_of_day / 3600 % 10) as u8;
+        buf[14] = b'0' + (secs_of_day / 60 / 10 % 6) as u8;
+        buf[15] = b'0' + (secs_of_day / 60 % 10) as u8;
+        buf[17] = b'0' + (secs_of_day / 10 % 6) as u8;
+        buf[18] = b'0' + (secs_of_day % 10) as u8;
+
+        if self.1 == Seconds || nanos == 0 && self.1 == Smart {
+            buf[19] = b'Z';
+            f.write_str(unsafe { str::from_utf8_unchecked(&buf[..20]) })
+        } else {
+            buf[20] = b'0' + (nanos / 100_000_000) as u8;
+            buf[21] = b'0' + (nanos / 10_000_000 % 10) as u8;
+            buf[22] = b'0' + (nanos / 1_000_000 % 10) as u8;
+            buf[23] = b'0' + (nanos / 100_000 % 10) as u8;
+            buf[24] = b'0' + (nanos / 10_000 % 10) as u8;
+            buf[25] = b'0' + (nanos / 1_000 % 10) as u8;
+            buf[26] = b'0' + (nanos / 100 % 10) as u8;
+            buf[27] = b'0' + (nanos / 10 % 10) as u8;
+            buf[28] = b'0' + (nanos / 1 % 10) as u8;
+            // we know our chars are all ascii
+            f.write_str(unsafe { str::from_utf8_unchecked(&buf[..]) })
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    extern crate time;
+    extern crate rand;
+
+    use std::str::from_utf8;
+    use self::rand::Rng;
+    use std::time::{UNIX_EPOCH, SystemTime, Duration};
+    use super::{parse_rfc3339, parse_rfc3339_weak, format_rfc3339};
+    use super::max;
+
+    fn from_sec(sec: u64) -> (String, SystemTime) {
+        let s = time::at_utc(time::Timespec { sec: sec as i64, nsec: 0 })
+                  .rfc3339().to_string();
+        let time = UNIX_EPOCH + Duration::new(sec, 0);
+        return (s, time)
+    }
+
+    #[test]
+    #[cfg(all(target_pointer_width="32", target_os="linux"))]
+    fn year_after_2038_fails_gracefully() {
+        // next second
+        assert_eq!(parse_rfc3339("2038-01-19T03:14:08Z").unwrap_err(),
+                   super::Error::OutOfRange);
+        assert_eq!(parse_rfc3339("9999-12-31T23:59:59Z").unwrap_err(),
+                   super::Error::OutOfRange);
+    }
+
+    #[test]
+    fn smoke_tests_parse() {
+        assert_eq!(parse_rfc3339("1970-01-01T00:00:00Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 0));
+        assert_eq!(parse_rfc3339("1970-01-01T00:00:01Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(1, 0));
+        assert_eq!(parse_rfc3339("2018-02-13T23:08:32Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(1518563312, 0));
+        assert_eq!(parse_rfc3339("2012-01-01T00:00:00Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(1325376000, 0));
+    }
+
+    #[test]
+    fn smoke_tests_format() {
+        assert_eq!(
+            format_rfc3339(UNIX_EPOCH + Duration::new(0, 0)).to_string(),
+            "1970-01-01T00:00:00Z");
+        assert_eq!(
+            format_rfc3339(UNIX_EPOCH + Duration::new(1, 0)).to_string(),
+            "1970-01-01T00:00:01Z");
+        assert_eq!(
+            format_rfc3339(UNIX_EPOCH + Duration::new(1518563312, 0)).to_string(),
+            "2018-02-13T23:08:32Z");
+        assert_eq!(
+            format_rfc3339(UNIX_EPOCH + Duration::new(1325376000, 0)).to_string(),
+            "2012-01-01T00:00:00Z");
+    }
+
+    #[test]
+    fn upper_bound() {
+        let max = UNIX_EPOCH + Duration::new(max::SECONDS, 0);
+        assert_eq!(parse_rfc3339(&max::TIMESTAMP).unwrap(), max);
+        assert_eq!(format_rfc3339(max).to_string(), max::TIMESTAMP);
+    }
+
+    #[test]
+    fn leap_second() {
+        assert_eq!(parse_rfc3339("2016-12-31T23:59:60Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(1483228799, 0));
+    }
+
+    #[test]
+    fn first_731_days() {
+        let year_start = 0;  // 1970
+        for day in 0.. (365 * 2 + 1) {  // scan leap year and non-leap year
+            let (s, time) = from_sec(year_start + day * 86400);
+            assert_eq!(parse_rfc3339(&s).unwrap(), time);
+            assert_eq!(format_rfc3339(time).to_string(), s);
+        }
+    }
+
+    #[test]
+    fn the_731_consecutive_days() {
+        let year_start = 1325376000;  // 2012
+        for day in 0.. (365 * 2 + 1) {  // scan leap year and non-leap year
+            let (s, time) = from_sec(year_start + day * 86400);
+            assert_eq!(parse_rfc3339(&s).unwrap(), time);
+            assert_eq!(format_rfc3339(time).to_string(), s);
+        }
+    }
+
+    #[test]
+    fn all_86400_seconds() {
+        let day_start = 1325376000;
+        for second in 0..86400 {  // scan leap year and non-leap year
+            let (s, time) = from_sec(day_start + second);
+            assert_eq!(parse_rfc3339(&s).unwrap(), time);
+            assert_eq!(format_rfc3339(time).to_string(), s);
+        }
+    }
+
+    #[test]
+    fn random_past() {
+        let upper = SystemTime::now().duration_since(UNIX_EPOCH).unwrap()
+            .as_secs();
+        for _ in 0..10000 {
+            let sec = rand::thread_rng().gen_range(0, upper);
+            let (s, time) = from_sec(sec);
+            assert_eq!(parse_rfc3339(&s).unwrap(), time);
+            assert_eq!(format_rfc3339(time).to_string(), s);
+        }
+    }
+
+    #[test]
+    fn random_wide_range() {
+        for _ in 0..100000 {
+            let sec = rand::thread_rng().gen_range(0, max::SECONDS);
+            let (s, time) = from_sec(sec);
+            assert_eq!(parse_rfc3339(&s).unwrap(), time);
+            assert_eq!(format_rfc3339(time).to_string(), s);
+        }
+    }
+
+    #[test]
+    fn milliseconds() {
+        assert_eq!(parse_rfc3339("1970-01-01T00:00:00.123Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 123000000));
+        assert_eq!(format_rfc3339(UNIX_EPOCH + Duration::new(0, 123000000))
+            .to_string(), "1970-01-01T00:00:00.123000000Z");
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn zero_month() {
+        parse_rfc3339("1970-00-01T00:00:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_month() {
+        parse_rfc3339("1970-32-01T00:00:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn zero_day() {
+        parse_rfc3339("1970-01-00T00:00:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_day() {
+        parse_rfc3339("1970-12-35T00:00:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_day2() {
+        parse_rfc3339("1970-02-30T00:00:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_second() {
+        parse_rfc3339("1970-12-30T00:00:78Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_minute() {
+        parse_rfc3339("1970-12-30T00:78:00Z").unwrap();
+    }
+
+    #[test]
+    #[should_panic(expected="OutOfRange")]
+    fn big_hour() {
+        parse_rfc3339("1970-12-30T24:00:00Z").unwrap();
+    }
+
+    #[test]
+    fn break_data() {
+        for pos in 0.."2016-12-31T23:59:60Z".len() {
+            let mut s = b"2016-12-31T23:59:60Z".to_vec();
+            s[pos] = b'x';
+            parse_rfc3339(from_utf8(&s).unwrap()).unwrap_err();
+        }
+    }
+
+    #[test]
+    fn weak_smoke_tests() {
+        assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 0));
+        parse_rfc3339("1970-01-01 00:00:00").unwrap_err();
+
+        assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 123000));
+        parse_rfc3339("1970-01-01 00:00:00.000123").unwrap_err();
+
+        assert_eq!(parse_rfc3339_weak("1970-01-01T00:00:00.000123").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 123000));
+        parse_rfc3339("1970-01-01T00:00:00.000123").unwrap_err();
+
+        assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 123000));
+        parse_rfc3339("1970-01-01 00:00:00.000123Z").unwrap_err();
+
+        assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00Z").unwrap(),
+                   UNIX_EPOCH + Duration::new(0, 0));
+        parse_rfc3339("1970-01-01 00:00:00Z").unwrap_err();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/src/duration.rs
@@ -0,0 +1,411 @@
+use std::fmt;
+use std::str::Chars;
+use std::time::Duration;
+use std::error::Error as StdError;
+
+quick_error! {
+    /// Error parsing human-friendly duration
+    #[derive(Debug, PartialEq, Clone, Copy)]
+    pub enum Error {
+        /// Invalid character during parsing
+        ///
+        /// More specifically anything that is not alphanumeric is prohibited
+        ///
+        /// The field is an byte offset of the character in the string.
+        InvalidCharacter(offset: usize) {
+            display("invalid character at {}", offset)
+            description("invalid character")
+        }
+        /// Non-numeric value where number is expected
+        ///
+        /// This usually means that either time unit is broken into words,
+        /// e.g. `m sec` instead of `msec`, or just number is omitted,
+        /// for example `2 hours min` instead of `2 hours 1 min`
+        ///
+        /// The field is an byte offset of the errorneous character
+        /// in the string.
+        NumberExpected(offset: usize) {
+            display("expected number at {}", offset)
+            description("expected number")
+        }
+        /// Unit in the number is not one of allowed units
+        ///
+        /// See documentation of `parse_duration` for the list of supported
+        /// time units.
+        ///
+        /// The two fields are start and end (exclusive) of the slice from
+        /// the original string, containing errorneous value
+        UnknownUnit(start: usize, end: usize) {
+            display("unknown unit at {}-{}", start, end)
+            description("unknown unit")
+        }
+        /// The numeric value is too large
+        ///
+        /// Usually this means value is too large to be useful. If user writes
+        /// data in subsecond units, then the maximum is about 3k years. When
+        /// using seconds, or larger units, the limit is even larger.
+        NumberOverflow {
+            display(self_) -> ("{}", self_.description())
+            description("number is too large")
+        }
+        /// The value was an empty string (or consists only whitespace)
+        Empty {
+            display(self_) -> ("{}", self_.description())
+            description("value was empty")
+        }
+    }
+
+}
+
+/// A wrapper type that allows you to Display a Duration
+#[derive(Debug)]
+pub struct FormattedDuration(Duration);
+
+trait OverflowOp: Sized {
+    fn mul(self, other: Self) -> Result<Self, Error>;
+    fn add(self, other: Self) -> Result<Self, Error>;
+}
+
+impl OverflowOp for u64 {
+    fn mul(self, other: Self) -> Result<Self, Error> {
+        self.checked_mul(other).ok_or(Error::NumberOverflow)
+    }
+    fn add(self, other: Self) -> Result<Self, Error> {
+        self.checked_add(other).ok_or(Error::NumberOverflow)
+    }
+}
+
+struct Parser<'a> {
+    iter: Chars<'a>,
+    src: &'a str,
+    current: (u64, u64),
+}
+
+impl<'a> Parser<'a> {
+    fn off(&self) -> usize {
+        self.src.len() - self.iter.as_str().len()
+    }
+
+    fn parse_first_char(&mut self) -> Result<Option<u64>, Error> {
+        let off = self.off();
+        for c in self.iter.by_ref() {
+            match c {
+                '0'...'9' => {
+                    return Ok(Some(c as u64 - '0' as u64));
+                }
+                c if c.is_whitespace() => continue,
+                _ => {
+                    return Err(Error::NumberExpected(off));
+                }
+            }
+        }
+        return Ok(None);
+    }
+    fn parse_unit(&mut self, n: u64, start: usize, end: usize)
+        -> Result<(), Error>
+    {
+        let (mut sec, nsec) = match &self.src[start..end] {
+            "nanos" | "nsec" | "ns" => (0u64, n),
+            "usec" | "us" => (0u64, try!(n.mul(1000))),
+            "millis" | "msec" | "ms" => (0u64, try!(n.mul(1000_000))),
+            "seconds" | "second" | "secs" | "sec" | "s" => (n, 0),
+            "minutes" | "minute" | "min" | "mins" | "m"
+            => (try!(n.mul(60)), 0),
+            "hours" | "hour" | "hr" | "hrs" | "h" => (try!(n.mul(3600)), 0),
+            "days" | "day" | "d" => (try!(n.mul(86400)), 0),
+            "weeks" | "week" | "w" => (try!(n.mul(86400*7)), 0),
+            "months" | "month" | "M" => (try!(n.mul(2630016)), 0), // 30.44d
+            "years" | "year" | "y" => (try!(n.mul(31557600)), 0), // 365.25d
+            _ => return Err(Error::UnknownUnit(start, end)),
+        };
+        let mut nsec = try!(self.current.1.add(nsec));
+        if nsec > 1000_000_000 {
+            sec = try!(sec.add(nsec / 1000_000_000));
+            nsec %= 1000_000_000;
+        }
+        sec = try!(self.current.0.add(sec));
+        self.current = (sec, nsec);
+        Ok(())
+    }
+
+    fn parse(mut self) -> Result<Duration, Error> {
+        let mut n = try!(try!(self.parse_first_char()).ok_or(Error::Empty));
+        'outer: loop {
+            let mut off = self.off();
+            while let Some(c) = self.iter.next() {
+                match c {
+                    '0'...'9' => {
+                        n = try!(n.checked_mul(10)
+                            .and_then(|x| x.checked_add(c as u64 - '0' as u64))
+                            .ok_or(Error::NumberOverflow));
+                    }
+                    c if c.is_whitespace() => {}
+                    'a'...'z' | 'A'...'Z' => {
+                        break;
+                    }
+                    _ => {
+                        return Err(Error::InvalidCharacter(off));
+                    }
+                }
+                off = self.off();
+            }
+            let start = off;
+            let mut off = self.off();
+            while let Some(c) = self.iter.next() {
+                match c {
+                    '0'...'9' => {
+                        try!(self.parse_unit(n, start, off));
+                        n = c as u64 - '0' as u64;
+                        continue 'outer;
+                    }
+                    c if c.is_whitespace() => break,
+                    'a'...'z' | 'A'...'Z' => {}
+                    _ => {
+                        return Err(Error::InvalidCharacter(off));
+                    }
+                }
+                off = self.off();
+            }
+            try!(self.parse_unit(n, start, off));
+            n = match try!(self.parse_first_char()) {
+                Some(n) => n,
+                None => return Ok(
+                    Duration::new(self.current.0, self.current.1 as u32)),
+            };
+        }
+    }
+
+}
+
+/// Parse duration object `1hour 12min 5s`
+///
+/// The duration object is a concatenation of time spans. Where each time
+/// span is an integer number and a suffix. Supported suffixes:
+///
+/// * `nsec`, `ns` -- microseconds
+/// * `usec`, `us` -- microseconds
+/// * `msec`, `ms` -- milliseconds
+/// * `seconds`, `second`, `sec`, `s`
+/// * `minutes`, `minute`, `min`, `m`
+/// * `hours`, `hour`, `hr`, `h`
+/// * `days`, `day`, `d`
+/// * `weeks`, `week`, `w`
+/// * `months`, `month`, `M` -- defined as 30.44 days
+/// * `years`, `year`, `y` -- defined as 365.25 days
+///
+/// # Examples
+///
+/// ```
+/// use std::time::Duration;
+/// use humantime::parse_duration;
+///
+/// assert_eq!(parse_duration("2h 37min"), Ok(Duration::new(9420, 0)));
+/// assert_eq!(parse_duration("32ms"), Ok(Duration::new(0, 32_000_000)));
+/// ```
+pub fn parse_duration(s: &str) -> Result<Duration, Error> {
+    Parser {
+        iter: s.chars(),
+        src: s,
+        current: (0, 0),
+    }.parse()
+}
+
+/// Formats duration into a human-readable string
+///
+/// Note: this format is guaranteed to have same value when using
+/// parse_duration, but we can change some details of the exact composition
+/// of the value.
+///
+/// # Examples
+///
+/// ```
+/// use std::time::Duration;
+/// use humantime::format_duration;
+///
+/// let val1 = Duration::new(9420, 0);
+/// assert_eq!(format_duration(val1).to_string(), "2h 37m");
+/// let val2 = Duration::new(0, 32_000_000);
+/// assert_eq!(format_duration(val2).to_string(), "32ms");
+/// ```
+pub fn format_duration(val: Duration) -> FormattedDuration {
+    FormattedDuration(val)
+}
+
+fn item_plural(f: &mut fmt::Formatter, started: &mut bool,
+    name: &str, value: u64)
+    -> fmt::Result
+{
+    if value > 0 {
+        if *started {
+            f.write_str(" ")?;
+        }
+        write!(f, "{}{}", value, name)?;
+        if value > 1 {
+            f.write_str("s")?;
+        }
+        *started = true;
+    }
+    Ok(())
+}
+fn item(f: &mut fmt::Formatter, started: &mut bool, name: &str, value: u32)
+    -> fmt::Result
+{
+    if value > 0 {
+        if *started {
+            f.write_str(" ")?;
+        }
+        write!(f, "{}{}", value, name)?;
+        *started = true;
+    }
+    Ok(())
+}
+
+impl fmt::Display for FormattedDuration {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let secs = self.0.as_secs();
+        let nanos = self.0.subsec_nanos();
+
+        if secs == 0 && nanos == 0 {
+            f.write_str("0s")?;
+            return Ok(());
+        }
+
+        let years = secs / 31557600;  // 365.25d
+        let ydays = secs % 31557600;
+        let months = ydays / 2630016;  // 30.44d
+        let mdays = ydays % 2630016;
+        let days = mdays / 86400;
+        let day_secs = mdays % 86400;
+        let hours = day_secs / 3600;
+        let minutes = day_secs % 3600 / 60;
+        let seconds = day_secs % 60;
+
+        let millis = nanos / 1_000_000;
+        let micros = nanos / 1000 % 1000;
+        let nanosec = nanos % 1000;
+
+        let ref mut started = false;
+        item_plural(f, started, "year", years)?;
+        item_plural(f, started, "month", months)?;
+        item_plural(f, started, "day", days)?;
+        item(f, started, "h", hours as u32)?;
+        item(f, started, "m", minutes as u32)?;
+        item(f, started, "s", seconds as u32)?;
+        item(f, started, "ms", millis)?;
+        item(f, started, "us", micros)?;
+        item(f, started, "ns", nanosec)?;
+        Ok(())
+    }
+}
+
+#[cfg(test)]
+mod test {
+    extern crate rand;
+
+    use std::time::Duration;
+    use self::rand::Rng;
+    use super::{parse_duration, format_duration};
+    use super::Error;
+
+    #[test]
+    fn test_units() {
+        assert_eq!(parse_duration("17nsec"), Ok(Duration::new(0, 17)));
+        assert_eq!(parse_duration("17nanos"), Ok(Duration::new(0, 17)));
+        assert_eq!(parse_duration("33ns"), Ok(Duration::new(0, 33)));
+        assert_eq!(parse_duration("3usec"), Ok(Duration::new(0, 3000)));
+        assert_eq!(parse_duration("78us"), Ok(Duration::new(0, 78000)));
+        assert_eq!(parse_duration("31msec"), Ok(Duration::new(0, 31000000)));
+        assert_eq!(parse_duration("31millis"), Ok(Duration::new(0, 31000000)));
+        assert_eq!(parse_duration("6ms"), Ok(Duration::new(0, 6000000)));
+        assert_eq!(parse_duration("3000s"), Ok(Duration::new(3000, 0)));
+        assert_eq!(parse_duration("300sec"), Ok(Duration::new(300, 0)));
+        assert_eq!(parse_duration("300secs"), Ok(Duration::new(300, 0)));
+        assert_eq!(parse_duration("50seconds"), Ok(Duration::new(50, 0)));
+        assert_eq!(parse_duration("1second"), Ok(Duration::new(1, 0)));
+        assert_eq!(parse_duration("100m"), Ok(Duration::new(6000, 0)));
+        assert_eq!(parse_duration("12min"), Ok(Duration::new(720, 0)));
+        assert_eq!(parse_duration("12mins"), Ok(Duration::new(720, 0)));
+        assert_eq!(parse_duration("1minute"), Ok(Duration::new(60, 0)));
+        assert_eq!(parse_duration("7minutes"), Ok(Duration::new(420, 0)));
+        assert_eq!(parse_duration("2h"), Ok(Duration::new(7200, 0)));
+        assert_eq!(parse_duration("7hr"), Ok(Duration::new(25200, 0)));
+        assert_eq!(parse_duration("7hrs"), Ok(Duration::new(25200, 0)));
+        assert_eq!(parse_duration("1hour"), Ok(Duration::new(3600, 0)));
+        assert_eq!(parse_duration("24hours"), Ok(Duration::new(86400, 0)));
+        assert_eq!(parse_duration("1day"), Ok(Duration::new(86400, 0)));
+        assert_eq!(parse_duration("2days"), Ok(Duration::new(172800, 0)));
+        assert_eq!(parse_duration("365d"), Ok(Duration::new(31536000, 0)));
+        assert_eq!(parse_duration("1week"), Ok(Duration::new(604800, 0)));
+        assert_eq!(parse_duration("7weeks"), Ok(Duration::new(4233600, 0)));
+        assert_eq!(parse_duration("52w"), Ok(Duration::new(31449600, 0)));
+        assert_eq!(parse_duration("1month"), Ok(Duration::new(2630016, 0)));
+        assert_eq!(parse_duration("3months"), Ok(Duration::new(3*2630016, 0)));
+        assert_eq!(parse_duration("12M"), Ok(Duration::new(31560192, 0)));
+        assert_eq!(parse_duration("1year"), Ok(Duration::new(31557600, 0)));
+        assert_eq!(parse_duration("7years"), Ok(Duration::new(7*31557600, 0)));
+        assert_eq!(parse_duration("17y"), Ok(Duration::new(536479200, 0)));
+    }
+
+    #[test]
+    fn test_combo() {
+        assert_eq!(parse_duration("20 min 17 nsec "), Ok(Duration::new(1200, 17)));
+        assert_eq!(parse_duration("2h 15m"), Ok(Duration::new(8100, 0)));
+    }
+
+    #[test]
+    fn all_86400_seconds() {
+        for second in 0..86400 {  // scan leap year and non-leap year
+            let d = Duration::new(second, 0);
+            assert_eq!(d,
+                parse_duration(&format_duration(d).to_string()).unwrap());
+        }
+    }
+
+    #[test]
+    fn random_second() {
+        for _ in 0..10000 {
+            let sec = rand::thread_rng().gen_range(0, 253370764800);
+            let d = Duration::new(sec, 0);
+            assert_eq!(d,
+                parse_duration(&format_duration(d).to_string()).unwrap());
+        }
+    }
+
+    #[test]
+    fn random_any() {
+        for _ in 0..10000 {
+            let sec = rand::thread_rng().gen_range(0, 253370764800);
+            let nanos = rand::thread_rng().gen_range(0, 1_000_000_000);
+            let d = Duration::new(sec, nanos);
+            assert_eq!(d,
+                parse_duration(&format_duration(d).to_string()).unwrap());
+        }
+    }
+
+    #[test]
+    fn test_overlow() {
+        // Overflow on subseconds is earlier because of how we do conversion
+        // we could fix it, but I don't see any good reason for this
+        assert_eq!(parse_duration("100000000000000000000ns"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("100000000000000000us"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("100000000000000ms"),
+            Err(Error::NumberOverflow));
+
+        assert_eq!(parse_duration("100000000000000000000s"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("10000000000000000000m"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("1000000000000000000h"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("100000000000000000d"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("10000000000000000w"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("1000000000000000M"),
+            Err(Error::NumberOverflow));
+        assert_eq!(parse_duration("10000000000000y"),
+            Err(Error::NumberOverflow));
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/src/lib.rs
@@ -0,0 +1,30 @@
+//! Human-friendly time parser and formatter
+//!
+//! Features:
+//!
+//! * Parses durations in free form like `15days 2min 2s`
+//! * Formats durations in similar form `2years 2min 12us`
+//! * Parses and formats timestamp in `rfc3339` format: `2018-01-01T12:53:00Z`
+//! * Parses timestamps in a weaker format: `2018-01-01 12:53:00`
+//!
+//! Timestamp parsing/formatting is super-fast because format is basically
+//! fixed.
+//!
+//! See [serde-humantime] for serde integration.
+//!
+//! [serde-humantime]: https://docs.rs/serde-humantime/0.1.1/serde_humantime/
+#![warn(missing_debug_implementations)]
+#![warn(missing_docs)]
+
+#[macro_use] extern crate quick_error;
+
+mod duration;
+mod wrapper;
+mod date;
+
+pub use duration::{parse_duration, Error as DurationError};
+pub use duration::{format_duration, FormattedDuration};
+pub use wrapper::{Duration, Timestamp};
+pub use date::{parse_rfc3339, parse_rfc3339_weak, Error as TimestampError};
+pub use date::{format_rfc3339, format_rfc3339_seconds, format_rfc3339_nanos};
+pub use date::{Rfc3339Timestamp};
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/src/wrapper.rs
@@ -0,0 +1,107 @@
+use std::str::FromStr;
+use std::ops::Deref;
+use std::fmt;
+use std::time::{Duration as StdDuration, SystemTime};
+
+use duration::{self, parse_duration, format_duration};
+use date::{self, parse_rfc3339_weak, format_rfc3339};
+
+/// A wrapper for duration that has `FromStr` implementation
+///
+/// This is useful if you want to use it somewhere where `FromStr` is
+/// expected.
+///
+/// See `parse_duration` for the description of the format.
+///
+/// # Example
+///
+/// ```
+/// use std::time::Duration;
+/// let x: Duration;
+/// x = "12h 5min 2ns".parse::<humantime::Duration>().unwrap().into();
+/// assert_eq!(x, Duration::new(12*3600 + 5*60, 2))
+/// ```
+///
+#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
+pub struct Duration(StdDuration);
+
+/// A wrapper for SystemTime that has `FromStr` implementation
+///
+/// This is useful if you want to use it somewhere where `FromStr` is
+/// expected.
+///
+/// See `parse_rfc3339_weak` for the description of the format. The "weak"
+/// format is used as it's more pemissive for human input as this is the
+/// expected use of the type (e.g. command-line parsing).
+///
+/// # Example
+///
+/// ```
+/// use std::time::SystemTime;
+/// let x: SystemTime;
+/// x = "2018-02-16T00:31:37Z".parse::<humantime::Timestamp>().unwrap().into();
+/// assert_eq!(humantime::format_rfc3339(x).to_string(), "2018-02-16T00:31:37Z");
+/// ```
+///
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct Timestamp(SystemTime);
+
+impl AsRef<StdDuration> for Duration {
+    fn as_ref(&self) -> &StdDuration { &self.0 }
+}
+
+impl Deref for Duration {
+    type Target = StdDuration;
+    fn deref(&self) -> &StdDuration { &self.0 }
+}
+
+impl Into<StdDuration> for Duration {
+    fn into(self) -> StdDuration { self.0 }
+}
+
+impl From<StdDuration> for Duration {
+    fn from(dur: StdDuration) -> Duration { Duration(dur) }
+}
+
+impl FromStr for Duration {
+    type Err = duration::Error;
+    fn from_str(s: &str) -> Result<Duration, Self::Err> {
+        parse_duration(s).map(Duration)
+    }
+}
+
+impl fmt::Display for Duration {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        format_duration(self.0).fmt(f)
+    }
+}
+
+impl AsRef<SystemTime> for Timestamp {
+    fn as_ref(&self) -> &SystemTime { &self.0 }
+}
+
+impl Deref for Timestamp {
+    type Target = SystemTime;
+    fn deref(&self) -> &SystemTime { &self.0 }
+}
+
+impl Into<SystemTime> for Timestamp {
+    fn into(self) -> SystemTime { self.0 }
+}
+
+impl From<SystemTime> for Timestamp {
+    fn from(dur: SystemTime) -> Timestamp { Timestamp(dur) }
+}
+
+impl FromStr for Timestamp {
+    type Err = date::Error;
+    fn from_str(s: &str) -> Result<Timestamp, Self::Err> {
+        parse_rfc3339_weak(s).map(Timestamp)
+    }
+}
+
+impl fmt::Display for Timestamp {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        format_rfc3339(self.0).fmt(f)
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/humantime/vagga.yaml
@@ -0,0 +1,92 @@
+commands:
+
+  cargo: !Command
+    description: Run any cargo command
+    container: ubuntu
+    run: [cargo]
+
+  make: !Command
+    description: Build the library
+    container: ubuntu
+    run: [cargo, build]
+
+  test64: !Command
+    description: Test the 64bit library
+    container: ubuntu
+    environ: { RUST_BACKTRACE: 1 }
+    run: [cargo, test]
+
+  test32: !Command
+    description: Test the 32bit library
+    container: ubuntu32
+    environ: { RUST_BACKTRACE: 1 }
+    run: [cargo, test]
+
+  test: !Command
+    description: Test the 64bit library
+    container: ubuntu
+    environ: { RUST_BACKTRACE: 1 }
+    prerequisites: [test64, test32]
+    run: [echo, okay]
+
+  bench: !Command
+    description: Run benchmarks
+    container: bench
+    environ: { RUST_BACKTRACE: 1 }
+    run: [cargo, bench]
+
+  _bulk: !Command
+    description: Run `bulk` command (for version bookkeeping)
+    container: ubuntu
+    run: [bulk]
+
+containers:
+
+  ubuntu:
+    setup:
+    - !Ubuntu xenial
+    - !UbuntuUniverse
+    - !Install [ca-certificates, build-essential, vim]
+
+    - !TarInstall
+      url: "https://static.rust-lang.org/dist/rust-1.24.0-x86_64-unknown-linux-gnu.tar.gz"
+      script: "./install.sh --prefix=/usr \
+               --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo"
+    - &bulk !Tar
+      url: "https://github.com/tailhook/bulk/releases/download/v0.4.10/bulk-v0.4.10.tar.gz"
+      sha256: 481513f8a0306a9857d045497fb5b50b50a51e9ff748909ecf7d2bda1de275ab
+      path: /
+
+    environ:
+      HOME: /work/target
+      USER: pc
+
+  ubuntu32:
+    setup:
+    - !UbuntuRelease
+      codename: xenial
+      arch: i386
+    - !UbuntuUniverse
+    - !Install [ca-certificates, build-essential, vim]
+
+    - !TarInstall
+      url: "https://static.rust-lang.org/dist/rust-1.24.0-i686-unknown-linux-gnu.tar.gz"
+      script: "./install.sh --prefix=/usr \
+               --components=rustc,rust-std-i686-unknown-linux-gnu,cargo"
+
+    environ:
+      HOME: /work/target
+      USER: pc
+
+  bench:
+    setup:
+    - !Ubuntu xenial
+    - !Install [ca-certificates, wget, build-essential]
+    - !TarInstall
+      url: https://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz
+      script: |
+        ./install.sh --prefix=/usr \
+          --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo
+    environ:
+      HOME: /work/target
+      USER: pc
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json
+++ /dev/null
@@ -1,1 +0,0 @@
-{"files":{".travis.yml":"e455a0ed5c3dd056d31f4c7be088bc94f21cab6595a23f2f015b1efc0ac2b55c","Cargo.toml":"0b700f1e7b8ba76ce4678d36b6906d38455e88f51085ea9f120d6ca63f13d5d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"adf2e822923878c2ebf4a0a782898c598fc6f57a3af905b85d57fc716c836687","src/lib.rs":"fbae25504264b185d877fb8784d4d88333ea34a7cbeddca3277dc8421f179933","src/macros.rs":"414505e520b8d705b4ce5a64ec2e82d6d1af0b88567454169486a668fbc1e9c8","src/stable.rs":"6363c4c0ef989c2ec81aa75be71c69a103d45a1de439f3f3bcb6806d8a78a172","src/strnom.rs":"1baded8543a9930798fb16092fe51e9074591902e327e0f94eb1c908a6370de9","src/unstable.rs":"110d27103e37427b3d1dcb45b6ba9dc9f5641a255766a43d5db0f4fd10a341ed","tests/test.rs":"9e75d5289abc1dc58c1df00ae051d8c3cd2c0d7830cca5ad689007c05acffe26"},"package":"d1cb7aaaa4bf022ec2b14ff2f2ba1643a22f3cee88df014a85e14b392282c61d"}
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/.travis.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-language: rust
-sudo: false
-
-matrix:
-  include:
-    - rust: 1.15.0
-    - rust: stable
-    - rust: beta
-    - rust: nightly
-      before_script:
-        - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
-      script:
-        - cargo test
-        - cargo build --features nightly
-        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
-        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly
-        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps
-      after_success:
-        - travis-cargo --only nightly doc-upload
-
-script:
-  - cargo test
-  - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
-env:
-  global:
-    - TRAVIS_CARGO_NIGHTLY_FEATURE=""
-    - secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8="
-
-notifications:
-  email:
-    on_success: never
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/Cargo.toml
+++ /dev/null
@@ -1,31 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g. crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-name = "proc-macro2"
-version = "0.2.2"
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
-homepage = "https://github.com/alexcrichton/proc-macro2"
-documentation = "https://docs.rs/proc-macro2"
-readme = "README.md"
-keywords = ["macros"]
-license = "MIT/Apache-2.0"
-repository = "https://github.com/alexcrichton/proc-macro2"
-
-[lib]
-doctest = false
-[dependencies.unicode-xid]
-version = "0.1"
-
-[features]
-nightly = []
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE
+++ /dev/null
@@ -1,201 +0,0 @@
-                              Apache License
-                        Version 2.0, January 2004
-                     http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-   "License" shall mean the terms and conditions for use, reproduction,
-   and distribution as defined by Sections 1 through 9 of this document.
-
-   "Licensor" shall mean the copyright owner or entity authorized by
-   the copyright owner that is granting the License.
-
-   "Legal Entity" shall mean the union of the acting entity and all
-   other entities that control, are controlled by, or are under common
-   control with that entity. For the purposes of this definition,
-   "control" means (i) the power, direct or indirect, to cause the
-   direction or management of such entity, whether by contract or
-   otherwise, or (ii) ownership of fifty percent (50%) or more of the
-   outstanding shares, or (iii) beneficial ownership of such entity.
-
-   "You" (or "Your") shall mean an individual or Legal Entity
-   exercising permissions granted by this License.
-
-   "Source" form shall mean the preferred form for making modifications,
-   including but not limited to software source code, documentation
-   source, and configuration files.
-
-   "Object" form shall mean any form resulting from mechanical
-   transformation or translation of a Source form, including but
-   not limited to compiled object code, generated documentation,
-   and conversions to other media types.
-
-   "Work" shall mean the work of authorship, whether in Source or
-   Object form, made available under the License, as indicated by a
-   copyright notice that is included in or attached to the work
-   (an example is provided in the Appendix below).
-
-   "Derivative Works" shall mean any work, whether in Source or Object
-   form, that is based on (or derived from) the Work and for which the
-   editorial revisions, annotations, elaborations, or other modifications
-   represent, as a whole, an original work of authorship. For the purposes
-   of this License, Derivative Works shall not include works that remain
-   separable from, or merely link (or bind by name) to the interfaces of,
-   the Work and Derivative Works thereof.
-
-   "Contribution" shall mean any work of authorship, including
-   the original version of the Work and any modifications or additions
-   to that Work or Derivative Works thereof, that is intentionally
-   submitted to Licensor for inclusion in the Work by the copyright owner
-   or by an individual or Legal Entity authorized to submit on behalf of
-   the copyright owner. For the purposes of this definition, "submitted"
-   means any form of electronic, verbal, or written communication sent
-   to the Licensor or its representatives, including but not limited to
-   communication on electronic mailing lists, source code control systems,
-   and issue tracking systems that are managed by, or on behalf of, the
-   Licensor for the purpose of discussing and improving the Work, but
-   excluding communication that is conspicuously marked or otherwise
-   designated in writing by the copyright owner as "Not a Contribution."
-
-   "Contributor" shall mean Licensor and any individual or Legal Entity
-   on behalf of whom a Contribution has been received by Licensor and
-   subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   copyright license to reproduce, prepare Derivative Works of,
-   publicly display, publicly perform, sublicense, and distribute the
-   Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   (except as stated in this section) patent license to make, have made,
-   use, offer to sell, sell, import, and otherwise transfer the Work,
-   where such license applies only to those patent claims licensable
-   by such Contributor that are necessarily infringed by their
-   Contribution(s) alone or by combination of their Contribution(s)
-   with the Work to which such Contribution(s) was submitted. If You
-   institute patent litigation against any entity (including a
-   cross-claim or counterclaim in a lawsuit) alleging that the Work
-   or a Contribution incorporated within the Work constitutes direct
-   or contributory patent infringement, then any patent licenses
-   granted to You under this License for that Work shall terminate
-   as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
-   Work or Derivative Works thereof in any medium, with or without
-   modifications, and in Source or Object form, provided that You
-   meet the following conditions:
-
-   (a) You must give any other recipients of the Work or
-       Derivative Works a copy of this License; and
-
-   (b) You must cause any modified files to carry prominent notices
-       stating that You changed the files; and
-
-   (c) You must retain, in the Source form of any Derivative Works
-       that You distribute, all copyright, patent, trademark, and
-       attribution notices from the Source form of the Work,
-       excluding those notices that do not pertain to any part of
-       the Derivative Works; and
-
-   (d) If the Work includes a "NOTICE" text file as part of its
-       distribution, then any Derivative Works that You distribute must
-       include a readable copy of the attribution notices contained
-       within such NOTICE file, excluding those notices that do not
-       pertain to any part of the Derivative Works, in at least one
-       of the following places: within a NOTICE text file distributed
-       as part of the Derivative Works; within the Source form or
-       documentation, if provided along with the Derivative Works; or,
-       within a display generated by the Derivative Works, if and
-       wherever such third-party notices normally appear. The contents
-       of the NOTICE file are for informational purposes only and
-       do not modify the License. You may add Your own attribution
-       notices within Derivative Works that You distribute, alongside
-       or as an addendum to the NOTICE text from the Work, provided
-       that such additional attribution notices cannot be construed
-       as modifying the License.
-
-   You may add Your own copyright statement to Your modifications and
-   may provide additional or different license terms and conditions
-   for use, reproduction, or distribution of Your modifications, or
-   for any such Derivative Works as a whole, provided Your use,
-   reproduction, and distribution of the Work otherwise complies with
-   the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
-   any Contribution intentionally submitted for inclusion in the Work
-   by You to the Licensor shall be under the terms and conditions of
-   this License, without any additional terms or conditions.
-   Notwithstanding the above, nothing herein shall supersede or modify
-   the terms of any separate license agreement you may have executed
-   with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
-   names, trademarks, service marks, or product names of the Licensor,
-   except as required for reasonable and customary use in describing the
-   origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
-   agreed to in writing, Licensor provides the Work (and each
-   Contributor provides its Contributions) on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-   implied, including, without limitation, any warranties or conditions
-   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-   PARTICULAR PURPOSE. You are solely responsible for determining the
-   appropriateness of using or redistributing the Work and assume any
-   risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
-   whether in tort (including negligence), contract, or otherwise,
-   unless required by applicable law (such as deliberate and grossly
-   negligent acts) or agreed to in writing, shall any Contributor be
-   liable to You for damages, including any direct, indirect, special,
-   incidental, or consequential damages of any character arising as a
-   result of this License or out of the use or inability to use the
-   Work (including but not limited to damages for loss of goodwill,
-   work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses), even if such Contributor
-   has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
-   the Work or Derivative Works thereof, You may choose to offer,
-   and charge a fee for, acceptance of support, warranty, indemnity,
-   or other liability obligations and/or rights consistent with this
-   License. However, in accepting such obligations, You may act only
-   on Your own behalf and on Your sole responsibility, not on behalf
-   of any other Contributor, and only if You agree to indemnify,
-   defend, and hold each Contributor harmless for any liability
-   incurred by, or claims asserted against, such Contributor by reason
-   of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-   To apply the Apache License to your work, attach the following
-   boilerplate notice, with the fields enclosed by brackets "[]"
-   replaced with your own identifying information. (Don't include
-   the brackets!)  The text should be enclosed in the appropriate
-   comment syntax for the file format. We also recommend that a
-   file or class name and description of purpose be included on the
-   same "printed page" as the copyright notice for easier
-   identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-	http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/LICENSE-MIT
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2014 Alex Crichton
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/README.md
+++ /dev/null
@@ -1,98 +0,0 @@
-# proc-macro2
-
-[![Build Status](https://api.travis-ci.org/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.org/alexcrichton/proc-macro2)
-[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
-
-A small shim over the `proc_macro` crate in the compiler intended to multiplex
-the current stable interface (as of 2017-07-05) and the [upcoming richer
-interface][upcoming].
-
-[upcoming]: https://github.com/rust-lang/rust/pull/40939
-
-The upcoming support has features like:
-
-* Span information on tokens
-* No need to go in/out through strings
-* Structured input/output
-
-The hope is that libraries ported to `proc_macro2` will be trivial to port to
-the real `proc_macro` crate once the support on nightly is stabilize.
-
-## Usage
-
-This crate by default compiles on the stable version of the compiler. It only
-uses the stable surface area of the `proc_macro` crate upstream in the compiler
-itself. Usage is done via:
-
-```toml
-[dependencies]
-proc-macro2 = "0.2"
-```
-
-followed by
-
-```rust
-extern crate proc_macro;
-extern crate proc_macro2;
-
-#[proc_macro_derive(MyDerive)]
-pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
-    let input: proc_macro2::TokenStream = input.into();
-
-    let output: proc_macro2::TokenStream = {
-        /* transform input */
-    };
-
-    output.into()
-}
-```
-
-If you'd like you can enable the `nightly` feature in this crate. This will
-cause it to compile against the **unstable and nightly-only** features of the
-`proc_macro` crate. This in turn requires a nightly compiler. This should help
-preserve span information, however, coming in from the compiler itself.
-
-You can enable this feature via:
-
-```toml
-[dependencies]
-proc-macro2 = { version = "0.2", features = ["nightly"] }
-```
-
-
-## Unstable Features
-
-`proc-macro2` supports exporting some methods from `proc_macro` which are
-currently highly unstable, and may not be stabilized in the first pass of
-`proc_macro` stabilizations. These features are not exported by default. Minor
-versions of `proc-macro2` may make breaking changes to them at any time.
-
-To enable these features, the `procmacro2_semver_exempt` config flag must be
-passed to rustc.
-
-```
-RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
-```
-
-Note that this must not only be done for your crate, but for any crate that
-depends on your crate. This infectious nature is intentional, as it serves as a
-reminder that you are outside of the normal semver guarantees.
-
-
-# License
-
-This project is licensed under either of
-
- * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
-   http://www.apache.org/licenses/LICENSE-2.0)
- * MIT license ([LICENSE-MIT](LICENSE-MIT) or
-   http://opensource.org/licenses/MIT)
-
-at your option.
-
-### Contribution
-
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
-dual licensed as above, without any additional terms or conditions.
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/src/lib.rs
+++ /dev/null
@@ -1,337 +0,0 @@
-//! A "shim crate" intended to multiplex the `proc_macro` API on to stable Rust.
-//!
-//! Procedural macros in Rust operate over the upstream
-//! `proc_macro::TokenStream` type. This type currently is quite conservative
-//! and exposed no internal implementation details. Nightly compilers, however,
-//! contain a much richer interface. This richer interface allows fine-grained
-//! inspection of the token stream which avoids stringification/re-lexing and
-//! also preserves span information.
-//!
-//! The upcoming APIs added to `proc_macro` upstream are the foundation for
-//! productive procedural macros in the ecosystem. To help prepare the ecosystem
-//! for using them this crate serves to both compile on stable and nightly and
-//! mirrors the API-to-be. The intention is that procedural macros which switch
-//! to use this crate will be trivially able to switch to the upstream
-//! `proc_macro` crate once its API stabilizes.
-//!
-//! In the meantime this crate also has a `nightly` Cargo feature which
-//! enables it to reimplement itself with the unstable API of `proc_macro`.
-//! This'll allow immediate usage of the beneficial upstream API, particularly
-//! around preserving span information.
-
-// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.2")]
-
-#![cfg_attr(feature = "nightly", feature(proc_macro))]
-
-extern crate proc_macro;
-
-#[cfg(not(feature = "nightly"))]
-extern crate unicode_xid;
-
-use std::fmt;
-use std::str::FromStr;
-use std::iter::FromIterator;
-
-#[macro_use]
-#[cfg(not(feature = "nightly"))]
-mod strnom;
-
-#[path = "stable.rs"]
-#[cfg(not(feature = "nightly"))]
-mod imp;
-#[path = "unstable.rs"]
-#[cfg(feature = "nightly")]
-mod imp;
-
-#[macro_use]
-mod macros;
-
-#[derive(Clone)]
-pub struct TokenStream(imp::TokenStream);
-
-pub struct LexError(imp::LexError);
-
-impl FromStr for TokenStream {
-    type Err = LexError;
-
-    fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        match src.parse() {
-            Ok(e) => Ok(TokenStream(e)),
-            Err(e) => Err(LexError(e)),
-        }
-    }
-}
-
-impl From<proc_macro::TokenStream> for TokenStream {
-    fn from(inner: proc_macro::TokenStream) -> TokenStream {
-        TokenStream(inner.into())
-    }
-}
-
-impl From<TokenStream> for proc_macro::TokenStream {
-    fn from(inner: TokenStream) -> proc_macro::TokenStream {
-        inner.0.into()
-    }
-}
-
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream(tree.into())
-    }
-}
-
-impl<T: Into<TokenStream>> FromIterator<T> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
-        TokenStream(streams.into_iter().map(|t| t.into().0).collect())
-    }
-}
-
-impl IntoIterator for TokenStream {
-    type Item = TokenTree;
-    type IntoIter = TokenTreeIter;
-
-    fn into_iter(self) -> TokenTreeIter {
-        TokenTreeIter(self.0.into_iter())
-    }
-}
-
-impl TokenStream {
-    pub fn empty() -> TokenStream {
-        TokenStream(imp::TokenStream::empty())
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.0.is_empty()
-    }
-}
-
-// Returned by reference, so we can't easily wrap it.
-#[cfg(procmacro2_semver_exempt)]
-pub use imp::FileName;
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile(imp::SourceFile);
-
-#[cfg(procmacro2_semver_exempt)]
-impl SourceFile {
-    /// Get the path to this source file as a string.
-    pub fn path(&self) -> &FileName {
-        self.0.path()
-    }
-
-    pub fn is_real(&self) -> bool {
-        self.0.is_real()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef<FileName> for SourceFile {
-    fn as_ref(&self) -> &FileName {
-        self.0.path()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-pub struct LineColumn {
-    pub line: usize,
-    pub column: usize,
-}
-
-#[derive(Copy, Clone)]
-pub struct Span(imp::Span);
-
-impl Span {
-    pub fn call_site() -> Span {
-        Span(imp::Span::call_site())
-    }
-
-    pub fn def_site() -> Span {
-        Span(imp::Span::def_site())
-    }
-
-    /// Creates a new span with the same line/column information as `self` but
-    /// that resolves symbols as though it were at `other`.
-    pub fn resolved_at(&self, other: Span) -> Span {
-        Span(self.0.resolved_at(other.0))
-    }
-
-    /// Creates a new span with the same name resolution behavior as `self` but
-    /// with the line/column information of `other`.
-    pub fn located_at(&self, other: Span) -> Span {
-        Span(self.0.located_at(other.0))
-    }
-
-    /// This method is only available when the `"nightly"` feature is enabled.
-    #[cfg(feature = "nightly")]
-    pub fn unstable(self) -> proc_macro::Span {
-        self.0.unstable()
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn source_file(&self) -> SourceFile {
-        SourceFile(self.0.source_file())
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn start(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.0.start();
-        LineColumn { line: line, column: column }
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn end(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.0.end();
-        LineColumn { line: line, column: column }
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn join(&self, other: Span) -> Option<Span> {
-        self.0.join(other.0).map(Span)
-    }
-}
-
-#[derive(Clone, Debug)]
-pub struct TokenTree {
-    pub span: Span,
-    pub kind: TokenNode,
-}
-
-impl From<TokenNode> for TokenTree {
-    fn from(kind: TokenNode) -> TokenTree {
-        TokenTree { span: Span::def_site(), kind: kind }
-    }
-}
-
-impl fmt::Display for TokenTree {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        TokenStream::from(self.clone()).fmt(f)
-    }
-}
-
-#[derive(Clone, Debug)]
-pub enum TokenNode {
-    Group(Delimiter, TokenStream),
-    Term(Term),
-    Op(char, Spacing),
-    Literal(Literal),
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Delimiter {
-    Parenthesis,
-    Brace,
-    Bracket,
-    None,
-}
-
-#[derive(Copy, Clone)]
-pub struct Term(imp::Term);
-
-impl Term {
-    pub fn intern(string: &str) -> Term {
-        Term(imp::Term::intern(string))
-    }
-
-    pub fn as_str(&self) -> &str {
-        self.0.as_str()
-    }
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Spacing {
-    Alone,
-    Joint,
-}
-
-#[derive(Clone)]
-pub struct Literal(imp::Literal);
-
-macro_rules! int_literals {
-    ($($kind:ident,)*) => ($(
-        pub fn $kind(n: $kind) -> Literal {
-            Literal(n.into())
-        }
-    )*)
-}
-
-impl Literal {
-    pub fn integer(s: i64) -> Literal {
-        Literal(imp::Literal::integer(s))
-    }
-
-    int_literals! {
-        u8, u16, u32, u64, usize,
-        i8, i16, i32, i64, isize,
-    }
-
-    pub fn float(f: f64) -> Literal {
-        Literal(imp::Literal::float(f))
-    }
-
-    pub fn f64(f: f64) -> Literal {
-        Literal(f.into())
-    }
-
-    pub fn f32(f: f32) -> Literal {
-        Literal(f.into())
-    }
-
-    pub fn string(string: &str) -> Literal {
-        Literal(string.into())
-    }
-
-    pub fn character(ch: char) -> Literal {
-        Literal(ch.into())
-    }
-
-    pub fn byte_string(s: &[u8]) -> Literal {
-        Literal(imp::Literal::byte_string(s))
-    }
-
-    // =======================================================================
-    // Not present upstream in proc_macro yet
-
-    pub fn byte_char(b: u8) -> Literal {
-        Literal(imp::Literal::byte_char(b))
-    }
-
-    pub fn doccomment(s: &str) -> Literal {
-        Literal(imp::Literal::doccomment(s))
-    }
-
-    pub fn raw_string(s: &str, pounds: usize) -> Literal {
-        Literal(imp::Literal::raw_string(s, pounds))
-    }
-
-    pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
-        Literal(imp::Literal::raw_byte_string(s, pounds))
-    }
-}
-
-pub struct TokenTreeIter(imp::TokenTreeIter);
-
-impl Iterator for TokenTreeIter {
-    type Item = TokenTree;
-
-    fn next(&mut self) -> Option<TokenTree> {
-        self.0.next()
-    }
-}
-
-forward_fmt!(Debug for LexError);
-forward_fmt!(Debug for Literal);
-forward_fmt!(Debug for Span);
-forward_fmt!(Debug for Term);
-forward_fmt!(Debug for TokenTreeIter);
-forward_fmt!(Debug for TokenStream);
-forward_fmt!(Display for Literal);
-forward_fmt!(Display for TokenStream);
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/src/macros.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-macro_rules! forward_fmt {
-    ($tr:ident for $ty:ident) => {
-        impl ::std::fmt::$tr for $ty {
-            fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
-                ::std::fmt::$tr::fmt(&self.0, f)
-            }
-        }
-    }
-}
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.2.2/src/stable.rs
+++ /dev/null
@@ -1,1206 +0,0 @@
-use std::ascii;
-use std::borrow::Borrow;
-use std::cell::RefCell;
-#[cfg(procmacro2_semver_exempt)]
-use std::cmp;
-use std::collections::HashMap;
-use std::fmt;
-use std::iter;
-use std::marker::PhantomData;
-use std::rc::Rc;
-use std::str::FromStr;
-use std::vec;
-
-use proc_macro;
-use unicode_xid::UnicodeXID;
-use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
-
-use {TokenTree, TokenNode, Delimiter, Spacing};
-
-#[derive(Clone, Debug)]
-pub struct TokenStream {
-    inner: Vec<TokenTree>,
-}
-
-#[derive(Debug)]
-pub struct LexError;
-
-impl TokenStream {
-    pub fn empty() -> TokenStream {
-        TokenStream { inner: Vec::new() }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.inner.len() == 0
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-fn get_cursor(src: &str) -> Cursor {
-    // Create a dummy file & add it to the codemap
-    CODEMAP.with(|cm| {
-        let mut cm = cm.borrow_mut();
-        let name = format!("<parsed string {}>", cm.files.len());
-        let span = cm.add_file(&name, src);
-        Cursor {
-            rest: src,
-            off: span.lo,
-        }
-    })
-}
-
-#[cfg(not(procmacro2_semver_exempt))]
-fn get_cursor(src: &str) -> Cursor {
-    Cursor {
-        rest: src,
-    }
-}
-
-impl FromStr for TokenStream {
-    type Err = LexError;
-
-    fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        // Create a dummy file & add it to the codemap
-        let cursor = get_cursor(src);
-
-        match token_stream(cursor) {
-            Ok((input, output)) => {
-                if skip_whitespace(input).len() != 0 {
-                    Err(LexError)
-                } else {
-                    Ok(output.0)
-                }
-            }
-            Err(LexError) => Err(LexError),
-        }
-    }
-}
-
-impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        let mut joint = false;
-        for (i, tt) in self.inner.iter().enumerate() {
-            if i != 0 && !joint {
-                write!(f, " ")?;
-            }
-            joint = false;
-            match tt.kind {
-                TokenNode::Group(delim, ref stream) => {
-                    let (start, end) = match delim {
-                        Delimiter::Parenthesis => ("(", ")"),
-                        Delimiter::Brace => ("{", "}"),
-                        Delimiter::Bracket => ("[", "]"),
-                        Delimiter::None => ("", ""),
-                    };
-                    if stream.0.inner.len() == 0 {
-                        write!(f, "{} {}", start, end)?
-                    } else {
-                        write!(f, "{} {} {}", start, stream, end)?
-                    }
-                }
-                TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?,
-                TokenNode::Op(ch, ref op) => {
-                    write!(f, "{}", ch)?;
-                    match *op {
-                        Spacing::Alone => {}
-                        Spacing::Joint => joint = true,
-                    }
-                }
-                TokenNode::Literal(ref literal) => {
-                    write!(f, "{}", literal)?;
-                    // handle comments
-                    if (literal.0).0.starts_with("/") {
-                        write!(f, "\n")?;
-                    }
-                }
-            }
-        }
-
-        Ok(())
-    }
-}
-
-impl From<proc_macro::TokenStream> for TokenStream {
-    fn from(inner: proc_macro::TokenStream) -> TokenStream {
-        inner.to_string().parse().expect("compiler token stream parse failed")
-    }
-}
-
-impl From<TokenStream> for proc_macro::TokenStream {
-    fn from(inner: TokenStream) -> proc_macro::TokenStream {
-        inner.to_string().parse().expect("failed to parse to compiler tokens")
-    }
-}
-
-
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { inner: vec![tree] }
-    }
-}
-
-impl iter::FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
-        let mut v = Vec::new();
-
-        for stream in streams.into_iter() {
-            v.extend(stream.inner);
-        }
-
-        TokenStream { inner: v }
-    }
-}
-
-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
-
-impl IntoIterator for TokenStream {
-    type Item = TokenTree;
-    type IntoIter = TokenTreeIter;
-
-    fn into_iter(self) -> TokenTreeIter {
-        self.inner.into_iter()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct FileName(String);
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Display for FileName {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
-    name: FileName,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl SourceFile {
-    /// Get the path to this source file as a string.
-    pub fn path(&self) -> &FileName {
-        &self.name
-    }
-
-    pub fn is_real(&self) -> bool {
-        // XXX(nika): Support real files in the future?
-        false
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef<FileName> for SourceFile {
-    fn as_ref(&self) -> &FileName {
-        self.path()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        f.debug_struct("SourceFile")
-            .field("path", &self.path())
-            .field("is_real", &self.is_real())
-            .finish()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct LineColumn {
-    pub line: usize,
-    pub column: usize,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-thread_local! {
-    static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
-        // NOTE: We start with a single dummy file which all call_site() and
-        // def_site() spans reference.
-        files: vec![FileInfo {
-            name: "<unspecified>".to_owned(),
-            span: Span { lo: 0, hi: 0 },
-            lines: vec![0],
-        }],
-    });
-}
-
-#[cfg(procmacro2_semver_exempt)]
-struct FileInfo {
-    name: String,
-    span: Span,
-    lines: Vec<usize>,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl FileInfo {
-    fn offset_line_column(&self, offset: usize) -> LineColumn {
-        assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 }));
-        let offset = offset - self.span.lo as usize;
-        match self.lines.binary_search(&offset) {
-            Ok(found) => LineColumn {
-                line: found + 1,
-                column: 0
-            },
-            Err(idx) => LineColumn {
-                line: idx,
-                column: offset - self.lines[idx - 1]
-            },
-        }
-    }
-
-    fn span_within(&self, span: Span) -> bool {
-        span.lo >= self.span.lo && span.hi <= self.span.hi
-    }
-}
-
-/// Computes the offsets of each line in the given source string.
-#[cfg(procmacro2_semver_exempt)]
-fn lines_offsets(s: &str) -> Vec<usize> {
-    let mut lines = vec![0];
-    let mut prev = 0;
-    while let Some(len) = s[prev..].find('\n') {
-        prev += len + 1;
-        lines.push(prev);
-    }
-    lines
-}
-
-#[cfg(procmacro2_semver_exempt)]
-struct Codemap {
-    files: Vec<FileInfo>,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl Codemap {
-    fn next_start_pos(&self) -> u32 {
-        // Add 1 so there's always space between files.
-        //
-        // We'll always have at least 1 file, as we initialize our files list
-        // with a dummy file.
-        self.files.last().unwrap().span.hi + 1
-    }
-
-    fn add_file(&mut self, name: &str, src: &str) -> Span {
-        let lines = lines_offsets(src);
-        let lo = self.next_start_pos();
-        // XXX(nika): Shouild we bother doing a checked cast or checked add here?
-        let span = Span { lo: lo, hi: lo + (src.len() as u32) };
-
-        self.files.push(FileInfo {
-            name: name.to_owned(),
-            span: span,
-            lines: lines,
-        });
-
-        span
-    }
-
-    fn fileinfo(&self, span: Span) -> &FileInfo {
-        for file in &self.files {
-            if file.span_within(span) {
-                return file;
-            }
-        }
-        panic!("Invalid span with no related FileInfo!");
-    }
-}
-
-#[derive(Clone, Copy, Debug)]
-pub struct Span {
-    #[cfg(procmacro2_semver_exempt)]
-    lo: u32,
-    #[cfg(procmacro2_semver_exempt)]
-    hi: u32,
-}
-
-impl Span {
-    #[cfg(not(procmacro2_semver_exempt))]
-    pub fn call_site() -> Span {
-        Span {}
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn call_site() -> Span {
-        Span { lo: 0, hi: 0 }
-    }
-
-    pub fn def_site() -> Span {
-        Span::call_site()
-    }
-
-    pub fn resolved_at(&self, _other: Span) -> Span {
-        // Stable spans consist only of line/column information, so
-        // `resolved_at` and `located_at` only select which span the
-        // caller wants line/column information from.
-        *self
-    }
-
-    pub fn located_at(&self, other: Span) -> Span {
-        other
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn source_file(&self) -> SourceFile {
-        CODEMAP.with(|cm| {
-            let cm = cm.borrow();
-            let fi = cm.fileinfo(*self);
-            SourceFile {
-                name: FileName(fi.name.clone()),
-            }
-        })
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn start(&self) -> LineColumn {
-        CODEMAP.with(|cm| {
-            let cm = cm.borrow();
-            let fi = cm.fileinfo(*self);
-            fi.offset_line_column(self.lo as usize)
-        })
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn end(&self) -> LineColumn {
-        CODEMAP.with(|cm| {
-            let cm = cm.borrow();
-            let fi = cm.fileinfo(*self);
-            fi.offset_line_column(self.hi as usize)
-        })
-    }
-
-    #[cfg(procmacro2_semver_exempt)]
-    pub fn join(&self, other: Span) -> Option<Span> {
-        CODEMAP.with(|cm| {
-            let cm = cm.borrow();
-            // If `other` is not within the same FileInfo as us, return None.
-            if !cm.fileinfo(*self).span_within(other) {
-                return None;
-            }
-            Some(Span {
-                lo: cmp::min(self.lo, other.lo),
-                hi: cmp::max(self.hi, other.hi),
-            })
-        })
-    }
-}
-
-#[derive(Copy, Clone)]
-pub struct Term {
-    intern: usize,
-    not_send_sync: PhantomData<*const ()>,
-}
-
-thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
-
-impl Term {
-    pub fn intern(string: &str) -> Term {
-        Term {
-            intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
-            not_send_sync: PhantomData,
-        }
-    }
-
-    pub fn as_str(&self) -> &str {
-        SYMBOLS.with(|interner| {
-            let interner = interner.borrow();
-            let s = interner.get(self.intern);
-            unsafe {
-                &*(s as *const str)
-            }
-        })
-    }
-}
-
-impl fmt::Debug for Term {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        f.debug_tuple("Term").field(&self.as_str()).finish()
-    }
-}
-
-struct Interner {
-    string_to_index: HashMap<MyRc, usize>,
-    index_to_string: Vec<Rc<String>>,
-}
-
-#[derive(Hash, Eq, PartialEq)]
-struct MyRc(Rc<String>);
-
-impl Borrow<str> for MyRc {
-    fn borrow(&self) -> &str {
-        &self.0
-    }
-}
-
-impl Interner {
-    fn new() -> Interner {
-        Interner {
-            string_to_index: HashMap::new(),
-            index_to_string: Vec::new(),
-        }
-    }
-
-   fn intern(&mut self, s: &str) -> usize {
-        if let Some(&idx) = self.string_to_index.get(s) {
-            return idx
-        }
-        let s = Rc::new(s.to_string());
-        self.index_to_string.push(s.clone());
-        self.string_to_index.insert(MyRc(s), self.index_to_string.len() - 1);
-        self.index_to_string.len() - 1
-    }
-
-   fn get(&self, idx: usize) -> &str {
-       &self.index_to_string[idx]
-   }
-}
-
-#[derive(Clone, Debug)]
-pub struct Literal(String);
-
-impl Literal {
-    pub fn byte_char(byte: u8) -> Literal {
-        match byte {
-            0 => Literal(format!("b'\\0'")),
-            b'\"' => Literal(format!("b'\"'")),
-            n => {
-                let mut escaped = "b'".to_string();
-                escaped.extend(ascii::escape_default(n).map(|c| c as char));
-                escaped.push('\'');
-                Literal(escaped)
-            }
-        }
-    }
-
-    pub fn byte_string(bytes: &[u8]) -> Literal {
-        let mut escaped = "b\"".to_string();
-        for b in bytes {
-            match *b {
-                b'\0' => escaped.push_str(r"\0"),
-                b'\t' => escaped.push_str(r"\t"),
-                b'\n' => escaped.push_str(r"\n"),
-                b'\r' => escaped.push_str(r"\r"),
-                b'"' => escaped.push_str("\\\""),
-                b'\\' => escaped.push_str("\\\\"),
-                b'\x20' ... b'\x7E' => escaped.push(*b as char),
-                _ => escaped.push_str(&format!("\\x{:02X}", b)),
-            }
-        }
-        escaped.push('"');
-        Literal(escaped)
-    }
-
-    pub fn doccomment(s: &str) -> Literal {
-        Literal(s.to_string())
-    }
-
-    pub fn float(n: f64) -> Literal {
-        if !n.is_finite() {
-            panic!("Invalid float literal {}", n);
-        }
-        let mut s = n.to_string();
-        if !s.contains('.') {
-            s += ".0";
-        }
-        Literal(s)
-    }
-
-    pub fn integer(s: i64) -> Literal {
-        Literal(s.to_string())
-    }
-
-    pub fn raw_string(s: &str, pounds: usize) -> Literal {
-        let mut ret = format!("r");
-        ret.extend((0..pounds).map(|_| "#"));
-        ret.push('"');
-        ret.push_str(s);
-        ret.push('"');
-        ret.extend((0..pounds).map(|_| "#"));
-        Literal(ret)
-    }
-
-    pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
-        let mut ret = format!("br");
-        ret.extend((0..pounds).map(|_| "#"));
-        ret.push('"');
-        ret.push_str(s);
-        ret.push('"');
-        ret.extend((0..pounds).map(|_| "#"));
-        Literal(ret)
-    }
-}
-
-impl fmt::Display for Literal {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-macro_rules! ints {
-    ($($t:ty,)*) => {$(
-        impl From<$t> for Literal {
-            fn from(t: $t) -> Literal {
-                Literal(format!(concat!("{}", stringify!($t)), t))
-            }
-        }
-    )*}
-}
-
-ints! {
-    u8, u16, u32, u64, usize,
-    i8, i16, i32, i64, isize,
-}
-
-macro_rules! floats {
-    ($($t:ty,)*) => {$(
-        impl From<$t> for Literal {
-            fn from(t: $t) -> Literal {
-                assert!(!t.is_nan());
-                assert!(!t.is_infinite());
-                Literal(format!(concat!("{}", stringify!($t)), t))
-            }
-        }
-    )*}
-}
-
-floats! {
-    f32, f64,
-}
-
-impl<'a> From<&'a str> for Literal {
-    fn from(t: &'a str) -> Literal {
-        let mut s = t.chars().flat_map(|c| c.escape_default()).collect::<String>();
-        s.push('"');
-        s.insert(0, '"');
-        Literal(s)
-    }
-}
-
-impl From<char> for Literal {
-    fn from(t: char) -> Literal {
-        Literal(format!("'{}'", t.escape_default().collect::<String>()))
-    }
-}
-
-named!(token_stream -> ::TokenStream, map!(
-    many0!(token_tree),
-    |trees| ::TokenStream(TokenStream { inner: trees })
-));
-
-#[cfg(not(procmacro2_semver_exempt))]
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
-    let (input, kind) = token_kind(input)?;
-    Ok((input, TokenTree {
-        span: ::Span(Span {}),
-        kind: kind,
-    }))
-}
-
-#[cfg(procmacro2_semver_exempt)]
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
-    let input = skip_whitespace(input);
-    let lo = input.off;
-    let (input, kind) = token_kind(input)?;
-    let hi = input.off;
-    Ok((input, TokenTree {
-        span: ::Span(Span {
-            lo: lo,
-            hi: hi,
-        }),
-        kind: kind,
-    }))
-}
-
-named!(token_kind -> TokenNode, alt!(
-    map!(delimited, |(d, s)| TokenNode::Group(d, s))
-    |
-    map!(literal, TokenNode::Literal) // must be before symbol
-    |
-    symbol
-    |
-    map!(op, |(op, kind)| TokenNode::Op(op, kind))
-));
-
-named!(delimited -> (Delimiter, ::TokenStream), alt!(
-    delimited!(
-        punct!("("),
-        token_stream,
-        punct!(")")
-    ) => { |ts| (Delimiter::Parenthesis, ts) }
-    |
-    delimited!(
-        punct!("["),
-        token_stream,
-        punct!("]")
-    ) => { |ts| (Delimiter::Bracket, ts) }
-    |
-    delimited!(
-        punct!("{"),
-        token_stream,
-        punct!("}")
-    ) => { |ts| (Delimiter::Brace, ts) }
-));
-
-fn symbol(mut input: Cursor) -> PResult<TokenNode> {
-    input = skip_whitespace(input);
-
-    let mut chars = input.char_indices();
-
-    let lifetime = input.starts_with("'");
-    if lifetime {
-        chars.next();
-    }
-
-    match chars.next() {
-        Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
-        _ => return Err(LexError),
-    }
-
-    let mut end = input.len();
-    for (i, ch) in chars {
-        if !UnicodeXID::is_xid_continue(ch) {
-            end = i;
-            break;
-        }
-    }
-
-    if lifetime && &input.rest[..end] != "'static" && KEYWORDS.contains(&&input.rest[1..end]) {
-        Err(LexError)
-    } else {
-        let a = &input.rest[..end];
-        if a == "_" {
-            Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone)))
-        } else {
-            Ok((input.advance(end), TokenNode::Term(::Term::intern(a))))
-        }
-    }
-}
-
-// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
-static KEYWORDS: &'static [&'static str] = &[
-    "abstract", "alignof", "as", "become", "box", "break", "const", "continue",
-    "crate", "do", "else", "enum", "extern", "false", "final", "fn", "for",
-    "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut",
-    "offsetof", "override", "priv", "proc", "pub", "pure", "ref", "return",
-    "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
-    "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while",
-    "yield",
-];
-
-fn literal(input: Cursor) -> PResult<::Literal> {
-    let input_no_ws = skip_whitespace(input);
-
-    match literal_nocapture(input_no_ws) {
-        Ok((a, ())) => {
-            let start = input.len() - input_no_ws.len();
-            let len = input_no_ws.len() - a.len();
-            let end = start + len;
-            Ok((a, ::Literal(Literal(input.rest[start..end].to_string()))))
-        }
-        Err(LexError) => Err(LexError),
-    }
-}
-
-named!(literal_nocapture -> (), alt!(
-    string
-    |
-    byte_string
-    |
-    byte
-    |
-    character
-    |
-    float
-    |
-    int
-    |
-    doc_comment
-));
-
-named!(string -> (), alt!(
-    quoted_string
-    |
-    preceded!(
-        punct!("r"),
-        raw_string
-    ) => { |_| () }
-));
-
-named!(quoted_string -> (), delimited!(
-    punct!("\""),
-    cooked_string,
-    tag!("\"")
-));
-
-fn cooked_string(input: Cursor) -> PResult<()> {
-    let mut chars = input.char_indices().peekable();
-    while let Some((byte_offset, ch)) = chars.next() {
-        match ch {
-            '"' => {
-                return Ok((input.advance(byte_offset), ()));
-            }
-            '\r' => {
-                if let Some((_, '\n')) = chars.next() {
-                    // ...
-                } else {
-                    break;
-                }
-            }
-            '\\' => {
-                match chars.next() {
-                    Some((_, 'x')) => {
-                        if !backslash_x_char(&mut chars) {
-                            break
-                        }
-                    }
-                    Some((_, 'n')) |
-                    Some((_, 'r')) |
-                    Some((_, 't')) |
-                    Some((_, '\\')) |
-                    Some((_, '\'')) |
-                    Some((_, '"')) |
-                    Some((_, '0')) => {}
-                    Some((_, 'u')) => {
-                        if !backslash_u(&mut chars) {
-                            break
-                        }
-                    }
-                    Some((_, '\n')) | Some((_, '\r')) => {
-                        while let Some(&(_, ch)) = chars.peek() {
-                            if ch.is_whitespace() {
-                                chars.next();
-                            } else {
-                                break;
-                            }
-                        }
-                    }
-                    _ => break,
-                }
-            }
-            _ch => {}
-        }
-    }
-    Err(LexError)
-}
-
-named!(byte_string -> (), alt!(
-    delimited!(
-        punct!("b\""),
-        cooked_byte_string,
-        tag!("\"")
-    ) => { |_| () }
-    |
-    preceded!(
-        punct!("br"),
-        raw_string
-    ) => { |_| () }
-));
-
-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
-    let mut bytes = input.bytes().enumerate();
-    'outer: while let Some((offset, b)) = bytes.next() {
-        match b {
-            b'"' => {
-                return Ok((input.advance(offset), ()));
-            }
-            b'\r' => {
-                if let Some((_, b'\n')) = bytes.next() {
-                    // ...
-                } else {
-                    break;
-                }
-            }
-            b'\\' => {
-                match bytes.next() {
-                    Some((_, b'x')) => {
-                        if !backslash_x_byte(&mut bytes) {
-                            break
-                        }
-                    }
-                    Some((_, b'n')) |
-                    Some((_, b'r')) |
-                    Some((_, b't')) |
-                    Some((_, b'\\')) |
-                    Some((_, b'0')) |
-                    Some((_, b'\'')) |
-                    Some((_, b'"'))  => {}
-                    Some((newline, b'\n')) |
-                    Some((newline, b'\r')) => {
-                        let rest = input.advance(newline + 1);
-                        for (offset, ch) in rest.char_indices() {
-                            if !ch.is_whitespace() {
-                                input = rest.advance(offset);
-                                bytes = input.bytes().enumerate();
-                                continue 'outer;
-                            }
-                        }
-                        break;
-                    }
-                    _ => break,
-                }
-            }
-            b if b < 0x80 => {}
-            _ => break,
-        }
-    }
-    Err(LexError)
-}
-
-fn raw_string(input: Cursor) -> PResult<()> {
-    let mut chars = input.char_indices();
-    let mut n = 0;
-    while let Some((byte_offset, ch)) = chars.next() {
-        match ch {
-            '"' => {
-                n = byte_offset;
-                break;
-            }
-            '#' => {}
-            _ => return Err(LexError),
-        }
-    }
-    for (byte_offset, ch) in chars {
-        match ch {
-            '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
-                let rest = input.advance(byte_offset + 1 + n);
-                return Ok((rest, ()))
-            }
-            '\r' => {}
-            _ => {}
-        }
-    }
-    Err(LexError)
-}
-
-named!(byte -> (), do_parse!(
-    punct!("b") >>
-    tag!("'") >>
-    cooked_byte >>
-    tag!("'") >>
-    (())
-));
-
-fn cooked_byte(input: Cursor) -> PResult<()> {
-    let mut bytes = input.bytes().enumerate();
-    let ok = match bytes.next().map(|(_, b)| b) {
-        Some(b'\\') => {
-            match bytes.next().map(|(_, b)| b) {
-                Some(b'x') => backslash_x_byte(&mut bytes),
-                Some(b'n') |
-                Some(b'r') |
-                Some(b't') |
-                Some(b'\\') |
-                Some(b'0') |
-                Some(b'\'') |
-                Some(b'"') => true,
-                _ => false,
-            }
-        }
-        b => b.is_some(),
-    };
-    if ok {
-        match bytes.next() {
-            Some((offset, _)) => {
-                if input.chars().as_str().is_char_boundary(offset) {
-                    Ok((input.advance(offset), ()))
-                } else {
-                    Err(LexError)
-                }
-            }
-            None => Ok((input.advance(input.len()), ())),
-        }
-    } else {
-        Err(LexError)
-    }
-}
-
-named!(character -> (), do_parse!(
-    punct!("'") >>
-    cooked_char >>
-    tag!("'") >>
-    (())
-));
-
-fn cooked_char(input: Cursor) -> PResult<()> {
-    let mut chars = input.char_indices();
-    let ok = match chars.next().map(|(_, ch)| ch) {
-        Some('\\') => {
-            match chars.next().map(|(_, ch)| ch) {
-                Some('x') => backslash_x_char(&mut chars),
-                Some('u') => backslash_u(&mut chars),
-                Some('n') |
-                Some('r') |
-                Some('t') |
-                Some('\\') |
-                Some('0') |
-                Some('\'') |
-                Some('"') => true,
-                _ => false,
-            }
-        }
-        ch => ch.is_some(),
-    };
-    if ok {
-        match chars.next() {
-            Some((idx, _)) => Ok((input.advance(idx), ())),
-            None => Ok((input.advance(input.len()), ())),
-        }
-    } else {
-        Err(LexError)
-    }
-}
-
-macro_rules! next_ch {
-    ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
-        match $chars.next() {
-            Some((_, ch)) => match ch {
-                $pat $(| $rest)*  => ch,
-                _ => return false,
-            },
-            None => return false
-        }
-    };
-}
-
-fn backslash_x_char<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, char)>
-{
-    next_ch!(chars @ '0'...'7');
-    next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
-    true
-}
-
-fn backslash_x_byte<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, u8)>
-{
-    next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
-    next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
-    true
-}
-
-fn backslash_u<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, char)>
-{
-    next_ch!(chars @ '{');
-    next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
-    loop {
-        let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}');
-        if c == '}' {
-            return true;
-        }
-    }
-}
-
-fn float(input: Cursor) -> PResult<()> {
-    let (rest, ()) = float_digits(input)?;
-    for suffix in &["f32", "f64"] {
-        if rest.starts_with(suffix) {
-            return word_break(rest.advance(suffix.len()));
-        }
-    }
-    word_break(rest)
-}
-
-fn float_digits(input: Cursor) -> PResult<()> {
-    let mut chars = input.chars().peekable();
-    match chars.next() {
-        Some(ch) if ch >= '0' && ch <= '9' => {}
-        _ => return Err(LexError),
-    }
-
-    let mut len = 1;
-    let mut has_dot = false;
-    let mut has_exp = false;
-    while let Some(&ch) = chars.peek() {
-        match ch {
-            '0'...'9' | '_' => {
-                chars.next();
-                len += 1;
-            }
-            '.' => {
-                if has_dot {
-                    break;
-                }
-                chars.next();
-                if chars.peek()
-                       .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
-                       .unwrap_or(false) {
-                    return Err(LexError);
-                }
-                len += 1;
-                has_dot = true;
-            }
-            'e' | 'E' => {
-                chars.next();
-                len += 1;
-                has_exp = true;
-                break;
-            }
-            _ => break,
-        }
-    }
-
-    let rest = input.advance(len);
-    if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
-        return Err(LexError);
-    }
-
-    if has_exp {
-        let mut has_exp_value = false;
-        while let Some(&ch) = chars.peek() {
-            match ch {
-                '+' | '-' => {
-                    if has_exp_value {
-                        break;
-                    }
-                    chars.next();
-                    len += 1;
-                }
-                '0'...'9' => {
-                    chars.next();
-                    len += 1;
-                    has_exp_value = true;
-                }
-                '_' => {
-                    chars.next();
-                    len += 1;
-                }
-                _ => break,
-            }
-        }
-        if !has_exp_value {
-            return Err(LexError);
-        }
-    }
-
-    Ok((input.advance(len), ()))
-}
-
-fn int(input: Cursor) -> PResult<()> {
-    let (rest, ()) = digits(input)?;
-    for suffix in &[
-        "isize",
-        "i8",
-        "i16",
-        "i32",
-        "i64&q