Merge mozilla-inbound to mozilla-central a=merge
authorRazvan Maries <rmaries@mozilla.com>
Wed, 09 Jan 2019 00:01:24 +0200
changeset 510049 e0e0d3ec5787906dcc10c615e4a4d45a26fc33dc
parent 510037 e08a0bb35c033607bdd78aa32d00e5debb218c41 (current diff)
parent 510048 e956eb6131305a6e9b4e1c581f2b35fe64baf61d (diff)
child 510066 9635e730e0be047e046656c6fa98142547e9e664
child 510085 6c61f3dce0d0953e8dea6bb444cddfa8fe39251b
push id10547
push userffxbld-merge
push dateMon, 21 Jan 2019 13:03:58 +0000
treeherdermozilla-beta@24ec1916bffe [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-inbound to mozilla-central a=merge
browser/base/content/tabbrowser.xml
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -617,19 +617,19 @@ name = "cssparser"
 version = "0.25.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cssparser-macros 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "smallvec 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "cssparser-macros"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -715,29 +715,29 @@ dependencies = [
 
 [[package]]
 name = "darling_core"
 version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "ident_case 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "darling_macro"
 version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "darling_core 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "devd-rs"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -893,19 +893,19 @@ dependencies = [
  "failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "failure_derive"
 version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "fake-simd"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
@@ -1473,18 +1473,18 @@ dependencies = [
  "thin-slice 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "malloc_size_of_derive"
 version = "0.0.1"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "matches"
 version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
@@ -1761,18 +1761,18 @@ dependencies = [
 ]
 
 [[package]]
 name = "num-derive"
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "num-integer"
 version = "0.1.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
@@ -1949,17 +1949,17 @@ name = "proc-macro2"
 version = "0.3.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "proc-macro2"
-version = "0.4.9"
+version = "0.4.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "procedural-masquerade"
 version = "0.1.1"
@@ -2000,20 +2000,20 @@ name = "quote"
 version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "quote"
-version = "0.6.3"
+version = "0.6.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rand"
 version = "0.3.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2216,19 +2216,19 @@ dependencies = [
  "scroll_derive 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "scroll_derive"
 version = "0.9.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "selectors"
 version = "0.21.0"
 dependencies = [
  "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "cssparser 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2272,19 +2272,19 @@ dependencies = [
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "serde_derive"
 version = "1.0.80"
 source = "git+https://github.com/servo/serde?branch=deserialize_from_enums9#e0cc925c259cb74ce41377e4fe02713adfa6d836"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "serde_json"
 version = "1.0.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2385,18 +2385,18 @@ dependencies = [
 
 [[package]]
 name = "string_cache_codegen"
 version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
  "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "string_cache_shared"
 version = "0.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
@@ -2460,19 +2460,19 @@ dependencies = [
  "walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "style_derive"
 version = "0.0.1"
 dependencies = [
  "darling 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "style_traits"
 version = "0.0.1"
 dependencies = [
  "app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2516,39 +2516,39 @@ dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "syn"
 version = "0.14.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "syn"
-version = "0.15.7"
+version = "0.15.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "synstructure"
 version = "0.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "target-lexicon"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
@@ -3349,21 +3349,21 @@ dependencies = [
 "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
 "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
 "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
 "checksum plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
 "checksum plane-split 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9b1d9a84aa3bbc2dafd06856bdb1dc333eb1d442ad8987b9d596c7344b3ed969"
 "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
 "checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
 "checksum proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "77997c53ae6edd6d187fec07ec41b207063b5ee6f33680e9fa86d405cdd313d4"
-"checksum proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "cccdc7557a98fe98453030f077df7f3a042052fae465bb61d2c2c41435cfd9b6"
+"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
 "checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
 "checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
 "checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
-"checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
+"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"
 "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
 "checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd"
 "checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
 "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
 "checksum redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "ab105df655884ede59d45b7070c8a65002d921461ee813a024558ca16030eea0"
 "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
 "checksum redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "214a97e49be64fd2c86f568dd0cb2c757d2cc53de95b273b6ad0a1c908482f26"
 "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
@@ -3401,17 +3401,17 @@ dependencies = [
 "checksum string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00caf261d6f90f588f8450b8e1230fa0d5be49ee6140fdfbcb55335aff350970"
 "checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
 "checksum string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eea1eee654ef80933142157fdad9dd8bc43cf7c74e999e369263496f04ff4da"
 "checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
 "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
 "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
 "checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
 "checksum syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4e4b5274d4a0a3d2749d5c158dc64d3403e60554dc61194648787ada5212473d"
-"checksum syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)" = "455a6ec9b368f8c479b0ae5494d13b22dc00990d2f00d68c9dc6a2dc4f17f210"
+"checksum syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)" = "734ecc29cd36e8123850d9bf21dfd62ef8300aaa8f879aabaa899721808be37c"
 "checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
 "checksum target-lexicon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4af5e2227f0b887d591d3724b796a96eff04226104d872f5b3883fcd427d64b9"
 "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
 "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
 "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
 "checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
 "checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
 "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
--- a/browser/base/content/tabbrowser.xml
+++ b/browser/base/content/tabbrowser.xml
@@ -29,17 +29,17 @@
 
     <handlers>
       <handler event="underflow" phase="capturing"><![CDATA[
         // Ignore underflow events:
         // - from nested scrollable elements
         // - for vertical orientation
         // - corresponding to an overflow event that we ignored
         let tabs = document.getBindingParent(this);
-        if (event.originalTarget != this._scrollbox ||
+        if (event.originalTarget != this.scrollbox ||
             event.detail == 0 ||
             !tabs.hasAttribute("overflow")) {
           return;
         }
 
         tabs.removeAttribute("overflow");
 
         if (tabs._lastTabClosedByMouse) {
@@ -51,17 +51,17 @@
         }
 
         tabs._positionPinnedTabs();
       ]]></handler>
       <handler event="overflow"><![CDATA[
         // Ignore overflow events:
         // - from nested scrollable elements
         // - for vertical orientation
-        if (event.originalTarget != this._scrollbox ||
+        if (event.originalTarget != this.scrollbox ||
             event.detail == 0) {
           return;
         }
 
         var tabs = document.getBindingParent(this);
         tabs.setAttribute("overflow", "true");
         tabs._positionPinnedTabs();
         tabs._handleTabSelect(true);
@@ -629,17 +629,17 @@
           // Move the dragged tab based on the mouse position.
 
           let leftTab = tabs[0];
           let rightTab = tabs[tabs.length - 1];
           let rightMovingTabScreenX = movingTabs[movingTabs.length - 1].boxObject.screenX;
           let leftMovingTabScreenX = movingTabs[0].boxObject.screenX;
           let translateX = screenX - draggedTab._dragData.screenX;
           if (!pinned) {
-            translateX += this.arrowScrollbox._scrollbox.scrollLeft - draggedTab._dragData.scrollX;
+            translateX += this.arrowScrollbox.scrollbox.scrollLeft - draggedTab._dragData.scrollX;
           }
           let leftBound = leftTab.boxObject.screenX - leftMovingTabScreenX;
           let rightBound = (rightTab.boxObject.screenX + rightTab.boxObject.width) -
                            (rightMovingTabScreenX + tabWidth);
           translateX = Math.min(Math.max(translateX, leftBound), rightBound);
 
           for (let tab of movingTabs) {
             tab.style.transform = "translateX(" + translateX + "px)";
@@ -1550,17 +1550,17 @@
         // relative to the corner of the dragged tab.
         function clientX(ele) {
           return ele.getBoundingClientRect().left;
         }
         let tabOffsetX = clientX(tab) - clientX(this);
         tab._dragData = {
           offsetX: event.screenX - window.screenX - tabOffsetX,
           offsetY: event.screenY - window.screenY,
-          scrollX: this.arrowScrollbox._scrollbox.scrollLeft,
+          scrollX: this.arrowScrollbox.scrollbox.scrollLeft,
           screenX: event.screenX,
           movingTabs: (tab.multiselected ? gBrowser.selectedTabs : [tab])
                       .filter(t => t.pinned == tab.pinned),
         };
 
         event.stopPropagation();
       ]]></handler>
 
--- a/browser/base/content/test/forms/browser_selectpopup.js
+++ b/browser/base/content/test/forms/browser_selectpopup.js
@@ -457,61 +457,74 @@ async function performLargePopupTests(wi
   });
 
   let selectPopup = win.document.getElementById("ContentSelectDropdown").menupopup;
   let browserRect = browser.getBoundingClientRect();
 
   // Check if a drag-select works and scrolls the list.
   await openSelectPopup(selectPopup, "mousedown", "select", win);
 
-  let scrollPos = selectPopup.scrollBox.scrollTop;
+  let getScrollPos = () => selectPopup.scrollBox.scrollbox.scrollTop;
+  let scrollPos = getScrollPos();
   let popupRect = selectPopup.getBoundingClientRect();
 
   // First, check that scrolling does not occur when the mouse is moved over the
   // anchor button but not the popup yet.
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 5, popupRect.top - 10, { type: "mousemove" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position after mousemove over button should not change");
+  is(getScrollPos(), scrollPos, "scroll position after mousemove over button should not change");
 
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top + 10, { type: "mousemove" }, win);
 
   // Dragging above the popup scrolls it up.
+  let scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+    () => getScrollPos() < scrollPos - 5);
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" }, win);
-  ok(selectPopup.scrollBox.scrollTop < scrollPos - 5, "scroll position at drag up");
+  await scrolledPromise;
+  ok(true, "scroll position at drag up");
 
   // Dragging below the popup scrolls it down.
-  scrollPos = selectPopup.scrollBox.scrollTop;
+  scrollPos = getScrollPos();
+  scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+    () => getScrollPos() > scrollPos + 5);
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
-  ok(selectPopup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down");
+  await scrolledPromise;
+  ok(true, "scroll position at drag down");
 
   // Releasing the mouse button and moving the mouse does not change the scroll position.
-  scrollPos = selectPopup.scrollBox.scrollTop;
+  scrollPos = getScrollPos();
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup should not change");
+  is(getScrollPos(), scrollPos, "scroll position at mouseup should not change");
 
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+  is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
 
   // Now check dragging with a mousedown on an item
   let menuRect = selectPopup.children[51].getBoundingClientRect();
   EventUtils.synthesizeMouseAtPoint(menuRect.left + 5, menuRect.top + 5, { type: "mousedown" }, win);
 
   // Dragging below the popup scrolls it down.
+  scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+    () => getScrollPos() > scrollPos + 5);
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
-  ok(selectPopup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down from option");
+  await scrolledPromise;
+  ok(true, "scroll position at drag down from option");
 
   // Dragging above the popup scrolls it up.
+  scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+    () => getScrollPos() < scrollPos - 5);
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at drag up from option");
+  await scrolledPromise;
+  ok(true, "scroll position at drag up from option");
 
-  scrollPos = selectPopup.scrollBox.scrollTop;
+  scrollPos = getScrollPos();
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup from option should not change");
+  is(getScrollPos(), scrollPos, "scroll position at mouseup from option should not change");
 
   EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
-  is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+  is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
 
   await hideSelectPopup(selectPopup, "escape", win);
 
   let positions = [
     "margin-top: 300px;",
     "position: fixed; bottom: 200px;",
     "width: 100%; height: 9999px;",
   ];
--- a/browser/base/content/test/tabs/browser_overflowScroll.js
+++ b/browser/base/content/test/tabs/browser_overflowScroll.js
@@ -3,17 +3,17 @@
 requestLongerTimeout(2);
 
 /**
  * Tests that scrolling the tab strip via the scroll buttons scrolls the right
  * amount in non-smoothscroll mode.
  */
 add_task(async function() {
   let arrowScrollbox = gBrowser.tabContainer.arrowScrollbox;
-  let scrollbox = arrowScrollbox._scrollbox;
+  let scrollbox = arrowScrollbox.scrollbox;
   let originalSmoothScroll = arrowScrollbox.smoothScroll;
   let tabs = gBrowser.tabs;
   let tabMinWidth = parseInt(getComputedStyle(gBrowser.selectedTab, null).minWidth);
 
   let rect = ele => ele.getBoundingClientRect();
   let width = ele => rect(ele).width;
 
   let tabCountForOverflow = Math.ceil(width(arrowScrollbox) / tabMinWidth * 3);
--- a/dom/base/nsImageLoadingContent.h
+++ b/dom/base/nsImageLoadingContent.h
@@ -195,18 +195,16 @@ class nsImageLoadingContent : public nsI
    * DestroyImageLoadingContent from their destructor, or earlier.  It
    * does things that cannot be done in ~nsImageLoadingContent because
    * they rely on being able to QueryInterface to other derived classes,
    * which cannot happen once the derived class destructor has started
    * calling the base class destructors.
    */
   void DestroyImageLoadingContent();
 
-  void ClearBrokenState() { mBroken = false; }
-
   /**
    * Returns the CORS mode that will be used for all future image loads. The
    * default implementation returns CORS_NONE unconditionally.
    */
   virtual mozilla::CORSMode GetCORSMode();
 
   virtual mozilla::net::ReferrerPolicy GetImageReferrerPolicy();
 
--- a/dom/html/HTMLImageElement.cpp
+++ b/dom/html/HTMLImageElement.cpp
@@ -520,21 +520,16 @@ nsresult HTMLImageElement::BindToTree(Do
     // This isn't necessary for responsive mode, since creating the
     // image load task is asynchronous we don't need to take special
     // care to avoid doing so when being filled by the parser.
 
     // Mark channel as urgent-start before load image if the image load is
     // initaiated by a user interaction.
     mUseUrgentStartForChannel = EventStateManager::IsHandlingUserInput();
 
-    // FIXME: Bug 660963 it would be nice if we could just have
-    // ClearBrokenState update our state and do it fast...
-    ClearBrokenState();
-    RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
-
     // We still act synchronously for the non-responsive case (Bug
     // 1076583), but still need to delay if it is unsafe to run
     // script.
 
     // If loading is temporarily disabled, don't even launch MaybeLoadImage.
     // Otherwise MaybeLoadImage may run later when someone has reenabled
     // loading.
     if (LoadingEnabled() && OwnerDoc()->ShouldLoadImages()) {
--- a/dom/html/HTMLInputElement.cpp
+++ b/dom/html/HTMLInputElement.cpp
@@ -4334,20 +4334,16 @@ nsresult HTMLInputElement::BindToTree(Do
   if (mType == NS_FORM_INPUT_IMAGE) {
     // Our base URI may have changed; claim that our URI changed, and the
     // nsImageLoadingContent will decide whether a new image load is warranted.
     if (HasAttr(kNameSpaceID_None, nsGkAtoms::src)) {
       // Mark channel as urgent-start before load image if the image load is
       // initaiated by a user interaction.
       mUseUrgentStartForChannel = EventStateManager::IsHandlingUserInput();
 
-      // FIXME: Bug 660963 it would be nice if we could just have
-      // ClearBrokenState update our state and do it fast...
-      ClearBrokenState();
-      RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
       nsContentUtils::AddScriptRunner(
           NewRunnableMethod("dom::HTMLInputElement::MaybeLoadImage", this,
                             &HTMLInputElement::MaybeLoadImage));
     }
   }
 
   // Add radio to document if we don't have a form already (if we do it's
   // already been added into that group)
new file mode 100644
--- /dev/null
+++ b/dom/html/reftests/bug1512297-ref.html
@@ -0,0 +1,7 @@
+<!DOCTYPE html>
+<html>
+<head></head>
+<body>
+<div><img src="" alt="ALT"></div>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/dom/html/reftests/bug1512297.html
@@ -0,0 +1,14 @@
+<!DOCTYPE html>
+<html>
+<head class="reftest-wait"></head>
+<body>
+<div><img src="" alt="ALT"></div>
+<script>
+var img = document.querySelector('img');
+img.remove();
+
+var div = document.querySelector('div');
+div.appendChild(img);
+</script>
+</body>
+</html>
--- a/dom/html/reftests/reftest.list
+++ b/dom/html/reftests/reftest.list
@@ -59,14 +59,16 @@ fuzzy(0-3,0-640) fuzzy-if(skiaContent,0-
 
 # Test imageset is using permissions.default.image
 pref(permissions.default.image,1) HTTP == bug1196784-with-srcset.html bug1196784-no-srcset.html
 pref(permissions.default.image,2) HTTP == bug1196784-with-srcset.html bug1196784-no-srcset.html
 
 # Test video with rotation information can be rotated.
 == bug1228601-video-rotation-90.html bug1228601-video-rotated-ref.html
 
+== bug1512297.html bug1512297-ref.html
+
 # Test that dynamically setting body margin attributes updates style appropriately
 == body-topmargin-dynamic.html body-topmargin-ref.html
 
 # Test that dynamically removing a nonmargin mapped attribute does not
 # destroy margins inherited from the frame.
 == body-frame-margin-remove-other-pres-hint.html body-frame-margin-remove-other-pres-hint-ref.html
--- a/dom/svg/SVGFEImageElement.cpp
+++ b/dom/svg/SVGFEImageElement.cpp
@@ -137,20 +137,16 @@ nsresult SVGFEImageElement::BindToTree(D
   nsresult rv =
       SVGFEImageElementBase::BindToTree(aDocument, aParent, aBindingParent);
   NS_ENSURE_SUCCESS(rv, rv);
 
   nsImageLoadingContent::BindToTree(aDocument, aParent, aBindingParent);
 
   if (mStringAttributes[HREF].IsExplicitlySet() ||
       mStringAttributes[XLINK_HREF].IsExplicitlySet()) {
-    // FIXME: Bug 660963 it would be nice if we could just have
-    // ClearBrokenState update our state and do it fast...
-    ClearBrokenState();
-    RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
     nsContentUtils::AddScriptRunner(
         NewRunnableMethod("dom::SVGFEImageElement::MaybeLoadSVGImage", this,
                           &SVGFEImageElement::MaybeLoadSVGImage));
   }
 
   return rv;
 }
 
--- a/dom/svg/SVGImageElement.cpp
+++ b/dom/svg/SVGImageElement.cpp
@@ -185,20 +185,16 @@ nsresult SVGImageElement::BindToTree(Doc
   nsresult rv =
       SVGImageElementBase::BindToTree(aDocument, aParent, aBindingParent);
   NS_ENSURE_SUCCESS(rv, rv);
 
   nsImageLoadingContent::BindToTree(aDocument, aParent, aBindingParent);
 
   if (mStringAttributes[HREF].IsExplicitlySet() ||
       mStringAttributes[XLINK_HREF].IsExplicitlySet()) {
-    // FIXME: Bug 660963 it would be nice if we could just have
-    // ClearBrokenState update our state and do it fast...
-    ClearBrokenState();
-    RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
     nsContentUtils::AddScriptRunner(
         NewRunnableMethod("dom::SVGImageElement::MaybeLoadSVGImage", this,
                           &SVGImageElement::MaybeLoadSVGImage));
   }
 
   return rv;
 }
 
--- a/layout/xul/nsMenuFrame.cpp
+++ b/layout/xul/nsMenuFrame.cpp
@@ -1203,18 +1203,17 @@ bool nsMenuFrame::SizeToPopup(nsBoxLayou
       //      border-padding
       //  (3) there's enough room in the popup for the content and its
       //      scrollbar
       nsMargin borderPadding;
       GetXULBorderAndPadding(borderPadding);
 
       // if there is a scroll frame, add the desired width of the scrollbar as
       // well
-      nsIScrollableFrame* scrollFrame =
-          do_QueryFrame(popupFrame->PrincipalChildList().FirstChild());
+      nsIScrollableFrame* scrollFrame = popupFrame->GetScrollFrame(popupFrame);
       nscoord scrollbarWidth = 0;
       if (scrollFrame) {
         scrollbarWidth =
             scrollFrame->GetDesiredScrollbarSizes(&aState).LeftRight();
       }
 
       aSize.width =
           tmpSize.width + std::max(borderPadding.LeftRight(), scrollbarWidth);
--- a/layout/xul/nsMenuPopupFrame.cpp
+++ b/layout/xul/nsMenuPopupFrame.cpp
@@ -1043,18 +1043,17 @@ nsPoint nsMenuPopupFrame::AdjustPosition
 
     // Only adjust the popup if it just opened, otherwise the popup will move
     // around if its gets resized or the selection changed. Cache the value in
     // mPositionedOffset and use that instead for any future calculations.
     if (mIsOpenChanged || mReflowCallbackData.mIsOpenChanged) {
       nsIFrame* selectedItemFrame = GetSelectedItemForAlignment();
       if (selectedItemFrame) {
         int32_t scrolly = 0;
-        nsIScrollableFrame* scrollframe =
-            do_QueryFrame(nsBox::GetChildXULBox(this));
+        nsIScrollableFrame* scrollframe = GetScrollFrame(this);
         if (scrollframe) {
           scrolly = scrollframe->GetScrollPosition().y;
         }
 
         mPositionedOffset = originalAnchorRect.height +
                             selectedItemFrame->GetRect().y - scrolly;
       }
     }
--- a/third_party/rust/proc-macro2/.cargo-checksum.json
+++ b/third_party/rust/proc-macro2/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"d271b6306e4b9e51b642d67ca9c35f6f32d582eb549da89085799aadc9bcc626","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"261fb7bbe050bbff8a8e33da68926b44cd1bbd2b1e8b655d19ae681b8fff3c6e","src/lib.rs":"b7483dd58c6defa21d68d163eeae8d03029fed83e96071edaacae3f694f2bd04","src/stable.rs":"c325eadc1f0a78c55117589e6bacb72dd295ccd02cb3e2dea13e1381ad2e972e","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"a18f0fdb7c0670b73663dc708962959176ae5b1c2623e7f36c3767ed9c3bcfef","tests/test.rs":"428f4298e16a23db8f8fbb6101a30e993f08dc0befa2d95439dcefb364d7a7cf"},"package":"cccdc7557a98fe98453030f077df7f3a042052fae465bb61d2c2c41435cfd9b6"}
\ No newline at end of file
+{"files":{"Cargo.toml":"f020c87cba7dd2260861239307b2cb93e16c2bed6e2ef6c9178642b1dfcc43a3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"538fd635d385b6a90ef4cc1e361aad717162a139e932a6192212cad8407aa8e1","build.rs":"7698abdd3087e0f3308916c37ade3349b6b000165186b80913013af18d36ecb6","src/lib.rs":"c5c276236d828189a5151c890a66f2b7d1c02beca98f08f2d9c01166df441eb2","src/stable.rs":"a1f29e850e5fc4c602ee1204847124e266087175695d77ec448016db910acb6b","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"0b7f86862d8254104330d14837ea6ec89e7b3bf2ffe910b73629269f2bc282de","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"427821bab498926aa56bfcea7d28c36fb24a7d63d7f59d3e7e097bcfc77fe95b"},"package":"77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"}
\ No newline at end of file
--- a/third_party/rust/proc-macro2/Cargo.toml
+++ b/third_party/rust/proc-macro2/Cargo.toml
@@ -7,29 +7,32 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "proc-macro2"
-version = "0.4.9"
+version = "0.4.24"
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
+build = "build.rs"
 description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
 homepage = "https://github.com/alexcrichton/proc-macro2"
 documentation = "https://docs.rs/proc-macro2"
 readme = "README.md"
 keywords = ["macros"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/alexcrichton/proc-macro2"
 [package.metadata.docs.rs]
+rustc-args = ["--cfg", "procmacro2_semver_exempt"]
 rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
-
-[lib]
-doctest = false
 [dependencies.unicode-xid]
 version = "0.1"
+[dev-dependencies.quote]
+version = "0.6"
 
 [features]
 default = ["proc-macro"]
 nightly = ["proc-macro"]
 proc-macro = []
+[badges.travis-ci]
+repository = "alexcrichton/proc-macro2"
--- a/third_party/rust/proc-macro2/README.md
+++ b/third_party/rust/proc-macro2/README.md
@@ -1,34 +1,29 @@
 # proc-macro2
 
 [![Build Status](https://api.travis-ci.org/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.org/alexcrichton/proc-macro2)
 [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
 [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
 
 A small shim over the `proc_macro` crate in the compiler intended to multiplex
-the current stable interface (as of 2017-07-05) and the [upcoming richer
-interface][upcoming].
+the stable interface as of 1.15.0 and the interface as of 1.30.0.
 
-[upcoming]: https://github.com/rust-lang/rust/pull/40939
-
-The upcoming support has features like:
+New features added in Rust 1.30.0 include:
 
 * Span information on tokens
 * No need to go in/out through strings
 * Structured input/output
 
-The hope is that libraries ported to `proc_macro2` will be trivial to port to
-the real `proc_macro` crate once the support on nightly is stabilized.
+Libraries ported to `proc_macro2` can retain support for older compilers while
+continuing to get all the nice benefits of using a 1.30.0+ compiler.
 
 ## Usage
 
-This crate by default compiles on the stable version of the compiler. It only
-uses the stable surface area of the `proc_macro` crate upstream in the compiler
-itself. Usage is done via:
+This crate compiles on all 1.15.0+ stable compilers and usage looks like:
 
 ```toml
 [dependencies]
 proc-macro2 = "0.4"
 ```
 
 followed by
 
@@ -43,48 +38,37 @@ pub fn my_derive(input: proc_macro::Toke
     let output: proc_macro2::TokenStream = {
         /* transform input */
     };
 
     output.into()
 }
 ```
 
-If you'd like you can enable the `nightly` feature in this crate. This will
-cause it to compile against the **unstable and nightly-only** features of the
-`proc_macro` crate. This in turn requires a nightly compiler. This should help
-preserve span information, however, coming in from the compiler itself.
-
-You can enable this feature via:
-
-```toml
-[dependencies]
-proc-macro2 = { version = "0.4", features = ["nightly"] }
-```
-
+The 1.30.0 compiler is automatically detected and its interfaces are used when
+available.
 
 ## Unstable Features
 
 `proc-macro2` supports exporting some methods from `proc_macro` which are
-currently highly unstable, and may not be stabilized in the first pass of
+currently highly unstable, and are not stabilized in the first pass of
 `proc_macro` stabilizations. These features are not exported by default. Minor
 versions of `proc-macro2` may make breaking changes to them at any time.
 
 To enable these features, the `procmacro2_semver_exempt` config flag must be
 passed to rustc.
 
 ```
 RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
 ```
 
 Note that this must not only be done for your crate, but for any crate that
 depends on your crate. This infectious nature is intentional, as it serves as a
 reminder that you are outside of the normal semver guarantees.
 
-
 # License
 
 This project is licensed under either of
 
  * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
    http://www.apache.org/licenses/LICENSE-2.0)
  * MIT license ([LICENSE-MIT](LICENSE-MIT) or
    http://opensource.org/licenses/MIT)
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2/build.rs
@@ -0,0 +1,67 @@
+use std::env;
+use std::process::Command;
+use std::str;
+
+fn main() {
+    println!("cargo:rerun-if-changed=build.rs");
+
+    let target = env::var("TARGET").unwrap();
+
+    let minor = match rustc_minor_version() {
+        Some(n) => n,
+        None => return,
+    };
+
+    if minor >= 26 {
+        println!("cargo:rustc-cfg=u128");
+    }
+
+    if !enable_use_proc_macro(&target) {
+        return;
+    }
+    println!("cargo:rustc-cfg=use_proc_macro");
+
+    // Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
+    if (minor >= 29 && !cfg!(procmacro2_semver_exempt)) || cfg!(feature = "nightly") {
+        println!("cargo:rustc-cfg=wrap_proc_macro");
+
+        if cfg!(procmacro2_semver_exempt) {
+            println!("cargo:rustc-cfg=super_unstable");
+            // https://github.com/alexcrichton/proc-macro2/issues/147
+            println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+        }
+    }
+
+    if minor == 29 {
+        println!("cargo:rustc-cfg=slow_extend");
+    }
+}
+
+fn enable_use_proc_macro(target: &str) -> bool {
+    // wasm targets don't have the `proc_macro` crate, disable this feature.
+    if target.contains("wasm32") {
+        return false;
+    }
+
+    // Otherwise, only enable it if our feature is actually enabled.
+    cfg!(feature = "proc-macro")
+}
+
+fn rustc_minor_version() -> Option<u32> {
+    macro_rules! otry {
+        ($e:expr) => {
+            match $e {
+                Some(e) => e,
+                None => return None,
+            }
+        };
+    }
+    let rustc = otry!(env::var_os("RUSTC"));
+    let output = otry!(Command::new(rustc).arg("--version").output().ok());
+    let version = otry!(str::from_utf8(&output.stdout).ok());
+    let mut pieces = version.split('.');
+    if pieces.next() != Some("rustc 1") {
+        return None;
+    }
+    otry!(pieces.next()).parse().ok()
+}
--- a/third_party/rust/proc-macro2/src/lib.rs
+++ b/third_party/rust/proc-macro2/src/lib.rs
@@ -38,39 +38,44 @@
 //! Note that this must not only be done for your crate, but for any crate that
 //! depends on your crate. This infectious nature is intentional, as it serves
 //! as a reminder that you are outside of the normal semver guarantees.
 //!
 //! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
 //! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
 
 // Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.9")]
-#![cfg_attr(feature = "nightly", feature(proc_macro_raw_ident, proc_macro_span))]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.24")]
+#![cfg_attr(
+    super_unstable,
+    feature(proc_macro_raw_ident, proc_macro_span, proc_macro_def_site)
+)]
 
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
 extern crate proc_macro;
 extern crate unicode_xid;
 
 use std::cmp::Ordering;
 use std::fmt;
 use std::hash::{Hash, Hasher};
 use std::iter::FromIterator;
 use std::marker;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::PathBuf;
 use std::rc::Rc;
 use std::str::FromStr;
 
 #[macro_use]
 mod strnom;
 mod stable;
 
-#[cfg(not(feature = "nightly"))]
+#[cfg(not(wrap_proc_macro))]
 use stable as imp;
 #[path = "unstable.rs"]
-#[cfg(feature = "nightly")]
+#[cfg(wrap_proc_macro)]
 mod imp;
 
 /// An abstract stream of tokens, or more concretely a sequence of token trees.
 ///
 /// This type provides interfaces for iterating over token trees and for
 /// collecting token trees into one stream.
 ///
 /// Token stream is both the input and output of `#[proc_macro]`,
@@ -141,42 +146,54 @@ impl FromStr for TokenStream {
         let e = src.parse().map_err(|e| LexError {
             inner: e,
             _marker: marker::PhantomData,
         })?;
         Ok(TokenStream::_new(e))
     }
 }
 
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
 impl From<proc_macro::TokenStream> for TokenStream {
     fn from(inner: proc_macro::TokenStream) -> TokenStream {
         TokenStream::_new(inner.into())
     }
 }
 
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
 impl From<TokenStream> for proc_macro::TokenStream {
     fn from(inner: TokenStream) -> proc_macro::TokenStream {
         inner.inner.into()
     }
 }
 
 impl Extend<TokenTree> for TokenStream {
     fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
         self.inner.extend(streams)
     }
 }
 
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        self.inner
+            .extend(streams.into_iter().map(|stream| stream.inner))
+    }
+}
+
 /// Collects a number of token trees into a single stream.
 impl FromIterator<TokenTree> for TokenStream {
     fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
         TokenStream::_new(streams.into_iter().collect())
     }
 }
+impl FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+    }
+}
 
 /// Prints the token stream as a string that is supposed to be losslessly
 /// convertible back into the same token stream (modulo spans), except for
 /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
 /// numeric literals.
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
@@ -191,64 +208,63 @@ impl fmt::Debug for TokenStream {
 }
 
 impl fmt::Debug for LexError {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
-// Returned by reference, so we can't easily wrap it.
-#[cfg(procmacro2_semver_exempt)]
-pub use imp::FileName;
-
 /// The source file of a given `Span`.
 ///
 /// This type is semver exempt and not exposed by default.
 #[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile(imp::SourceFile);
+pub struct SourceFile {
+    inner: imp::SourceFile,
+    _marker: marker::PhantomData<Rc<()>>,
+}
 
 #[cfg(procmacro2_semver_exempt)]
 impl SourceFile {
+    fn _new(inner: imp::SourceFile) -> Self {
+        SourceFile {
+            inner: inner,
+            _marker: marker::PhantomData,
+        }
+    }
+
     /// Get the path to this source file.
     ///
     /// ### Note
     ///
     /// If the code span associated with this `SourceFile` was generated by an
     /// external macro, this may not be an actual path on the filesystem. Use
     /// [`is_real`] to check.
     ///
     /// Also note that even if `is_real` returns `true`, if
     /// `--remap-path-prefix` was passed on the command line, the path as given
     /// may not actually be valid.
     ///
     /// [`is_real`]: #method.is_real
-    pub fn path(&self) -> &FileName {
-        self.0.path()
+    pub fn path(&self) -> PathBuf {
+        self.inner.path()
     }
 
     /// Returns `true` if this source file is a real source file, and not
     /// generated by an external macro's expansion.
     pub fn is_real(&self) -> bool {
-        self.0.is_real()
-    }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef<FileName> for SourceFile {
-    fn as_ref(&self) -> &FileName {
-        self.0.path()
+        self.inner.is_real()
     }
 }
 
 #[cfg(procmacro2_semver_exempt)]
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
+        self.inner.fmt(f)
     }
 }
 
 /// A line-column pair representing the start or end of a `Span`.
 ///
 /// This type is semver exempt and not exposed by default.
 #[cfg(procmacro2_semver_exempt)]
 pub struct LineColumn {
@@ -313,27 +329,28 @@ impl Span {
     ///
     /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn located_at(&self, other: Span) -> Span {
         Span::_new(self.inner.located_at(other.inner))
     }
 
     /// This method is only available when the `"nightly"` feature is enabled.
-    #[cfg(all(feature = "nightly", feature = "proc-macro"))]
+    #[doc(hidden)]
+    #[cfg(any(feature = "nightly", super_unstable))]
     pub fn unstable(self) -> proc_macro::Span {
         self.inner.unstable()
     }
 
     /// The original source file into which this span points.
     ///
     /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
-        SourceFile(self.inner.source_file())
+        SourceFile::_new(self.inner.source_file())
     }
 
     /// Get the starting line/column in the source file for this span.
     ///
     /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
         let imp::LineColumn { line, column } = self.inner.start();
@@ -481,19 +498,17 @@ impl fmt::Debug for TokenTree {
 }
 
 /// A delimited token stream.
 ///
 /// A `Group` internally contains a `TokenStream` which is surrounded by
 /// `Delimiter`s.
 #[derive(Clone)]
 pub struct Group {
-    delimiter: Delimiter,
-    stream: TokenStream,
-    span: Span,
+    inner: imp::Group,
 }
 
 /// Describes how a sequence of token trees is delimited.
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum Delimiter {
     /// `( ... )`
     Parenthesis,
     /// `{ ... }`
@@ -506,87 +521,108 @@ pub enum Delimiter {
     /// coming from a "macro variable" `$var`. It is important to preserve
     /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
     /// Implicit delimiters may not survive roundtrip of a token stream through
     /// a string.
     None,
 }
 
 impl Group {
+    fn _new(inner: imp::Group) -> Self {
+        Group {
+            inner: inner,
+        }
+    }
+
+    fn _new_stable(inner: stable::Group) -> Self {
+        Group {
+            inner: inner.into(),
+        }
+    }
+
     /// Creates a new `Group` with the given delimiter and token stream.
     ///
     /// This constructor will set the span for this group to
     /// `Span::call_site()`. To change the span you can use the `set_span`
     /// method below.
     pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
         Group {
-            delimiter: delimiter,
-            stream: stream,
-            span: Span::call_site(),
+            inner: imp::Group::new(delimiter, stream.inner),
         }
     }
 
     /// Returns the delimiter of this `Group`
     pub fn delimiter(&self) -> Delimiter {
-        self.delimiter
+        self.inner.delimiter()
     }
 
     /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
     ///
     /// Note that the returned token stream does not include the delimiter
     /// returned above.
     pub fn stream(&self) -> TokenStream {
-        self.stream.clone()
+        TokenStream::_new(self.inner.stream())
     }
 
     /// Returns the span for the delimiters of this token stream, spanning the
     /// entire `Group`.
+    ///
+    /// ```text
+    /// pub fn span(&self) -> Span {
+    ///            ^^^^^^^
+    /// ```
     pub fn span(&self) -> Span {
-        self.span
+        Span::_new(self.inner.span())
+    }
+
+    /// Returns the span pointing to the opening delimiter of this group.
+    ///
+    /// ```text
+    /// pub fn span_open(&self) -> Span {
+    ///                 ^
+    /// ```
+    #[cfg(procmacro2_semver_exempt)]
+    pub fn span_open(&self) -> Span {
+        Span::_new(self.inner.span_open())
+    }
+
+    /// Returns the span pointing to the closing delimiter of this group.
+    ///
+    /// ```text
+    /// pub fn span_close(&self) -> Span {
+    ///                        ^
+    /// ```
+    #[cfg(procmacro2_semver_exempt)]
+    pub fn span_close(&self) -> Span {
+        Span::_new(self.inner.span_close())
     }
 
     /// Configures the span for this `Group`'s delimiters, but not its internal
     /// tokens.
     ///
     /// This method will **not** set the span of all the internal tokens spanned
     /// by this group, but rather it will only set the span of the delimiter
     /// tokens at the level of the `Group`.
     pub fn set_span(&mut self, span: Span) {
-        self.span = span;
+        self.inner.set_span(span.inner)
     }
 }
 
 /// Prints the group as a string that should be losslessly convertible back
 /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
 /// with `Delimiter::None` delimiters.
 impl fmt::Display for Group {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        let (left, right) = match self.delimiter {
-            Delimiter::Parenthesis => ("(", ")"),
-            Delimiter::Brace => ("{", "}"),
-            Delimiter::Bracket => ("[", "]"),
-            Delimiter::None => ("", ""),
-        };
-
-        f.write_str(left)?;
-        self.stream.fmt(f)?;
-        f.write_str(right)?;
-
-        Ok(())
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(&self.inner, formatter)
     }
 }
 
 impl fmt::Debug for Group {
-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        let mut debug = fmt.debug_struct("Group");
-        debug.field("delimiter", &self.delimiter);
-        debug.field("stream", &self.stream);
-        #[cfg(procmacro2_semver_exempt)]
-        debug.field("span", &self.span);
-        debug.finish()
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Debug::fmt(&self.inner, formatter)
     }
 }
 
 /// An `Punct` is an single punctuation character like `+`, `-` or `#`.
 ///
 /// Multicharacter operators like `+=` are represented as two instances of
 /// `Punct` with different forms of `Spacing` returned.
 #[derive(Clone)]
@@ -674,21 +710,21 @@ impl fmt::Debug for Punct {
 /// An identifier consists of at least one Unicode code point, the first of
 /// which has the XID_Start property and the rest of which have the XID_Continue
 /// property.
 ///
 /// - The empty string is not an identifier. Use `Option<Ident>`.
 /// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
 ///
 /// An identifier constructed with `Ident::new` is permitted to be a Rust
-/// keyword, though parsing one through its [`Synom`] implementation rejects
-/// Rust keywords. Use `call!(Ident::parse_any)` when parsing to match the
+/// keyword, though parsing one through its [`Parse`] implementation rejects
+/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
 /// behaviour of `Ident::new`.
 ///
-/// [`Synom`]: https://docs.rs/syn/0.14/syn/synom/trait.Synom.html
+/// [`Parse`]: https://docs.rs/syn/0.15/syn/parse/trait.Parse.html
 ///
 /// # Examples
 ///
 /// A new ident can be created from a string using the `Ident::new` function.
 /// A span must be provided explicitly which governs the name resolution
 /// behavior of the resulting identifier.
 ///
 /// ```rust
@@ -806,26 +842,26 @@ impl Ident {
     /// context.
     pub fn set_span(&mut self, span: Span) {
         self.inner.set_span(span.inner);
     }
 }
 
 impl PartialEq for Ident {
     fn eq(&self, other: &Ident) -> bool {
-        self.to_string() == other.to_string()
+        self.inner == other.inner
     }
 }
 
 impl<T> PartialEq<T> for Ident
 where
     T: ?Sized + AsRef<str>,
 {
     fn eq(&self, other: &T) -> bool {
-        self.to_string() == other.as_ref()
+        self.inner == other
     }
 }
 
 impl Eq for Ident {}
 
 impl PartialOrd for Ident {
     fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
         Some(self.cmp(other))
@@ -933,29 +969,41 @@ impl Literal {
         usize_suffixed => usize,
         i8_suffixed => i8,
         i16_suffixed => i16,
         i32_suffixed => i32,
         i64_suffixed => i64,
         isize_suffixed => isize,
     }
 
+    #[cfg(u128)]
+    suffixed_int_literals! {
+        u128_suffixed => u128,
+        i128_suffixed => i128,
+    }
+
     unsuffixed_int_literals! {
         u8_unsuffixed => u8,
         u16_unsuffixed => u16,
         u32_unsuffixed => u32,
         u64_unsuffixed => u64,
         usize_unsuffixed => usize,
         i8_unsuffixed => i8,
         i16_unsuffixed => i16,
         i32_unsuffixed => i32,
         i64_unsuffixed => i64,
         isize_unsuffixed => isize,
     }
 
+    #[cfg(u128)]
+    unsuffixed_int_literals! {
+        u128_unsuffixed => u128,
+        i128_unsuffixed => i128,
+    }
+
     pub fn f64_unsuffixed(f: f64) -> Literal {
         assert!(f.is_finite());
         Literal::_new(imp::Literal::f64_unsuffixed(f))
     }
 
     pub fn f64_suffixed(f: f64) -> Literal {
         assert!(f.is_finite());
         Literal::_new(imp::Literal::f64_suffixed(f))
--- a/third_party/rust/proc-macro2/src/stable.rs
+++ b/third_party/rust/proc-macro2/src/stable.rs
@@ -1,23 +1,26 @@
 #![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
 
 #[cfg(procmacro2_semver_exempt)]
 use std::cell::RefCell;
 #[cfg(procmacro2_semver_exempt)]
 use std::cmp;
 use std::fmt;
 use std::iter;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::Path;
+use std::path::PathBuf;
 use std::str::FromStr;
 use std::vec;
 
 use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
 use unicode_xid::UnicodeXID;
 
-use {Delimiter, Group, Punct, Spacing, TokenTree};
+use {Delimiter, Punct, Spacing, TokenTree};
 
 #[derive(Clone)]
 pub struct TokenStream {
     inner: Vec<TokenTree>,
 }
 
 #[derive(Debug)]
 pub struct LexError;
@@ -111,27 +114,27 @@ impl fmt::Display for TokenStream {
 
 impl fmt::Debug for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.write_str("TokenStream ")?;
         f.debug_list().entries(self.clone()).finish()
     }
 }
 
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
 impl From<::proc_macro::TokenStream> for TokenStream {
     fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
         inner
             .to_string()
             .parse()
             .expect("compiler token stream parse failed")
     }
 }
 
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
 impl From<TokenStream> for ::proc_macro::TokenStream {
     fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
         inner
             .to_string()
             .parse()
             .expect("failed to parse to compiler tokens")
     }
 }
@@ -149,70 +152,69 @@ impl iter::FromIterator<TokenTree> for T
         for token in streams.into_iter() {
             v.push(token);
         }
 
         TokenStream { inner: v }
     }
 }
 
+impl iter::FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut v = Vec::new();
+
+        for stream in streams.into_iter() {
+            v.extend(stream.inner);
+        }
+
+        TokenStream { inner: v }
+    }
+}
+
 impl Extend<TokenTree> for TokenStream {
     fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
         self.inner.extend(streams);
     }
 }
 
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        self.inner
+            .extend(streams.into_iter().flat_map(|stream| stream));
+    }
+}
+
 pub type TokenTreeIter = vec::IntoIter<TokenTree>;
 
 impl IntoIterator for TokenStream {
     type Item = TokenTree;
     type IntoIter = TokenTreeIter;
 
     fn into_iter(self) -> TokenTreeIter {
         self.inner.into_iter()
     }
 }
 
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct FileName(String);
-
-#[allow(dead_code)]
-pub fn file_name(s: String) -> FileName {
-    FileName(s)
-}
-
-impl fmt::Display for FileName {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
 #[derive(Clone, PartialEq, Eq)]
 pub struct SourceFile {
-    name: FileName,
+    path: PathBuf,
 }
 
 impl SourceFile {
     /// Get the path to this source file as a string.
-    pub fn path(&self) -> &FileName {
-        &self.name
+    pub fn path(&self) -> PathBuf {
+        self.path.clone()
     }
 
     pub fn is_real(&self) -> bool {
         // XXX(nika): Support real files in the future?
         false
     }
 }
 
-impl AsRef<FileName> for SourceFile {
-    fn as_ref(&self) -> &FileName {
-        self.path()
-    }
-}
-
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.debug_struct("SourceFile")
             .field("path", &self.path())
             .field("is_real", &self.is_real())
             .finish()
     }
 }
@@ -358,17 +360,17 @@ impl Span {
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
         CODEMAP.with(|cm| {
             let cm = cm.borrow();
             let fi = cm.fileinfo(*self);
             SourceFile {
-                name: FileName(fi.name.clone()),
+                path: Path::new(&fi.name).to_owned(),
             }
         })
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
         CODEMAP.with(|cm| {
             let cm = cm.borrow();
@@ -408,16 +410,85 @@ impl fmt::Debug for Span {
         return write!(f, "bytes({}..{})", self.lo, self.hi);
 
         #[cfg(not(procmacro2_semver_exempt))]
         write!(f, "Span")
     }
 }
 
 #[derive(Clone)]
+pub struct Group {
+    delimiter: Delimiter,
+    stream: TokenStream,
+    span: Span,
+}
+
+impl Group {
+    pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+        Group {
+            delimiter: delimiter,
+            stream: stream,
+            span: Span::call_site(),
+        }
+    }
+
+    pub fn delimiter(&self) -> Delimiter {
+        self.delimiter
+    }
+
+    pub fn stream(&self) -> TokenStream {
+        self.stream.clone()
+    }
+
+    pub fn span(&self) -> Span {
+        self.span
+    }
+
+    pub fn span_open(&self) -> Span {
+        self.span
+    }
+
+    pub fn span_close(&self) -> Span {
+        self.span
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        self.span = span;
+    }
+}
+
+impl fmt::Display for Group {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let (left, right) = match self.delimiter {
+            Delimiter::Parenthesis => ("(", ")"),
+            Delimiter::Brace => ("{", "}"),
+            Delimiter::Bracket => ("[", "]"),
+            Delimiter::None => ("", ""),
+        };
+
+        f.write_str(left)?;
+        self.stream.fmt(f)?;
+        f.write_str(right)?;
+
+        Ok(())
+    }
+}
+
+impl fmt::Debug for Group {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = fmt.debug_struct("Group");
+        debug.field("delimiter", &self.delimiter);
+        debug.field("stream", &self.stream);
+        #[cfg(procmacro2_semver_exempt)]
+        debug.field("span", &self.span);
+        debug.finish()
+    }
+}
+
+#[derive(Clone)]
 pub struct Ident {
     sym: String,
     span: Span,
     raw: bool,
 }
 
 impl Ident {
     fn _new(string: &str, raw: bool, span: Span) -> Ident {
@@ -488,16 +559,36 @@ fn validate_term(string: &str) {
         true
     }
 
     if !ident_ok(validate) {
         panic!("{:?} is not a valid Ident", string);
     }
 }
 
+impl PartialEq for Ident {
+    fn eq(&self, other: &Ident) -> bool {
+        self.sym == other.sym && self.raw == other.raw
+    }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+    T: ?Sized + AsRef<str>,
+{
+    fn eq(&self, other: &T) -> bool {
+        let other = other.as_ref();
+        if self.raw {
+            other.starts_with("r#") && self.sym == other[2..]
+        } else {
+            self.sym == other
+        }
+    }
+}
+
 impl fmt::Display for Ident {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         if self.raw {
             "r#".fmt(f)?;
         }
         self.sym.fmt(f)
     }
 }
@@ -565,29 +656,41 @@ impl Literal {
         i32_suffixed => i32,
         i64_suffixed => i64,
         isize_suffixed => isize,
 
         f32_suffixed => f32,
         f64_suffixed => f64,
     }
 
+    #[cfg(u128)]
+    suffixed_numbers! {
+        u128_suffixed => u128,
+        i128_suffixed => i128,
+    }
+
     unsuffixed_numbers! {
         u8_unsuffixed => u8,
         u16_unsuffixed => u16,
         u32_unsuffixed => u32,
         u64_unsuffixed => u64,
         usize_unsuffixed => usize,
         i8_unsuffixed => i8,
         i16_unsuffixed => i16,
         i32_unsuffixed => i32,
         i64_unsuffixed => i64,
         isize_unsuffixed => isize,
     }
 
+    #[cfg(u128)]
+    unsuffixed_numbers! {
+        u128_unsuffixed => u128,
+        i128_unsuffixed => i128,
+    }
+
     pub fn f32_unsuffixed(f: f32) -> Literal {
         let mut s = f.to_string();
         if !s.contains(".") {
             s.push_str(".0");
         }
         Literal::_new(s)
     }
 
@@ -703,43 +806,43 @@ fn spanned<'a, T>(
 
 fn token_tree(input: Cursor) -> PResult<TokenTree> {
     let (rest, (mut tt, span)) = spanned(input, token_kind)?;
     tt.set_span(span);
     Ok((rest, tt))
 }
 
 named!(token_kind -> TokenTree, alt!(
-    map!(group, TokenTree::Group)
+    map!(group, |g| TokenTree::Group(::Group::_new_stable(g)))
     |
     map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
     |
     map!(op, TokenTree::Punct)
     |
     symbol_leading_ws
 ));
 
 named!(group -> Group, alt!(
     delimited!(
         punct!("("),
         token_stream,
         punct!(")")
-    ) => { |ts| Group::new(Delimiter::Parenthesis, ::TokenStream::_new_stable(ts)) }
+    ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
     |
     delimited!(
         punct!("["),
         token_stream,
         punct!("]")
-    ) => { |ts| Group::new(Delimiter::Bracket, ::TokenStream::_new_stable(ts)) }
+    ) => { |ts| Group::new(Delimiter::Bracket, ts) }
     |
     delimited!(
         punct!("{"),
         token_stream,
         punct!("}")
-    ) => { |ts| Group::new(Delimiter::Brace, ::TokenStream::_new_stable(ts)) }
+    ) => { |ts| Group::new(Delimiter::Brace, ts) }
 ));
 
 fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
     symbol(skip_whitespace(input))
 }
 
 fn symbol(input: Cursor) -> PResult<TokenTree> {
     let mut chars = input.char_indices();
@@ -1244,17 +1347,18 @@ fn doc_comment(input: Cursor) -> PResult
     let mut stream = vec![
         TokenTree::Ident(::Ident::new("doc", span)),
         TokenTree::Punct(Punct::new('=', Spacing::Alone)),
         TokenTree::Literal(::Literal::string(comment)),
     ];
     for tt in stream.iter_mut() {
         tt.set_span(span);
     }
-    trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
+    let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+    trees.push(::Group::_new_stable(group).into());
     for tt in trees.iter_mut() {
         tt.set_span(span);
     }
     Ok((rest, trees))
 }
 
 named!(doc_comment_contents -> (&str, bool), alt!(
     do_parse!(
--- a/third_party/rust/proc-macro2/src/unstable.rs
+++ b/third_party/rust/proc-macro2/src/unstable.rs
@@ -1,43 +1,88 @@
-#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
+#![cfg_attr(not(super_unstable), allow(dead_code))]
 
 use std::fmt;
 use std::iter;
-use std::panic;
+use std::panic::{self, PanicInfo};
+#[cfg(super_unstable)]
+use std::path::PathBuf;
 use std::str::FromStr;
 
 use proc_macro;
 use stable;
 
-use {Delimiter, Group, Punct, Spacing, TokenTree};
+use {Delimiter, Punct, Spacing, TokenTree};
 
 #[derive(Clone)]
 pub enum TokenStream {
     Nightly(proc_macro::TokenStream),
     Stable(stable::TokenStream),
 }
 
 pub enum LexError {
     Nightly(proc_macro::LexError),
     Stable(stable::LexError),
 }
 
 fn nightly_works() -> bool {
     use std::sync::atomic::*;
+    use std::sync::Once;
+
     static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
+    static INIT: Once = Once::new();
 
     match WORKS.load(Ordering::SeqCst) {
         1 => return false,
         2 => return true,
         _ => {}
     }
-    let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
-    WORKS.store(works as usize + 1, Ordering::SeqCst);
-    works
+
+    // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+    // then use catch_unwind to determine whether the compiler's proc_macro is
+    // working. When proc-macro2 is used from outside of a procedural macro all
+    // of the proc_macro crate's APIs currently panic.
+    //
+    // The Once is to prevent the possibility of this ordering:
+    //
+    //     thread 1 calls take_hook, gets the user's original hook
+    //     thread 1 calls set_hook with the null hook
+    //     thread 2 calls take_hook, thinks null hook is the original hook
+    //     thread 2 calls set_hook with the null hook
+    //     thread 1 calls set_hook with the actual original hook
+    //     thread 2 calls set_hook with what it thinks is the original hook
+    //
+    // in which the user's hook has been lost.
+    //
+    // There is still a race condition where a panic in a different thread can
+    // happen during the interval that the user's original panic hook is
+    // unregistered such that their hook is incorrectly not called. This is
+    // sufficiently unlikely and less bad than printing panic messages to stderr
+    // on correct use of this crate. Maybe there is a libstd feature request
+    // here. For now, if a user needs to guarantee that this failure mode does
+    // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+    // the main thread before launching any other threads.
+    INIT.call_once(|| {
+        type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
+
+        let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+        let sanity_check = &*null_hook as *const PanicHook;
+        let original_hook = panic::take_hook();
+        panic::set_hook(null_hook);
+
+        let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+        WORKS.store(works as usize + 1, Ordering::SeqCst);
+
+        let hopefully_null_hook = panic::take_hook();
+        panic::set_hook(original_hook);
+        if sanity_check != &*hopefully_null_hook {
+            panic!("observed race condition in proc_macro2::nightly_works");
+        }
+    });
+    nightly_works()
 }
 
 fn mismatch() -> ! {
     panic!("stable/nightly mismatch")
 }
 
 impl TokenStream {
     pub fn new() -> TokenStream {
@@ -56,16 +101,23 @@ impl TokenStream {
     }
 
     fn unwrap_nightly(self) -> proc_macro::TokenStream {
         match self {
             TokenStream::Nightly(s) => s,
             TokenStream::Stable(_) => mismatch(),
         }
     }
+
+    fn unwrap_stable(self) -> stable::TokenStream {
+        match self {
+            TokenStream::Nightly(_) => mismatch(),
+            TokenStream::Stable(s) => s,
+        }
+    }
 }
 
 impl FromStr for TokenStream {
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
         if nightly_works() {
             Ok(TokenStream::Nightly(src.parse()?))
@@ -106,28 +158,17 @@ impl From<stable::TokenStream> for Token
 }
 
 impl From<TokenTree> for TokenStream {
     fn from(token: TokenTree) -> TokenStream {
         if !nightly_works() {
             return TokenStream::Stable(token.into());
         }
         let tt: proc_macro::TokenTree = match token {
-            TokenTree::Group(tt) => {
-                let delim = match tt.delimiter() {
-                    Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
-                    Delimiter::Bracket => proc_macro::Delimiter::Bracket,
-                    Delimiter::Brace => proc_macro::Delimiter::Brace,
-                    Delimiter::None => proc_macro::Delimiter::None,
-                };
-                let span = tt.span();
-                let mut group = proc_macro::Group::new(delim, tt.stream.inner.unwrap_nightly());
-                group.set_span(span.inner.unwrap_nightly());
-                group.into()
-            }
+            TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
             TokenTree::Punct(tt) => {
                 let spacing = match tt.spacing() {
                     Spacing::Joint => proc_macro::Spacing::Joint,
                     Spacing::Alone => proc_macro::Spacing::Alone,
                 };
                 let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
                 op.set_span(tt.span().inner.unwrap_nightly());
                 op.into()
@@ -151,39 +192,118 @@ impl iter::FromIterator<TokenTree> for T
                 });
             TokenStream::Nightly(trees.collect())
         } else {
             TokenStream::Stable(trees.into_iter().collect())
         }
     }
 }
 
+impl iter::FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut streams = streams.into_iter();
+        match streams.next() {
+            #[cfg(slow_extend)]
+            Some(TokenStream::Nightly(first)) => {
+                let stream = iter::once(first).chain(streams.map(|s| {
+                    match s {
+                        TokenStream::Nightly(s) => s,
+                        TokenStream::Stable(_) => mismatch(),
+                    }
+                })).collect();
+                TokenStream::Nightly(stream)
+            }
+            #[cfg(not(slow_extend))]
+            Some(TokenStream::Nightly(mut first)) => {
+                first.extend(streams.map(|s| {
+                    match s {
+                        TokenStream::Nightly(s) => s,
+                        TokenStream::Stable(_) => mismatch(),
+                    }
+                }));
+                TokenStream::Nightly(first)
+            }
+            Some(TokenStream::Stable(mut first)) => {
+                first.extend(streams.map(|s| {
+                    match s {
+                        TokenStream::Stable(s) => s,
+                        TokenStream::Nightly(_) => mismatch(),
+                    }
+                }));
+                TokenStream::Stable(first)
+            }
+            None => TokenStream::new(),
+
+        }
+    }
+}
+
 impl Extend<TokenTree> for TokenStream {
     fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
         match self {
             TokenStream::Nightly(tts) => {
-                *tts = tts
-                    .clone()
-                    .into_iter()
-                    .chain(
+                #[cfg(not(slow_extend))]
+                {
+                    tts.extend(
                         streams
                             .into_iter()
-                            .map(TokenStream::from)
-                            .flat_map(|t| match t {
-                                TokenStream::Nightly(tts) => tts.into_iter(),
-                                _ => panic!(),
-                            }),
-                    )
-                    .collect();
+                            .map(|t| TokenStream::from(t).unwrap_nightly()),
+                    );
+                }
+                #[cfg(slow_extend)]
+                {
+                    *tts = tts
+                        .clone()
+                        .into_iter()
+                        .chain(
+                            streams
+                                .into_iter()
+                                .map(TokenStream::from)
+                                .flat_map(|t| match t {
+                                    TokenStream::Nightly(tts) => tts.into_iter(),
+                                    _ => mismatch(),
+                                }),
+                        ).collect();
+                }
             }
             TokenStream::Stable(tts) => tts.extend(streams),
         }
     }
 }
 
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        match self {
+            TokenStream::Nightly(tts) => {
+                #[cfg(not(slow_extend))]
+                {
+                    tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
+                }
+                #[cfg(slow_extend)]
+                {
+                    *tts = tts
+                        .clone()
+                        .into_iter()
+                        .chain(
+                            streams
+                                .into_iter()
+                                .flat_map(|t| match t {
+                                    TokenStream::Nightly(tts) => tts.into_iter(),
+                                    _ => mismatch(),
+                                }),
+                        ).collect();
+                }
+            }
+            TokenStream::Stable(tts) => {
+                tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
+            }
+        }
+    }
+}
+
 impl fmt::Debug for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
             TokenStream::Nightly(tts) => tts.fmt(f),
             TokenStream::Stable(tts) => tts.fmt(f),
         }
     }
 }
@@ -230,28 +350,17 @@ impl Iterator for TokenTreeIter {
     type Item = TokenTree;
 
     fn next(&mut self) -> Option<TokenTree> {
         let token = match self {
             TokenTreeIter::Nightly(iter) => iter.next()?,
             TokenTreeIter::Stable(iter) => return iter.next(),
         };
         Some(match token {
-            proc_macro::TokenTree::Group(tt) => {
-                let delim = match tt.delimiter() {
-                    proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
-                    proc_macro::Delimiter::Bracket => Delimiter::Bracket,
-                    proc_macro::Delimiter::Brace => Delimiter::Brace,
-                    proc_macro::Delimiter::None => Delimiter::None,
-                };
-                let stream = ::TokenStream::_new(TokenStream::Nightly(tt.stream()));
-                let mut g = Group::new(delim, stream);
-                g.set_span(::Span::_new(Span::Nightly(tt.span())));
-                g.into()
-            }
+            proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Nightly(tt)).into(),
             proc_macro::TokenTree::Punct(tt) => {
                 let spacing = match tt.spacing() {
                     proc_macro::Spacing::Joint => Spacing::Joint,
                     proc_macro::Spacing::Alone => Spacing::Alone,
                 };
                 let mut o = Punct::new(tt.as_char(), spacing);
                 o.set_span(::Span::_new(Span::Nightly(tt.span())));
                 o.into()
@@ -270,58 +379,50 @@ impl Iterator for TokenTreeIter {
 }
 
 impl fmt::Debug for TokenTreeIter {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.debug_struct("TokenTreeIter").finish()
     }
 }
 
-pub use stable::FileName;
-
-// NOTE: We have to generate our own filename object here because we can't wrap
-// the one provided by proc_macro.
 #[derive(Clone, PartialEq, Eq)]
+#[cfg(super_unstable)]
 pub enum SourceFile {
-    Nightly(proc_macro::SourceFile, FileName),
+    Nightly(proc_macro::SourceFile),
     Stable(stable::SourceFile),
 }
 
+#[cfg(super_unstable)]
 impl SourceFile {
     fn nightly(sf: proc_macro::SourceFile) -> Self {
-        let filename = stable::file_name(sf.path().display().to_string());
-        SourceFile::Nightly(sf, filename)
+        SourceFile::Nightly(sf)
     }
 
     /// Get the path to this source file as a string.
-    pub fn path(&self) -> &FileName {
+    pub fn path(&self) -> PathBuf {
         match self {
-            SourceFile::Nightly(_, f) => f,
+            SourceFile::Nightly(a) => a.path(),
             SourceFile::Stable(a) => a.path(),
         }
     }
 
     pub fn is_real(&self) -> bool {
         match self {
-            SourceFile::Nightly(a, _) => a.is_real(),
+            SourceFile::Nightly(a) => a.is_real(),
             SourceFile::Stable(a) => a.is_real(),
         }
     }
 }
 
-impl AsRef<FileName> for SourceFile {
-    fn as_ref(&self) -> &FileName {
-        self.path()
-    }
-}
-
+#[cfg(super_unstable)]
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
-            SourceFile::Nightly(a, _) => a.fmt(f),
+            SourceFile::Nightly(a) => a.fmt(f),
             SourceFile::Stable(a) => a.fmt(f),
         }
     }
 }
 
 pub struct LineColumn {
     pub line: usize,
     pub column: usize,
@@ -337,93 +438,97 @@ impl Span {
     pub fn call_site() -> Span {
         if nightly_works() {
             Span::Nightly(proc_macro::Span::call_site())
         } else {
             Span::Stable(stable::Span::call_site())
         }
     }
 
+    #[cfg(super_unstable)]
     pub fn def_site() -> Span {
         if nightly_works() {
             Span::Nightly(proc_macro::Span::def_site())
         } else {
             Span::Stable(stable::Span::def_site())
         }
     }
 
+    #[cfg(super_unstable)]
     pub fn resolved_at(&self, other: Span) -> Span {
         match (self, other) {
             (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.resolved_at(b)),
             (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.resolved_at(b)),
             _ => mismatch(),
         }
     }
 
+    #[cfg(super_unstable)]
     pub fn located_at(&self, other: Span) -> Span {
         match (self, other) {
             (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.located_at(b)),
             (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.located_at(b)),
             _ => mismatch(),
         }
     }
 
     pub fn unstable(self) -> proc_macro::Span {
         match self {
             Span::Nightly(s) => s,
             Span::Stable(_) => mismatch(),
         }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
+    #[cfg(super_unstable)]
     pub fn source_file(&self) -> SourceFile {
         match self {
             Span::Nightly(s) => SourceFile::nightly(s.source_file()),
             Span::Stable(s) => SourceFile::Stable(s.source_file()),
         }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
+    #[cfg(super_unstable)]
     pub fn start(&self) -> LineColumn {
         match self {
             Span::Nightly(s) => {
                 let proc_macro::LineColumn { line, column } = s.start();
                 LineColumn { line, column }
             }
             Span::Stable(s) => {
                 let stable::LineColumn { line, column } = s.start();
                 LineColumn { line, column }
             }
         }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
+    #[cfg(super_unstable)]
     pub fn end(&self) -> LineColumn {
         match self {
             Span::Nightly(s) => {
                 let proc_macro::LineColumn { line, column } = s.end();
                 LineColumn { line, column }
             }
             Span::Stable(s) => {
                 let stable::LineColumn { line, column } = s.end();
                 LineColumn { line, column }
             }
         }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
+    #[cfg(super_unstable)]
     pub fn join(&self, other: Span) -> Option<Span> {
         let ret = match (self, other) {
             (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.join(b)?),
             (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.join(b)?),
             _ => return None,
         };
         Some(ret)
     }
 
+    #[cfg(super_unstable)]
     pub fn eq(&self, other: &Span) -> bool {
         match (self, other) {
             (Span::Nightly(a), Span::Nightly(b)) => a.eq(b),
             (Span::Stable(a), Span::Stable(b)) => a.eq(b),
             _ => false,
         }
     }
 
@@ -452,32 +557,148 @@ impl fmt::Debug for Span {
         match self {
             Span::Nightly(s) => s.fmt(f),
             Span::Stable(s) => s.fmt(f),
         }
     }
 }
 
 #[derive(Clone)]
+pub enum Group {
+    Nightly(proc_macro::Group),
+    Stable(stable::Group),
+}
+
+impl Group {
+    pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+        match stream {
+            TokenStream::Nightly(stream) => {
+                let delimiter = match delimiter {
+                    Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
+                    Delimiter::Bracket => proc_macro::Delimiter::Bracket,
+                    Delimiter::Brace => proc_macro::Delimiter::Brace,
+                    Delimiter::None => proc_macro::Delimiter::None,
+                };
+                Group::Nightly(proc_macro::Group::new(delimiter, stream))
+            }
+            TokenStream::Stable(stream) => {
+                Group::Stable(stable::Group::new(delimiter, stream))
+            }
+        }
+    }
+
+    pub fn delimiter(&self) -> Delimiter {
+        match self {
+            Group::Nightly(g) => match g.delimiter() {
+                proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
+                proc_macro::Delimiter::Bracket => Delimiter::Bracket,
+                proc_macro::Delimiter::Brace => Delimiter::Brace,
+                proc_macro::Delimiter::None => Delimiter::None,
+            }
+            Group::Stable(g) => g.delimiter(),
+        }
+    }
+
+    pub fn stream(&self) -> TokenStream {
+        match self {
+            Group::Nightly(g) => TokenStream::Nightly(g.stream()),
+            Group::Stable(g) => TokenStream::Stable(g.stream()),
+        }
+    }
+
+    pub fn span(&self) -> Span {
+        match self {
+            Group::Nightly(g) => Span::Nightly(g.span()),
+            Group::Stable(g) => Span::Stable(g.span()),
+        }
+    }
+
+    #[cfg(super_unstable)]
+    pub fn span_open(&self) -> Span {
+        match self {
+            Group::Nightly(g) => Span::Nightly(g.span_open()),
+            Group::Stable(g) => Span::Stable(g.span_open()),
+        }
+    }
+
+    #[cfg(super_unstable)]
+    pub fn span_close(&self) -> Span {
+        match self {
+            Group::Nightly(g) => Span::Nightly(g.span_close()),
+            Group::Stable(g) => Span::Stable(g.span_close()),
+        }
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        match (self, span) {
+            (Group::Nightly(g), Span::Nightly(s)) => g.set_span(s),
+            (Group::Stable(g), Span::Stable(s)) => g.set_span(s),
+            _ => mismatch(),
+        }
+    }
+
+    fn unwrap_nightly(self) -> proc_macro::Group {
+        match self {
+            Group::Nightly(g) => g,
+            Group::Stable(_) => mismatch(),
+        }
+    }
+}
+
+impl From<stable::Group> for Group {
+    fn from(g: stable::Group) -> Self {
+        Group::Stable(g)
+    }
+}
+
+impl fmt::Display for Group {
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Group::Nightly(group) => group.fmt(formatter),
+            Group::Stable(group) => group.fmt(formatter),
+        }
+    }
+}
+
+impl fmt::Debug for Group {
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Group::Nightly(group) => group.fmt(formatter),
+            Group::Stable(group) => group.fmt(formatter),
+        }
+    }
+}
+
+#[derive(Clone)]
 pub enum Ident {
     Nightly(proc_macro::Ident),
     Stable(stable::Ident),
 }
 
 impl Ident {
     pub fn new(string: &str, span: Span) -> Ident {
         match span {
             Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new(string, s)),
             Span::Stable(s) => Ident::Stable(stable::Ident::new(string, s)),
         }
     }
 
     pub fn new_raw(string: &str, span: Span) -> Ident {
         match span {
-            Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new_raw(string, s)),
+            Span::Nightly(s) => {
+                let p: proc_macro::TokenStream = string.parse().unwrap();
+                let ident = match p.into_iter().next() {
+                    Some(proc_macro::TokenTree::Ident(mut i)) => {
+                        i.set_span(s);
+                        i
+                    }
+                    _ => panic!(),
+                };
+                Ident::Nightly(ident)
+            }
             Span::Stable(s) => Ident::Stable(stable::Ident::new_raw(string, s)),
         }
     }
 
     pub fn span(&self) -> Span {
         match self {
             Ident::Nightly(t) => Span::Nightly(t.span()),
             Ident::Stable(t) => Span::Stable(t.span()),
@@ -495,16 +716,39 @@ impl Ident {
     fn unwrap_nightly(self) -> proc_macro::Ident {
         match self {
             Ident::Nightly(s) => s,
             Ident::Stable(_) => mismatch(),
         }
     }
 }
 
+impl PartialEq for Ident {
+    fn eq(&self, other: &Ident) -> bool {
+        match (self, other) {
+            (Ident::Nightly(t), Ident::Nightly(o)) => t.to_string() == o.to_string(),
+            (Ident::Stable(t), Ident::Stable(o)) => t == o,
+            _ => mismatch(),
+        }
+    }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+    T: ?Sized + AsRef<str>,
+{
+    fn eq(&self, other: &T) -> bool {
+        let other = other.as_ref();
+        match self {
+            Ident::Nightly(t) => t.to_string() == other,
+            Ident::Stable(t) => t == other,
+        }
+    }
+}
+
 impl fmt::Display for Ident {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
             Ident::Nightly(t) => t.fmt(f),
             Ident::Stable(t) => t.fmt(f),
         }
     }
 }
@@ -560,29 +804,41 @@ impl Literal {
         i32_suffixed => i32,
         i64_suffixed => i64,
         isize_suffixed => isize,
 
         f32_suffixed => f32,
         f64_suffixed => f64,
     }
 
+    #[cfg(u128)]
+    suffixed_numbers! {
+        i128_suffixed => i128,
+        u128_suffixed => u128,
+    }
+
     unsuffixed_integers! {
         u8_unsuffixed => u8,
         u16_unsuffixed => u16,
         u32_unsuffixed => u32,
         u64_unsuffixed => u64,
         usize_unsuffixed => usize,
         i8_unsuffixed => i8,
         i16_unsuffixed => i16,
         i32_unsuffixed => i32,
         i64_unsuffixed => i64,
         isize_unsuffixed => isize,
     }
 
+    #[cfg(u128)]
+    unsuffixed_integers! {
+        i128_unsuffixed => i128,
+        u128_unsuffixed => u128,
+    }
+
     pub fn f32_unsuffixed(f: f32) -> Literal {
         if nightly_works() {
             Literal::Nightly(proc_macro::Literal::f32_unsuffixed(f))
         } else {
             Literal::Stable(stable::Literal::f32_unsuffixed(f))
         }
     }
 
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2/tests/marker.rs
@@ -0,0 +1,61 @@
+extern crate proc_macro2;
+
+use proc_macro2::*;
+
+macro_rules! assert_impl {
+    ($ty:ident is $($marker:ident) and +) => {
+        #[test]
+        #[allow(non_snake_case)]
+        fn $ty() {
+            fn assert_implemented<T: $($marker +)+>() {}
+            assert_implemented::<$ty>();
+        }
+    };
+
+    ($ty:ident is not $($marker:ident) or +) => {
+        #[test]
+        #[allow(non_snake_case)]
+        fn $ty() {
+            $(
+                {
+                    // Implemented for types that implement $marker.
+                    trait IsNotImplemented {
+                        fn assert_not_implemented() {}
+                    }
+                    impl<T: $marker> IsNotImplemented for T {}
+
+                    // Implemented for the type being tested.
+                    trait IsImplemented {
+                        fn assert_not_implemented() {}
+                    }
+                    impl IsImplemented for $ty {}
+
+                    // If $ty does not implement $marker, there is no ambiguity
+                    // in the following trait method call.
+                    <$ty>::assert_not_implemented();
+                }
+            )+
+        }
+    };
+}
+
+assert_impl!(Delimiter is Send and Sync);
+assert_impl!(Spacing is Send and Sync);
+
+assert_impl!(Group is not Send or Sync);
+assert_impl!(Ident is not Send or Sync);
+assert_impl!(LexError is not Send or Sync);
+assert_impl!(Literal is not Send or Sync);
+assert_impl!(Punct is not Send or Sync);
+assert_impl!(Span is not Send or Sync);
+assert_impl!(TokenStream is not Send or Sync);
+assert_impl!(TokenTree is not Send or Sync);
+
+#[cfg(procmacro2_semver_exempt)]
+mod semver_exempt {
+    use super::*;
+
+    assert_impl!(LineColumn is Send and Sync);
+
+    assert_impl!(SourceFile is not Send or Sync);
+}
--- a/third_party/rust/proc-macro2/tests/test.rs
+++ b/third_party/rust/proc-macro2/tests/test.rs
@@ -198,17 +198,17 @@ testing 123
 fn default_span() {
     let start = Span::call_site().start();
     assert_eq!(start.line, 1);
     assert_eq!(start.column, 0);
     let end = Span::call_site().end();
     assert_eq!(end.line, 1);
     assert_eq!(end.column, 0);
     let source_file = Span::call_site().source_file();
-    assert_eq!(source_file.path().to_string(), "<unspecified>");
+    assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
     assert!(!source_file.is_real());
 }
 
 #[cfg(procmacro2_semver_exempt)]
 #[test]
 fn span_join() {
     let source1 = "aaa\nbbb"
         .parse::<TokenStream>()
--- a/third_party/rust/quote/.cargo-checksum.json
+++ b/third_party/rust/quote/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"33e512b1a2fd40b4d0b5af4ac16ad4f163e0383ba2f4abcd7a7e575e2af3442c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"61dc7827fb2e29185f0d73594db326bfdbec8393ca7a48429b259711d42e80f9","src/ext.rs":"2e2f71fca8c8580eeed138da42d93dc21fc48d7a8da973ae6d3b616da6a3b0e3","src/lib.rs":"0dedf7935a0203324804cecdf6350245caa24dbdaaf9e168b7ab90b0883ec0c4","src/to_tokens.rs":"10dc32fbe69798408ee1f49ec25770b90eeb6b069552f50cd4e03228b8e85847","tests/test.rs":"90fe0e9a704e628339fe9298f0cb8307e94ebadfe28fffd7b2fc2d94203bc342"},"package":"e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"}
\ No newline at end of file
+{"files":{"Cargo.toml":"44cf9d3a28be1b21f4247572b6ca4d38dc3fd42fa84c4a4e0e5632aa27bee083","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"b43ef9b9c61628f8de7036271e61322cba23d878d056748e571f4f6cf9fba1b1","src/ext.rs":"a2def0b0f24c822b3f936a781c347e5f6fdc75120f85874c94f5e7eb708168c2","src/lib.rs":"f1ba768690c57252e8135ee474a20bdd513fd0bd0664e9e9b697800163f39d08","src/to_tokens.rs":"86c419a72017846ef33a0acc53caee7312c750c90b3f1d3b58e33f20efcb94f4","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"}
\ No newline at end of file
--- a/third_party/rust/quote/Cargo.toml
+++ b/third_party/rust/quote/Cargo.toml
@@ -7,24 +7,27 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "quote"
-version = "0.6.3"
+version = "0.6.10"
 authors = ["David Tolnay <dtolnay@gmail.com>"]
 include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
 description = "Quasi-quoting macro quote!(...)"
 documentation = "https://docs.rs/quote/"
 readme = "README.md"
 keywords = ["syn"]
+categories = ["development-tools::procedural-macro-helpers"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/dtolnay/quote"
 [dependencies.proc-macro2]
-version = "0.4.4"
+version = "0.4.21"
 default-features = false
 
 [features]
 default = ["proc-macro"]
 proc-macro = ["proc-macro2/proc-macro"]
+[badges.travis-ci]
+repository = "dtolnay/quote"
--- a/third_party/rust/quote/README.md
+++ b/third_party/rust/quote/README.md
@@ -63,21 +63,21 @@ most Rust primitive types as well as mos
 ```rust
 let tokens = quote! {
     struct SerializeWith #generics #where_clause {
         value: &'a #field_ty,
         phantom: ::std::marker::PhantomData<#item_ty>,
     }
 
     impl #generics serde::Serialize for SerializeWith #generics #where_clause {
-        fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
+        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
         where
             S: serde::Serializer,
         {
-            #path(self.value, s)
+            #path(self.value, serializer)
         }
     }
 
     SerializeWith {
         value: #value,
         phantom: ::std::marker::PhantomData::<#item_ty>,
     }
 };
--- a/third_party/rust/quote/src/ext.rs
+++ b/third_party/rust/quote/src/ext.rs
@@ -1,10 +1,12 @@
 use super::ToTokens;
 
+use std::iter;
+
 use proc_macro2::{TokenStream, TokenTree};
 
 /// TokenStream extension trait with methods for appending tokens.
 ///
 /// This trait is sealed and cannot be implemented outside of the `quote` crate.
 pub trait TokenStreamExt: private::Sealed {
     fn append<U>(&mut self, token: U)
     where
@@ -31,17 +33,17 @@ pub trait TokenStreamExt: private::Seale
 impl TokenStreamExt for TokenStream {
     /// For use by `ToTokens` implementations.
     ///
     /// Appends the token specified to this list of tokens.
     fn append<U>(&mut self, token: U)
     where
         U: Into<TokenTree>,
     {
-        self.extend(Some(token.into()));
+        self.extend(iter::once(token.into()));
     }
 
     /// For use by `ToTokens` implementations.
     ///
     /// ```
     /// # #[macro_use] extern crate quote;
     /// # extern crate proc_macro2;
     /// # use quote::{TokenStreamExt, ToTokens};
--- a/third_party/rust/quote/src/lib.rs
+++ b/third_party/rust/quote/src/lib.rs
@@ -62,21 +62,21 @@
 //! #
 //! let tokens = quote! {
 //!     struct SerializeWith #generics #where_clause {
 //!         value: &'a #field_ty,
 //!         phantom: ::std::marker::PhantomData<#item_ty>,
 //!     }
 //!
 //!     impl #generics serde::Serialize for SerializeWith #generics #where_clause {
-//!         fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
+//!         fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
 //!         where
 //!             S: serde::Serializer,
 //!         {
-//!             #path(self.value, s)
+//!             #path(self.value, serializer)
 //!         }
 //!     }
 //!
 //!     SerializeWith {
 //!         value: #value,
 //!         phantom: ::std::marker::PhantomData::<#item_ty>,
 //!     }
 //! };
@@ -87,60 +87,177 @@
 //! ## Recursion limit
 //!
 //! The `quote!` macro relies on deep recursion so some large invocations may
 //! fail with "recursion limit reached" when you compile. If it fails, bump up
 //! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
 //! An even higher limit may be necessary for especially large invocations.
 
 // Quote types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/quote/0.6.3")]
+#![doc(html_root_url = "https://docs.rs/quote/0.6.10")]
 
-#[cfg(feature = "proc-macro")]
+#[cfg(all(
+    not(all(target_arch = "wasm32", target_os = "unknown")),
+    feature = "proc-macro"
+))]
 extern crate proc_macro;
 extern crate proc_macro2;
 
 mod ext;
 pub use ext::TokenStreamExt;
 
 mod to_tokens;
 pub use to_tokens::ToTokens;
 
 // Not public API.
 #[doc(hidden)]
 pub mod __rt {
-    // Not public API.
+    use ext::TokenStreamExt;
     pub use proc_macro2::*;
 
-    // Not public API.
+    fn is_ident_start(c: u8) -> bool {
+        (b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
+    }
+
+    fn is_ident_continue(c: u8) -> bool {
+        (b'a' <= c && c <= b'z')
+            || (b'A' <= c && c <= b'Z')
+            || c == b'_'
+            || (b'0' <= c && c <= b'9')
+    }
+
+    fn is_ident(token: &str) -> bool {
+        if token.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
+            return false;
+        }
+
+        let mut bytes = token.bytes();
+        let first = bytes.next().unwrap();
+        if !is_ident_start(first) {
+            return false;
+        }
+        for ch in bytes {
+            if !is_ident_continue(ch) {
+                return false;
+            }
+        }
+        true
+    }
+
     pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
-        let s: TokenStream = s.parse().expect("invalid token stream");
-        tokens.extend(s.into_iter().map(|mut t| {
-            t.set_span(span);
-            t
-        }));
+        if is_ident(s) {
+            // Fast path, since idents are the most common token.
+            tokens.append(Ident::new(s, span));
+        } else {
+            let s: TokenStream = s.parse().expect("invalid token stream");
+            tokens.extend(s.into_iter().map(|mut t| {
+                t.set_span(span);
+                t
+            }));
+        }
+    }
+
+    macro_rules! push_punct {
+        ($name:ident $char1:tt) => {
+            pub fn $name(tokens: &mut TokenStream, span: Span) {
+                let mut punct = Punct::new($char1, Spacing::Alone);
+                punct.set_span(span);
+                tokens.append(punct);
+            }
+        };
+        ($name:ident $char1:tt $char2:tt) => {
+            pub fn $name(tokens: &mut TokenStream, span: Span) {
+                let mut punct = Punct::new($char1, Spacing::Joint);
+                punct.set_span(span);
+                tokens.append(punct);
+                let mut punct = Punct::new($char2, Spacing::Alone);
+                punct.set_span(span);
+                tokens.append(punct);
+            }
+        };
+        ($name:ident $char1:tt $char2:tt $char3:tt) => {
+            pub fn $name(tokens: &mut TokenStream, span: Span) {
+                let mut punct = Punct::new($char1, Spacing::Joint);
+                punct.set_span(span);
+                tokens.append(punct);
+                let mut punct = Punct::new($char2, Spacing::Joint);
+                punct.set_span(span);
+                tokens.append(punct);
+                let mut punct = Punct::new($char3, Spacing::Alone);
+                punct.set_span(span);
+                tokens.append(punct);
+            }
+        };
     }
+
+    push_punct!(push_add '+');
+    push_punct!(push_add_eq '+' '=');
+    push_punct!(push_and '&');
+    push_punct!(push_and_and '&' '&');
+    push_punct!(push_and_eq '&' '=');
+    push_punct!(push_at '@');
+    push_punct!(push_bang '!');
+    push_punct!(push_caret '^');
+    push_punct!(push_caret_eq '^' '=');
+    push_punct!(push_colon ':');
+    push_punct!(push_colon2 ':' ':');
+    push_punct!(push_comma ',');
+    push_punct!(push_div '/');
+    push_punct!(push_div_eq '/' '=');
+    push_punct!(push_dot '.');
+    push_punct!(push_dot2 '.' '.');
+    push_punct!(push_dot3 '.' '.' '.');
+    push_punct!(push_dot_dot_eq '.' '.' '=');
+    push_punct!(push_eq '=');
+    push_punct!(push_eq_eq '=' '=');
+    push_punct!(push_ge '>' '=');
+    push_punct!(push_gt '>');
+    push_punct!(push_le '<' '=');
+    push_punct!(push_lt '<');
+    push_punct!(push_mul_eq '*' '=');
+    push_punct!(push_ne '!' '=');
+    push_punct!(push_or '|');
+    push_punct!(push_or_eq '|' '=');
+    push_punct!(push_or_or '|' '|');
+    push_punct!(push_pound '#');
+    push_punct!(push_question '?');
+    push_punct!(push_rarrow '-' '>');
+    push_punct!(push_larrow '<' '-');
+    push_punct!(push_rem '%');
+    push_punct!(push_rem_eq '%' '=');
+    push_punct!(push_fat_arrow '=' '>');
+    push_punct!(push_semi ';');
+    push_punct!(push_shl '<' '<');
+    push_punct!(push_shl_eq '<' '<' '=');
+    push_punct!(push_shr '>' '>');
+    push_punct!(push_shr_eq '>' '>' '=');
+    push_punct!(push_star '*');
+    push_punct!(push_sub '-');
+    push_punct!(push_sub_eq '-' '=');
 }
 
 /// The whole point.
 ///
 /// Performs variable interpolation against the input and produces it as
 /// [`TokenStream`]. For returning tokens to the compiler in a procedural macro, use
 /// `into()` to build a `TokenStream`.
 ///
 /// [`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
 ///
 /// # Interpolation
 ///
 /// Variable interpolation is done with `#var` (similar to `$var` in
 /// `macro_rules!` macros). This grabs the `var` variable that is currently in
-/// scope and inserts it in that location in the output tokens. The variable
-/// must implement the [`ToTokens`] trait.
+/// scope and inserts it in that location in the output tokens. Any type
+/// implementing the [`ToTokens`] trait can be interpolated. This includes most
+/// Rust primitive types as well as most of the syntax tree types from the [Syn]
+/// crate.
 ///
 /// [`ToTokens`]: trait.ToTokens.html
+/// [Syn]: https://github.com/dtolnay/syn
 ///
 /// Repetition is done using `#(...)*` or `#(...),*` again similar to
 /// `macro_rules!`. This iterates through the elements of any variable
 /// interpolated within the repetition and inserts a copy of the repetition body
 /// for each one. The variables in an interpolation may be anything that
 /// implements `IntoIterator`, including `Vec` or a pre-existing iterator.
 ///
 /// - `#(#var)*` — no separators
@@ -195,17 +312,17 @@ pub mod __rt {
 ///     };
 ///
 ///     // Hand the output tokens back to the compiler.
 ///     expanded.into()
 /// }
 /// #
 /// # fn main() {}
 /// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! quote {
     ($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*));
 }
 
 /// Same as `quote!`, but applies a given span to all tokens originating within
 /// the macro invocation.
 ///
 /// # Syntax
@@ -304,33 +421,33 @@ macro_rules! quote {
 ///
 /// In this example it is important for the where-clause to be spanned with the
 /// line/column information of the user's input type so that error messages are
 /// placed appropriately by the compiler. But it is also incredibly important
 /// that `Sync` resolves at the macro definition site and not the macro call
 /// site. If we resolve `Sync` at the same span that the user's type is going to
 /// be resolved, then they could bypass our check by defining their own trait
 /// named `Sync` that is implemented for their type.
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! quote_spanned {
     ($span:expr=> $($tt:tt)*) => {
         {
             let mut _s = $crate::__rt::TokenStream::new();
             let _span = $span;
             quote_each_token!(_s _span $($tt)*);
             _s
         }
     };
 }
 
 // Extract the names of all #metavariables and pass them to the $finish macro.
 //
 // in:   pounded_var_names!(then () a #b c #( #d )* #e)
 // out:  then!(() b d e)
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! pounded_var_names {
     ($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
         pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
     };
 
     ($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
         pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
@@ -365,17 +482,17 @@ macro_rules! pounded_var_names {
     };
 }
 
 // in:   nested_tuples_pat!(() a b c d e)
 // out:  ((((a b) c) d) e)
 //
 // in:   nested_tuples_pat!(() a)
 // out:  a
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! nested_tuples_pat {
     (()) => {
         &()
     };
 
     (() $first:ident $($rest:ident)*) => {
         nested_tuples_pat!(($first) $($rest)*)
@@ -390,17 +507,17 @@ macro_rules! nested_tuples_pat {
     };
 }
 
 // in:   multi_zip_expr!(() a b c d e)
 // out:  a.into_iter().zip(b).zip(c).zip(d).zip(e)
 //
 // in:   multi_zip_iter!(() a)
 // out:  a
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! multi_zip_expr {
     (()) => {
         &[]
     };
 
     (() $single:ident) => {
         $single
@@ -414,17 +531,17 @@ macro_rules! multi_zip_expr {
         multi_zip_expr!(($zips.zip($first)) $($rest)*)
     };
 
     (($done:expr)) => {
         $done
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! quote_each_token {
     ($tokens:ident $span:ident) => {};
 
     ($tokens:ident $span:ident # ! $($rest:tt)*) => {
         quote_each_token!($tokens $span #);
         quote_each_token!($tokens $span !);
         quote_each_token!($tokens $span $($rest)*);
@@ -449,63 +566,293 @@ macro_rules! quote_each_token {
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
         quote_each_token!($tokens $span #);
         $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Bracket,
-                quote_spanned!($span=> $($inner)*).into(),
+                quote_spanned!($span=> $($inner)*),
             );
             g.set_span($span);
             Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
         $crate::ToTokens::to_tokens(&$first, &mut $tokens);
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
         $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Parenthesis,
-                quote_spanned!($span=> $($first)*).into(),
+                quote_spanned!($span=> $($first)*),
             );
             g.set_span($span);
             Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
         $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Bracket,
-                quote_spanned!($span=> $($first)*).into(),
+                quote_spanned!($span=> $($first)*),
             );
             g.set_span($span);
             Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
         $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Brace,
-                quote_spanned!($span=> $($first)*).into(),
+                quote_spanned!($span=> $($first)*),
             );
             g.set_span($span);
             Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
+    ($tokens:ident $span:ident + $($rest:tt)*) => {
+        $crate::__rt::push_add(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident += $($rest:tt)*) => {
+        $crate::__rt::push_add_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident & $($rest:tt)*) => {
+        $crate::__rt::push_and(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident && $($rest:tt)*) => {
+        $crate::__rt::push_and_and(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident &= $($rest:tt)*) => {
+        $crate::__rt::push_and_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident @ $($rest:tt)*) => {
+        $crate::__rt::push_at(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ! $($rest:tt)*) => {
+        $crate::__rt::push_bang(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ^ $($rest:tt)*) => {
+        $crate::__rt::push_caret(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ^= $($rest:tt)*) => {
+        $crate::__rt::push_caret_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident : $($rest:tt)*) => {
+        $crate::__rt::push_colon(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident :: $($rest:tt)*) => {
+        $crate::__rt::push_colon2(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident , $($rest:tt)*) => {
+        $crate::__rt::push_comma(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident / $($rest:tt)*) => {
+        $crate::__rt::push_div(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident /= $($rest:tt)*) => {
+        $crate::__rt::push_div_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident . $($rest:tt)*) => {
+        $crate::__rt::push_dot(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident .. $($rest:tt)*) => {
+        $crate::__rt::push_dot2(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ... $($rest:tt)*) => {
+        $crate::__rt::push_dot3(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ..= $($rest:tt)*) => {
+        $crate::__rt::push_dot_dot_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident = $($rest:tt)*) => {
+        $crate::__rt::push_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident == $($rest:tt)*) => {
+        $crate::__rt::push_eq_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident >= $($rest:tt)*) => {
+        $crate::__rt::push_ge(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident > $($rest:tt)*) => {
+        $crate::__rt::push_gt(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident <= $($rest:tt)*) => {
+        $crate::__rt::push_le(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident < $($rest:tt)*) => {
+        $crate::__rt::push_lt(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident *= $($rest:tt)*) => {
+        $crate::__rt::push_mul_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident != $($rest:tt)*) => {
+        $crate::__rt::push_ne(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident | $($rest:tt)*) => {
+        $crate::__rt::push_or(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident |= $($rest:tt)*) => {
+        $crate::__rt::push_or_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident || $($rest:tt)*) => {
+        $crate::__rt::push_or_or(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident # $($rest:tt)*) => {
+        $crate::__rt::push_pound(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ? $($rest:tt)*) => {
+        $crate::__rt::push_question(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident -> $($rest:tt)*) => {
+        $crate::__rt::push_rarrow(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident <- $($rest:tt)*) => {
+        $crate::__rt::push_larrow(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident % $($rest:tt)*) => {
+        $crate::__rt::push_rem(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident %= $($rest:tt)*) => {
+        $crate::__rt::push_rem_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident => $($rest:tt)*) => {
+        $crate::__rt::push_fat_arrow(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident ; $($rest:tt)*) => {
+        $crate::__rt::push_semi(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident << $($rest:tt)*) => {
+        $crate::__rt::push_shl(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident <<= $($rest:tt)*) => {
+        $crate::__rt::push_shl_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident >> $($rest:tt)*) => {
+        $crate::__rt::push_shr(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident >>= $($rest:tt)*) => {
+        $crate::__rt::push_shr_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident * $($rest:tt)*) => {
+        $crate::__rt::push_star(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident - $($rest:tt)*) => {
+        $crate::__rt::push_sub(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
+    ($tokens:ident $span:ident -= $($rest:tt)*) => {
+        $crate::__rt::push_sub_eq(&mut $tokens, $span);
+        quote_each_token!($tokens $span $($rest)*);
+    };
+
     ($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
-        // TODO: this seems slow... special case some `:tt` arguments?
-        $crate::__rt::parse(&mut $tokens, $span, stringify!($first));
+        $crate::__rt::parse(&mut $tokens, $span, quote_stringify!($first));
         quote_each_token!($tokens $span $($rest)*);
     };
 }
+
+// Unhygienically invoke whatever `stringify` the caller has in scope i.e. not a
+// local macro. The macros marked `local_inner_macros` above cannot invoke
+// `stringify` directly.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_stringify {
+    ($tt:tt) => {
+        stringify!($tt)
+    };
+}
--- a/third_party/rust/quote/src/to_tokens.rs
+++ b/third_party/rust/quote/src/to_tokens.rs
@@ -1,11 +1,12 @@
 use super::TokenStreamExt;
 
 use std::borrow::Cow;
+use std::iter;
 
 use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
 
 /// Types that can be interpolated inside a [`quote!`] invocation.
 ///
 /// [`quote!`]: macro.quote.html
 pub trait ToTokens {
     /// Write `self` to the given `TokenStream`.
@@ -72,16 +73,22 @@ pub trait ToTokens {
 }
 
 impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
     fn to_tokens(&self, tokens: &mut TokenStream) {
         (**self).to_tokens(tokens);
     }
 }
 
+impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
+        (**self).to_tokens(tokens);
+    }
+}
+
 impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
     fn to_tokens(&self, tokens: &mut TokenStream) {
         (**self).to_tokens(tokens);
     }
 }
 
 impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
     fn to_tokens(&self, tokens: &mut TokenStream) {
@@ -131,16 +138,22 @@ primitive! {
     u32 => u32_suffixed
     u64 => u64_suffixed
     usize => usize_suffixed
 
     f32 => f32_suffixed
     f64 => f64_suffixed
 }
 
+#[cfg(integer128)]
+primitive! {
+    i128 => i128_suffixed
+    u128 => u128_suffixed
+}
+
 impl ToTokens for char {
     fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(Literal::character(*self));
     }
 }
 
 impl ToTokens for bool {
     fn to_tokens(&self, tokens: &mut TokenStream) {
@@ -176,15 +189,15 @@ impl ToTokens for Literal {
 impl ToTokens for TokenTree {
     fn to_tokens(&self, dst: &mut TokenStream) {
         dst.append(self.clone());
     }
 }
 
 impl ToTokens for TokenStream {
     fn to_tokens(&self, dst: &mut TokenStream) {
-        dst.append_all(self.clone().into_iter());
+        dst.extend(iter::once(self.clone()));
     }
 
     fn into_token_stream(self) -> TokenStream {
         self
     }
 }
new file mode 100644
--- /dev/null
+++ b/third_party/rust/quote/tests/conditional/integer128.rs
@@ -0,0 +1,11 @@
+#[test]
+fn test_integer128() {
+    let ii128 = -1i128;
+    let uu128 = 1u128;
+
+    let tokens = quote! {
+        #ii128 #uu128
+    };
+    let expected = "-1i128 1u128";
+    assert_eq!(expected, tokens.to_string());
+}
--- a/third_party/rust/quote/tests/test.rs
+++ b/third_party/rust/quote/tests/test.rs
@@ -4,16 +4,21 @@ use std::borrow::Cow;
 
 extern crate proc_macro2;
 #[macro_use]
 extern crate quote;
 
 use proc_macro2::{Ident, Span, TokenStream};
 use quote::TokenStreamExt;
 
+mod conditional {
+    #[cfg(integer128)]
+    mod integer128;
+}
+
 struct X;
 
 impl quote::ToTokens for X {
     fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(Ident::new("X", Span::call_site()));
     }
 }
 
--- a/third_party/rust/syn/.cargo-checksum.json
+++ b/third_party/rust/syn/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"cc823b5150d40948fb45042c1987dd8ede59ed6aa64003f622c4b901e319a218","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"9ca76bd182b81395755fbf0a4f1af086a46a90848f45b75210517004e751afe2","src/attr.rs":"ac161a3011bcd3381eb143e0954851fcff3e82e47075e6c9f6191ccf8ee67fee","src/buffer.rs":"a82b47bb12ec0de4159a7677712db5f4f54c145eb61aa88a1696d32cf143d50e","src/data.rs":"c34df5d623fed7a52be01fc625a4502f6ad97dc8feb25c51f7d57029dbdd91dd","src/derive.rs":"7a2246e8ee03fcbea040363d0b943daac696667ba799218231307c1b7a96aeed","src/error.rs":"93a265306ee6d265feeccb65b64e429ec6b4bb29d825cb52a319ea86e5cc1c11","src/export.rs":"39cc2468a141fb8229c9189dfe99875c278850714b1996e683a5b4cbc8aa3457","src/expr.rs":"91bab694502cebc56bdcd45219f1cf317ff857320d855e595ec2acc0f9ab781a","src/ext.rs":"4902ffc7dc25a1bb5813d5292a3df7cbf72ebad79def578c7cd231cf67e1785c","src/file.rs":"ebd350b5ff548cdbb21a0fadd5572575a216a1b0caef36dd46477ca324c7af6f","src/gen/fold.rs":"bfca5243b4694cc6a9b0d1f34ca53fa90387325fd4ee6bce024adb3ca42f4472","src/gen/visit.rs":"4d13b239db7c38a38f8ce5dfe827317ec4d35df83dd65ad5a350a3c882632bfd","src/gen/visit_mut.rs":"f5c8aea5907084c2425cdb613a07def41735c764022f7549876c9fa4c7170d5a","src/gen_helper.rs":"d128fbd24fadfc5634976bdb9188c649f9905718c9c987a2839c3e6134b155a2","src/generics.rs":"3b07bcfa970714c38c2b524da765e5b0465832d91aba9ed40b99b4aa7a7ea9c4","src/group.rs":"7faa0b3701b6e597a5766e316733cd4d88ecc2a24b54b233f33e28c23a7cbad8","src/ident.rs":"61534c48949ebfa03e948874ef64174e1c8111c3b953edd58f79748fe9e00507","src/item.rs":"897cfd8ea6f2ff1a664e2a5db84073f5ed1480318d14236c636c94608016b27c","src/keyword.rs":"0a8fd45d065c56532065f97fb097136b6f1a8efc393a0946e6a95217877616a9","src/lib.rs":"79664eb2d3570c2851c0d6e5dde4e9764619e14c5f107ff07d1416d2a15f8c1a","src/lifetime.rs":"3174a81cea3eef0ec1555900b96b1641d6d3ed3609bc17d72b02a1495377ac35","src/lit.rs":"661bf3ad4b49bc74dc808c1f1d584551689145d3c5fbadfcc28d157d5087981b","src/lookahead.rs":"07ce6d6915f24a01f86a486135eb841a3a01424148fe49ea797c5ffacf0c7673","src/mac.rs":"8a7efbdc9498612844c88660312117c401685bf300da7e91bef7effde3026019","src/macros.rs":"03d33128d9c2d2664cc2d3d158518c60cadad8b340945c6eb292fb7bd92e1b41","src/op.rs":"83bbe2199946abbf1d4a0bc6eb0d97285439f77e8e02646b8e3b707485e4301e","src/parse.rs":"248cfe3d826cf192efd5fef1b52db5075d3739e045f42157908596fc039a741b","src/parse_quote.rs":"e6f8101568d8430d6793183dfedfee01c2c479274ff7318dd221060ac140a477","src/path.rs":"e666c702d46e2849cdc37fddc2e1950659cd17611ebf988102f2bf0af72b6bd1","src/print.rs":"7ebb68123898f2ebbae12abf028747c05bea7b08f1e96b17164f1dcebdab7355","src/punctuated.rs":"01539dcb51c75e0fe0a4cdfb7716a909ce1bfd0767370c04043159a0a0dec154","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"0d9bdef967d339deae5e2229f9593f48b15af67cf1f79358aa464cacd173f32c","src/synom.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/token.rs":"40c406da738c52e52944585acc5ff36b75edb905b78cfb2bd74626663edb2c99","src/tt.rs":"6ff2559d5c5fcaa73e914cd0a4a5984ab21de7ea334f1c1498e73059d2d1f7d1","src/ty.rs":"503e0ae7da33ecd6d733aa3d14a45ced20696b9bdd52d3f9ef23fd31ec5651da"},"package":"455a6ec9b368f8c479b0ae5494d13b22dc00990d2f00d68c9dc6a2dc4f17f210"}
\ No newline at end of file
+{"files":{"Cargo.toml":"fdac1ee690fa3d33b906be9d09a8551741db11ddba7f755cb75c8d74fd2d918b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3f3d105c0f1bae3bdb5ed3cc32a8e5a02f3de6f62a9f17f5ba03af3f813d0881","src/attr.rs":"9210a8dc5fea0ee3004b14a9e2923142932c53986b56685d62d9aa115fe093b0","src/buffer.rs":"fac77febe022ab4982078c73ca502f9d698a402b3eca0f8e9c7a2796af01e5f4","src/data.rs":"54ee54c3c650bc5e200d4bea947d6e89093a39f2444cd43e8861f5852975e0bc","src/derive.rs":"eb041b47a73bace73c2872cd9a8e190de7e2b3b57cd57213770f088ec19ab3c6","src/error.rs":"0bcf09b0892c6d5f5f31f7f758866ded1e47463833cad60694329348ac1fb44a","src/export.rs":"1f7e017dac9426e91c796445e733c390c408c808ba9c21cb7df7799b67352fde","src/expr.rs":"3fe98f69af9be770d4d549d869d35136f3dea66452e4cb4b9e387982e3b7aea2","src/ext.rs":"1881179e634681cdd472ecac502192b5a5a7759056d1c49497706749fdab1bdf","src/file.rs":"abb9f5e71a8a6b52649c15da21d3f99e25a727d87c2f87d2744ac3970c1d683f","src/gen/fold.rs":"7f7ab907e3c17b503a72707e2f8d9fc29d9139269d810ea9b6511af9de7d3080","src/gen/visit.rs":"0a4543ac9f82d8ab7ccf02c55e8325ff9b859e36ea08d3e187a4836f470eef1c","src/gen/visit_mut.rs":"66441522f544056cd464740e8ba614688a5f417857c649550eeba2b589ef6096","src/gen_helper.rs":"644b1d31987919c0239891d8f90c09f3bf541a71fb85e744438e4814cc6dff89","src/generics.rs":"6ee5bba532b95f7de7c6bbe8caadabc6a71c45e7f8d7636d823075ff27f28937","src/group.rs":"03487f75d0abd302f06e50eb09d14ab83fb60c67e1f2602be53ca3f28a833b90","src/ident.rs":"61534c48949ebfa03e948874ef64174e1c8111c3b953edd58f79748fe9e00507","src/item.rs":"6799adb332bedaa68c3801055e71a5665cc7b9c4ba53960f6b91487408e7e10c","src/keyword.rs":"7dde0b4a0d70527921be16f50f8d88e4b5ad7e5fd70e9badd2bb351fd796dfb8","src/lib.rs":"254744712dae7c30ce7f97ce4685fc6256cf125e7872397d3dd9e32632273262","src/lifetime.rs":"7912a4c77ee805e912fb28c7f434836ea82540263d549877cd5edfbe32d1bf95","src/lit.rs":"b6aef4f2787201edbeb85529fc0c333bd8083d697a08f28c812b6b2f765939f5","src/lookahead.rs":"5b3c55ae8c1b1d0ed813c296dc6fa586379a99e7792a3cb0d634ae6ca74f54b5","src/mac.rs":"a91623ed9c1de7b18ef752db79a242002e95156497a52a1790a75069915d22ee","src/macros.rs":"2f91e07a1aec4b385986c0a0f66274e8de1c1aa81f95d398a5cd364b3c451bb4","src/op.rs":"01edb1e07b6d60b266797ca4b30788b0a511452228e04073a11f0b61f106a0e7","src/parse.rs":"d907b9822943bafbcb1e005f09a145e46c162e7702fce703b57f9b7ccbdf85a2","src/parse_macro_input.rs":"8df7b4c1b361171f3fefb0490dec570ad29c024c04e35184b296725f97f2002c","src/parse_quote.rs":"d5e613fbba06900d882f2aaa042f10c1bee1b1dffaa1d9ee9a73d1e504a08fad","src/path.rs":"d6a319db75e4b34783912aed0ddfad92cdec05798d8d378f2f23231437cab3e1","src/print.rs":"7ebb68123898f2ebbae12abf028747c05bea7b08f1e96b17164f1dcebdab7355","src/punctuated.rs":"5ad6885e602cb1c79b49f11e1c739bdb7c33ecfa1ca9c709d711b0778ae48085","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"83b4ab1e2138ac9340eaa8234ad1d9f7468b450ddf3a852e574cac18e4f766b8","src/synom.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/thread.rs":"798dd0a6cca7807f1d55c1f79cec967314a75c4e8e2cfdb5644499c22618b307","src/token.rs":"8fa7ffb89da61d5187ab0ff40de7b3b1135ace6cf770c8d84fce8371854698a9","src/tt.rs":"b3d99cbd68cd50749f26f4afa138e6366d327099ed566b30c315ccb58fa26ded","src/ty.rs":"4ac9d1b84f9bf269516348e1b923b1c8e3f7562b98ec7ef66174c31fffb8dce5"},"package":"734ecc29cd36e8123850d9bf21dfd62ef8300aaa8f879aabaa899721808be37c"}
\ No newline at end of file
--- a/third_party/rust/syn/Cargo.toml
+++ b/third_party/rust/syn/Cargo.toml
@@ -7,17 +7,17 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "syn"
-version = "0.15.7"
+version = "0.15.24"
 authors = ["David Tolnay <dtolnay@gmail.com>"]
 include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"]
 description = "Parser for Rust source code"
 documentation = "https://docs.rs/syn"
 readme = "README.md"
 categories = ["development-tools::procedural-macro-helpers"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/dtolnay/syn"
--- a/third_party/rust/syn/LICENSE-MIT
+++ b/third_party/rust/syn/LICENSE-MIT
@@ -1,10 +1,8 @@
-Copyright (c) 2018 Syn Developers
-
 Permission is hereby granted, free of charge, to any
 person obtaining a copy of this software and associated
 documentation files (the "Software"), to deal in the
 Software without restriction, including without
 limitation the rights to use, copy, modify, merge,
 publish, distribute, sublicense, and/or sell copies of
 the Software, and to permit persons to whom the Software
 is furnished to do so, subject to the following
--- a/third_party/rust/syn/README.md
+++ b/third_party/rust/syn/README.md
@@ -76,22 +76,19 @@ syn = "0.15"
 quote = "0.6"
 
 [lib]
 proc-macro = true
 ```
 
 ```rust
 extern crate proc_macro;
-extern crate syn;
-
-#[macro_use]
-extern crate quote;
 
 use proc_macro::TokenStream;
+use quote::quote;
 use syn::{parse_macro_input, DeriveInput};
 
 #[proc_macro_derive(MyMacro)]
 pub fn my_macro(input: TokenStream) -> TokenStream {
     // Parse the input tokens into a syntax tree
     let input = parse_macro_input!(input as DeriveInput);
 
     // Build the output, possibly using quasi-quotation
@@ -100,17 +97,17 @@ pub fn my_macro(input: TokenStream) -> T
     };
 
     // Hand the output tokens back to the compiler
     TokenStream::from(expanded)
 }
 ```
 
 The [`heapsize`] example directory shows a complete working Macros 1.1
-implementation of a custom derive. It works on any Rust compiler \>=1.15.0. The
+implementation of a custom derive. It works on any Rust compiler 1.15+. The
 example derives a `HeapSize` trait which computes an estimate of the amount of
 heap memory owned by a value.
 
 [`heapsize`]: examples/heapsize
 
 ```rust
 pub trait HeapSize {
     /// Total number of bytes of heap memory owned by `self`.
@@ -128,49 +125,30 @@ struct Demo<'a, T: ?Sized> {
     b: u8,
     c: &'a str,
     d: String,
 }
 ```
 
 ## Spans and error reporting
 
-The [`heapsize2`] example directory is an extension of the `heapsize` example
-that demonstrates some of the hygiene and error reporting properties of Macros
-2.0. This example currently requires a nightly Rust compiler \>=1.24.0-nightly
-but we are working to stabilize all of the APIs involved.
-
-[`heapsize2`]: examples/heapsize2
-
 The token-based procedural macro API provides great control over where the
 compiler's error messages are displayed in user code. Consider the error the
 user sees if one of their field types does not implement `HeapSize`.
 
 ```rust
 #[derive(HeapSize)]
 struct Broken {
     ok: String,
     bad: std::thread::Thread,
 }
 ```
 
-In the Macros 1.1 string-based procedural macro world, the resulting error would
-point unhelpfully to the invocation of the derive macro and not to the actual
-problematic field.
-
-```
-error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
- --> src/main.rs:4:10
-  |
-4 | #[derive(HeapSize)]
-  |          ^^^^^^^^
-```
-
 By tracking span information all the way through the expansion of a procedural
-macro as shown in the `heapsize2` example, token-based macros in Syn are able to
+macro as shown in the `heapsize` example, token-based macros in Syn are able to
 trigger errors that directly pinpoint the source of the problem.
 
 ```
 error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
  --> src/main.rs:7:5
   |
 7 |     bad: std::thread::Thread,
   |     ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread`
@@ -241,34 +219,31 @@ available.
 - **`fold`** — Trait for transforming an owned syntax tree.
 - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
   types.
 - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
   types.
 - **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
   library libproc_macro from rustc toolchain.
 
-## Nightly features
+## Proc macro shim
 
-By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's
-procedural macro API in a stable way that works all the way back to Rust 1.15.0.
-This shim makes it possible to write code without regard for whether the current
-compiler version supports the features we use.
-
-[`proc-macro2`]: https://github.com/alexcrichton/proc-macro2
+Syn uses the [proc-macro2] crate to emulate the compiler's procedural macro API
+in a stable way that works all the way back to Rust 1.15.0. This shim makes it
+possible to write code without regard for whether the current compiler version
+supports the features we use.
 
-On a nightly compiler, to eliminate the stable shim and use the compiler's
-`proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its
-`"nightly"` feature which bypasses the stable shim.
+In general all of your code should be written against proc-macro2 rather than
+proc-macro. The one exception is in the signatures of procedural macro entry
+points, which are required by the language to use `proc_macro::TokenStream`.
 
-```toml
-[dependencies]
-syn = "0.15"
-proc-macro2 = { version = "0.4", features = ["nightly"] }
-```
+The proc-macro2 crate will automatically detect and use the compiler's data
+structures on sufficiently new compilers.
+
+[proc-macro2]: https://github.com/alexcrichton/proc-macro2
 
 ## License
 
 Licensed under either of
 
  * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
  * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
 
--- a/third_party/rust/syn/src/attr.rs
+++ b/third_party/rust/syn/src/attr.rs
@@ -1,22 +1,16 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use punctuated::Punctuated;
 
 use std::iter;
 
-use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree};
+use proc_macro2::TokenStream;
+#[cfg(not(feature = "parsing"))]
+use proc_macro2::{Delimiter, Spacing, TokenTree};
 
 #[cfg(feature = "parsing")]
 use parse::{ParseStream, Result};
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 #[cfg(feature = "extra-traits")]
 use tt::TokenStreamHelper;
 
@@ -50,42 +44,39 @@ ast_struct! {
     /// tokens.
     ///
     /// ```text
     /// #[derive(Copy)]      #[crate::precondition x < 5]
     ///   ^^^^^^~~~~~~         ^^^^^^^^^^^^^^^^^^^ ~~~~~
     ///    path  tts                   path         tts
     /// ```
     ///
-    /// Use the [`interpret_meta`] method to try parsing the tokens of an
-    /// attribute into the structured representation that is used by convention
-    /// across most Rust libraries.
+    /// Use the [`parse_meta`] method to try parsing the tokens of an attribute
+    /// into the structured representation that is used by convention across
+    /// most Rust libraries.
     ///
-    /// [`interpret_meta`]: #method.interpret_meta
+    /// [`parse_meta`]: #method.parse_meta
     ///
     /// # Parsing
     ///
     /// This type does not implement the [`Parse`] trait and thus cannot be
     /// parsed directly by [`ParseStream::parse`]. Instead use
     /// [`ParseStream::call`] with one of the two parser functions
     /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
     /// which you intend to parse.
     ///
     /// [`Parse`]: parse/trait.Parse.html
     /// [`ParseStream::parse`]: parse/struct.ParseBuffer.html#method.parse
     /// [`ParseStream::call`]: parse/struct.ParseBuffer.html#method.call
     /// [`Attribute::parse_outer`]: #method.parse_outer
     /// [`Attribute::parse_inner`]: #method.parse_inner
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{Attribute, Ident};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{Attribute, Ident, Result, Token};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // Parses a unit struct with attributes.
     /// //
     /// //     #[path = "s.tmpl"]
     /// //     struct S;
     /// struct UnitStruct {
     ///     attrs: Vec<Attribute>,
     ///     struct_token: Token![struct],
@@ -98,18 +89,16 @@ ast_struct! {
     ///         Ok(UnitStruct {
     ///             attrs: input.call(Attribute::parse_outer)?,
     ///             struct_token: input.parse()?,
     ///             name: input.parse()?,
     ///             semi_token: input.parse()?,
     ///         })
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub struct Attribute #manual_extra_traits {
         pub pound_token: Token![#],
         pub style: AttrStyle,
         pub bracket_token: token::Bracket,
         pub path: Path,
         pub tts: TokenStream,
     }
@@ -141,42 +130,75 @@ impl Hash for Attribute {
         self.path.hash(state);
         TokenStreamHelper(&self.tts).hash(state);
     }
 }
 
 impl Attribute {
     /// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
     /// possible.
+    ///
+    /// Deprecated; use `parse_meta` instead.
+    #[doc(hidden)]
     pub fn interpret_meta(&self) -> Option<Meta> {
-        let name = if self.path.segments.len() == 1 {
-            &self.path.segments.first().unwrap().value().ident
-        } else {
-            return None;
-        };
-
-        if self.tts.is_empty() {
-            return Some(Meta::Word(name.clone()));
+        #[cfg(feature = "parsing")]
+        {
+            self.parse_meta().ok()
         }
 
-        let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
+        #[cfg(not(feature = "parsing"))]
+        {
+            let name = if self.path.segments.len() == 1 {
+                &self.path.segments.first().unwrap().value().ident
+            } else {
+                return None;
+            };
+
+            if self.tts.is_empty() {
+                return Some(Meta::Word(name.clone()));
+            }
+
+            let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
 
-        if tts.len() == 1 {
-            if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
-                return Some(meta);
+            if tts.len() == 1 {
+                if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
+                    return Some(meta);
+                }
+            }
+
+            if tts.len() == 2 {
+                if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
+                    return Some(meta);
+                }
             }
+
+            None
+        }
+    }
+
+    /// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
+    /// possible.
+    #[cfg(feature = "parsing")]
+    pub fn parse_meta(&self) -> Result<Meta> {
+        if let Some(ref colon) = self.path.leading_colon {
+            return Err(Error::new(colon.spans[0], "expected meta identifier"));
         }
 
-        if tts.len() == 2 {
-            if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
-                return Some(meta);
-            }
+        let first_segment = self
+            .path
+            .segments
+            .first()
+            .expect("paths have at least one segment");
+        if let Some(colon) = first_segment.punct() {
+            return Err(Error::new(colon.spans[0], "expected meta value"));
         }
+        let ident = first_segment.value().ident.clone();
 
-        None
+        let parser = |input: ParseStream| parsing::parse_meta_after_ident(ident, input);
+        parse::Parser::parse2(parser, self.tts.clone())
     }
 
     /// Parses zero or more outer attributes from the stream.
     ///
     /// *This function is available if Syn is built with the `"parsing"`
     /// feature.*
     #[cfg(feature = "parsing")]
     pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
@@ -195,16 +217,17 @@ impl Attribute {
     pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
         let mut attrs = Vec::new();
         while input.peek(Token![#]) && input.peek2(Token![!]) {
             attrs.push(input.call(parsing::single_parse_inner)?);
         }
         Ok(attrs)
     }
 
+    #[cfg(not(feature = "parsing"))]
     fn extract_meta_list(ident: Ident, tt: &TokenTree) -> Option<Meta> {
         let g = match *tt {
             TokenTree::Group(ref g) => g,
             _ => return None,
         };
         if g.delimiter() != Delimiter::Parenthesis {
             return None;
         }
@@ -215,16 +238,17 @@ impl Attribute {
         };
         Some(Meta::List(MetaList {
             paren_token: token::Paren(g.span()),
             ident: ident,
             nested: nested,
         }))
     }
 
+    #[cfg(not(feature = "parsing"))]
     fn extract_name_value(ident: Ident, a: &TokenTree, b: &TokenTree) -> Option<Meta> {
         let a = match *a {
             TokenTree::Punct(ref o) => o,
             _ => return None,
         };
         if a.spacing() != Spacing::Alone {
             return None;
         }
@@ -251,16 +275,17 @@ impl Attribute {
                 })),
                 _ => None,
             },
             _ => None,
         }
     }
 }
 
+#[cfg(not(feature = "parsing"))]
 fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> {
     assert!(!tts.is_empty());
 
     match tts[0] {
         TokenTree::Literal(ref lit) => {
             if lit.to_string().starts_with('/') {
                 None
             } else {
@@ -292,16 +317,17 @@ fn nested_meta_item_from_tokens(tts: &[T
             };
             Some((nested_meta, &tts[1..]))
         }
 
         _ => None,
     }
 }
 
+#[cfg(not(feature = "parsing"))]
 fn list_of_nested_meta_items_from_tokens(
     mut tts: &[TokenTree],
 ) -> Option<Punctuated<NestedMeta, Token![,]>> {
     let mut nested_meta_items = Punctuated::new();
     let mut first = true;
 
     while !tts.is_empty() {
         let prev_comma = if first {
@@ -433,55 +459,95 @@ ast_enum_of_structs! {
         /// would be a nested `Meta::Word`.
         pub Meta(Meta),
 
         /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
         pub Literal(Lit),
     }
 }
 
+/// Conventional argument type associated with an invocation of an attribute
+/// macro.
+///
+/// For example if we are developing an attribute macro that is intended to be
+/// invoked on function items as follows:
+///
+/// ```edition2018
+/// # const IGNORE: &str = stringify! {
+/// #[my_attribute(path = "/v1/refresh")]
+/// # };
+/// pub fn refresh() {
+///     /* ... */
+/// }
+/// ```
+///
+/// The implementation of this macro would want to parse its attribute arguments
+/// as type `AttributeArgs`.
+///
+/// ```edition2018
+/// extern crate proc_macro;
+///
+/// use proc_macro::TokenStream;
+/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
+///     let args = parse_macro_input!(args as AttributeArgs);
+///     let input = parse_macro_input!(input as ItemFn);
+///
+///     /* ... */
+/// #   "".parse().unwrap()
+/// }
+/// ```
+pub type AttributeArgs = Vec<NestedMeta>;
+
 pub trait FilterAttrs<'a> {
     type Ret: Iterator<Item = &'a Attribute>;
 
     fn outer(self) -> Self::Ret;
     fn inner(self) -> Self::Ret;
 }
 
 impl<'a, T> FilterAttrs<'a> for T
 where
     T: IntoIterator<Item = &'a Attribute>,
 {
     type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
 
     fn outer(self) -> Self::Ret {
+        #[cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
         fn is_outer(attr: &&Attribute) -> bool {
             match attr.style {
                 AttrStyle::Outer => true,
                 _ => false,
             }
         }
         self.into_iter().filter(is_outer)
     }
 
     fn inner(self) -> Self::Ret {
+        #[cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
         fn is_inner(attr: &&Attribute) -> bool {
             match attr.style {
                 AttrStyle::Inner(_) => true,
                 _ => false,
             }
         }
         self.into_iter().filter(is_inner)
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
 
-    use parse::{ParseStream, Result};
+    use ext::IdentExt;
+    use parse::{Parse, ParseStream, Result};
     #[cfg(feature = "full")]
     use private;
 
     pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
         let content;
         Ok(Attribute {
             pound_token: input.parse()?,
             style: AttrStyle::Inner(input.parse()?),
@@ -505,16 +571,81 @@ pub mod parsing {
     #[cfg(feature = "full")]
     impl private {
         pub fn attrs(outer: Vec<Attribute>, inner: Vec<Attribute>) -> Vec<Attribute> {
             let mut attrs = outer;
             attrs.extend(inner);
             attrs
         }
     }
+
+    impl Parse for Meta {
+        fn parse(input: ParseStream) -> Result<Self> {
+            let ident = input.call(Ident::parse_any)?;
+            parse_meta_after_ident(ident, input)
+        }
+    }
+
+    impl Parse for MetaList {
+        fn parse(input: ParseStream) -> Result<Self> {
+            let ident = input.call(Ident::parse_any)?;
+            parse_meta_list_after_ident(ident, input)
+        }
+    }
+
+    impl Parse for MetaNameValue {
+        fn parse(input: ParseStream) -> Result<Self> {
+            let ident = input.call(Ident::parse_any)?;
+            parse_meta_name_value_after_ident(ident, input)
+        }
+    }
+
+    impl Parse for NestedMeta {
+        fn parse(input: ParseStream) -> Result<Self> {
+            let ahead = input.fork();
+
+            if ahead.peek(Lit) && !(ahead.peek(LitBool) && ahead.peek2(Token![=])) {
+                input.parse().map(NestedMeta::Literal)
+            } else if ahead.call(Ident::parse_any).is_ok() {
+                input.parse().map(NestedMeta::Meta)
+            } else {
+                Err(input.error("expected identifier or literal"))
+            }
+        }
+    }
+
+    pub fn parse_meta_after_ident(ident: Ident, input: ParseStream) -> Result<Meta> {
+        if input.peek(token::Paren) {
+            parse_meta_list_after_ident(ident, input).map(Meta::List)
+        } else if input.peek(Token![=]) {
+            parse_meta_name_value_after_ident(ident, input).map(Meta::NameValue)
+        } else {
+            Ok(Meta::Word(ident))
+        }
+    }
+
+    fn parse_meta_list_after_ident(ident: Ident, input: ParseStream) -> Result<MetaList> {
+        let content;
+        Ok(MetaList {
+            ident: ident,
+            paren_token: parenthesized!(content in input),
+            nested: content.parse_terminated(NestedMeta::parse)?,
+        })
+    }
+
+    fn parse_meta_name_value_after_ident(
+        ident: Ident,
+        input: ParseStream,
+    ) -> Result<MetaNameValue> {
+        Ok(MetaNameValue {
+            ident: ident,
+            eq_token: input.parse()?,
+            lit: input.parse()?,
+        })
+    }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     use proc_macro2::TokenStream;
     use quote::ToTokens;
 
--- a/third_party/rust/syn/src/buffer.rs
+++ b/third_party/rust/syn/src/buffer.rs
@@ -1,35 +1,28 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! A stably addressed token buffer supporting efficient traversal based on a
 //! cheaply copyable cursor.
 //!
 //! *This module is available if Syn is built with the `"parsing"` feature.*
 
-// This module is heavily commented as it contains the only unsafe code in Syn,
-// and caution should be used when editing it. The public-facing interface is
-// 100% safe but the implementation is fragile internally.
+// This module is heavily commented as it contains most of the unsafe code in
+// Syn, and caution should be used when editing it. The public-facing interface
+// is 100% safe but the implementation is fragile internally.
 
 #[cfg(all(
     not(all(target_arch = "wasm32", target_os = "unknown")),
     feature = "proc-macro"
 ))]
 use proc_macro as pm;
 use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
 
 use std::marker::PhantomData;
 use std::ptr;
 
+use private;
 use Lifetime;
 
 /// Internal type which is used instead of `TokenTree` to represent a token tree
 /// within a `TokenBuffer`.
 enum Entry {
     // Mimicking types from proc-macro.
     Group(Group, TokenBuffer),
     Ident(Ident),
@@ -348,8 +341,26 @@ impl<'a> Cursor<'a> {
             Entry::Group(ref group, _) => group.span(),
             Entry::Literal(ref l) => l.span(),
             Entry::Ident(ref t) => t.span(),
             Entry::Punct(ref o) => o.span(),
             Entry::End(..) => Span::call_site(),
         }
     }
 }
+
+impl private {
+    #[cfg(procmacro2_semver_exempt)]
+    pub fn open_span_of_group(cursor: Cursor) -> Span {
+        match *cursor.entry() {
+            Entry::Group(ref group, _) => group.span_open(),
+            _ => cursor.span(),
+        }
+    }
+
+    #[cfg(procmacro2_semver_exempt)]
+    pub fn close_span_of_group(cursor: Cursor) -> Span {
+        match *cursor.entry() {
+            Entry::Group(ref group, _) => group.span_close(),
+            _ => cursor.span(),
+        }
+    }
+}
--- a/third_party/rust/syn/src/data.rs
+++ b/third_party/rust/syn/src/data.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use punctuated::Punctuated;
 
 ast_struct! {
     /// An enum variant.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
--- a/third_party/rust/syn/src/derive.rs
+++ b/third_party/rust/syn/src/derive.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use punctuated::Punctuated;
 
 ast_struct! {
     /// Data structure sent to a `proc_macro_derive` macro.
     ///
     /// *This type is available if Syn is built with the `"derive"` feature.*
     pub struct DeriveInput {
--- a/third_party/rust/syn/src/error.rs
+++ b/third_party/rust/syn/src/error.rs
@@ -1,134 +1,213 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use std;
 use std::fmt::{self, Display};
 use std::iter::FromIterator;
 
 use proc_macro2::{
     Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
 };
+#[cfg(feature = "printing")]
+use quote::ToTokens;
 
+#[cfg(feature = "parsing")]
 use buffer::Cursor;
+#[cfg(all(procmacro2_semver_exempt, feature = "parsing"))]
+use private;
+use thread::ThreadBound;
 
 /// The result of a Syn parser.
 pub type Result<T> = std::result::Result<T, Error>;
 
 /// Error returned when a Syn parser cannot parse the input tokens.
 ///
 /// Refer to the [module documentation] for details about parsing in Syn.
 ///
 /// [module documentation]: index.html
 ///
 /// *This type is available if Syn is built with the `"parsing"` feature.*
-#[derive(Debug, Clone)]
+#[derive(Debug)]
 pub struct Error {
-    span: Span,
+    // Span is implemented as an index into a thread-local interner to keep the
+    // size small. It is not safe to access from a different thread. We want
+    // errors to be Send and Sync to play nicely with the Failure crate, so pin
+    // the span we're given to its original thread and assume it is
+    // Span::call_site if accessed from any other thread.
+    start_span: ThreadBound<Span>,
+    end_span: ThreadBound<Span>,
     message: String,
 }
 
+#[cfg(test)]
+struct _Test
+where
+    Error: Send + Sync;
+
 impl Error {
     /// Usually the [`ParseStream::error`] method will be used instead, which
     /// automatically uses the correct span from the current position of the
     /// parse stream.
     ///
     /// Use `Error::new` when the error needs to be triggered on some span other
     /// than where the parse stream is currently positioned.
     ///
     /// [`ParseStream::error`]: struct.ParseBuffer.html#method.error
     ///
     /// # Example
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{Ident, LitStr};
-    /// use syn::parse::{Error, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{Error, Ident, LitStr, Result, Token};
+    /// use syn::parse::ParseStream;
     ///
     /// // Parses input that looks like `name = "string"` where the key must be
     /// // the identifier `name` and the value may be any string literal.
     /// // Returns the string literal.
     /// fn parse_name(input: ParseStream) -> Result<LitStr> {
     ///     let name_token: Ident = input.parse()?;
     ///     if name_token != "name" {
     ///         // Trigger an error not on the current position of the stream,
     ///         // but on the position of the unexpected identifier.
     ///         return Err(Error::new(name_token.span(), "expected `name`"));
     ///     }
     ///     input.parse::<Token![=]>()?;
     ///     let s: LitStr = input.parse()?;
     ///     Ok(s)
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn new<T: Display>(span: Span, message: T) -> Self {
         Error {
-            span: span,
+            start_span: ThreadBound::new(span),
+            end_span: ThreadBound::new(span),
             message: message.to_string(),
         }
     }
 
+    /// Creates an error with the specified message spanning the given syntax
+    /// tree node.
+    ///
+    /// Unlike the `Error::new` constructor, this constructor takes an argument
+    /// `tokens` which is a syntax tree node. This allows the resulting `Error`
+    /// to attempt to span all tokens inside of `tokens`. While you would
+    /// typically be able to use the `Spanned` trait with the above `Error::new`
+    /// constructor, implementation limitations today mean that
+    /// `Error::new_spanned` may provide a higher-quality error message on
+    /// stable Rust.
+    ///
+    /// When in doubt it's recommended to stick to `Error::new` (or
+    /// `ParseStream::error`)!
+    #[cfg(feature = "printing")]
+    pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
+        let mut iter = tokens.into_token_stream().into_iter();
+        let start = iter.next().map_or_else(Span::call_site, |t| t.span());
+        let end = iter.last().map_or(start, |t| t.span());
+        Error {
+            start_span: ThreadBound::new(start),
+            end_span: ThreadBound::new(end),
+            message: message.to_string(),
+        }
+    }
+
+    /// The source location of the error.
+    ///
+    /// Spans are not thread-safe so this function returns `Span::call_site()`
+    /// if called from a different thread than the one on which the `Error` was
+    /// originally created.
     pub fn span(&self) -> Span {
-        self.span
+        let start = match self.start_span.get() {
+            Some(span) => *span,
+            None => return Span::call_site(),
+        };
+
+        #[cfg(procmacro2_semver_exempt)]
+        {
+            let end = match self.end_span.get() {
+                Some(span) => *span,
+                None => return Span::call_site(),
+            };
+            start.join(end).unwrap_or(start)
+        }
+        #[cfg(not(procmacro2_semver_exempt))]
+        {
+            start
+        }
     }
 
     /// Render the error as an invocation of [`compile_error!`].
     ///
     /// The [`parse_macro_input!`] macro provides a convenient way to invoke
     /// this method correctly in a procedural macro.
     ///
     /// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
     /// [`parse_macro_input!`]: ../macro.parse_macro_input.html
     pub fn to_compile_error(&self) -> TokenStream {
+        let start = self
+            .start_span
+            .get()
+            .cloned()
+            .unwrap_or_else(Span::call_site);
+        let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+
         // compile_error!($message)
         TokenStream::from_iter(vec![
-            TokenTree::Ident(Ident::new("compile_error", self.span)),
+            TokenTree::Ident(Ident::new("compile_error", start)),
             TokenTree::Punct({
                 let mut punct = Punct::new('!', Spacing::Alone);
-                punct.set_span(self.span);
+                punct.set_span(start);
                 punct
             }),
             TokenTree::Group({
                 let mut group = Group::new(Delimiter::Brace, {
                     TokenStream::from_iter(vec![TokenTree::Literal({
                         let mut string = Literal::string(&self.message);
-                        string.set_span(self.span);
+                        string.set_span(end);
                         string
                     })])
                 });
-                group.set_span(self.span);
+                group.set_span(end);
                 group
             }),
         ])
     }
 }
 
+#[cfg(feature = "parsing")]
 pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
     if cursor.eof() {
         Error::new(scope, format!("unexpected end of input, {}", message))
     } else {
-        Error::new(cursor.span(), message)
+        #[cfg(procmacro2_semver_exempt)]
+        let span = private::open_span_of_group(cursor);
+        #[cfg(not(procmacro2_semver_exempt))]
+        let span = cursor.span();
+        Error::new(span, message)
     }
 }
 
 impl Display for Error {
     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
         formatter.write_str(&self.message)
     }
 }
 
+impl Clone for Error {
+    fn clone(&self) -> Self {
+        let start = self
+            .start_span
+            .get()
+            .cloned()
+            .unwrap_or_else(Span::call_site);
+        let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+        Error {
+            start_span: ThreadBound::new(start),
+            end_span: ThreadBound::new(end),
+            message: self.message.clone(),
+        }
+    }
+}
+
 impl std::error::Error for Error {
     fn description(&self) -> &str {
         "parse error"
     }
 }
 
 impl From<LexError> for Error {
     fn from(err: LexError) -> Self {
--- a/third_party/rust/syn/src/export.rs
+++ b/third_party/rust/syn/src/export.rs
@@ -3,16 +3,19 @@ pub use std::cmp::{Eq, PartialEq};
 pub use std::convert::From;
 pub use std::default::Default;
 pub use std::fmt::{self, Debug, Formatter};
 pub use std::hash::{Hash, Hasher};
 pub use std::marker::Copy;
 pub use std::option::Option::{None, Some};
 pub use std::result::Result::{Err, Ok};
 
+#[cfg(feature = "printing")]
+pub extern crate quote;
+
 pub use proc_macro2::{Span, TokenStream as TokenStream2};
 
 pub use span::IntoSpans;
 
 #[cfg(all(
     not(all(target_arch = "wasm32", target_os = "unknown")),
     feature = "proc-macro"
 ))]
--- a/third_party/rust/syn/src/expr.rs
+++ b/third_party/rust/syn/src/expr.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use proc_macro2::{Span, TokenStream};
 use punctuated::Punctuated;
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 #[cfg(all(feature = "parsing", feature = "full"))]
 use std::mem;
 #[cfg(feature = "extra-traits")]
@@ -22,17 +14,17 @@ ast_enum_of_structs! {
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
     ///
     /// # Syntax tree enums
     ///
     /// This type is a syntax tree enum. In Syn this and other syntax tree enums
     /// are designed to be traversed using the following rebinding idiom.
     ///
-    /// ```
+    /// ```edition2018
     /// # use syn::Expr;
     /// #
     /// # fn example(expr: Expr) {
     /// # const IGNORE: &str = stringify! {
     /// let expr: Expr = /* ... */;
     /// # };
     /// match expr {
     ///     Expr::MethodCall(expr) => {
@@ -55,46 +47,46 @@ ast_enum_of_structs! {
     /// with the same name `expr` we effectively imbue our variable with all of
     /// the data fields provided by the variant that it turned out to be. So for
     /// example above if we ended up in the `MethodCall` case then we get to use
     /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we get
     /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`.
     ///
     /// The pattern is similar if the input expression is borrowed:
     ///
-    /// ```
+    /// ```edition2018
     /// # use syn::Expr;
     /// #
     /// # fn example(expr: &Expr) {
     /// match *expr {
     ///     Expr::MethodCall(ref expr) => {
     /// #   }
     /// #   _ => {}
     /// # }
     /// # }
     /// ```
     ///
     /// This approach avoids repeating the variant names twice on every line.
     ///
-    /// ```
+    /// ```edition2018
     /// # use syn::{Expr, ExprMethodCall};
     /// #
     /// # fn example(expr: Expr) {
     /// # match expr {
     /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { // repetitive
     /// # }
     /// # _ => {}
     /// # }
     /// # }
     /// ```
     ///
     /// In general, the name to which a syntax tree enum variant is bound should
     /// be a suitable name for the complete syntax tree enum type.
     ///
-    /// ```
+    /// ```edition2018
     /// # use syn::{Expr, ExprField};
     /// #
     /// # fn example(discriminant: &ExprField) {
     /// // Binding is called `base` which is the name I would use if I were
     /// // assigning `*discriminant.base` without an `if let`.
     /// if let Expr::Tuple(ref base) = *discriminant.base {
     /// # }
     /// # }
@@ -763,19 +755,16 @@ ast_enum_of_structs! {
     ///
     /// *This type is available if Syn is built with the `"full"` feature.*
     ///
     /// # Syntax tree enum
     ///
     /// This type is a [syntax tree enum].
     ///
     /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    // Clippy false positive
-    // https://github.com/Manishearth/rust-clippy/issues/1241
-    #[cfg_attr(feature = "cargo-clippy", allow(enum_variant_names))]
     pub enum Pat {
         /// A pattern that matches any value: `_`.
         ///
         /// *This type is available if Syn is built with the `"full"` feature.*
         pub Wild(PatWild {
             pub underscore_token: Token![_],
         }),
 
@@ -917,17 +906,17 @@ impl Hash for PatVerbatim {
 }
 
 #[cfg(feature = "full")]
 ast_struct! {
     /// One arm of a `match` expression: `0...10 => { return true; }`.
     ///
     /// As in:
     ///
-    /// ```rust
+    /// ```edition2018
     /// # fn f() -> bool {
     /// #     let n = 0;
     /// match n {
     ///     0...10 => {
     ///         return true;
     ///     }
     ///     // ...
     ///     # _ => {}
@@ -1892,17 +1881,17 @@ pub mod parsing {
         let expr = expr_no_struct(input)?;
 
         let content;
         let brace_token = braced!(content in input);
         let inner_attrs = content.call(Attribute::parse_inner)?;
 
         let mut arms = Vec::new();
         while !content.is_empty() {
-            arms.push(content.call(match_arm)?);
+            arms.push(content.call(Arm::parse)?);
         }
 
         Ok(ExprMatch {
             attrs: inner_attrs,
             match_token: match_token,
             expr: Box::new(expr),
             brace_token: brace_token,
             arms: arms,
@@ -1929,62 +1918,16 @@ pub mod parsing {
                 } else {
                     None
                 }
             },
         })
     }
 
     #[cfg(feature = "full")]
-    fn match_arm(input: ParseStream) -> Result<Arm> {
-        let requires_comma;
-        Ok(Arm {
-            attrs: input.call(Attribute::parse_outer)?,
-            leading_vert: input.parse()?,
-            pats: {
-                let mut pats = Punctuated::new();
-                let value: Pat = input.parse()?;
-                pats.push_value(value);
-                loop {
-                    if !input.peek(Token![|]) {
-                        break;
-                    }
-                    let punct = input.parse()?;
-                    pats.push_punct(punct);
-                    let value: Pat = input.parse()?;
-                    pats.push_value(value);
-                }
-                pats
-            },
-            guard: {
-                if input.peek(Token![if]) {
-                    let if_token: Token![if] = input.parse()?;
-                    let guard: Expr = input.parse()?;
-                    Some((if_token, Box::new(guard)))
-                } else {
-                    None
-                }
-            },
-            fat_arrow_token: input.parse()?,
-            body: {
-                let body = input.call(expr_early)?;
-                requires_comma = requires_terminator(&body);
-                Box::new(body)
-            },
-            comma: {
-                if requires_comma && !input.is_empty() {
-                    Some(input.parse()?)
-                } else {
-                    input.parse()?
-                }
-            },
-        })
-    }
-
-    #[cfg(feature = "full")]
     fn expr_closure(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprClosure> {
         let asyncness: Option<Token![async]> = input.parse()?;
         let movability: Option<Token![static]> = if asyncness.is_none() {
             input.parse()?
         } else {
             None
         };
         let capture: Option<Token![move]> = input.parse()?;
@@ -2343,22 +2286,19 @@ pub mod parsing {
         /// Parse the body of a block as zero or more statements, possibly
         /// including one trailing expression.
         ///
         /// *This function is available if Syn is built with the `"parsing"`
         /// feature.*
         ///
         /// # Example
         ///
-        /// ```
-        /// #[macro_use]
-        /// extern crate syn;
-        ///
-        /// use syn::{token, Attribute, Block, Ident, Stmt};
-        /// use syn::parse::{Parse, ParseStream, Result};
+        /// ```edition2018
+        /// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
+        /// use syn::parse::{Parse, ParseStream};
         ///
         /// // Parse a function with no generics or parameter list.
         /// //
         /// //     fn playground {
         /// //         let mut x = 1;
         /// //         x += 1;
         /// //         println!("{}", x);
         /// //     }
@@ -2389,18 +2329,16 @@ pub mod parsing {
         ///             },
         ///             fn_token: fn_token,
         ///             name: name,
         ///             brace_token: brace_token,
         ///             stmts: stmts,
         ///         })
         ///     }
         /// }
-        /// #
-        /// # fn main() {}
         /// ```
         pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
             let mut stmts = Vec::new();
             loop {
                 while input.peek(Token![;]) {
                     input.parse::<Token![;]>()?;
                 }
                 if input.is_empty() {
@@ -2565,33 +2503,38 @@ pub mod parsing {
             } else if lookahead.peek(Token![-]) || lookahead.peek(Lit) {
                 pat_lit_or_range(input)
             } else if input.peek(Ident)
                 && ({
                     input.peek2(Token![::])
                         || input.peek2(Token![!])
                         || input.peek2(token::Brace)
                         || input.peek2(token::Paren)
-                        || input.peek2(Token![..]) && !{
-                            let ahead = input.fork();
-                            ahead.parse::<Ident>()?;
-                            ahead.parse::<RangeLimits>()?;
-                            ahead.is_empty() || ahead.peek(Token![,])
-                        }
+                        || input.peek2(Token![..])
+                            && !{
+                                let ahead = input.fork();
+                                ahead.parse::<Ident>()?;
+                                ahead.parse::<RangeLimits>()?;
+                                ahead.is_empty() || ahead.peek(Token![,])
+                            }
                 })
+                || input.peek(Token![self]) && input.peek2(Token![::])
                 || input.peek(Token![::])
                 || input.peek(Token![<])
-                || input.peek(Token![self])
                 || input.peek(Token![Self])
                 || input.peek(Token![super])
                 || input.peek(Token![extern])
                 || input.peek(Token![crate])
             {
                 pat_path_or_macro_or_struct_or_range(input)
-            } else if input.peek(Token![ref]) || input.peek(Token![mut]) || input.peek(Ident) {
+            } else if input.peek(Token![ref])
+                || input.peek(Token![mut])
+                || input.peek(Token![self])
+                || input.peek(Ident)
+            {
                 input.call(pat_ident).map(Pat::Ident)
             } else if lookahead.peek(token::Paren) {
                 input.call(pat_tuple).map(Pat::Tuple)
             } else if lookahead.peek(Token![&]) {
                 input.call(pat_ref).map(Pat::Ref)
             } else if lookahead.peek(token::Bracket) {
                 input.call(pat_slice).map(Pat::Slice)
             } else {
@@ -2777,16 +2720,64 @@ pub mod parsing {
             } else if input.peek(LitInt) {
                 input.parse().map(Member::Unnamed)
             } else {
                 Err(input.error("expected identifier or integer"))
             }
         }
     }
 
+    #[cfg(feature = "full")]
+    impl Parse for Arm {
+        fn parse(input: ParseStream) -> Result<Arm> {
+            let requires_comma;
+            Ok(Arm {
+                attrs: input.call(Attribute::parse_outer)?,
+                leading_vert: input.parse()?,
+                pats: {
+                    let mut pats = Punctuated::new();
+                    let value: Pat = input.parse()?;
+                    pats.push_value(value);
+                    loop {
+                        if !input.peek(Token![|]) {
+                            break;
+                        }
+                        let punct = input.parse()?;
+                        pats.push_punct(punct);
+                        let value: Pat = input.parse()?;
+                        pats.push_value(value);
+                    }
+                    pats
+                },
+                guard: {
+                    if input.peek(Token![if]) {
+                        let if_token: Token![if] = input.parse()?;
+                        let guard: Expr = input.parse()?;
+                        Some((if_token, Box::new(guard)))
+                    } else {
+                        None
+                    }
+                },
+                fat_arrow_token: input.parse()?,
+                body: {
+                    let body = input.call(expr_early)?;
+                    requires_comma = requires_terminator(&body);
+                    Box::new(body)
+                },
+                comma: {
+                    if requires_comma && !input.is_empty() {
+                        Some(input.parse()?)
+                    } else {
+                        input.parse()?
+                    }
+                },
+            })
+        }
+    }
+
     impl Parse for Index {
         fn parse(input: ParseStream) -> Result<Self> {
             let lit: LitInt = input.parse()?;
             if let IntSuffix::None = lit.suffix() {
                 Ok(Index {
                     index: lit.value() as u32,
                     span: lit.span(),
                 })
--- a/third_party/rust/syn/src/ext.rs
+++ b/third_party/rust/syn/src/ext.rs
@@ -12,23 +12,20 @@ use parse::{ParseStream, Result};
 ///
 /// *This trait is available if Syn is built with the `"parsing"` feature.*
 pub trait IdentExt: Sized + private::Sealed {
     /// Parses any identifier including keywords.
     ///
     /// This is useful when parsing a DSL which allows Rust keywords as
     /// identifiers.
     ///
-    /// ```rust
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::Ident;
+    /// ```edition2018
+    /// use syn::{Error, Ident, Result, Token};
     /// use syn::ext::IdentExt;
-    /// use syn::parse::{Error, ParseStream, Result};
+    /// use syn::parse::ParseStream;
     ///
     /// // Parses input that looks like `name = NAME` where `NAME` can be
     /// // any identifier.
     /// //
     /// // Examples:
     /// //
     /// //     name = anything
     /// //     name = impl
@@ -36,18 +33,16 @@ pub trait IdentExt: Sized + private::Sea
     ///     let name_token: Ident = input.parse()?;
     ///     if name_token != "name" {
     ///         return Err(Error::new(name_token.span(), "expected `name`"));
     ///     }
     ///     input.parse::<Token![=]>()?;
     ///     let name = input.call(Ident::parse_any)?;
     ///     Ok(name)
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     fn parse_any(input: ParseStream) -> Result<Self>;
 }
 
 impl IdentExt for Ident {
     fn parse_any(input: ParseStream) -> Result<Self> {
         input.step(|cursor| match cursor.ident() {
             Some((ident, rest)) => Ok((ident, rest)),
--- a/third_party/rust/syn/src/file.rs
+++ b/third_party/rust/syn/src/file.rs
@@ -1,31 +1,21 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 
 ast_struct! {
     /// A complete file of Rust source code.
     ///
     /// *This type is available if Syn is built with the `"full"` feature.*
     ///
     /// # Example
     ///
     /// Parse a Rust source file into a `syn::File` and print out a debug
     /// representation of the syntax tree.
     ///
-    /// ```
-    /// # extern crate syn;
-    /// #
+    /// ```edition2018
     /// use std::env;
     /// use std::fs::File;
     /// use std::io::Read;
     /// use std::process;
     ///
     /// fn main() {
     /// # }
     /// #
--- a/third_party/rust/syn/src/gen/fold.rs
+++ b/third_party/rust/syn/src/gen/fold.rs
@@ -1,12 +1,11 @@
 // THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
 
 #![allow(unreachable_code)]
-#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
 #[cfg(any(feature = "full", feature = "derive"))]
 use gen::helper::fold::*;
 use proc_macro2::Span;
 #[cfg(any(feature = "full", feature = "derive"))]
 use token::{Brace, Bracket, Group, Paren};
 use *;
 #[cfg(feature = "full")]
 macro_rules! full {
--- a/third_party/rust/syn/src/gen/visit.rs
+++ b/third_party/rust/syn/src/gen/visit.rs
@@ -1,11 +1,11 @@
 // THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
 
-#![cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
+#![cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
 #[cfg(any(feature = "full", feature = "derive"))]
 use gen::helper::visit::*;
 use proc_macro2::Span;
 #[cfg(any(feature = "full", feature = "derive"))]
 use punctuated::Punctuated;
 use *;
 #[cfg(feature = "full")]
 macro_rules! full {
--- a/third_party/rust/syn/src/gen/visit_mut.rs
+++ b/third_party/rust/syn/src/gen/visit_mut.rs
@@ -1,11 +1,10 @@
 // THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
 
-#![cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
 #[cfg(any(feature = "full", feature = "derive"))]
 use gen::helper::visit_mut::*;
 use proc_macro2::Span;
 #[cfg(any(feature = "full", feature = "derive"))]
 use punctuated::Punctuated;
 use *;
 #[cfg(feature = "full")]
 macro_rules! full {
--- a/third_party/rust/syn/src/gen_helper.rs
+++ b/third_party/rust/syn/src/gen_helper.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 #[cfg(feature = "fold")]
 pub mod fold {
     use fold::Fold;
     use proc_macro2::Span;
     use punctuated::{Pair, Punctuated};
 
     pub trait FoldHelper {
         type Item;
--- a/third_party/rust/syn/src/generics.rs
+++ b/third_party/rust/syn/src/generics.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use punctuated::{Iter, IterMut, Punctuated};
 
 ast_struct! {
     /// Lifetimes and type parameters attached to a declaration of a function,
     /// enum, trait, etc.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
@@ -295,24 +287,19 @@ pub struct TypeGenerics<'a>(&'a Generics
 #[cfg_attr(feature = "clone-impls", derive(Clone))]
 pub struct Turbofish<'a>(&'a Generics);
 
 #[cfg(feature = "printing")]
 impl Generics {
     /// Split a type's generics into the pieces required for impl'ing a trait
     /// for that type.
     ///
-    /// ```
-    /// # #[macro_use]
-    /// # extern crate quote;
-    /// #
-    /// # extern crate proc_macro2;
-    /// # extern crate syn;
-    /// #
+    /// ```edition2018
     /// # use proc_macro2::{Span, Ident};
+    /// # use quote::quote;
     /// #
     /// # fn main() {
     /// #     let generics: syn::Generics = Default::default();
     /// #     let name = Ident::new("MyType", Span::call_site());
     /// #
     /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
     /// quote! {
     ///     impl #impl_generics MyTrait for #name #ty_generics #where_clause {
@@ -485,29 +472,23 @@ ast_enum_of_structs! {
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
 
     use parse::{Parse, ParseStream, Result};
 
     impl Parse for Generics {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut params = Punctuated::new();
-
             if !input.peek(Token![<]) {
-                return Ok(Generics {
-                    lt_token: None,
-                    params: params,
-                    gt_token: None,
-                    where_clause: None,
-                });
+                return Ok(Generics::default());
             }
 
             let lt_token: Token![<] = input.parse()?;
 
+            let mut params = Punctuated::new();
             let mut has_type_param = false;
             loop {
                 if input.peek(Token![>]) {
                     break;
                 }
 
                 let attrs = input.call(Attribute::parse_outer)?;
                 let lookahead = input.lookahead1();
@@ -653,17 +634,20 @@ pub mod parsing {
                         has_colon = false;
                         None
                     }
                 },
                 bounds: {
                     let mut bounds = Punctuated::new();
                     if has_colon {
                         loop {
-                            if input.peek(Token![,]) || input.peek(Token![>]) {
+                            if input.peek(Token![,])
+                                || input.peek(Token![>])
+                                || input.peek(Token![=])
+                            {
                                 break;
                             }
                             let value = input.parse()?;
                             bounds.push_value(value);
                             if !input.peek(Token![+]) {
                                 break;
                             }
                             let punct = input.parse()?;
--- a/third_party/rust/syn/src/group.rs
+++ b/third_party/rust/syn/src/group.rs
@@ -69,19 +69,23 @@ impl private {
             content: content,
         })
     }
 }
 
 fn parse_delimited(input: ParseStream, delimiter: Delimiter) -> Result<(Span, ParseBuffer)> {
     input.step(|cursor| {
         if let Some((content, span, rest)) = cursor.group(delimiter) {
-            let unexpected = private::get_unexpected(input);
+            #[cfg(procmacro2_semver_exempt)]
+            let scope = private::close_span_of_group(*cursor);
+            #[cfg(not(procmacro2_semver_exempt))]
+            let scope = span;
             let nested = private::advance_step_cursor(cursor, content);
-            let content = private::new_parse_buffer(span, nested, unexpected);
+            let unexpected = private::get_unexpected(input);
+            let content = private::new_parse_buffer(scope, nested, unexpected);
             Ok(((span, content), rest))
         } else {
             let message = match delimiter {
                 Delimiter::Parenthesis => "expected parentheses",
                 Delimiter::Brace => "expected curly braces",
                 Delimiter::Bracket => "expected square brackets",
                 Delimiter::None => "expected invisible group",
             };
@@ -89,25 +93,21 @@ fn parse_delimited(input: ParseStream, d
         }
     })
 }
 
 /// Parse a set of parentheses and expose their content to subsequent parsers.
 ///
 /// # Example
 ///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
 /// #
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::{token, Ident, Type};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{parenthesized, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
 /// use syn::punctuated::Punctuated;
 ///
 /// // Parse a simplified tuple struct syntax like:
 /// //
 /// //     struct S(A, B);
 /// struct TupleStruct {
 ///     struct_token: Token![struct],
 ///     ident: Ident,
@@ -150,24 +150,21 @@ macro_rules! parenthesized {
         }
     };
 }
 
 /// Parse a set of curly braces and expose their content to subsequent parsers.
 ///
 /// # Example
 ///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
 /// #
-/// #[macro_use]
-/// extern crate syn;
-/// use syn::{token, Ident, Type};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{braced, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
 /// use syn::punctuated::Punctuated;
 ///
 /// // Parse a simplified struct syntax like:
 /// //
 /// //     struct S {
 /// //         a: A,
 /// //         b: B,
 /// //     }
@@ -231,28 +228,22 @@ macro_rules! braced {
     };
 }
 
 /// Parse a set of square brackets and expose their content to subsequent
 /// parsers.
 ///
 /// # Example
 ///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
 /// #
-/// #[macro_use]
-/// extern crate syn;
-///
-/// extern crate proc_macro2;
-///
 /// use proc_macro2::TokenStream;
-/// use syn::token;
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{bracketed, token, Result, Token};
+/// use syn::parse::{Parse, ParseStream};
 ///
 /// // Parse an outer attribute like:
 /// //
 /// //     #[repr(C, packed)]
 /// struct OuterAttribute {
 ///     pound_token: Token![#],
 ///     bracket_token: token::Bracket,
 ///     content: TokenStream,
--- a/third_party/rust/syn/src/item.rs
+++ b/third_party/rust/syn/src/item.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use derive::{Data, DeriveInput};
 use proc_macro2::TokenStream;
 use punctuated::Punctuated;
 use token::{Brace, Paren};
 
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
@@ -765,16 +757,18 @@ ast_enum_of_structs! {
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
 
     use ext::IdentExt;
     use parse::{Parse, ParseStream, Result};
+    use proc_macro2::{Punct, Spacing, TokenTree};
+    use std::iter::FromIterator;
 
     impl Parse for Item {
         fn parse(input: ParseStream) -> Result<Self> {
             let ahead = input.fork();
             ahead.call(Attribute::parse_outer)?;
             let vis: Visibility = ahead.parse()?;
 
             let lookahead = ahead.lookahead1();
@@ -802,17 +796,17 @@ pub mod parsing {
                 }
             } else if lookahead.peek(Token![use]) {
                 input.parse().map(Item::Use)
             } else if lookahead.peek(Token![static]) {
                 input.parse().map(Item::Static)
             } else if lookahead.peek(Token![const]) {
                 ahead.parse::<Token![const]>()?;
                 let lookahead = ahead.lookahead1();
-                if lookahead.peek(Ident) {
+                if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
                     input.parse().map(Item::Const)
                 } else if lookahead.peek(Token![unsafe])
                     || lookahead.peek(Token![async])
                     || lookahead.peek(Token![extern])
                     || lookahead.peek(Token![fn])
                 {
                     input.parse().map(Item::Fn)
                 } else {
@@ -844,19 +838,19 @@ pub mod parsing {
             } else if lookahead.peek(Token![existential]) {
                 input.parse().map(Item::Existential)
             } else if lookahead.peek(Token![struct]) {
                 input.parse().map(Item::Struct)
             } else if lookahead.peek(Token![enum]) {
                 input.parse().map(Item::Enum)
             } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
                 input.parse().map(Item::Union)
-            } else if lookahead.peek(Token![trait])
-                || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
-            {
+            } else if lookahead.peek(Token![trait]) {
+                input.call(parse_trait_or_trait_alias)
+            } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
                 input.parse().map(Item::Trait)
             } else if lookahead.peek(Token![impl ])
                 || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
             {
                 input.parse().map(Item::Impl)
             } else if lookahead.peek(Token![macro]) {
                 input.parse().map(Item::Macro2)
             } else if vis.is_inherited()
@@ -898,39 +892,82 @@ pub mod parsing {
                 semi_token: semi_token,
             })
         }
     }
 
     // TODO: figure out the actual grammar; is body required to be braced?
     impl Parse for ItemMacro2 {
         fn parse(input: ParseStream) -> Result<Self> {
+            let attrs = input.call(Attribute::parse_outer)?;
+            let vis: Visibility = input.parse()?;
+            let macro_token: Token![macro] = input.parse()?;
+            let ident: Ident = input.parse()?;
+
+            let paren_token;
             let args;
+            let brace_token;
             let body;
+            let lookahead = input.lookahead1();
+            if lookahead.peek(token::Paren) {
+                let paren_content;
+                paren_token = parenthesized!(paren_content in input);
+                args = paren_content.parse()?;
+
+                let brace_content;
+                brace_token = braced!(brace_content in input);
+                body = brace_content.parse()?;
+            } else if lookahead.peek(token::Brace) {
+                // Hack: the ItemMacro2 syntax tree will need to change so that
+                // we can store None for the args.
+                //
+                // https://github.com/dtolnay/syn/issues/548
+                //
+                // For now, store some sentinel tokens that are otherwise
+                // illegal.
+                paren_token = token::Paren::default();
+                args = TokenStream::from_iter(vec![
+                    TokenTree::Punct(Punct::new('$', Spacing::Alone)),
+                    TokenTree::Punct(Punct::new('$', Spacing::Alone)),
+                ]);
+
+                let brace_content;
+                brace_token = braced!(brace_content in input);
+                body = brace_content.parse()?;
+            } else {
+                return Err(lookahead.error());
+            }
+
             Ok(ItemMacro2 {
-                attrs: input.call(Attribute::parse_outer)?,
-                vis: input.parse()?,
-                macro_token: input.parse()?,
-                ident: input.parse()?,
-                paren_token: parenthesized!(args in input),
-                args: args.parse()?,
-                brace_token: braced!(body in input),
-                body: body.parse()?,
+                attrs: attrs,
+                vis: vis,
+                macro_token: macro_token,
+                ident: ident,
+                paren_token: paren_token,
+                args: args,
+                brace_token: brace_token,
+                body: body,
             })
         }
     }
 
     impl Parse for ItemExternCrate {
         fn parse(input: ParseStream) -> Result<Self> {
             Ok(ItemExternCrate {
                 attrs: input.call(Attribute::parse_outer)?,
                 vis: input.parse()?,
                 extern_token: input.parse()?,
                 crate_token: input.parse()?,
-                ident: input.parse()?,
+                ident: {
+                    if input.peek(Token![self]) {
+                        input.call(Ident::parse_any)?
+                    } else {
+                        input.parse()?
+                    }
+                },
                 rename: {
                     if input.peek(Token![as]) {
                         let as_token: Token![as] = input.parse()?;
                         let rename: Ident = input.parse()?;
                         Some((as_token, rename))
                     } else {
                         None
                     }
@@ -1018,17 +1055,24 @@ pub mod parsing {
     }
 
     impl Parse for ItemConst {
         fn parse(input: ParseStream) -> Result<Self> {
             Ok(ItemConst {
                 attrs: input.call(Attribute::parse_outer)?,
                 vis: input.parse()?,
                 const_token: input.parse()?,
-                ident: input.parse()?,
+                ident: {
+                    let lookahead = input.lookahead1();
+                    if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+                        input.call(Ident::parse_any)?
+                    } else {
+                        return Err(lookahead.error());
+                    }
+                },
                 colon_token: input.parse()?,
                 ty: input.parse()?,
                 eq_token: input.parse()?,
                 expr: input.parse()?,
                 semi_token: input.parse()?,
             })
         }
     }
@@ -1433,101 +1477,169 @@ pub mod parsing {
                     where_clause: where_clause,
                     ..generics
                 },
                 fields: fields,
             })
         }
     }
 
+    fn parse_trait_or_trait_alias(input: ParseStream) -> Result<Item> {
+        let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
+        let lookahead = input.lookahead1();
+        if lookahead.peek(token::Brace)
+            || lookahead.peek(Token![:])
+            || lookahead.peek(Token![where])
+        {
+            let unsafety = None;
+            let auto_token = None;
+            parse_rest_of_trait(
+                input,
+                attrs,
+                vis,
+                unsafety,
+                auto_token,
+                trait_token,
+                ident,
+                generics,
+            )
+            .map(Item::Trait)
+        } else if lookahead.peek(Token![=]) {
+            parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
+                .map(Item::TraitAlias)
+        } else {
+            Err(lookahead.error())
+        }
+    }
+
     impl Parse for ItemTrait {
         fn parse(input: ParseStream) -> Result<Self> {
             let attrs = input.call(Attribute::parse_outer)?;
             let vis: Visibility = input.parse()?;
             let unsafety: Option<Token![unsafe]> = input.parse()?;
             let auto_token: Option<Token![auto]> = input.parse()?;
             let trait_token: Token![trait] = input.parse()?;
             let ident: Ident = input.parse()?;
-            let mut generics: Generics = input.parse()?;
-            let colon_token: Option<Token![:]> = input.parse()?;
+            let generics: Generics = input.parse()?;
+            parse_rest_of_trait(
+                input,
+                attrs,
+                vis,
+                unsafety,
+                auto_token,
+                trait_token,
+                ident,
+                generics,
+            )
+        }
+    }
 
-            let mut supertraits = Punctuated::new();
-            if colon_token.is_some() {
-                loop {
-                    supertraits.push_value(input.parse()?);
-                    if input.peek(Token![where]) || input.peek(token::Brace) {
-                        break;
-                    }
-                    supertraits.push_punct(input.parse()?);
-                    if input.peek(Token![where]) || input.peek(token::Brace) {
-                        break;
-                    }
+    fn parse_rest_of_trait(
+        input: ParseStream,
+        attrs: Vec<Attribute>,
+        vis: Visibility,
+        unsafety: Option<Token![unsafe]>,
+        auto_token: Option<Token![auto]>,
+        trait_token: Token![trait],
+        ident: Ident,
+        mut generics: Generics,
+    ) -> Result<ItemTrait> {
+        let colon_token: Option<Token![:]> = input.parse()?;
+
+        let mut supertraits = Punctuated::new();
+        if colon_token.is_some() {
+            loop {
+                supertraits.push_value(input.parse()?);
+                if input.peek(Token![where]) || input.peek(token::Brace) {
+                    break;
+                }
+                supertraits.push_punct(input.parse()?);
+                if input.peek(Token![where]) || input.peek(token::Brace) {
+                    break;
                 }
             }
+        }
 
-            generics.where_clause = input.parse()?;
+        generics.where_clause = input.parse()?;
 
-            let content;
-            let brace_token = braced!(content in input);
-            let mut items = Vec::new();
-            while !content.is_empty() {
-                items.push(content.parse()?);
-            }
+        let content;
+        let brace_token = braced!(content in input);
+        let mut items = Vec::new();
+        while !content.is_empty() {
+            items.push(content.parse()?);
+        }
 
-            Ok(ItemTrait {
-                attrs: attrs,
-                vis: vis,
-                unsafety: unsafety,
-                auto_token: auto_token,
-                trait_token: trait_token,
-                ident: ident,
-                generics: generics,
-                colon_token: colon_token,
-                supertraits: supertraits,
-                brace_token: brace_token,
-                items: items,
-            })
-        }
+        Ok(ItemTrait {
+            attrs: attrs,
+            vis: vis,
+            unsafety: unsafety,
+            auto_token: auto_token,
+            trait_token: trait_token,
+            ident: ident,
+            generics: generics,
+            colon_token: colon_token,
+            supertraits: supertraits,
+            brace_token: brace_token,
+            items: items,
+        })
     }
 
     impl Parse for ItemTraitAlias {
         fn parse(input: ParseStream) -> Result<Self> {
-            let attrs = input.call(Attribute::parse_outer)?;
-            let vis: Visibility = input.parse()?;
-            let trait_token: Token![trait] = input.parse()?;
-            let ident: Ident = input.parse()?;
-            let mut generics: Generics = input.parse()?;
-            let eq_token: Token![=] = input.parse()?;
+            let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
+            parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
+        }
+    }
+
+    fn parse_start_of_trait_alias(
+        input: ParseStream,
+    ) -> Result<(Vec<Attribute>, Visibility, Token![trait], Ident, Generics)> {
+        let attrs = input.call(Attribute::parse_outer)?;
+        let vis: Visibility = input.parse()?;
+        let trait_token: Token![trait] = input.parse()?;
+        let ident: Ident = input.parse()?;
+        let generics: Generics = input.parse()?;
+        Ok((attrs, vis, trait_token, ident, generics))
+    }
+
+    fn parse_rest_of_trait_alias(
+        input: ParseStream,
+        attrs: Vec<Attribute>,
+        vis: Visibility,
+        trait_token: Token![trait],
+        ident: Ident,
+        mut generics: Generics,
+    ) -> Result<ItemTraitAlias> {
+        let eq_token: Token![=] = input.parse()?;
 
-            let mut bounds = Punctuated::new();
-            loop {
-                if input.peek(Token![where]) || input.peek(Token![;]) {
-                    break;
-                }
-                bounds.push_value(input.parse()?);
-                if input.peek(Token![where]) || input.peek(Token![;]) {
-                    break;
-                }
-                bounds.push_punct(input.parse()?);
+        let mut bounds = Punctuated::new();
+        loop {
+            if input.peek(Token![where]) || input.peek(Token![;]) {
+                break;
             }
+            bounds.push_value(input.parse()?);
+            if input.peek(Token![where]) || input.peek(Token![;]) {
+                break;
+            }
+            bounds.push_punct(input.parse()?);
+        }
 
-            generics.where_clause = input.parse()?;
-            let semi_token: Token![;] = input.parse()?;
+        generics.where_clause = input.parse()?;
+        let semi_token: Token![;] = input.parse()?;
 
-            Ok(ItemTraitAlias {
-                attrs: attrs,
-                vis: vis,
-                trait_token: trait_token,
-                ident: ident,
-                generics: generics,
-                eq_token: eq_token,
-                bounds: bounds,
-                semi_token: semi_token,
-            })
-        }
+        Ok(ItemTraitAlias {
+            attrs: attrs,
+            vis: vis,
+            trait_token: trait_token,
+            ident: ident,
+            generics: generics,
+            eq_token: eq_token,
+            bounds: bounds,
+            semi_token: semi_token,
+        })
     }
 
     impl Parse for TraitItem {
         fn parse(input: ParseStream) -> Result<Self> {
             let ahead = input.fork();
             ahead.call(Attribute::parse_outer)?;
 
             let lookahead = ahead.lookahead1();
@@ -1707,17 +1819,30 @@ pub mod parsing {
     }
 
     impl Parse for ItemImpl {
         fn parse(input: ParseStream) -> Result<Self> {
             let outer_attrs = input.call(Attribute::parse_outer)?;
             let defaultness: Option<Token![default]> = input.parse()?;
             let unsafety: Option<Token![unsafe]> = input.parse()?;
             let impl_token: Token![impl ] = input.parse()?;
-            let generics: Generics = input.parse()?;
+
+            let has_generics = input.peek(Token![<])
+                && (input.peek2(Token![>])
+                    || input.peek2(Token![#])
+                    || (input.peek2(Ident) || input.peek2(Lifetime))
+                        && (input.peek3(Token![:])
+                            || input.peek3(Token![,])
+                            || input.peek3(Token![>])));
+            let generics: Generics = if has_generics {
+                input.parse()?
+            } else {
+                Generics::default()
+            };
+
             let trait_ = {
                 let ahead = input.fork();
                 if ahead.parse::<Option<Token![!]>>().is_ok()
                     && ahead.parse::<Path>().is_ok()
                     && ahead.parse::<Token![for]>().is_ok()
                 {
                     let polarity: Option<Token![!]> = input.parse()?;
                     let path: Path = input.parse()?;
@@ -2226,19 +2351,24 @@ mod printing {
     }
 
     impl ToTokens for ItemMacro2 {
         fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.macro_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                self.args.to_tokens(tokens);
-            });
+
+            // Hack: see comment in impl Parse for ItemMacro2.
+            if self.args.to_string() != "$ $" {
+                self.paren_token.surround(tokens, |tokens| {
+                    self.args.to_tokens(tokens);
+                });
+            }
+
             self.brace_token.surround(tokens, |tokens| {
                 self.body.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for ItemVerbatim {
         fn to_tokens(&self, tokens: &mut TokenStream) {
--- a/third_party/rust/syn/src/keyword.rs
+++ b/third_party/rust/syn/src/keyword.rs
@@ -2,25 +2,20 @@
 /// were a keyword.
 ///
 /// # Usage
 ///
 /// As a convention, it is recommended that this macro be invoked within a
 /// module called `kw` or `keyword` and that the resulting parser be invoked
 /// with a `kw::` or `keyword::` prefix.
 ///
-/// ```
-/// # #[macro_use]
-/// # extern crate syn;
-/// #
+/// ```edition2018
 /// mod kw {
-///     custom_keyword!(whatever);
+///     syn::custom_keyword!(whatever);
 /// }
-/// #
-/// # fn main() {}
 /// ```
 ///
 /// The generated syntax tree node supports the following operations just like
 /// any built-in keyword token.
 ///
 /// - [Peeking] — `input.peek(kw::whatever)`
 ///
 /// - [Parsing] — `input.parse::<kw::whatever>()?`
@@ -43,26 +38,23 @@
 /// `bool`, the value may be either `true` or `false`. If `str`, the value may
 /// be any string literal.
 ///
 /// The symbols `bool` and `str` are not reserved keywords in Rust so these are
 /// not considered keywords in the `syn::token` module. Like any other
 /// identifier that is not a keyword, these can be declared as custom keywords
 /// by crates that need to use them as such.
 ///
-/// ```
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::{LitBool, LitStr};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// ```edition2018
+/// use syn::{LitBool, LitStr, Result, Token};
+/// use syn::parse::{Parse, ParseStream};
 ///
 /// mod kw {
-///     custom_keyword!(bool);
-///     custom_keyword!(str);
+///     syn::custom_keyword!(bool);
+///     syn::custom_keyword!(str);
 /// }
 ///
 /// enum Argument {
 ///     Bool {
 ///         bool_token: kw::bool,
 ///         eq_token: Token![=],
 ///         value: LitBool,
 ///     },
@@ -88,18 +80,16 @@
 ///                 eq_token: input.parse()?,
 ///                 value: input.parse()?,
 ///             })
 ///         } else {
 ///             Err(lookahead.error())
 ///         }
 ///     }
 /// }
-/// #
-/// # fn main() {}
 /// ```
 #[macro_export(local_inner_macros)]
 macro_rules! custom_keyword {
     ($ident:ident) => {
         pub struct $ident {
             pub span: $crate::export::Span,
         }
 
--- a/third_party/rust/syn/src/lib.rs
+++ b/third_party/rust/syn/src/lib.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
 //! tree of Rust source code.
 //!
 //! Currently this library is geared toward use in Rust procedural macros, but
 //! contains some APIs that may be useful more generally.
 //!
 //! - **Data structures** — Syn provides a complete syntax tree that can
 //!   represent any valid Rust source code. The syntax tree is rooted at
@@ -67,112 +59,87 @@
 //! [dependencies]
 //! syn = "0.15"
 //! quote = "0.6"
 //!
 //! [lib]
 //! proc-macro = true
 //! ```
 //!
-//! ```rust
-//! #[macro_use]
-//! extern crate quote;
-//! #[macro_use]
-//! extern crate syn;
-//!
+//! ```edition2018
 //! extern crate proc_macro;
 //!
 //! use proc_macro::TokenStream;
-//! use syn::DeriveInput;
+//! use quote::quote;
+//! use syn::{parse_macro_input, DeriveInput};
 //!
 //! # const IGNORE_TOKENS: &str = stringify! {
 //! #[proc_macro_derive(MyMacro)]
 //! # };
 //! pub fn my_macro(input: TokenStream) -> TokenStream {
 //!     // Parse the input tokens into a syntax tree
 //!     let input = parse_macro_input!(input as DeriveInput);
 //!
 //!     // Build the output, possibly using quasi-quotation
 //!     let expanded = quote! {
 //!         // ...
 //!     };
 //!
 //!     // Hand the output tokens back to the compiler
 //!     TokenStream::from(expanded)
 //! }
-//! #
-//! # fn main() {}
 //! ```
 //!
 //! The [`heapsize`] example directory shows a complete working Macros 1.1
-//! implementation of a custom derive. It works on any Rust compiler \>=1.15.0.
+//! implementation of a custom derive. It works on any Rust compiler 1.15+.
 //! The example derives a `HeapSize` trait which computes an estimate of the
 //! amount of heap memory owned by a value.
 //!
 //! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
 //!
-//! ```rust
+//! ```edition2018
 //! pub trait HeapSize {
 //!     /// Total number of bytes of heap memory owned by `self`.
 //!     fn heap_size_of_children(&self) -> usize;
 //! }
 //! ```
 //!
 //! The custom derive allows users to write `#[derive(HeapSize)]` on data
 //! structures in their program.
 //!
-//! ```rust
+//! ```edition2018
 //! # const IGNORE_TOKENS: &str = stringify! {
 //! #[derive(HeapSize)]
 //! # };
 //! struct Demo<'a, T: ?Sized> {
 //!     a: Box<T>,
 //!     b: u8,
 //!     c: &'a str,
 //!     d: String,
 //! }
 //! ```
 //!
 //! ## Spans and error reporting
 //!
-//! The [`heapsize2`] example directory is an extension of the `heapsize`
-//! example that demonstrates some of the hygiene and error reporting properties
-//! of Macros 2.0. This example currently requires a nightly Rust compiler
-//! \>=1.24.0-nightly but we are working to stabilize all of the APIs involved.
-//!
-//! [`heapsize2`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize2
-//!
 //! The token-based procedural macro API provides great control over where the
 //! compiler's error messages are displayed in user code. Consider the error the
 //! user sees if one of their field types does not implement `HeapSize`.
 //!
-//! ```rust
+//! ```edition2018
 //! # const IGNORE_TOKENS: &str = stringify! {
 //! #[derive(HeapSize)]
 //! # };
 //! struct Broken {
 //!     ok: String,
 //!     bad: std::thread::Thread,
 //! }
 //! ```
 //!
-//! In the Macros 1.1 string-based procedural macro world, the resulting error
-//! would point unhelpfully to the invocation of the derive macro and not to the
-//! actual problematic field.
-//!
-//! ```text
-//! error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
-//!  --> src/main.rs:4:10
-//!   |
-//! 4 | #[derive(HeapSize)]
-//!   |          ^^^^^^^^
-//! ```
-//!
 //! By tracking span information all the way through the expansion of a
-//! procedural macro as shown in the `heapsize2` example, token-based macros in
+//! procedural macro as shown in the `heapsize` example, token-based macros in
 //! Syn are able to trigger errors that directly pinpoint the source of the
 //! problem.
 //!
 //! ```text
 //! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
 //!  --> src/main.rs:7:5
 //!   |
 //! 7 |     bad: std::thread::Thread,
@@ -185,17 +152,17 @@
 //! `functionlike!(...)` procedural macro in which the input tokens are parsed
 //! using Syn's parsing API.
 //!
 //! [`lazy-static`]: https://github.com/dtolnay/syn/tree/master/examples/lazy-static
 //!
 //! The example reimplements the popular `lazy_static` crate from crates.io as a
 //! procedural macro.
 //!
-//! ```
+//! ```edition2018
 //! # macro_rules! lazy_static {
 //! #     ($($tt:tt)*) => {}
 //! # }
 //! #
 //! lazy_static! {
 //!     static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
 //! }
 //! ```
@@ -250,54 +217,52 @@
 //! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
 //!   types.
 //! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
 //!   types.
 //! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
 //!   dynamic library libproc_macro from rustc toolchain.
 
 // Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/0.15.7")]
+#![doc(html_root_url = "https://docs.rs/syn/0.15.24")]
 #![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
 #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
 // Ignored clippy lints.
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
         block_in_if_condition_stmt,
         const_static_lifetime,
         cyclomatic_complexity,
+        deprecated_cfg_attr,
         doc_markdown,
         eval_order_dependence,
         large_enum_variant,
-        match_bool,
+        needless_pass_by_value,
         never_loop,
-        redundant_closure,
-        needless_pass_by_value,
         redundant_field_names,
-        trivially_copy_pass_by_ref
+        too_many_arguments,
     )
 )]
 // Ignored clippy_pedantic lints.
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
         cast_possible_truncation,
         cast_possible_wrap,
         empty_enum,
         if_not_else,
-        indexing_slicing,
         items_after_statements,
+        module_name_repetitions,
         shadow_unrelated,
         similar_names,
         single_match_else,
-        stutter,
         unseparated_literal_suffix,
         use_self,
-        used_underscore_binding
+        used_underscore_binding,
     )
 )]
 
 #[cfg(all(
     not(all(target_arch = "wasm32", target_os = "unknown")),
     feature = "proc-macro"
 ))]
 extern crate proc_macro;
@@ -320,17 +285,17 @@ pub mod group;
 pub mod token;
 
 mod ident;
 pub use ident::Ident;
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod attr;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue, NestedMeta};
+pub use attr::{AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod data;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use data::{
     Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
     Visibility,
 };
@@ -357,20 +322,17 @@ pub use expr::{
 #[cfg(any(feature = "full", feature = "derive"))]
 mod generics;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use generics::{
     BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
     PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
     WhereClause, WherePredicate,
 };
-#[cfg(all(
-    any(feature = "full", feature = "derive"),
-    feature = "printing"
-))]
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 pub use generics::{ImplGenerics, Turbofish, TypeGenerics};
 
 #[cfg(feature = "full")]
 mod item;
 #[cfg(feature = "full")]
 pub use item::{
     ArgCaptured, ArgSelf, ArgSelfRef, FnArg, FnDecl, ForeignItem, ForeignItemFn, ForeignItemMacro,
     ForeignItemStatic, ForeignItemType, ForeignItemVerbatim, ImplItem, ImplItemConst,
@@ -430,41 +392,47 @@ pub use path::{
     ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
 };
 
 #[cfg(feature = "parsing")]
 pub mod buffer;
 #[cfg(feature = "parsing")]
 pub mod ext;
 pub mod punctuated;
-#[cfg(all(
-    any(feature = "full", feature = "derive"),
-    feature = "extra-traits"
-))]
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
 mod tt;
 
 // Not public API except the `parse_quote!` macro.
 #[cfg(feature = "parsing")]
 #[doc(hidden)]
 pub mod parse_quote;
 
+// Not public API except the `parse_macro_input!` macro.
+#[cfg(all(
+    not(all(target_arch = "wasm32", target_os = "unknown")),
+    feature = "parsing",
+    feature = "proc-macro"
+))]
+#[doc(hidden)]
+pub mod parse_macro_input;
+
 #[cfg(all(feature = "parsing", feature = "printing"))]
 pub mod spanned;
 
 mod gen {
     /// Syntax tree traversal to walk a shared borrow of a syntax tree.
     ///
     /// Each method of the [`Visit`] trait is a hook that can be overridden to
     /// customize the behavior when visiting the corresponding type of node. By
     /// default, every method recursively visits the substructure of the input
     /// by invoking the right visitor method of each of its fields.
     ///
     /// [`Visit`]: trait.Visit.html
     ///
-    /// ```rust
+    /// ```edition2018
     /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
     /// #
     /// pub trait Visit<'ast> {
     ///     /* ... */
     ///
     ///     fn visit_expr_binary(&mut self, node: &'ast ExprBinary) {
     ///         for attr in &node.attrs {
     ///             self.visit_attribute(attr);
@@ -490,17 +458,17 @@ mod gen {
     ///
     /// Each method of the [`VisitMut`] trait is a hook that can be overridden
     /// to customize the behavior when mutating the corresponding type of node.
     /// By default, every method recursively visits the substructure of the
     /// input by invoking the right visitor method of each of its fields.
     ///
     /// [`VisitMut`]: trait.VisitMut.html
     ///
-    /// ```rust
+    /// ```edition2018
     /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
     /// #
     /// pub trait VisitMut {
     ///     /* ... */
     ///
     ///     fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) {
     ///         for attr in &mut node.attrs {
     ///             self.visit_attribute_mut(attr);
@@ -526,17 +494,17 @@ mod gen {
     ///
     /// Each method of the [`Fold`] trait is a hook that can be overridden to
     /// customize the behavior when transforming the corresponding type of node.
     /// By default, every method recursively visits the substructure of the
     /// input by invoking the right visitor method of each of its fields.
     ///
     /// [`Fold`]: trait.Fold.html
     ///
-    /// ```rust
+    /// ```edition2018
     /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
     /// #
     /// pub trait Fold {
     ///     /* ... */
     ///
     ///     fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
     ///         ExprBinary {
     ///             attrs: node.attrs
@@ -575,34 +543,31 @@ mod keyword;
 #[cfg(feature = "parsing")]
 mod lookahead;
 
 #[cfg(feature = "parsing")]
 pub mod parse;
 
 mod span;
 
-#[cfg(all(
-    any(feature = "full", feature = "derive"),
-    feature = "printing"
-))]
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 mod print;
 
+mod thread;
+
 ////////////////////////////////////////////////////////////////////////////////
 
 #[cfg(any(feature = "parsing", feature = "full", feature = "derive"))]
 #[allow(non_camel_case_types)]
 struct private;
 
 ////////////////////////////////////////////////////////////////////////////////
 
-#[cfg(feature = "parsing")]
 mod error;
-#[cfg(feature = "parsing")]
-use error::Error;
+pub use error::{Error, Result};
 
 /// Parse tokens of source code into the chosen syntax tree node.
 ///
 /// This is preferred over parsing a string because tokens are able to preserve
 /// information about where in the user's code they were originally written (the
 /// "span" of the token), possibly allowing the compiler to produce better error
 /// messages.
 ///
@@ -612,24 +577,21 @@ use error::Error;
 ///
 /// [`syn::parse2`]: fn.parse2.html
 ///
 /// *This function is available if Syn is built with both the `"parsing"` and
 /// `"proc-macro"` features.*
 ///
 /// # Examples
 ///
-/// ```rust
-/// #[macro_use]
-/// extern crate quote;
-///
+/// ```edition2018
 /// extern crate proc_macro;
-/// extern crate syn;
 ///
 /// use proc_macro::TokenStream;
+/// use quote::quote;
 /// use syn::DeriveInput;
 ///
 /// # const IGNORE_TOKENS: &str = stringify! {
 /// #[proc_macro_derive(MyMacro)]
 /// # };
 /// pub fn my_macro(input: TokenStream) -> TokenStream {
 ///     // Parse the tokens into a syntax tree
 ///     let ast: DeriveInput = syn::parse(input).unwrap();
@@ -637,73 +599,70 @@ use error::Error;
 ///     // Build the output, possibly using quasi-quotation
 ///     let expanded = quote! {
 ///         /* ... */
 ///     };
 ///
 ///     // Convert into a token stream and return it
 ///     expanded.into()
 /// }
-/// #
-/// # fn main() {}
 /// ```
 #[cfg(all(
     not(all(target_arch = "wasm32", target_os = "unknown")),
     feature = "parsing",
     feature = "proc-macro"
 ))]
-pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T, Error> {
+pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
     parse::Parser::parse(T::parse, tokens)
 }
 
 /// Parse a proc-macro2 token stream into the chosen syntax tree node.
 ///
 /// This function parses a `proc_macro2::TokenStream` which is commonly useful
 /// when the input comes from a node of the Syn syntax tree, for example the tts
 /// of a [`Macro`] node. When in a procedural macro parsing the
 /// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
 /// instead.
 ///
 /// [`Macro`]: struct.Macro.html
 /// [`syn::parse`]: fn.parse.html
 ///
 /// *This function is available if Syn is built with the `"parsing"` feature.*
 #[cfg(feature = "parsing")]
-pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T, Error> {
+pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
     parse::Parser::parse2(T::parse, tokens)
 }
 
 /// Parse a string of Rust code into the chosen syntax tree node.
 ///
 /// *This function is available if Syn is built with the `"parsing"` feature.*
 ///
 /// # Hygiene
 ///
 /// Every span in the resulting syntax tree will be set to resolve at the macro
 /// call site.
 ///
 /// # Examples
 ///
-/// ```rust
-/// # extern crate syn;
-/// #
-/// use syn::Expr;
-/// use syn::parse::Result;
+/// ```edition2018
+/// use syn::{Expr, Result};
 ///
 /// fn run() -> Result<()> {
 ///     let code = "assert_eq!(u8::max_value(), 255)";
 ///     let expr = syn::parse_str::<Expr>(code)?;
 ///     println!("{:#?}", expr);
 ///     Ok(())
 /// }
 /// #
-/// # fn main() { run().unwrap() }
+/// # fn main() {
+/// #     run().unwrap();
+/// # }
 /// ```
 #[cfg(feature = "parsing")]
-pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T, Error> {
+pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
     parse::Parser::parse_str(T::parse, s)
 }
 
 // FIXME the name parse_file makes it sound like you might pass in a path to a
 // file, rather than the content.
 /// Parse the content of a file of Rust code.
 ///
 /// This is different from `syn::parse_str::<File>(content)` in two ways:
@@ -712,19 +671,17 @@ pub fn parse_str<T: parse::Parse>(s: &st
 /// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
 ///
 /// If present, either of these would be an error using `from_str`.
 ///
 /// *This function is available if Syn is built with the `"parsing"` and `"full"` features.*
 ///
 /// # Examples
 ///
-/// ```rust,no_run
-/// # extern crate syn;
-/// #
+/// ```edition2018,no_run
 /// use std::error::Error;
 /// use std::fs::File;
 /// use std::io::Read;
 ///
 /// fn run() -> Result<(), Box<Error>> {
 ///     let mut file = File::open("path/to/code.rs")?;
 ///     let mut content = String::new();
 ///     file.read_to_string(&mut content)?;
@@ -733,20 +690,22 @@ pub fn parse_str<T: parse::Parse>(s: &st
 ///     if let Some(shebang) = ast.shebang {
 ///         println!("{}", shebang);
 ///     }
 ///     println!("{} items", ast.items.len());
 ///
 ///     Ok(())
 /// }
 /// #
-/// # fn main() { run().unwrap() }
+/// # fn main() {
+/// #     run().unwrap();
+/// # }
 /// ```
 #[cfg(all(feature = "parsing", feature = "full"))]
-pub fn parse_file(mut content: &str) -> Result<File, Error> {
+pub fn parse_file(mut content: &str) -> Result<File> {
     // Strip the BOM if it is present
     const BOM: &'static str = "\u{feff}";
     if content.starts_with(BOM) {
         content = &content[BOM.len()..];
     }
 
     let mut shebang = None;
     if content.starts_with("#!") && !content.starts_with("#![") {
@@ -758,63 +717,8 @@ pub fn parse_file(mut content: &str) -> 
             content = "";
         }
     }
 
     let mut file: File = parse_str(content)?;
     file.shebang = shebang;
     Ok(file)
 }
-
-/// Parse the input TokenStream of a macro, triggering a compile error if the
-/// tokens fail to parse.
-///
-/// Refer to the [`parse` module] documentation for more details about parsing
-/// in Syn.
-///
-/// [`parse` module]: parse/index.html
-///
-/// # Intended usage
-///
-/// ```rust
-/// #[macro_use]
-/// extern crate syn;
-///
-/// extern crate proc_macro;
-///
-/// use proc_macro::TokenStream;
-/// use syn::parse::{Parse, ParseStream, Result};
-///
-/// struct MyMacroInput {
-///     /* ... */
-/// }
-///
-/// impl Parse for MyMacroInput {
-///     fn parse(input: ParseStream) -> Result<Self> {
-///         /* ... */
-/// #       Ok(MyMacroInput {})
-///     }
-/// }
-///
-/// # const IGNORE: &str = stringify! {
-/// #[proc_macro]
-/// # };
-/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
-///     let input = parse_macro_input!(tokens as MyMacroInput);
-///
-///     /* ... */
-/// #   "".parse().unwrap()
-/// }
-/// #
-/// # fn main() {}
-/// ```
-#[cfg(feature = "proc-macro")]
-#[macro_export]
-macro_rules! parse_macro_input {
-    ($tokenstream:ident as $ty:ty) => {
-        match $crate::parse::<$ty>($tokenstream) {
-            $crate::export::Ok(data) => data,
-            $crate::export::Err(err) => {
-                return $crate::export::TokenStream::from(err.to_compile_error());
-            }
-        };
-    };
-}
--- a/third_party/rust/syn/src/lifetime.rs
+++ b/third_party/rust/syn/src/lifetime.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use std::cmp::Ordering;
 use std::fmt::{self, Display};
 use std::hash::{Hash, Hasher};
 
 use proc_macro2::{Ident, Span};
 use unicode_xid::UnicodeXID;
 
 #[cfg(feature = "parsing")]
@@ -38,20 +30,17 @@ pub struct Lifetime {
 
 impl Lifetime {
     /// # Panics
     ///
     /// Panics if the lifetime does not conform to the bulleted rules above.
     ///
     /// # Invocation
     ///
-    /// ```
-    /// # extern crate proc_macro2;
-    /// # extern crate syn;
-    /// #
+    /// ```edition2018
     /// # use proc_macro2::Span;
     /// # use syn::Lifetime;
     /// #
     /// # fn f() -> Lifetime {
     /// Lifetime::new("'a", Span::call_site())
     /// # }
     /// ```
     pub fn new(symbol: &str, span: Span) -> Self {
--- a/third_party/rust/syn/src/lit.rs
+++ b/third_party/rust/syn/src/lit.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use proc_macro2::{Literal, Span};
 use std::str;
 
 #[cfg(feature = "printing")]
 use proc_macro2::Ident;
 
 #[cfg(feature = "parsing")]
 use proc_macro2::TokenStream;
@@ -124,45 +116,33 @@ impl LitStr {
     }
 
     /// Parse a syntax tree node from the content of this string literal.
     ///
     /// All spans in the syntax tree will point to the span of this `LitStr`.
     ///
     /// # Example
     ///
-    /// ```
-    /// # extern crate proc_macro2;
-    /// # extern crate syn;
-    /// #
+    /// ```edition2018
     /// use proc_macro2::Span;
-    /// use syn::{Attribute, Ident, Lit, Meta, MetaNameValue, Path};
-    /// use syn::parse::{Error, Result};
+    /// use syn::{Attribute, Error, Ident, Lit, Meta, MetaNameValue, Path, Result};
     ///
     /// // Parses the path from an attribute that looks like:
     /// //
     /// //     #[path = "a::b::c"]
     /// //
-    /// // or returns the path `Self` as a default if the attribute is not of
-    /// // that form.
-    /// fn get_path(attr: &Attribute) -> Result<Path> {
-    ///     let default = || Path::from(Ident::new("Self", Span::call_site()));
-    ///
-    ///     let meta = match attr.interpret_meta() {
-    ///         Some(meta) => meta,
-    ///         None => return Ok(default()),
-    ///     };
-    ///
-    ///     if meta.name() != "path" {
-    ///         return Ok(default());
+    /// // or returns `None` if the input is some other attribute.
+    /// fn get_path(attr: &Attribute) -> Result<Option<Path>> {
+    ///     if !attr.path.is_ident("path") {
+    ///         return Ok(None);
     ///     }
     ///
-    ///     match meta {
+    ///     match attr.parse_meta()? {
     ///         Meta::NameValue(MetaNameValue { lit: Lit::Str(lit_str), .. }) => {
-    ///             lit_str.parse()
+    ///             lit_str.parse().map(Some)
     ///         }
     ///         _ => {
     ///             let error_span = attr.bracket_token.span;
     ///             let message = "expected #[path = \"...\"]";
     ///             Err(Error::new(error_span, message))
     ///         }
     ///     }
     /// }
@@ -651,30 +631,34 @@ mod value {
             match value::byte(&value, 0) {
                 b'"' | b'r' => return Lit::Str(LitStr { token: token }),
                 b'b' => match value::byte(&value, 1) {
                     b'"' | b'r' => return Lit::ByteStr(LitByteStr { token: token }),
                     b'\'' => return Lit::Byte(LitByte { token: token }),
                     _ => {}
                 },
                 b'\'' => return Lit::Char(LitChar { token: token }),
-                b'0'...b'9' => if number_is_int(&value) {
-                    return Lit::Int(LitInt { token: token });
-                } else if number_is_float(&value) {
-                    return Lit::Float(LitFloat { token: token });
-                } else {
-                    // number overflow
-                    return Lit::Verbatim(LitVerbatim { token: token });
-                },
-                _ => if value == "true" || value == "false" {
-                    return Lit::Bool(LitBool {
-                        value: value == "true",
-                        span: token.span(),
-                    });
-                },
+                b'0'...b'9' => {
+                    if number_is_int(&value) {
+                        return Lit::Int(LitInt { token: token });
+                    } else if number_is_float(&value) {
+                        return Lit::Float(LitFloat { token: token });
+                    } else {
+                        // number overflow
+                        return Lit::Verbatim(LitVerbatim { token: token });
+                    }
+                }
+                _ => {
+                    if value == "true" || value == "false" {
+                        return Lit::Bool(LitBool {
+                            value: value == "true",
+                            span: token.span(),
+                        });
+                    }
+                }
             }
 
             panic!("Unrecognized literal: {}", value);
         }
     }
 
     fn number_is_int(value: &str) -> bool {
         if number_is_float(value) {
@@ -949,22 +933,23 @@ mod value {
 
     fn backslash_x<S>(s: &S) -> (u8, &S)
     where
         S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
     {
         let mut ch = 0;
         let b0 = byte(s, 0);
         let b1 = byte(s, 1);
-        ch += 0x10 * match b0 {
-            b'0'...b'9' => b0 - b'0',
-            b'a'...b'f' => 10 + (b0 - b'a'),
-            b'A'...b'F' => 10 + (b0 - b'A'),
-            _ => panic!("unexpected non-hex character after \\x"),
-        };
+        ch += 0x10
+            * match b0 {
+                b'0'...b'9' => b0 - b'0',
+                b'a'...b'f' => 10 + (b0 - b'a'),
+                b'A'...b'F' => 10 + (b0 - b'A'),
+                _ => panic!("unexpected non-hex character after \\x"),
+            };
         ch += match b1 {
             b'0'...b'9' => b1 - b'0',
             b'a'...b'f' => 10 + (b1 - b'a'),
             b'A'...b'F' => 10 + (b1 - b'A'),
             _ => panic!("unexpected non-hex character after \\x"),
         };
         (ch, &s[2..])
     }
--- a/third_party/rust/syn/src/lookahead.rs
+++ b/third_party/rust/syn/src/lookahead.rs
@@ -16,22 +16,19 @@ use token::Token;
 ///
 /// Use [`ParseStream::lookahead1`] to construct this object.
 ///
 /// [`ParseStream::peek`]: struct.ParseBuffer.html#method.peek
 /// [`ParseStream::lookahead1`]: struct.ParseBuffer.html#method.lookahead1
 ///
 /// # Example
 ///
-/// ```
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, TypeParam};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// ```edition2018
+/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam};
+/// use syn::parse::{Parse, ParseStream};
 ///
 /// // A generic parameter, a single one of the comma-separated elements inside
 /// // angle brackets in:
 /// //
 /// //     fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
 /// //
 /// // On invalid input, lookahead gives us a reasonable error message.
 /// //
@@ -54,18 +51,16 @@ use token::Token;
 ///             input.parse().map(GenericParam::Lifetime)
 ///         } else if lookahead.peek(Token![const]) {
 ///             input.parse().map(GenericParam::Const)
 ///         } else {
 ///             Err(lookahead.error())
 ///         }
 ///     }
 /// }
-/// #
-/// # fn main() {}
 /// ```
 pub struct Lookahead1<'a> {
     scope: Span,
     cursor: Cursor<'a>,
     comparisons: RefCell<Vec<&'static str>>,
 }
 
 pub fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
@@ -109,21 +104,23 @@ impl<'a> Lookahead1<'a> {
 
     /// Triggers an error at the current position of the parse stream.
     ///
     /// The error message will identify all of the expected token types that
     /// have been peeked against this lookahead instance.
     pub fn error(self) -> Error {
         let comparisons = self.comparisons.borrow();
         match comparisons.len() {
-            0 => if self.cursor.eof() {
-                Error::new(self.scope, "unexpected end of input")
-            } else {
-                Error::new(self.cursor.span(), "unexpected token")
-            },
+            0 => {
+                if self.cursor.eof() {
+                    Error::new(self.scope, "unexpected end of input")
+                } else {
+                    Error::new(self.cursor.span(), "unexpected token")
+                }
+            }
             1 => {
                 let message = format!("expected {}", comparisons[0]);
                 error::new_at(self.scope, self.cursor, message)
             }
             2 => {
                 let message = format!("expected {} or {}", comparisons[0], comparisons[1]);
                 error::new_at(self.scope, self.cursor, message)
             }
--- a/third_party/rust/syn/src/mac.rs
+++ b/third_party/rust/syn/src/mac.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use proc_macro2::TokenStream;
 #[cfg(feature = "parsing")]
 use proc_macro2::{Delimiter, TokenTree};
 use token::{Brace, Bracket, Paren};
 
 #[cfg(feature = "parsing")]
 use parse::{ParseStream, Result};
--- a/third_party/rust/syn/src/macros.rs
+++ b/third_party/rust/syn/src/macros.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 #[cfg(any(feature = "full", feature = "derive"))]
 macro_rules! ast_struct {
     (
         $(#[$attr:meta])*
         pub struct $name:ident #full $($rest:tt)*
     ) => {
         #[cfg(feature = "full")]
         $(#[$attr])*
@@ -108,20 +100,17 @@ macro_rules! ast_enum_of_structs {
             $($remaining)*
             ()
             tokens
             $name { $($variant $( [$($rest)*] )*,)* }
         }
     )
 }
 
-#[cfg(all(
-    feature = "printing",
-    any(feature = "full", feature = "derive")
-))]
+#[cfg(all(feature = "printing", any(feature = "full", feature = "derive")))]
 macro_rules! generate_to_tokens {
     (do_not_generate_to_tokens $($foo:tt)*) => ();
 
     (($($arms:tt)*) $tokens:ident $name:ident { $variant:ident, $($next:tt)*}) => {
         generate_to_tokens!(
             ($($arms)* $name::$variant => {})
             $tokens $name { $($next)* }
         );
@@ -147,21 +136,17 @@ macro_rules! generate_to_tokens {
 
 #[cfg(all(feature = "printing", feature = "full"))]
 macro_rules! to_tokens_call {
     ($e:ident, $tokens:ident, $($rest:tt)*) => {
         $e.to_tokens($tokens)
     };
 }
 
-#[cfg(all(
-    feature = "printing",
-    feature = "derive",
-    not(feature = "full")
-))]
+#[cfg(all(feature = "printing", feature = "derive", not(feature = "full")))]
 macro_rules! to_tokens_call {
     // If the variant is marked as #full, don't auto-generate to-tokens for it.
     ($e:ident, $tokens:ident, #full $($rest:tt)*) => {
         unreachable!()
     };
     ($e:ident, $tokens:ident, $($rest:tt)*) => {
         $e.to_tokens($tokens)
     };
--- a/third_party/rust/syn/src/op.rs
+++ b/third_party/rust/syn/src/op.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 ast_enum! {
     /// A binary operator: `+`, `+=`, `&`.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
     #[cfg_attr(feature = "clone-impls", derive(Copy))]
     pub enum BinOp {
         /// The `+` operator (addition)
--- a/third_party/rust/syn/src/parse.rs
+++ b/third_party/rust/syn/src/parse.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! Parsing interface for parsing a token stream into a syntax tree node.
 //!
 //! Parsing in Syn is built on parser functions that take in a [`ParseStream`]
 //! and produce a [`Result<T>`] where `T` is some syntax tree node. Underlying
 //! these parser functions is a lower level mechanism built around the
 //! [`Cursor`] type. `Cursor` is a cheaply copyable cursor over a range of
 //! tokens in a token stream.
 //!
@@ -28,25 +20,22 @@
 //! Once `Parse` impls have been defined, they can be called conveniently from a
 //! procedural macro through [`parse_macro_input!`] as shown at the bottom of
 //! the snippet. If the caller provides syntactically invalid input to the
 //! procedural macro, they will receive a helpful compiler error message
 //! pointing out the exact token that triggered the failure to parse.
 //!
 //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
 //!
-//! ```
-//! #[macro_use]
-//! extern crate syn;
-//!
+//! ```edition2018
 //! extern crate proc_macro;
 //!
 //! use proc_macro::TokenStream;
-//! use syn::{token, Field, Ident};
-//! use syn::parse::{Parse, ParseStream, Result};
+//! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+//! use syn::parse::{Parse, ParseStream};
 //! use syn::punctuated::Punctuated;
 //!
 //! enum Item {
 //!     Struct(ItemStruct),
 //!     Enum(ItemEnum),
 //! }
 //!
 //! struct ItemStruct {
@@ -93,36 +82,34 @@
 //! #[proc_macro]
 //! # };
 //! pub fn my_macro(tokens: TokenStream) -> TokenStream {
 //!     let input = parse_macro_input!(tokens as Item);
 //!
 //!     /* ... */
 //! #   "".parse().unwrap()
 //! }
-//! #
-//! # fn main() {}
 //! ```
 //!
 //! # The `syn::parse*` functions
 //!
 //! The [`syn::parse`], [`syn::parse2`], and [`syn::parse_str`] functions serve
 //! as an entry point for parsing syntax tree nodes that can be parsed in an
 //! obvious default way. These functions can return any syntax tree node that
 //! implements the [`Parse`] trait, which includes most types in Syn.
 //!
 //! [`syn::parse`]: ../fn.parse.html
 //! [`syn::parse2`]: ../fn.parse2.html
 //! [`syn::parse_str`]: ../fn.parse_str.html
 //! [`Parse`]: trait.Parse.html
 //!
-//! ```
+//! ```edition2018
 //! use syn::Type;
 //!
-//! # fn run_parser() -> Result<(), syn::parse::Error> {
+//! # fn run_parser() -> syn::Result<()> {
 //! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?;
 //! #     Ok(())
 //! # }
 //! #
 //! # fn main() {
 //! #     run_parser().unwrap();
 //! # }
 //! ```
@@ -140,69 +127,74 @@
 //! would either reject valid input or accept invalid input.
 //!
 //! [`Attribute`]: ../struct.Attribute.html
 //! [`Punctuated`]: ../punctuated/index.html
 //!
 //! The `Parse` trait is not implemented in these cases because there is no good
 //! behavior to consider the default.
 //!
-//! ```ignore
+//! ```edition2018,compile_fail
+//! # extern crate proc_macro;
+//! #
+//! # use syn::punctuated::Punctuated;
+//! # use syn::{PathSegment, Result, Token};
+//! #
+//! # fn f(tokens: proc_macro::TokenStream) -> Result<()> {
+//! #
 //! // Can't parse `Punctuated` without knowing whether trailing punctuation
 //! // should be allowed in this context.
 //! let path: Punctuated<PathSegment, Token![::]> = syn::parse(tokens)?;
+//! #
+//! #     Ok(())
+//! # }
 //! ```
 //!
 //! In these cases the types provide a choice of parser functions rather than a
 //! single `Parse` implementation, and those parser functions can be invoked
 //! through the [`Parser`] trait.
 //!
 //! [`Parser`]: trait.Parser.html
 //!
-//! ```
-//! #[macro_use]
-//! extern crate syn;
+//! ```edition2018
+//! extern crate proc_macro;
 //!
-//! extern crate proc_macro2;
-//!
-//! use proc_macro2::TokenStream;
+//! use proc_macro::TokenStream;
 //! use syn::parse::Parser;
 //! use syn::punctuated::Punctuated;
-//! use syn::{Attribute, Expr, PathSegment};
+//! use syn::{Attribute, Expr, PathSegment, Result, Token};
 //!
-//! # fn run_parsers() -> Result<(), syn::parse::Error> {
-//! #     let tokens = TokenStream::new().into();
-//! // Parse a nonempty sequence of path segments separated by `::` punctuation
-//! // with no trailing punctuation.
-//! let parser = Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty;
-//! let path = parser.parse(tokens)?;
+//! fn call_some_parser_methods(input: TokenStream) -> Result<()> {
+//!     // Parse a nonempty sequence of path segments separated by `::` punctuation
+//!     // with no trailing punctuation.
+//!     let tokens = input.clone();
+//!     let parser = Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty;
+//!     let _path = parser.parse(tokens)?;
 //!
-//! #     let tokens = TokenStream::new().into();
-//! // Parse a possibly empty sequence of expressions terminated by commas with
-//! // an optional trailing punctuation.
-//! let parser = Punctuated::<Expr, Token![,]>::parse_terminated;
-//! let args = parser.parse(tokens)?;
+//!     // Parse a possibly empty sequence of expressions terminated by commas with
+//!     // an optional trailing punctuation.
+//!     let tokens = input.clone();
+//!     let parser = Punctuated::<Expr, Token![,]>::parse_terminated;
+//!     let _args = parser.parse(tokens)?;
 //!
-//! #     let tokens = TokenStream::new().into();
-//! // Parse zero or more outer attributes but not inner attributes.
-//! let parser = Attribute::parse_outer;
-//! let attrs = parser.parse(tokens)?;
-//! #
-//! #     Ok(())
-//! # }
-//! #
-//! # fn main() {}
+//!     // Parse zero or more outer attributes but not inner attributes.
+//!     let tokens = input.clone();
+//!     let parser = Attribute::parse_outer;
+//!     let _attrs = parser.parse(tokens)?;
+//!
+//!     Ok(())
+//! }
 //! ```
 //!
 //! ---
 //!
 //! *This module is available if Syn is built with the `"parsing"` feature.*
 
 use std::cell::Cell;
-use std::fmt::Display;
+use std::fmt::{self, Debug, Display};
 use std::marker::PhantomData;
 use std::mem;
 use std::ops::Deref;
 use std::rc::Rc;
 use std::str::FromStr;
 
 #[cfg(all(
     not(all(target_arch = "wasm32", target_os = "unknown")),
@@ -237,16 +229,29 @@ pub type ParseStream<'a> = &'a ParseBuff
 
 /// Cursor position within a buffered token stream.
 ///
 /// This type is more commonly used through the type alias [`ParseStream`] which
 /// is an alias for `&ParseBuffer`.
 ///
 /// `ParseStream` is the input type for all parser functions in Syn. They have
 /// the signature `fn(ParseStream) -> Result<T>`.
+///
+/// ## Calling a parser function
+///
+/// There is no public way to construct a `ParseBuffer`. Instead, if you are
+/// looking to invoke a parser function that requires `ParseStream` as input,
+/// you will need to go through one of the public parsing entry points.
+///
+/// - The [`parse_macro_input!`] macro if parsing input of a procedural macro;
+/// - One of [the `syn::parse*` functions][syn-parse]; or
+/// - A method of the [`Parser`] trait.
+///
+/// [`parse_macro_input!`]: ../macro.parse_macro_input.html
+/// [syn-parse]: index.html#the-synparse-functions
 pub struct ParseBuffer<'a> {
     scope: Span,
     // Instead of Cell<Cursor<'a>> so that ParseBuffer<'a> is covariant in 'a.
     // The rest of the code in this module needs to be careful that only a
     // cursor derived from this `cell` is ever assigned to this `cell`.
     //
     // Cell<Cursor<'a>> cannot be covariant in 'a because then we could take a
     // ParseBuffer<'a>, upcast to ParseBuffer<'short> for some lifetime shorter
@@ -263,30 +268,40 @@ pub struct ParseBuffer<'a> {
 impl<'a> Drop for ParseBuffer<'a> {
     fn drop(&mut self) {
         if !self.is_empty() && self.unexpected.get().is_none() {
             self.unexpected.set(Some(self.cursor().span()));
         }
     }
 }
 
+impl<'a> Display for ParseBuffer<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Display::fmt(&self.cursor().token_stream(), f)
+    }
+}
+
+impl<'a> Debug for ParseBuffer<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Debug::fmt(&self.cursor().token_stream(), f)
+    }
+}
+
 /// Cursor state associated with speculative parsing.
 ///
 /// This type is the input of the closure provided to [`ParseStream::step`].
 ///
 /// [`ParseStream::step`]: struct.ParseBuffer.html#method.step
 ///
 /// # Example
 ///
-/// ```
-/// # extern crate proc_macro2;
-/// # extern crate syn;
-/// #
+/// ```edition2018
 /// use proc_macro2::TokenTree;
-/// use syn::parse::{ParseStream, Result};
+/// use syn::Result;
+/// use syn::parse::ParseStream;
 ///
 /// // This function advances the stream past the next occurrence of `@`. If
 /// // no `@` is present in the stream, the stream position is unchanged and
 /// // an error is returned.
 /// fn skip_past_next_at(input: ParseStream) -> Result<()> {
 ///     input.step(|cursor| {
 ///         let mut rest = *cursor;
 ///         while let Some((tt, next)) = rest.token_tree() {
@@ -296,17 +311,30 @@ impl<'a> Drop for ParseBuffer<'a> {
 ///                 }
 ///                 _ => rest = next,
 ///             }
 ///         }
 ///         Err(cursor.error("no `@` was found after this point"))
 ///     })
 /// }
 /// #
-/// # fn main() {}
+/// # fn remainder_after_skipping_past_next_at(
+/// #     input: ParseStream,
+/// # ) -> Result<proc_macro2::TokenStream> {
+/// #     skip_past_next_at(input)?;
+/// #     input.parse()
+/// # }
+/// #
+/// # fn main() {
+/// #     use syn::parse::Parser;
+/// #     let remainder = remainder_after_skipping_past_next_at
+/// #         .parse_str("a @ b c")
+/// #         .unwrap();
+/// #     assert_eq!(remainder.to_string(), "b c");
+/// # }
 /// ```
 #[derive(Copy, Clone)]
 pub struct StepCursor<'c, 'a> {
     scope: Span,
     // This field is covariant in 'c.
     cursor: Cursor<'c>,
     // This field is contravariant in 'c. Together these make StepCursor
     // invariant in 'c. Also covariant in 'a. The user cannot cast 'c to a
@@ -353,17 +381,18 @@ fn skip(input: ParseStream) -> bool {
         .step(|cursor| {
             if let Some((_lifetime, rest)) = cursor.lifetime() {
                 Ok((true, rest))
             } else if let Some((_token, rest)) = cursor.token_tree() {
                 Ok((true, rest))
             } else {
                 Ok((false, *cursor))
             }
-        }).unwrap()
+        })
+        .unwrap()
 }
 
 impl private {
     pub fn new_parse_buffer(
         scope: Span,
         cursor: Cursor,
         unexpected: Rc<Cell<Option<Span>>>,
     ) -> ParseBuffer {
@@ -393,22 +422,19 @@ impl<'a> ParseBuffer<'a> {
     ///
     /// # Example
     ///
     /// The parser below invokes [`Attribute::parse_outer`] to parse a vector of
     /// zero or more outer attributes.
     ///
     /// [`Attribute::parse_outer`]: ../struct.Attribute.html#method.parse_outer
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{Attribute, Ident};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{Attribute, Ident, Result, Token};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // Parses a unit struct with attributes.
     /// //
     /// //     #[path = "s.tmpl"]
     /// //     struct S;
     /// struct UnitStruct {
     ///     attrs: Vec<Attribute>,
     ///     struct_token: Token![struct],
@@ -421,18 +447,16 @@ impl<'a> ParseBuffer<'a> {
     ///         Ok(UnitStruct {
     ///             attrs: input.call(Attribute::parse_outer)?,
     ///             struct_token: input.parse()?,
     ///             name: input.parse()?,
     ///             semi_token: input.parse()?,
     ///         })
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn call<T>(&self, function: fn(ParseStream) -> Result<T>) -> Result<T> {
         function(self)
     }
 
     /// Looks at the next token in the parse stream to determine whether it
     /// matches the requested type of token.
     ///
@@ -449,22 +473,19 @@ impl<'a> ParseBuffer<'a> {
     /// - `input.peek(Lifetime)`
     /// - `input.peek(token::Brace)`
     ///
     /// # Example
     ///
     /// In this example we finish parsing the list of supertraits when the next
     /// token in the input is either `where` or an opening curly brace.
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{token, Generics, Ident, TypeParamBound};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{braced, token, Generics, Ident, Result, Token, TypeParamBound};
+    /// use syn::parse::{Parse, ParseStream};
     /// use syn::punctuated::Punctuated;
     ///
     /// // Parses a trait definition containing no associated items.
     /// //
     /// //     trait Marker<'de, T>: A + B<'de> where Box<T>: Clone {}
     /// struct MarkerTrait {
     ///     trait_token: Token![trait],
     ///     ident: Ident,
@@ -501,18 +522,16 @@ impl<'a> ParseBuffer<'a> {
     ///             ident: ident,
     ///             generics: generics,
     ///             colon_token: colon_token,
     ///             supertraits: supertraits,
     ///             brace_token: empty_brace_token,
     ///         })
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn peek<T: Peek>(&self, token: T) -> bool {
         let _ = token;
         T::Token::peek(self.cursor())
     }
 
     /// Looks at the second-next token in the parse stream.
     ///
@@ -521,22 +540,19 @@ impl<'a> ParseBuffer<'a> {
     /// # Example
     ///
     /// This example needs to use `peek2` because the symbol `union` is not a
     /// keyword in Rust. We can't use just `peek` and decide to parse a union if
     /// the very next token is `union`, because someone is free to write a `mod
     /// union` and a macro invocation that looks like `union::some_macro! { ...
     /// }`. In other words `union` is a contextual keyword.
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{Ident, ItemUnion, Macro};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{Ident, ItemUnion, Macro, Result, Token};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // Parses either a union or a macro invocation.
     /// enum UnionOrMacro {
     ///     // union MaybeUninit<T> { uninit: (), value: T }
     ///     Union(ItemUnion),
     ///     // lazy_static! { ... }
     ///     Macro(Macro),
     /// }
@@ -545,18 +561,16 @@ impl<'a> ParseBuffer<'a> {
     ///     fn parse(input: ParseStream) -> Result<Self> {
     ///         if input.peek(Token![union]) && input.peek2(Ident) {
     ///             input.parse().map(UnionOrMacro::Union)
     ///         } else {
     ///             input.parse().map(UnionOrMacro::Macro)
     ///         }
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn peek2<T: Peek>(&self, token: T) -> bool {
         let ahead = self.fork();
         skip(&ahead) && ahead.peek(token)
     }
 
     /// Looks at the third-next token in the parse stream.
     pub fn peek3<T: Peek>(&self, token: T) -> bool {
@@ -567,25 +581,21 @@ impl<'a> ParseBuffer<'a> {
     /// Parses zero or more occurrences of `T` separated by punctuation of type
     /// `P`, with optional trailing punctuation.
     ///
     /// Parsing continues until the end of this parse stream. The entire content
     /// of this parse stream must consist of `T` and `P`.
     ///
     /// # Example
     ///
-    /// ```rust
-    /// # #[macro_use]
-    /// # extern crate quote;
+    /// ```edition2018
+    /// # use quote::quote;
     /// #
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{token, Ident, Type};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// use syn::{parenthesized, token, Ident, Result, Token, Type};
+    /// use syn::parse::{Parse, ParseStream};
     /// use syn::punctuated::Punctuated;
     ///
     /// // Parse a simplified tuple struct syntax like:
     /// //
     /// //     struct S(A, B);
     /// struct TupleStruct {
     ///     struct_token: Token![struct],
     ///     ident: Ident,
@@ -623,22 +633,19 @@ impl<'a> ParseBuffer<'a> {
 
     /// Returns whether there are tokens remaining in this stream.
     ///
     /// This method returns true at the end of the content of a set of
     /// delimiters, as well as at the very end of the complete macro input.
     ///
     /// # Example
     ///
-    /// ```rust
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{token, Ident, Item};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{braced, token, Ident, Item, Result, Token};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // Parses a Rust `mod m { ... }` containing zero or more items.
     /// struct Mod {
     ///     mod_token: Token![mod],
     ///     name: Ident,
     ///     brace_token: token::Brace,
     ///     items: Vec<Item>,
     /// }
@@ -655,33 +662,29 @@ impl<'a> ParseBuffer<'a> {
     ///                 while !content.is_empty() {
     ///                     items.push(content.parse()?);
     ///                 }
     ///                 items
     ///             },
     ///         })
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
+    /// ```
     pub fn is_empty(&self) -> bool {
         self.cursor().eof()
     }
 
     /// Constructs a helper for peeking at the next token in this stream and
     /// building an error message if it is not one of a set of expected tokens.
     ///
     /// # Example
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, TypeParam};
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // A generic parameter, a single one of the comma-separated elements inside
     /// // angle brackets in:
     /// //
     /// //     fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
     /// //
     /// // On invalid input, lookahead gives us a reasonable error message.
     /// //
@@ -704,36 +707,34 @@ impl<'a> ParseBuffer<'a> {
     ///             input.parse().map(GenericParam::Lifetime)
     ///         } else if lookahead.peek(Token![const]) {
     ///             input.parse().map(GenericParam::Const)
     ///         } else {
     ///             Err(lookahead.error())
     ///         }
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn lookahead1(&self) -> Lookahead1<'a> {
         lookahead::new(self.scope, self.cursor())
     }
 
     /// Forks a parse stream so that parsing tokens out of either the original
     /// or the fork does not advance the position of the other.
     ///
     /// # Performance
     ///
     /// Forking a parse stream is a cheap fixed amount of work and does not
     /// involve copying token buffers. Where you might hit performance problems
     /// is if your macro ends up parsing a large amount of content more than
     /// once.
     ///
-    /// ```
-    /// # use syn::Expr;
-    /// # use syn::parse::{ParseStream, Result};
+    /// ```edition2018
+    /// # use syn::{Expr, Result};
+    /// # use syn::parse::ParseStream;
     /// #
     /// # fn bad(input: ParseStream) -> Result<Expr> {
     /// // Do not do this.
     /// if input.fork().parse::<Expr>().is_ok() {
     ///     return input.parse::<Expr>();
     /// }
     /// # unimplemented!()
     /// # }
@@ -758,40 +759,37 @@ impl<'a> ParseBuffer<'a> {
     /// - `pub(self)`
     /// - `pub(super)`
     /// - `pub(in some::path)`
     ///
     /// To handle the case of visibilities inside of tuple structs, the parser
     /// needs to distinguish parentheses that specify visibility restrictions
     /// from parentheses that form part of a tuple type.
     ///
-    /// ```
+    /// ```edition2018
     /// # struct A;
     /// # struct B;
     /// # struct C;
     /// #
     /// struct S(pub(crate) A, pub (B, C));
     /// ```
     ///
     /// In this example input the first tuple struct element of `S` has
     /// `pub(crate)` visibility while the second tuple struct element has `pub`
     /// visibility; the parentheses around `(B, C)` are part of the type rather
     /// than part of a visibility restriction.
     ///
     /// The parser uses a forked parse stream to check the first token inside of
     /// parentheses after the `pub` keyword. This is a small bounded amount of
     /// work performed against the forked parse stream.
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::{token, Ident, Path};
+    /// ```edition2018
+    /// use syn::{parenthesized, token, Ident, Path, Result, Token};
     /// use syn::ext::IdentExt;
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// struct PubVisibility {
     ///     pub_token: Token![pub],
     ///     restricted: Option<Restricted>,
     /// }
     ///
     /// struct Restricted {
     ///     paren_token: token::Paren,
@@ -833,40 +831,35 @@ impl<'a> ParseBuffer<'a> {
     ///         }
     ///
     ///         Ok(PubVisibility {
     ///             pub_token: pub_token,
     ///             restricted: None,
     ///         })
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn fork(&self) -> Self {
         ParseBuffer {
             scope: self.scope,
             cell: self.cell.clone(),
             marker: PhantomData,
             // Not the parent's unexpected. Nothing cares whether the clone
             // parses all the way.
             unexpected: Rc::new(Cell::new(None)),
         }
     }
 
     /// Triggers an error at the current position of the parse stream.
     ///
     /// # Example
     ///
-    /// ```
-    /// #[macro_use]
-    /// extern crate syn;
-    ///
-    /// use syn::Expr;
-    /// use syn::parse::{Parse, ParseStream, Result};
+    /// ```edition2018
+    /// use syn::{Expr, Result, Token};
+    /// use syn::parse::{Parse, ParseStream};
     ///
     /// // Some kind of loop: `while` or `for` or `loop`.
     /// struct Loop {
     ///     expr: Expr,
     /// }
     ///
     /// impl Parse for Loop {
     ///     fn parse(input: ParseStream) -> Result<Self> {
@@ -877,58 +870,67 @@ impl<'a> ParseBuffer<'a> {
     ///             Ok(Loop {
     ///                 expr: input.parse()?,
     ///             })
     ///         } else {
     ///             Err(input.error("expected some kind of loop"))
     ///         }
     ///     }
     /// }
-    /// #
-    /// # fn main() {}
     /// ```
     pub fn error<T: Display>(&self, message: T) -> Error {
         error::new_at(self.scope, self.cursor(), message)
     }
 
     /// Speculatively parses tokens from this parse stream, advancing the
     /// position of this stream only if parsing succeeds.
     ///
     /// This is a powerful low-level API used for defining the `Parse` impls of
     /// the basic built-in token types. It is not something that will be used
     /// widely outside of the Syn codebase.
     ///
     /// # Example
     ///
-    /// ```
-    /// # extern crate proc_macro2;
-    /// # extern crate syn;
-    /// #
+    /// ```edition2018
     /// use proc_macro2::TokenTree;
-    /// use syn::parse::{ParseStream, Result};
+    /// use syn::Result;
+    /// use syn::parse::ParseStream;
     ///
     /// // This function advances the stream past the next occurrence of `@`. If
     /// // no `@` is present in the stream, the stream position is unchanged and
     /// // an error is returned.
     /// fn skip_past_next_at(input: ParseStream) -> Result<()> {
     ///     input.step(|cursor| {
     ///         let mut rest = *cursor;
-    ///         while let Some((tt, next)) = cursor.token_tree() {
+    ///         while let Some((tt, next)) = rest.token_tree() {
     ///             match tt {
     ///                 TokenTree::Punct(ref punct) if punct.as_char() == '@' => {
     ///                     return Ok(((), next));
     ///                 }
     ///                 _ => rest = next,
     ///             }
     ///         }
     ///         Err(cursor.error("no `@` was found after this point"))
     ///     })
     /// }
     /// #
-    /// # fn main() {}
+    /// # fn remainder_after_skipping_past_next_at(
+    /// #     input: ParseStream,
+    /// # ) -> Result<proc_macro2::TokenStream> {
+    /// #     skip_past_next_at(input)?;
+    /// #     input.parse()
+    /// # }
+    /// #
+    /// # fn main() {
+    /// #     use syn::parse::Parser;
+    /// #     let remainder = remainder_after_skipping_past_next_at
+    /// #         .parse_str("a @ b c")
+    /// #         .unwrap();
+    /// #     assert_eq!(remainder.to_string(), "b c");
+    /// # }
     /// ```
     pub fn step<F, R>(&self, function: F) -> Result<R>
     where
         F: for<'c> FnOnce(StepCursor<'c, 'a>) -> Result<(R, Cursor<'c>)>,
     {
         // Since the user's function is required to work for any 'c, we know
         // that the Cursor<'c> they return is either derived from the input
         // StepCursor<'c, 'a> or from a Cursor<'static>.
@@ -1042,32 +1044,41 @@ impl Parse for Literal {
 ///
 /// [module documentation]: index.html
 ///
 /// *This trait is available if Syn is built with the `"parsing"` feature.*
 pub trait Parser: Sized {
     type Output;
 
     /// Parse a proc-macro2 token stream into the chosen syntax tree node.
+    ///
+    /// This function will check that the input is fully parsed. If there are
+    /// any unparsed tokens at the end of the stream, an error is returned.
     fn parse2(self, tokens: TokenStream) -> Result<Self::Output>;
 
     /// Parse tokens of source code into the chosen syntax tree node.
     ///
+    /// This function will check that the input is fully parsed. If there are
+    /// any unparsed tokens at the end of the stream, an error is returned.
+    ///
     /// *This method is available if Syn is built with both the `"parsing"` and
     /// `"proc-macro"` features.*
     #[cfg(all(
         not(all(target_arch = "wasm32", target_os = "unknown")),
         feature = "proc-macro"
     ))]
     fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
         self.parse2(proc_macro2::TokenStream::from(tokens))
     }
 
     /// Parse a string of Rust code into the chosen syntax tree node.
     ///
+    /// This function will check that the input is fully parsed. If there are
+    /// any unparsed tokens at the end of the string, an error is returned.
+    ///
     /// # Hygiene
     ///
     /// Every span in the resulting syntax tree will be set to resolve at the
     /// macro call site.
     fn parse_str(self, s: &str) -> Result<Self::Output> {
         self.parse2(proc_macro2::TokenStream::from_str(s)?)
     }
 }
new file mode 100644
--- /dev/null
+++ b/third_party/rust/syn/src/parse_macro_input.rs
@@ -0,0 +1,103 @@
+/// Parse the input TokenStream of a macro, triggering a compile error if the
+/// tokens fail to parse.
+///
+/// Refer to the [`parse` module] documentation for more details about parsing
+/// in Syn.
+///
+/// [`parse` module]: parse/index.html
+///
+/// # Intended usage
+///
+/// ```edition2018
+/// extern crate proc_macro;
+///
+/// use proc_macro::TokenStream;
+/// use syn::{parse_macro_input, Result};
+/// use syn::parse::{Parse, ParseStream};
+///
+/// struct MyMacroInput {
+///     /* ... */
+/// }
+///
+/// impl Parse for MyMacroInput {
+///     fn parse(input: ParseStream) -> Result<Self> {
+///         /* ... */
+/// #       Ok(MyMacroInput {})
+///     }
+/// }
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro]
+/// # };
+/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
+///     let input = parse_macro_input!(tokens as MyMacroInput);
+///
+///     /* ... */
+/// #   "".parse().unwrap()
+/// }
+/// ```
+#[macro_export(local_inner_macros)]
+macro_rules! parse_macro_input {
+    ($tokenstream:ident as $ty:ty) => {
+        match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+            $crate::export::Ok(data) => data,
+            $crate::export::Err(err) => {
+                return $crate::export::TokenStream::from(err.to_compile_error());
+            }
+        }
+    };
+    ($tokenstream:ident) => {
+        parse_macro_input!($tokenstream as _)
+    };
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Can parse any type that implements Parse.
+
+use parse::{Parse, ParseStream, Parser, Result};
+use proc_macro::TokenStream;
+
+// Not public API.
+#[doc(hidden)]
+pub fn parse<T: ParseMacroInput>(token_stream: TokenStream) -> Result<T> {
+    T::parse.parse(token_stream)
+}
+
+// Not public API.
+#[doc(hidden)]
+pub trait ParseMacroInput: Sized {
+    fn parse(input: ParseStream) -> Result<Self>;
+}
+
+impl<T: Parse> ParseMacroInput for T {
+    fn parse(input: ParseStream) -> Result<Self> {
+        <T as Parse>::parse(input)
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Any other types that we want `parse_macro_input!` to be able to parse.
+
+#[cfg(any(feature = "full", feature = "derive"))]
+use AttributeArgs;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+impl ParseMacroInput for AttributeArgs {
+    fn parse(input: ParseStream) -> Result<Self> {
+        let mut metas = Vec::new();
+
+        loop {
+            if input.is_empty() {
+                break;
+            }
+            let value = input.parse()?;
+            metas.push(value);
+            if input.is_empty() {
+                break;
+            }
+            input.parse::<Token![,]>()?;
+        }
+
+        Ok(metas)
+    }
+}
--- a/third_party/rust/syn/src/parse_quote.rs
+++ b/third_party/rust/syn/src/parse_quote.rs
@@ -3,23 +3,19 @@
 ///
 /// [`quote!`]: https://docs.rs/quote/0.6/quote/index.html
 ///
 /// The return type can be any syntax tree node that implements the [`Parse`]
 /// trait.
 ///
 /// [`Parse`]: parse/trait.Parse.html
 ///
-/// ```
-/// #[macro_use]
-/// extern crate quote;
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::Stmt;
+/// ```edition2018
+/// use quote::quote;
+/// use syn::{parse_quote, Stmt};
 ///
 /// fn main() {
 ///     let name = quote!(v);
 ///     let ty = quote!(u8);
 ///
 ///     let stmt: Stmt = parse_quote! {
 ///         let #name: #ty = Default::default();
 ///     };
@@ -32,35 +28,28 @@
 /// although interpolation of syntax tree nodes into the quoted tokens is only
 /// supported if Syn is built with the `"printing"` feature as well.*
 ///
 /// # Example
 ///
 /// The following helper function adds a bound `T: HeapSize` to every type
 /// parameter `T` in the input generics.
 ///
-/// ```
-/// #[macro_use]
-/// extern crate quote;
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::{Generics, GenericParam};
+/// ```edition2018
+/// use syn::{parse_quote, Generics, GenericParam};
 ///
 /// // Add a bound `T: HeapSize` to every type parameter T.
 /// fn add_trait_bounds(mut generics: Generics) -> Generics {
 ///     for param in &mut generics.params {
 ///         if let GenericParam::Type(ref mut type_param) = *param {
 ///             type_param.bounds.push(parse_quote!(HeapSize));
 ///         }
 ///     }
 ///     generics
 /// }
-/// #
-/// # fn main() {}
 /// ```
 ///
 /// # Special cases
 ///
 /// This macro can parse the following additional types as a special case even
 /// though they do not implement the `Parse` trait.
 ///
 /// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
@@ -71,20 +60,39 @@
 /// [`Attribute`]: struct.Attribute.html
 /// [`Punctuated<T, P>`]: punctuated/struct.Punctuated.html
 ///
 /// # Panics
 ///
 /// Panics if the tokens fail to parse as the expected syntax tree type. The
 /// caller is responsible for ensuring that the input tokens are syntactically
 /// valid.
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! parse_quote {
     ($($tt:tt)*) => {
-        $crate::parse_quote::parse($crate::export::From::from(quote!($($tt)*)))
+        $crate::parse_quote::parse($crate::export::From::from(quote_impl!($($tt)*)))
+    };
+}
+
+#[cfg(not(syn_can_call_macro_by_path))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! quote_impl {
+    ($($tt:tt)*) => {
+        // Require caller to have their own `#[macro_use] extern crate quote`.
+        quote!($($tt)*)
+    };
+}
+
+#[cfg(syn_can_call_macro_by_path)]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! quote_impl {
+    ($($tt:tt)*) => {
+        $crate::export::quote::quote!($($tt)*)
     };
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 // Can parse any type that implements Parse.
 
 use parse::{Parse, ParseStream, Parser, Result};
 use proc_macro2::TokenStream;
--- a/third_party/rust/syn/src/path.rs
+++ b/third_party/rust/syn/src/path.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use punctuated::Punctuated;
 
 ast_struct! {
     /// A path at which a named item is exported: `std::collections::HashMap`.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
@@ -88,16 +80,23 @@ impl Default for PathArguments {
 impl PathArguments {
     pub fn is_empty(&self) -> bool {
         match *self {
             PathArguments::None => true,
             PathArguments::AngleBracketed(ref bracketed) => bracketed.args.is_empty(),
             PathArguments::Parenthesized(_) => false,
         }
     }
+
+    fn is_none(&self) -> bool {
+        match *self {
+            PathArguments::None => true,
+            PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
+        }
+    }
 }
 
 ast_enum! {
     /// An individual generic argument, like `'a`, `T`, or `Item = T`.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
     pub enum GenericArgument {
@@ -354,22 +353,19 @@ pub mod parsing {
     impl Path {
         /// Parse a `Path` containing no path arguments on any of its segments.
         ///
         /// *This function is available if Syn is built with the `"parsing"`
         /// feature.*
         ///
         /// # Example
         ///
-        /// ```
-        /// #[macro_use]
-        /// extern crate syn;
-        ///
-        /// use syn::Path;
-        /// use syn::parse::{Parse, ParseStream, Result};
+        /// ```edition2018
+        /// use syn::{Path, Result, Token};
+        /// use syn::parse::{Parse, ParseStream};
         ///
         /// // A simplified single `use` statement like:
         /// //
         /// //     use std::collections::HashMap;
         /// //
         /// // Note that generic parameters are not allowed in a `use` statement
         /// // so the following must not be accepted.
         /// //
@@ -382,18 +378,16 @@ pub mod parsing {
         /// impl Parse for SingleUse {
         ///     fn parse(input: ParseStream) -> Result<Self> {
         ///         Ok(SingleUse {
         ///             use_token: input.parse()?,
         ///             path: input.call(Path::parse_mod_style)?,
         ///         })
         ///     }
         /// }
-        /// #
-        /// # fn main() {}
         /// ```
         pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
             Ok(Path {
                 leading_colon: input.parse()?,
                 segments: {
                     let mut segments = Punctuated::new();
                     loop {
                         if !input.peek(Ident)
@@ -418,16 +412,36 @@ pub mod parsing {
                     } else if segments.trailing_punct() {
                         return Err(input.error("expected path segment"));
                     }
                     segments
                 },
             })
         }
 
+        /// Determines whether this is a path of length 1 equal to the given
+        /// ident.
+        ///
+        /// For them to compare equal, it must be the case that:
+        ///
+        /// - the path has no leading colon,
+        /// - the number of path segments is 1,
+        /// - the first path segment has no angle bracketed or parenthesized
+        ///   path arguments
+        /// - and the ident of the first path segment is equal to the given one.
+        pub fn is_ident<I>(&self, ident: I) -> bool
+        where
+            Ident: PartialEq<I>,
+        {
+            self.leading_colon.is_none()
+                && self.segments.len() == 1
+                && self.segments[0].arguments.is_none()
+                && self.segments[0].ident == ident
+        }
+
         fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
             if input.peek(Token![dyn]) {
                 return Err(input.error("expected path"));
             }
 
             Ok(Path {
                 leading_colon: input.parse()?,
                 segments: {
--- a/third_party/rust/syn/src/punctuated.rs
+++ b/third_party/rust/syn/src/punctuated.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! A punctuated sequence of syntax tree nodes separated by punctuation.
 //!
 //! Lots of things in Rust are punctuated sequences.
 //!
 //! - The fields of a struct are `Punctuated<Field, Token![,]>`.
 //! - The segments of a path are `Punctuated<PathSegment, Token![::]>`.
 //! - The bounds on a generic parameter are `Punctuated<TypeParamBound, Token![+]>`.
 //! - The arguments to a function call are `Punctuated<Expr, Token![,]>`.
--- a/third_party/rust/syn/src/spanned.rs
+++ b/third_party/rust/syn/src/spanned.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! A trait that can provide the `Span` of the complete contents of a syntax
 //! tree node.
 //!
 //! *This module is available if Syn is built with both the `"parsing"` and
 //! `"printing"` features.*
 //!
 //! # Example
 //!
@@ -23,26 +15,22 @@
 //! [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
 //!
 //! If the field type does *not* implement `Sync` as required, we want the
 //! compiler to report an error pointing out exactly which type it was.
 //!
 //! The following macro code takes a variable `ty` of type `Type` and produces a
 //! static assertion that `Sync` is implemented for that type.
 //!
-//! ```
+//! ```edition2018
 //! # extern crate proc_macro;
-//! # extern crate proc_macro2;
-//! # extern crate syn;
 //! #
-//! #[macro_use]
-//! extern crate quote;
-//!
 //! use proc_macro::TokenStream;
 //! use proc_macro2::Span;
+//! use quote::quote_spanned;
 //! use syn::Type;
 //! use syn::spanned::Spanned;
 //!
 //! # const IGNORE_TOKENS: &str = stringify! {
 //! #[proc_macro_derive(MyMacro)]
 //! # };
 //! pub fn my_macro(input: TokenStream) -> TokenStream {
 //!     # let ty = get_a_type();
@@ -54,18 +42,16 @@
 //!
 //!     /* ... */
 //!     # input
 //! }
 //! #
 //! # fn get_a_type() -> Type {
 //! #     unimplemented!()
 //! # }
-//! #
-//! # fn main() {}
 //! ```
 //!
 //! By inserting this `assert_sync` fragment into the output code generated by
 //! our macro, the user's code will fail to compile if `ty` does not implement
 //! `Sync`. The errors they would see look like the following.
 //!
 //! ```text
 //! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not satisfied
@@ -109,41 +95,50 @@ mod private {
     pub trait Sealed {}
     impl<T: ToTokens> Sealed for T {}
 }
 
 impl<T> Spanned for T
 where
     T: ToTokens,
 {
+    fn span(&self) -> Span {
+        join_spans(self.into_token_stream())
+    }
+}
+
+fn join_spans(tokens: TokenStream) -> Span {
+    let mut iter = tokens.into_iter().filter_map(|tt| {
+        // FIXME: This shouldn't be required, since optimally spans should
+        // never be invalid. This filter_map can probably be removed when
+        // https://github.com/rust-lang/rust/issues/43081 is resolved.
+        let span = tt.span();
+        let debug = format!("{:?}", span);
+        if debug.ends_with("bytes(0..0)") {
+            None
+        } else {
+            Some(span)
+        }
+    });
+
+    let mut joined = match iter.next() {
+        Some(span) => span,
+        None => return Span::call_site(),
+    };
+
     #[cfg(procmacro2_semver_exempt)]
-    fn span(&self) -> Span {
-        let mut tokens = TokenStream::new();
-        self.to_tokens(&mut tokens);
-        let mut iter = tokens.into_iter();
-        let mut span = match iter.next() {
-            Some(tt) => tt.span(),
-            None => {
-                return Span::call_site();
-            }
-        };
-        for tt in iter {
-            if let Some(joined) = span.join(tt.span()) {
-                span = joined;
+    {
+        for next in iter {
+            if let Some(span) = joined.join(next) {
+                joined = span;
             }
         }
-        span
     }
 
     #[cfg(not(procmacro2_semver_exempt))]
-    fn span(&self) -> Span {
-        let mut tokens = TokenStream::new();
-        self.to_tokens(&mut tokens);
-        let mut iter = tokens.into_iter();
-
+    {
         // We can't join spans without procmacro2_semver_exempt so just grab the
         // first one.
-        match iter.next() {
-            Some(tt) => tt.span(),
-            None => Span::call_site(),
-        }
+        joined = joined;
     }
+
+    joined
 }
new file mode 100644
--- /dev/null
+++ b/third_party/rust/syn/src/thread.rs
@@ -0,0 +1,81 @@
+use std::fmt::{self, Debug};
+
+use self::thread_id::ThreadId;
+
+/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value
+/// of type T only from the original thread on which the ThreadBound was
+/// constructed.
+pub struct ThreadBound<T> {
+    value: T,
+    thread_id: ThreadId,
+}
+
+unsafe impl<T> Sync for ThreadBound<T> {}
+
+// Send bound requires Copy, as otherwise Drop could run in the wrong place.
+unsafe impl<T: Copy> Send for ThreadBound<T> {}
+
+impl<T> ThreadBound<T> {
+    pub fn new(value: T) -> Self {
+        ThreadBound {
+            value: value,
+            thread_id: thread_id::current(),
+        }
+    }
+
+    pub fn get(&self) -> Option<&T> {
+        if thread_id::current() == self.thread_id {
+            Some(&self.value)
+        } else {
+            None
+        }
+    }
+}
+
+impl<T: Debug> Debug for ThreadBound<T> {
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        match self.get() {
+            Some(value) => Debug::fmt(value, formatter),
+            None => formatter.write_str("unknown"),
+        }
+    }
+}
+
+#[cfg(syn_can_use_thread_id)]
+mod thread_id {
+    use std::thread;
+
+    pub use std::thread::ThreadId;
+
+    pub fn current() -> ThreadId {
+        thread::current().id()
+    }
+}
+
+#[cfg(not(syn_can_use_thread_id))]
+mod thread_id {
+    use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
+
+    thread_local! {
+        static THREAD_ID: usize = {
+            static NEXT_THREAD_ID: AtomicUsize = ATOMIC_USIZE_INIT;
+
+            // Ordering::Relaxed because our only requirement for the ids is
+            // that they are unique. It is okay for the compiler to rearrange
+            // other memory reads around this fetch. It's still an atomic
+            // fetch_add, so no two threads will be able to read the same value
+            // from it.
+            //
+            // The main thing which these orderings affect is other memory reads
+            // around the atomic read, which for our case are irrelevant as this
+            // atomic guards nothing.
+            NEXT_THREAD_ID.fetch_add(1, Ordering::Relaxed)
+        };
+    }
+
+    pub type ThreadId = usize;
+
+    pub fn current() -> ThreadId {
+        THREAD_ID.with(|id| *id)
+    }
+}
--- a/third_party/rust/syn/src/token.rs
+++ b/third_party/rust/syn/src/token.rs
@@ -1,68 +1,53 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 //! Tokens representing Rust punctuation, keywords, and delimiters.
 //!
 //! The type names in this module can be difficult to keep straight, so we
 //! prefer to use the [`Token!`] macro instead. This is a type-macro that
 //! expands to the token type of the given token.
 //!
 //! [`Token!`]: ../macro.Token.html
 //!
 //! # Example
 //!
 //! The [`ItemStatic`] syntax tree node is defined like this.
 //!
 //! [`ItemStatic`]: ../struct.ItemStatic.html
 //!
-//! ```
-//! # #[macro_use]
-//! # extern crate syn;
-//! #
-//! # use syn::{Attribute, Expr, Ident, Type, Visibility};
+//! ```edition2018
+//! # use syn::{Attribute, Expr, Ident, Token, Type, Visibility};
 //! #
 //! pub struct ItemStatic {
 //!     pub attrs: Vec<Attribute>,
 //!     pub vis: Visibility,
 //!     pub static_token: Token![static],
 //!     pub mutability: Option<Token![mut]>,
 //!     pub ident: Ident,
 //!     pub colon_token: Token![:],
 //!     pub ty: Box<Type>,
 //!     pub eq_token: Token![=],
 //!     pub expr: Box<Expr>,
 //!     pub semi_token: Token![;],
 //! }
-//! #
-//! # fn main() {}
 //! ```
 //!
 //! # Parsing
 //!
 //! Keywords and punctuation can be parsed through the [`ParseStream::parse`]
 //! method. Delimiter tokens are parsed using the [`parenthesized!`],
 //! [`bracketed!`] and [`braced!`] macros.
 //!
 //! [`ParseStream::parse`]: ../parse/struct.ParseBuffer.html#method.parse
 //! [`parenthesized!`]: ../macro.parenthesized.html
 //! [`bracketed!`]: ../macro.bracketed.html
 //! [`braced!`]: ../macro.braced.html
 //!
-//! ```
-//! # extern crate syn;
-//! #
-//! use syn::Attribute;
-//! use syn::parse::{Parse, ParseStream, Result};
+//! ```edition2018
+//! use syn::{Attribute, Result};
+//! use syn::parse::{Parse, ParseStream};
 //! #
 //! # enum ItemStatic {}
 //!
 //! // Parse the ItemStatic struct shown above.
 //! impl Parse for ItemStatic {
 //!     fn parse(input: ParseStream) -> Result<Self> {
 //!         # use syn::ItemStatic;
 //!         # fn parse(input: ParseStream) -> Result<ItemStatic> {
@@ -77,31 +62,25 @@
 //!             eq_token: input.parse()?,
 //!             expr: input.parse()?,
 //!             semi_token: input.parse()?,
 //!         })
 //!         # }
 //!         # unimplemented!()
 //!     }
 //! }
-//! #
-//! # fn main() {}
 //! ```
 
 use std;
-#[cfg(feature = "parsing")]
-use std::cell::Cell;
 #[cfg(feature = "extra-traits")]
 use std::cmp;
 #[cfg(feature = "extra-traits")]
 use std::fmt::{self, Debug};
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
-#[cfg(feature = "parsing")]
-use std::rc::Rc;
 
 #[cfg(feature = "parsing")]
 use proc_macro2::Delimiter;
 #[cfg(any(feature = "parsing", feature = "printing"))]
 use proc_macro2::Ident;
 use proc_macro2::Span;
 #[cfg(feature = "printing")]
 use proc_macro2::TokenStream;
@@ -141,18 +120,22 @@ pub trait Token: private::Sealed {
 #[cfg(feature = "parsing")]
 mod private {
     pub trait Sealed {}
 }
 
 #[cfg(feature = "parsing")]
 impl private::Sealed for Ident {}
 
+#[cfg(any(feature = "full", feature = "derive"))]
 #[cfg(feature = "parsing")]
 fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
+    use std::cell::Cell;
+    use std::rc::Rc;
+
     let scope = Span::call_site();
     let unexpected = Rc::new(Cell::new(None));
     let buffer = ::private::new_parse_buffer(scope, cursor, unexpected);
     peek(&buffer)
 }
 
 #[cfg(any(feature = "full", feature = "derive"))]
 macro_rules! impl_token {
@@ -267,17 +250,17 @@ macro_rules! define_keywords {
             #[cfg(feature = "extra-traits")]
             impl Hash for $name {
                 fn hash<H: Hasher>(&self, _state: &mut H) {}
             }
 
             #[cfg(feature = "printing")]
             impl ToTokens for $name {
                 fn to_tokens(&self, tokens: &mut TokenStream) {
-                    printing::keyword($token, &self.span, tokens);
+                    printing::keyword($token, self.span, tokens);
                 }
             }
 
             #[cfg(feature = "parsing")]
             impl Parse for $name {
                 fn parse(input: ParseStream) -> Result<Self> {
                     Ok($name {
                         span: parsing::keyword(input, $token)?,
@@ -445,17 +428,17 @@ macro_rules! define_delimiters {
             }
 
             impl $name {
                 #[cfg(feature = "printing")]
                 pub fn surround<F>(&self, tokens: &mut TokenStream, f: F)
                 where
                     F: FnOnce(&mut TokenStream),
                 {
-                    printing::delim($token, &self.span, tokens, f);
+                    printing::delim($token, self.span, tokens, f);
                 }
             }
 
             #[cfg(feature = "parsing")]
             impl private::Sealed for $name {}
         )*
     };
 }
@@ -557,17 +540,16 @@ impl Token for Group {
 define_keywords! {
     "abstract"    pub struct Abstract     /// `abstract`
     "as"          pub struct As           /// `as`
     "async"       pub struct Async        /// `async`
     "auto"        pub struct Auto         /// `auto`
     "become"      pub struct Become       /// `become`
     "box"         pub struct Box          /// `box`
     "break"       pub struct Break        /// `break`
-    "Self"        pub struct CapSelf      /// `Self`
     "const"       pub struct Const        /// `const`
     "continue"    pub struct Continue     /// `continue`
     "crate"       pub struct Crate        /// `crate`
     "default"     pub struct Default      /// `default`
     "do"          pub struct Do           /// `do`
     "dyn"         pub struct Dyn          /// `dyn`
     "else"        pub struct Else         /// `else`
     "enum"        pub struct Enum         /// `enum`
@@ -586,17 +568,18 @@ define_keywords! {
     "mod"         pub struct Mod          /// `mod`
     "move"        pub struct Move         /// `move`
     "mut"         pub struct Mut          /// `mut`
     "override"    pub struct Override     /// `override`
     "priv"        pub struct Priv         /// `priv`
     "pub"         pub struct Pub          /// `pub`
     "ref"         pub struct Ref          /// `ref`
     "return"      pub struct Return       /// `return`
-    "self"        pub struct Self_        /// `self`
+    "Self"        pub struct SelfType     /// `Self`
+    "self"        pub struct SelfValue    /// `self`
     "static"      pub struct Static       /// `static`
     "struct"      pub struct Struct       /// `struct`
     "super"       pub struct Super        /// `super`
     "trait"       pub struct Trait        /// `trait`
     "try"         pub struct Try          /// `try`
     "type"        pub struct Type         /// `type`
     "typeof"      pub struct Typeof       /// `typeof`
     "union"       pub struct Union        /// `union`
@@ -678,17 +661,16 @@ define_delimiters! {
 macro_rules! Token {
     (abstract)    => { $crate::token::Abstract };
     (as)          => { $crate::token::As };
     (async)       => { $crate::token::Async };
     (auto)        => { $crate::token::Auto };
     (become)      => { $crate::token::Become };
     (box)         => { $crate::token::Box };
     (break)       => { $crate::token::Break };
-    (Self)        => { $crate::token::CapSelf };
     (const)       => { $crate::token::Const };
     (continue)    => { $crate::token::Continue };
     (crate)       => { $crate::token::Crate };
     (default)     => { $crate::token::Default };
     (do)          => { $crate::token::Do };
     (dyn)         => { $crate::token::Dyn };
     (else)        => { $crate::token::Else };
     (enum)        => { $crate::token::Enum };
@@ -707,17 +689,18 @@ macro_rules! Token {
     (mod)         => { $crate::token::Mod };
     (move)        => { $crate::token::Move };
     (mut)         => { $crate::token::Mut };
     (override)    => { $crate::token::Override };
     (priv)        => { $crate::token::Priv };
     (pub)         => { $crate::token::Pub };
     (ref)         => { $crate::token::Ref };
     (return)      => { $crate::token::Return };
-    (self)        => { $crate::token::Self_ };
+    (Self)        => { $crate::token::SelfType };
+    (self)        => { $crate::token::SelfValue };
     (static)      => { $crate::token::Static };
     (struct)      => { $crate::token::Struct };
     (super)       => { $crate::token::Super };
     (trait)       => { $crate::token::Trait };
     (try)         => { $crate::token::Try };
     (type)        => { $crate::token::Type };
     (typeof)      => { $crate::token::Typeof };
     (union)       => { $crate::token::Union };
@@ -771,16 +754,23 @@ macro_rules! Token {
     (>>=)         => { $crate::token::ShrEq };
     (*)           => { $crate::token::Star };
     (-)           => { $crate::token::Sub };
     (-=)          => { $crate::token::SubEq };
     (~)           => { $crate::token::Tilde };
     (_)           => { $crate::token::Underscore };
 }
 
+// Old names. TODO: remove these re-exports in a breaking change.
+// https://github.com/dtolnay/syn/issues/486
+#[doc(hidden)]
+pub use self::SelfType as CapSelf;
+#[doc(hidden)]
+pub use self::SelfValue as Self_;
+
 #[cfg(feature = "parsing")]
 mod parsing {
     use proc_macro2::{Spacing, Span};
 
     use buffer::Cursor;
     use error::{Error, Result};
     use parse::ParseStream;
     use span::FromSpans;
@@ -874,30 +864,30 @@ mod printing {
             tokens.append(op);
         }
 
         let mut op = Punct::new(ch, Spacing::Alone);
         op.set_span(*span);
         tokens.append(op);
     }
 
-    pub fn keyword(s: &str, span: &Span, tokens: &mut TokenStream) {
-        tokens.append(Ident::new(s, *span));
+    pub fn keyword(s: &str, span: Span, tokens: &mut TokenStream) {
+        tokens.append(Ident::new(s, span));
     }
 
-    pub fn delim<F>(s: &str, span: &Span, tokens: &mut TokenStream, f: F)
+    pub fn delim<F>(s: &str, span: Span, tokens: &mut TokenStream, f: F)
     where
         F: FnOnce(&mut TokenStream),
     {
         let delim = match s {
             "(" => Delimiter::Parenthesis,
             "[" => Delimiter::Bracket,
             "{" => Delimiter::Brace,
             " " => Delimiter::None,
             _ => panic!("unknown delimiter: {}", s),
         };
         let mut inner = TokenStream::new();
         f(&mut inner);
         let mut g = Group::new(delim, inner);
-        g.set_span(*span);
+        g.set_span(span);
         tokens.append(g);
     }
 }
--- a/third_party/rust/syn/src/tt.rs
+++ b/third_party/rust/syn/src/tt.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use std::hash::{Hash, Hasher};
 
 use proc_macro2::{Delimiter, TokenStream, TokenTree};
 
 pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
 
 impl<'a> PartialEq for TokenTreeHelper<'a> {
     fn eq(&self, other: &Self) -> bool {
@@ -36,20 +28,21 @@ impl<'a> PartialEq for TokenTreeHelper<'
                     };
                     if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
                         return false;
                     }
                 }
                 s2.next().is_none()
             }
             (&TokenTree::Punct(ref o1), &TokenTree::Punct(ref o2)) => {
-                o1.as_char() == o2.as_char() && match (o1.spacing(), o2.spacing()) {
-                    (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
-                    _ => false,
-                }
+                o1.as_char() == o2.as_char()
+                    && match (o1.spacing(), o2.spacing()) {
+                        (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
+                        _ => false,
+                    }
             }
             (&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => {
                 l1.to_string() == l2.to_string()
             }
             (&TokenTree::Ident(ref s1), &TokenTree::Ident(ref s2)) => s1 == s2,
             _ => false,
         }
     }
--- a/third_party/rust/syn/src/ty.rs
+++ b/third_party/rust/syn/src/ty.rs
@@ -1,16 +1,8 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
 use super::*;
 use proc_macro2::TokenStream;
 use punctuated::Punctuated;
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 #[cfg(feature = "extra-traits")]
 use tt::TokenStreamHelper;
 
@@ -303,36 +295,101 @@ pub mod parsing {
                 }));
             }
             if content.peek(Lifetime) {
                 return Ok(Type::Paren(TypeParen {
                     paren_token: paren_token,
                     elem: Box::new(Type::TraitObject(content.parse()?)),
                 }));
             }
+            if content.peek(Token![?]) {
+                return Ok(Type::TraitObject(TypeTraitObject {
+                    dyn_token: None,
+                    bounds: {
+                        let mut bounds = Punctuated::new();
+                        bounds.push_value(TypeParamBound::Trait(TraitBound {
+                            paren_token: Some(paren_token),
+                            ..content.parse()?
+                        }));
+                        while let Some(plus) = input.parse()? {
+                            bounds.push_punct(plus);
+                            bounds.push_value(input.parse()?);
+                        }
+                        bounds
+                    },
+                }));
+            }
             let first: Type = content.parse()?;
             if content.peek(Token![,]) {
-                Ok(Type::Tuple(TypeTuple {
+                return Ok(Type::Tuple(TypeTuple {
                     paren_token: paren_token,
                     elems: {
                         let mut elems = Punctuated::new();
                         elems.push_value(first);
                         elems.push_punct(content.parse()?);
                         let rest: Punctuated<Type, Token![,]> =
                             content.parse_terminated(Parse::parse)?;
                         elems.extend(rest);
                         elems
                     },
-                }))
-            } else {
-                Ok(Type::Paren(TypeParen {
-                    paren_token: paren_token,
-                    elem: Box::new(first),
-                }))
+                }));
             }
+            if allow_plus && input.peek(Token![+]) {
+                loop {
+                    let first = match first {
+                        Type::Path(TypePath { qself: None, path }) => {
+                            TypeParamBound::Trait(TraitBound {
+                                paren_token: Some(paren_token),
+                                modifier: TraitBoundModifier::None,
+                                lifetimes: None,
+                                path: path,
+                            })
+                        }
+                        Type::TraitObject(TypeTraitObject {
+                            dyn_token: None,
+                            ref bounds,
+                        }) => {
+                            if bounds.len() > 1 || bounds.trailing_punct() {
+                                break;
+                            }
+                            match first {
+                                Type::TraitObject(TypeTraitObject { bounds, .. }) => {
+                                    match bounds.into_iter().next().unwrap() {
+                                        TypeParamBound::Trait(trait_bound) => {
+                                            TypeParamBound::Trait(TraitBound {
+                                                paren_token: Some(paren_token),
+                                                ..trait_bound
+                                            })
+                                        }
+                                        other => other,
+                                    }
+                                }
+                                _ => unreachable!(),
+                            }
+                        }
+                        _ => break,
+                    };
+                    return Ok(Type::TraitObject(TypeTraitObject {
+                        dyn_token: None,
+                        bounds: {
+                            let mut bounds = Punctuated::new();
+                            bounds.push_value(first);
+                            while let Some(plus) = input.parse()? {
+                                bounds.push_punct(plus);
+                                bounds.push_value(input.parse()?);
+                            }
+                            bounds
+                        },
+                    }));
+                }
+            }
+            Ok(Type::Paren(TypeParen {
+                paren_token: paren_token,
+                elem: Box::new(first),
+            }))
         } else if lookahead.peek(Token![fn])
             || lookahead.peek(Token![unsafe])
             || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
         {
             let mut bare_fn: TypeBareFn = input.parse()?;
             bare_fn.lifetimes = lifetimes;
             Ok(Type::BareFn(bare_fn))
         } else if lookahead.peek(Ident)
@@ -393,16 +450,19 @@ pub mod parsing {
                     paren_token: None,
                     modifier: TraitBoundModifier::None,
                     lifetimes: lifetimes,
                     path: ty.path,
                 }));
                 if allow_plus {
                     while input.peek(Token![+]) {
                         bounds.push_punct(input.parse()?);
+                        if input.peek(Token![>]) {
+                            break;
+                        }
                         bounds.push_value(input.parse()?);
                     }
                 }
                 return Ok(Type::TraitObject(TypeTraitObject {
                     dyn_token: None,
                     bounds: bounds,
                 }));
             }
@@ -635,16 +695,19 @@ pub mod parsing {
                     let mut bounds = Punctuated::new();
                     if allow_plus {
                         loop {
                             bounds.push_value(input.parse()?);
                             if !input.peek(Token![+]) {
                                 break;
                             }
                             bounds.push_punct(input.parse()?);
+                            if input.peek(Token![>]) {
+                                break;
+                            }
                         }
                     } else {
                         bounds.push_value(input.parse()?);
                     }
                     // Just lifetimes like `'a + 'b` is not a TraitObject.
                     if !at_least_one_type(&bounds) {
                         return Err(input.error("expected at least one type"));
                     }
--- a/toolkit/content/tests/chrome/test_menulist.xul
+++ b/toolkit/content/tests/chrome/test_menulist.xul
@@ -16,17 +16,17 @@
     <menupopup id="menulist-popup"/>
   </menulist>
   <button label="Two"/>
   <button label="Three"/>
 </vbox>
 <richlistbox id="scroller-in-listbox" style="overflow: auto" height="60">
   <richlistitem allowevents="true">
     <menulist id="menulist-in-listbox" onpopupshown="test_menulist_open(this, this.parentNode.parentNode)"
-              onpopuphidden="SimpleTest.executeSoon(checkScrollAndFinish)">
+              onpopuphidden="SimpleTest.executeSoon(() => checkScrollAndFinish().catch(ex => ok(false, ex)));">
       <menupopup id="menulist-in-listbox-popup">
         <menuitem label="One" value="one"/>
         <menuitem label="Two" value="two"/>
       </menupopup>
     </menulist>
   </richlistitem>
   <richlistitem><label value="Two"/></richlistitem>
   <richlistitem><label value="Three"/></richlistitem>
@@ -69,16 +69,28 @@
     <menuitem label="Seven" value="seven"/>
     <menuitem label="Eight" value="eight"/>
   </menupopup>
 </menulist>
 
 <script class="testbody" type="application/javascript">
 <![CDATA[
 
+function waitForEvent(subject, eventName, checkFn) {
+  return new Promise(resolve => {
+    subject.addEventListener(eventName, function listener(event) {
+      if (checkFn && !checkFn(event)) {
+        return;
+      }
+      subject.removeEventListener(eventName, listener);
+      SimpleTest.executeSoon(() => resolve(event));
+    });
+  });
+}
+
 SimpleTest.waitForExplicitFinish();
 
 function testtag_menulists()
 {
   testtag_menulist_UI_start($("menulist"), false);
 }
 
 function testtag_menulist_UI_start(element)
@@ -220,102 +232,115 @@ function test_menulist_open(element, scr
   synthesizeMouse(element.menupopup.childNodes[1], 6, 6, { type: "mousemove" });
   is(element.activeChild, item, "activeChild after menu highlight " + element.id);
   is(element.selectedIndex, 0, "selectedIndex after menu highlight " + element.id);
   is(scroller.scrollTop, 0, "scroll position after menu highlight " + element.id);
 
   element.open = false;
 }
 
-function checkScrollAndFinish()
+async function checkScrollAndFinish()
 {
   is($("scroller").scrollTop, 0, "mousewheel on menulist does not scroll vbox parent");
   is($("scroller-in-listbox").scrollTop, 0, "mousewheel on menulist does not scroll listbox parent");
 
   let menulist = $("menulist-size");
-  menulist.addEventListener("popupshown", function testAltClose() {
-    menulist.removeEventListener("popupshown", testAltClose);
+  let shownPromise = waitForEvent(menulist, "popupshown");
+  menulist.open = true;
+  await shownPromise;
 
-    sendKey("ALT");
-    is(menulist.menupopup.state, "open", "alt doesn't close menulist");
-    menulist.open = false;
+  sendKey("ALT");
+  is(menulist.menupopup.state, "open", "alt doesn't close menulist");
+  menulist.open = false;
 
-    dragScroll();
-  });
-
-  menulist.open = true;
+  await dragScroll();
 }
 
-function dragScroll()
+async function dragScroll()
 {
   let menulist = $("menulist-clipped");
-  menulist.addEventListener("popupshown", function testDragScroll() {
-    menulist.removeEventListener("popupshown", testDragScroll);
 
-    let popup = menulist.menupopup;
-    let scrollPos = popup.scrollBox.scrollTop;
-    let popupRect = popup.getBoundingClientRect();
+  let shownPromise = waitForEvent(menulist, "popupshown");
+  menulist.open = true;
+  await shownPromise;
 
-    // First, check that scrolling does not occur when the mouse is moved over the
-    // anchor button but not the popup yet.
-    synthesizeMouseAtPoint(popupRect.left + 5, popupRect.top - 10, { type: "mousemove" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position after mousemove over button should not change");
+  let popup = menulist.menupopup;
+  let getScrollPos = () => popup.scrollBox.scrollbox.scrollTop;
+  let scrollPos = getScrollPos();
+  let popupRect = popup.getBoundingClientRect();
 
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top + 10, { type: "mousemove" });
+  // First, check that scrolling does not occur when the mouse is moved over the
+  // anchor button but not the popup yet.
+  synthesizeMouseAtPoint(popupRect.left + 5, popupRect.top - 10, { type: "mousemove" });
+  is(getScrollPos(), scrollPos, "scroll position after mousemove over button should not change");
+
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top + 10, { type: "mousemove" });
 
-    // Dragging above the popup scrolls it up.
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" });
-    ok(popup.scrollBox.scrollTop < scrollPos - 5, "scroll position at drag up");
+  // Dragging above the popup scrolls it up.
+  let scrolledPromise = waitForEvent(popup, "scroll", false,
+    () => getScrollPos() < scrollPos - 5);
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" });
+  await scrolledPromise;
+  ok(true, "scroll position at drag up");
 
-    // Dragging below the popup scrolls it down.
-    scrollPos = popup.scrollBox.scrollTop;
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
-    ok(popup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down");
+  // Dragging below the popup scrolls it down.
+  scrollPos = getScrollPos();
+  scrolledPromise = waitForEvent(popup, "scroll", false,
+    () => getScrollPos() > scrollPos + 5);
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
+  await scrolledPromise;
+  ok(true, "scroll position at drag down");
 
-    // Releasing the mouse button and moving the mouse does not change the scroll position.
-    scrollPos = popup.scrollBox.scrollTop;
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup should not change");
+  // Releasing the mouse button and moving the mouse does not change the scroll position.
+  scrollPos = getScrollPos();
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" });
+  is(getScrollPos(), scrollPos, "scroll position at mouseup should not change");
 
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
+  is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
 
-    // Now check dragging with a mousedown on an item
-    let menuRect = popup.childNodes[4].getBoundingClientRect();
-    synthesizeMouseAtPoint(menuRect.left + 5, menuRect.top + 5, { type: "mousedown" });
-
-    // Dragging below the popup scrolls it down.
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
-    ok(popup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down from item");
+  // Now check dragging with a mousedown on an item. Make sure the element is
+  // visible, as the asynchronous scrolling may have moved it out of view.
+  popup.childNodes[4].scrollIntoView({ block: "nearest", behavior: "instant" });
+  let menuRect = popup.childNodes[4].getBoundingClientRect();
+  synthesizeMouseAtPoint(menuRect.left + 5, menuRect.top + 5, { type: "mousedown" });
 
-    // Dragging above the popup scrolls it up.
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position at drag up from item");
-
-    scrollPos = popup.scrollBox.scrollTop;
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup should not change");
+  // Dragging below the popup scrolls it down.
+  scrolledPromise = waitForEvent(popup, "scroll", false,
+    () => getScrollPos() > scrollPos + 5);
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
+  await scrolledPromise;
+  ok(true, "scroll position at drag down from item");
 
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
-    is(popup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+  // Dragging above the popup scrolls it up.
+  scrollPos = getScrollPos();
+  scrolledPromise = waitForEvent(popup, "scroll", false,
+    () => getScrollPos() < scrollPos - 5);
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" });
+  await scrolledPromise;
+  ok(true, "scroll position at drag up from item");
 
-    menulist.open = false;
+  scrollPos = getScrollPos();
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" });
+  is(getScrollPos(), scrollPos, "scroll position at mouseup should not change");
 
-    let mouseMoveTarget = null;
-    popup.childNodes[4].click();
-    addEventListener("mousemove", function checkMouseMove(event) {
-      mouseMoveTarget = event.target;
-    }, {once: true});
-    synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
-    isnot(mouseMoveTarget, popup, "clicking on item when popup closed doesn't start dragging");
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
+  is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
+
+  menulist.open = false;
 
-    SimpleTest.finish();
-  });
+  let mouseMoveTarget = null;
+  popup.childNodes[4].click();
+  addEventListener("mousemove", function checkMouseMove(event) {
+    mouseMoveTarget = event.target;
+  }, {once: true});
+  synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" });
+  isnot(mouseMoveTarget, popup, "clicking on item when popup closed doesn't start dragging");
 
-  menulist.open = true;
+  SimpleTest.finish();
 }
 
 ]]>
 </script>
 
 <body xmlns="http://www.w3.org/1999/xhtml">
 <p id="display">
 </p>
--- a/toolkit/content/tests/chrome/test_mousescroll.xul
+++ b/toolkit/content/tests/chrome/test_mousescroll.xul
@@ -144,17 +144,17 @@ function* testRichListbox(id)
     yield* helper(5, -delta,  0, deltaModes[i]);
     yield* helper(5,  delta,  1, deltaModes[i]);
     yield* helper(5,  delta,  0, deltaModes[i]);
   }
 }
 
 function* testArrowScrollbox(id)
 {
-  var scrollbox = document.getElementById(id)._scrollbox;
+  var scrollbox = document.getElementById(id).scrollbox;
   var orient = scrollbox.getAttribute("orient");
 
   function* helper(aStart, aDelta, aDeltaMode, aExpected)
   {
     var lineOrPageDelta = (aDeltaMode == WheelEvent.DOM_DELTA_PIXEL) ? aDelta / 10 : aDelta;
     var orientIsHorizontal = (orient == "horizontal");
 
     scrollbox.scrollTo(aStart, aStart);
--- a/toolkit/content/tests/chrome/window_largemenu.xul
+++ b/toolkit/content/tests/chrome/window_largemenu.xul
@@ -98,17 +98,17 @@ function popupShown()
   if (gTests[gTestIndex] == "menu movement")
     return testPopupMovement();
 
   if (gContextMenuTests)
     return contextMenuPopupShown();
 
   var popup = document.getElementById("popup");
   var rect = popup.getBoundingClientRect();
-  var scrollbox = document.getAnonymousNodes(popup)[0]._scrollbox;
+  var scrollbox = document.getAnonymousNodes(popup)[0].scrollbox;
   var expectedScrollPos = 0;
 
   if (gTestIndex == 0) {
     // the popup should be in the center of the screen
     // note that if the height is odd, the y-offset will have been rounded
     // down when we pass the fractional value to openPopupAtScreen above.
     is(Math.round(rect.top) + gScreenY, Math.floor(screen.height / 2),
                               gTests[gTestIndex] + " top");
--- a/toolkit/content/widgets/popup.xml
+++ b/toolkit/content/widgets/popup.xml
@@ -260,19 +260,20 @@
       <handler event="popuppositioned" phase="target">
         this.adjustArrowPosition();
       </handler>
     </handlers>
   </binding>
 
   <binding id="popup-scrollbars" extends="chrome://global/content/bindings/popup.xml#popup">
     <content>
-      <xul:scrollbox class="popup-internal-box" flex="1" orient="vertical" style="overflow: auto;">
+      <xul:arrowscrollbox class="popup-internal-box" flex="1" orient="vertical"
+                          smoothscroll="false">
         <children/>
-      </xul:scrollbox>
+      </xul:arrowscrollbox>
     </content>
     <implementation>
       <field name="AUTOSCROLL_INTERVAL">25</field>
       <field name="NOT_DRAGGING">0</field>
       <field name="DRAG_OVER_BUTTON">-1</field>
       <field name="DRAG_OVER_POPUP">1</field>
 
       <field name="_draggingState">this.NOT_DRAGGING</field>
@@ -363,21 +364,21 @@
             if (event.screenY > popupRect.top && event.screenY < popupRect.bottom) {
               this._draggingState = this.DRAG_OVER_POPUP;
             }
           }
 
           if (this._draggingState == this.DRAG_OVER_POPUP &&
               (event.screenY <= popupRect.top || event.screenY >= popupRect.bottom)) {
             let scrollAmount = event.screenY <= popupRect.top ? -1 : 1;
-            this.scrollBox.scrollByIndex(scrollAmount);
+            this.scrollBox.scrollByIndex(scrollAmount, true);
 
             let win = this.ownerGlobal;
             this._scrollTimer = win.setInterval(() => {
-              this.scrollBox.scrollByIndex(scrollAmount);
+              this.scrollBox.scrollByIndex(scrollAmount, true);
             }, this.AUTOSCROLL_INTERVAL);
           }
         }
       ]]>
       </handler>
     </handlers>
   </binding>
 
--- a/toolkit/content/widgets/scrollbox.xml
+++ b/toolkit/content/widgets/scrollbox.xml
@@ -61,17 +61,17 @@
       <destructor><![CDATA[
         // Release timer to avoid reference cycles.
         if (this._scrollTimer) {
           this._scrollTimer.cancel();
           this._scrollTimer = null;
         }
       ]]></destructor>
 
-      <field name="_scrollbox">
+      <field name="scrollbox">
         document.getAnonymousElementByAttribute(this, "anonid", "scrollbox");
       </field>
       <field name="_scrollButtonUp">
         document.getAnonymousElementByAttribute(this, "anonid", "scrollbutton-up");
       </field>
       <field name="_scrollButtonDown">
         document.getAnonymousElementByAttribute(this, "anonid", "scrollbutton-down");
       </field>
@@ -148,39 +148,39 @@
         <setter><![CDATA[
           this.setAttribute("smoothscroll", !!val);
           return val;
         ]]></setter>
       </property>
 
       <property name="scrollBoxObject" readonly="true">
         <getter><![CDATA[
-          return this._scrollbox.boxObject;
+          return this.scrollbox.boxObject;
         ]]></getter>
       </property>
 
       <property name="scrollClientRect" readonly="true">
         <getter><![CDATA[
-          return this._scrollbox.getBoundingClientRect();
+          return this.scrollbox.getBoundingClientRect();
         ]]></getter>
       </property>
 
       <property name="scrollClientSize" readonly="true">
         <getter><![CDATA[
           return this.orient == "vertical" ?
-                 this._scrollbox.clientHeight :
-                 this._scrollbox.clientWidth;
+                 this.scrollbox.clientHeight :
+                 this.scrollbox.clientWidth;
         ]]></getter>
       </property>
 
       <property name="scrollSize" readonly="true">
         <getter><![CDATA[
           return this.orient == "vertical" ?
-                 this._scrollbox.scrollHeight :
-                 this._scrollbox.scrollWidth;
+                 this.scrollbox.scrollHeight :
+                 this.scrollbox.scrollWidth;
         ]]></getter>
       </property>
 
       <property name="lineScrollAmount" readonly="true">
         <getter><![CDATA[
           // line scroll amout should be the width (at horizontal scrollbox) or
           // the height (at vertical scrollbox) of the scrolled elements.
           // However, the elements may have different width or height.  So,
@@ -188,28 +188,28 @@
           var elements = this._getScrollableElements();
           return elements.length && (this.scrollSize / elements.length);
         ]]></getter>
       </property>
 
       <property name="scrollPosition" readonly="true">
         <getter><![CDATA[
           return this.orient == "vertical" ?
-                 this._scrollbox.scrollTop :
-                 this._scrollbox.scrollLeft;
+                 this.scrollbox.scrollTop :
+                 this.scrollbox.scrollLeft;
         ]]></getter>
       </property>
 
       <field name="_startEndProps"><![CDATA[
         this.orient == "vertical" ? ["top", "bottom"] : ["left", "right"];
       ]]></field>
 
       <field name="_isRTLScrollbox"><![CDATA[
         this.orient != "vertical" &&
-        document.defaultView.getComputedStyle(this._scrollbox).direction == "rtl";
+        document.defaultView.getComputedStyle(this.scrollbox).direction == "rtl";
       ]]></field>
 
       <method name="_onButtonClick">
         <parameter name="event"/>
         <body><![CDATA[
           if (this._clickToScroll) {
             this._distanceScroll(event);
           }
@@ -293,17 +293,18 @@
         <body><![CDATA[
           if (!this._canScrollToElement(element))
             return;
 
           if (this._ensureElementIsVisibleAnimationFrame) {
             window.cancelAnimationFrame(this._ensureElementIsVisibleAnimationFrame);
           }
           this._ensureElementIsVisibleAnimationFrame = window.requestAnimationFrame(() => {
-            element.scrollIntoView({ behavior: aInstant ? "instant" : "auto" });
+            element.scrollIntoView({ block: "nearest",
+                                     behavior: aInstant ? "instant" : "auto" });
             this._ensureElementIsVisibleAnimationFrame = 0;
           });
         ]]></body>
       </method>
 
       <method name="scrollByIndex">
         <parameter name="index"/>
         <parameter name="aInstant"/>
@@ -533,17 +534,17 @@
       </method>
 
       <method name="scrollByPixels">
         <parameter name="aPixels"/>
         <parameter name="aInstant"/>
         <body><![CDATA[
           let scrollOptions = { behavior: aInstant ? "instant" : "auto" };
           scrollOptions[this._startEndProps[0]] = aPixels;
-          this._scrollbox.scrollBy(scrollOptions);
+          this.scrollbox.scrollBy(scrollOptions);
         ]]></body>
       </method>
 
       <field name="_prevMouseScrolls">[null, null]</field>
 
       <field name="_touchStart">-1</field>
 
       <field name="_scrollButtonUpdatePending">false</field>
@@ -559,16 +560,21 @@
             return;
           }
           this._scrollButtonUpdatePending = true;
 
           // Wait until after the next paint to get current layout data from
           // getBoundsWithoutFlushing.
           window.requestAnimationFrame(() => {
             setTimeout(() => {
+              if (!this._startEndProps) {
+                // We've been destroyed in the meantime.
+                return;
+              }
+
               this._scrollButtonUpdatePending = false;
 
               let scrolledToStart = false;
               let scrolledToEnd = false;
 
               if (this.hasAttribute("notoverflowing")) {
                 scrolledToStart = true;
                 scrolledToEnd = true;
@@ -579,21 +585,21 @@
 
                 let elements = this._getScrollableElements();
                 let [leftOrTopElement, rightOrBottomElement] = [elements[0], elements[elements.length - 1]];
                 if (this._isRTLScrollbox) {
                   [leftOrTopElement, rightOrBottomElement] = [rightOrBottomElement, leftOrTopElement];
                 }
 
                 if (leftOrTopElement &&
-                    leftOrTopEdge(leftOrTopElement) >= leftOrTopEdge(this._scrollbox)) {
+                    leftOrTopEdge(leftOrTopElement) >= leftOrTopEdge(this.scrollbox)) {
                   scrolledToStart = !this._isRTLScrollbox;
                   scrolledToEnd = this._isRTLScrollbox;
                 } else if (rightOrBottomElement &&
-                           rightOrBottomEdge(rightOrBottomElement) <= rightOrBottomEdge(this._scrollbox)) {
+                           rightOrBottomEdge(rightOrBottomElement) <= rightOrBottomEdge(this.scrollbox)) {
                   scrolledToStart = this._isRTLScrollbox;
                   scrolledToEnd = !this._isRTLScrollbox;
                 }
               }
 
               if (scrolledToEnd) {
                 this.setAttribute("scrolledtoend", "true");
               } else {
--- a/toolkit/content/xul.css
+++ b/toolkit/content/xul.css
@@ -631,16 +631,27 @@ menulist[popuponly="true"] {
   min-height: 0 !important;
   border: 0 !important;
 }
 
 menulist > menupopup > menuitem {
   -moz-binding: url("chrome://global/content/bindings/menu.xml#menuitem-iconic-noaccel");
 }
 
+menulist > menupopup > .popup-internal-box > .scrollbutton-up,
+menulist > menupopup > .popup-internal-box > .arrowscrollbox-overflow-start-indicator,
+menulist > menupopup > .popup-internal-box > .arrowscrollbox-overflow-end-indicator,
+menulist > menupopup > .popup-internal-box > .scrollbutton-down {
+  display: none;
+}
+
+menulist > menupopup > .popup-internal-box > .arrowscrollbox-scrollbox {
+  overflow: auto;
+}
+
 dropmarker > .dropmarker-icon {
   pointer-events: none;
 }
 
 /********** splitter **********/
 
 .tree-splitter {
   width: 0px;
--- a/toolkit/modules/SelectParentHelper.jsm
+++ b/toolkit/modules/SelectParentHelper.jsm
@@ -240,17 +240,17 @@ var SelectParentHelper = {
 
     if (msg.name == "Forms:UpdateDropDown") {
       // Sanity check - we'd better know what the currently
       // opened menulist is, and what browser it belongs to...
       if (!currentMenulist) {
         return;
       }
 
-      let scrollBox = currentMenulist.menupopup.scrollBox;
+      let scrollBox = currentMenulist.menupopup.scrollBox.scrollbox;
       let scrollTop = scrollBox.scrollTop;
 
       let options = msg.data.options;
       let selectedIndex = msg.data.selectedIndex;
       let uaSelectBackgroundColor = msg.data.uaSelectBackgroundColor;
       let uaSelectColor = msg.data.uaSelectColor;
       let selectBackgroundColor = msg.data.selectBackgroundColor;
       let selectColor = msg.data.selectColor;