Merge inbound to mozilla-central a=merge
authorarthur.iakab <aiakab@mozilla.com>
Thu, 30 Aug 2018 00:56:06 +0300
changeset 433964 2b50a2ad969a326c3d066426d6e823c44de5b7d4
parent 433925 b946a1ac70e563013202fb7eb6c096de53249eca (current diff)
parent 433963 034adef609b91e0c2dc2c353dcb5d5dc9ff18a40 (diff)
child 433965 c6555188a710a4843da3862995860fe847d76287
push id34526
push useraiakab@mozilla.com
push dateWed, 29 Aug 2018 21:56:30 +0000
treeherdermozilla-central@2b50a2ad969a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central a=merge
browser/components/preferences/in-content/privacy.js
browser/components/preferences/in-content/privacy.xul
browser/locales/en-US/browser/preferences/preferences.ftl
python/mozbuild/mozbuild/mach_commands.py
taskcluster/taskgraph/transforms/task.py
third_party/rust/parking_lot_core/src/stable.rs
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -9,20 +9,20 @@ dependencies = [
 
 [[package]]
 name = "adler32"
 version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "aho-corasick"
-version = "0.6.3"
+version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "ansi_term"
 version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -70,22 +70,22 @@ source = "registry+https://github.com/ru
 dependencies = [
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "atty"
-version = "0.2.2"
+version = "0.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "audioipc"
 version = "0.2.4"
 dependencies = [
  "bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -311,17 +311,17 @@ dependencies = [
 ]
 
 [[package]]
 name = "clap"
 version = "2.31.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -680,17 +680,17 @@ dependencies = [
  "simd 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "env_logger"
 version = "0.5.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "error-chain"
@@ -762,22 +762,21 @@ name = "freetype"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "fs2"
-version = "0.4.2"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "fuchsia-zircon"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1288,17 +1287,17 @@ dependencies = [
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "memmap"
 version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "memoffset"
 version = "0.2.1"
@@ -1438,22 +1437,21 @@ dependencies = [
 
 [[package]]
 name = "mp4parse_fallible"
 version = "0.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "msdos_time"
-version = "0.1.5"
+version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "net2"
 version = "0.2.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1599,29 +1597,28 @@ dependencies = [
 ]
 
 [[package]]
 name = "parking_lot"
 version = "0.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "lock_api 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "parking_lot_core"
-version = "0.2.7"
+version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "smallvec 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "peeking_take_while"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -1797,42 +1794,50 @@ dependencies = [
 name = "rayon-core"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "redox_syscall"
 version = "0.1.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "redox_termios"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "regex"
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "regex"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "regex-syntax"
@@ -2271,16 +2276,26 @@ dependencies = [
 name = "termcolor"
 version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "termion"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "textwrap"
 version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -2807,33 +2822,33 @@ dependencies = [
 
 [[package]]
 name = "zip"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bzip2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "msdos_time 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [metadata]
 "checksum Inflector 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1b33cd9b653730fc539c53c7b3c672d2f47108fa20c6df571fa5817178f5a14c"
 "checksum adler32 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6cbd0b9af8587c72beadc9f72d35b9fbb070982c9e6203e46e93f10df25f8f45"
-"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
+"checksum aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "68f56c7353e5a9547cbd76ed90f7bb5ffc3ba09d4ea9bd1d8c06c8b1142eeb5a"
 "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
 "checksum app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9dadc668390b373e73e4abbfc1f07238b09a25858f2f39c06cebc6d8e141d774"
 "checksum arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd1479b7c29641adbd35ff3b5c293922d696a92f25c8c975da3e0acbc87258f"
 "checksum arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0ef4a9820019a0c91d918918c93dc71d469f581a49b47ddc1d285d4270bbe2"
 "checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
 "checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
 "checksum atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d0fd4c0631f06448cc45a6bbb3b710ebb7ff8ccb96a0800c994afe23a70d5df2"
-"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
+"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
 "checksum base64 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "96434f987501f0ed4eb336a411e0631ecd1afa11574fe148587adc4ff96143c9"
 "checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
 "checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
 "checksum bindgen 0.37.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1b25ab82877ea8fe6ce1ce1f8ac54361f0218bad900af9eb11803994bf67c221"
 "checksum binjs_meta 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fd7ca5635f1c6f94aaef7de76cb834c5920578355ce41dbcaf731b7ebe348518"
 "checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
 "checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f"
 "checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
@@ -2891,17 +2906,17 @@ dependencies = [
 "checksum euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "70a2ebdf55fb9d6329046e026329a55ef8fbaae5ea833f56e170beb3125a8a5f"
 "checksum failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7efb22686e4a466b1ec1a15c2898f91fa9cb340452496dca654032de20ff95b9"
 "checksum failure_derive 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "946d0e98a50d9831f5d589038d2ca7f8f455b1c21028c0db0e84116a12696426"
 "checksum fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478"
 "checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909"
 "checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
 "checksum foreign-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ebc04f19019fff1f2d627b5581574ead502f80c48c88900575a46e0840fe5d0"
 "checksum freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b659e75b7a7338fe75afd7f909fc2b71937845cffb6ebe54ba2e50f13d8e903d"
-"checksum fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab76cfd2aaa59b7bf6688ad9ba15bbae64bff97f04ea02144cfd3443e5c2866"
+"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
 "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
 "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
 "checksum futures 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "884dbe32a6ae4cd7da5c6db9b78114449df9953b8d490c9d7e1b51720b922c62"
 "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4"
 "checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
 "checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
 "checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
 "checksum gl_generator 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a795170cbd85b5a7baa58d6d7525cae6a03e486859860c220f7ebbbdd379d0a"
@@ -2944,34 +2959,34 @@ dependencies = [
 "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
 "checksum miniz_oxide 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aaa2d3ad070f428fffbd7d3ca2ea20bb0d8cffe9024405c44e1840bc1418b398"
 "checksum miniz_oxide_c_api 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "92d98fdbd6145645828069b37ea92ca3de225e000d80702da25c20d3584b38a5"
 "checksum mio 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)" = "4fcfcb32d63961fb6f367bfd5d21e4600b92cd310f71f9dca25acae196eb1560"
 "checksum mio-uds 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1731a873077147b626d89cc6c2a0db6288d607496c5d10c0cfcf3adc697ec673"
 "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
 "checksum moz_cbor 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20c82a57087fd5990d7122dbff1607c3b20c3d2958e9d9ad9765aab415e2c91c"
 "checksum mp4parse_fallible 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6626c2aef76eb8f984eef02e475883d3fe9112e114720446c5810fc5f045cd30"
-"checksum msdos_time 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "65ba9d75bcea84e07812618fedf284a64776c2f2ea0cad6bca7f69739695a958"
+"checksum msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729"
 "checksum net2 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "9044faf1413a1057267be51b5afba8eb1090bd2231c693664aa1db716fe1eae0"
 "checksum new-ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ccbebba6fb53a6d2bdcfaf79cb339bc136dee3bfff54dc337a334bafe36476a"
 "checksum new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0cdc457076c78ab54d5e0d6fa7c47981757f1e34dc39ff92787f217dede586c4"
 "checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2"
 "checksum nom 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b8c256fd9471521bcb84c3cdba98921497f1a331cbc15b8030fc63b82050ce"
 "checksum nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05aec50c70fd288702bcd93284a8444607f3292dbdf2a30de5ea5dcdbe72287b"
 "checksum num 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "a311b77ebdc5dd4cf6449d81e4135d9f0e3b153839ac90e648a8ef538f923525"
 "checksum num-derive 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d2c31b75c36a993d30c7a13d70513cb93f02acafdd5b7ba250f9b0e18615de7"
 "checksum num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "d1452e8b06e448a07f0e6ebb0bb1d92b8890eea63288c0b627331d53514d0fba"
 "checksum num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "7485fcc84f85b4ecd0ea527b14189281cf27d60e583ae65ebc9c088b13dffe01"
 "checksum num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
 "checksum num-traits 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e7de20f146db9d920c45ee8ed8f71681fd9ade71909b48c3acbd766aa504cf10"
 "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d"
 "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
 "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
 "checksum parking_lot 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69376b761943787ebd5cc85a5bc95958651a22609c5c1c2b65de21786baec72b"
-"checksum parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6c677d78851950b3aec390e681a411f78cc250cba277d4f578758a377f727970"
+"checksum parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa"
 "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
 "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
 "checksum petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7e5234c228fbfa874c86a77f685886127f82e0aef602ad1d48333fcac6ad61"
 "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
 "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
 "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
 "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
 "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
@@ -2985,16 +3000,17 @@ dependencies = [
 "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
 "checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
 "checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
 "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
 "checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd"
 "checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
 "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
 "checksum redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "ab105df655884ede59d45b7070c8a65002d921461ee813a024558ca16030eea0"
+"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
 "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
 "checksum regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75ecf88252dce580404a22444fc7d626c01815debba56a7f4f536772a5ff19d3"
 "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
 "checksum regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1ac0f60d675cc6cf13a20ec076568254472551051ad5dd050364d70671bf6b"
 "checksum rkv 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "21983ae9330b1e1cb1d01868229618a3c7cc5134955f0dc1a86a0a1886f3acb7"
 "checksum ron 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "da06feaa07f69125ab9ddc769b11de29090122170b402547f64b86fe16ebc399"
 "checksum runloop 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d79b4b604167921892e84afbbaad9d5ad74e091bf6c511d9dbfb0593f09fabd"
 "checksum rust-ini 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8a654c5bda722c699be6b0fe4c0d90de218928da5b724c3e467fc48865c37263"
@@ -3027,16 +3043,17 @@ dependencies = [
 "checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
 "checksum syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4e4b5274d4a0a3d2749d5c158dc64d3403e60554dc61194648787ada5212473d"
 "checksum synstructure 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "98cad891cd238c98e1f0aec9f7c0f620aa696e4e5f7daba56ac67b5e86a6b049"
 "checksum synstructure 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "85bb9b7550d063ea184027c9b8c20ac167cd36d3e06b3a40bceb9d746dc1a7b7"
 "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
 "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
 "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
 "checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
+"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
 "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
 "checksum thin-slice 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c"
 "checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
 "checksum thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf947d192a9be60ef5131cc7a4648886ba89d712f16700ebbf80c8a69d05d48f"
 "checksum time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "d825be0eb33fda1a7e68012d51e9c7f451dc1a69391e7fdc197060bb8c56667b"
 "checksum tokio 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ee337e5f4e501fc32966fec6fe0ca0cc1c237b0b1b14a335f8bfe3c5f06e286"
 "checksum tokio-codec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "881e9645b81c2ce95fcb799ded2c29ffb9f25ef5bef909089a420e5961dd8ccb"
 "checksum tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "aeeffbbb94209023feaef3c196a41cbcdafa06b4a6f893f68779bb5e53796f71"
--- a/browser/app/profile/firefox.js
+++ b/browser/app/profile/firefox.js
@@ -40,16 +40,17 @@ pref("extensions.webextOptionalPermissio
 // Preferences for AMO integration
 pref("extensions.getAddons.cache.enabled", true);
 pref("extensions.getAddons.get.url", "https://services.addons.mozilla.org/api/v3/addons/search/?guid=%IDS%&lang=%LOCALE%");
 pref("extensions.getAddons.compatOverides.url", "https://services.addons.mozilla.org/api/v3/addons/compat-override/?guid=%IDS%&lang=%LOCALE%");
 pref("extensions.getAddons.search.browseURL", "https://addons.mozilla.org/%LOCALE%/firefox/search?q=%TERMS%&platform=%OS%&appver=%VERSION%");
 pref("extensions.webservice.discoverURL", "https://discovery.addons.mozilla.org/%LOCALE%/firefox/discovery/pane/%VERSION%/%OS%/%COMPATIBILITY_MODE%");
 pref("extensions.getAddons.link.url", "https://addons.mozilla.org/%LOCALE%/firefox/");
 pref("extensions.getAddons.themes.browseURL", "https://addons.mozilla.org/%LOCALE%/firefox/themes/?src=firefox");
+pref("extensions.getAddons.langpacks.url", "https://services.addons.mozilla.org/api/v3/addons/language-tools/?app=firefox&type=language&appversion=%VERSION%");
 
 pref("extensions.update.autoUpdateDefault", true);
 
 // Check AUS for system add-on updates.
 pref("extensions.systemAddon.update.url", "https://aus5.mozilla.org/update/3/SystemAddons/%VERSION%/%BUILD_ID%/%BUILD_TARGET%/%LOCALE%/%CHANNEL%/%OS_VERSION%/%DISTRIBUTION%/%DISTRIBUTION_VERSION%/update.xml");
 pref("extensions.systemAddon.update.enabled", true);
 
 // Disable add-ons that are not installed by the user in all scopes by default.
new file mode 100644
--- /dev/null
+++ b/browser/components/preferences/browserLanguages.js
@@ -0,0 +1,127 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/* import-globals-from ../../../toolkit/content/preferencesBindings.js */
+
+ChromeUtils.import("resource://gre/modules/Services.jsm");
+
+class OrderedListBox {
+  constructor({richlistbox, upButton, downButton}) {
+    this.richlistbox = richlistbox;
+    this.upButton = upButton;
+    this.downButton = downButton;
+
+    this.items = [];
+
+    this.richlistbox.addEventListener("select", () => this.setButtonState());
+    this.upButton.addEventListener("command", () => this.moveUp());
+    this.downButton.addEventListener("command", () => this.moveDown());
+  }
+
+  setButtonState() {
+    let { upButton, downButton } = this;
+    switch (this.richlistbox.selectedCount) {
+    case 0:
+      upButton.disabled = downButton.disabled = true;
+      break;
+    case 1:
+      upButton.disabled = this.richlistbox.selectedIndex == 0;
+      downButton.disabled = this.richlistbox.selectedIndex == this.richlistbox.childNodes.length - 1;
+      break;
+    default:
+      upButton.disabled = true;
+      downButton.disabled = true;
+    }
+  }
+
+  moveUp() {
+    let {selectedIndex} = this.richlistbox;
+    if (selectedIndex == 0) {
+      return;
+    }
+    let {items} = this;
+    let selectedItem = items[selectedIndex];
+    let prevItem = items[selectedIndex - 1];
+    items[selectedIndex - 1] = items[selectedIndex];
+    items[selectedIndex] = prevItem;
+    let prevEl = document.getElementById(prevItem.id);
+    let selectedEl = document.getElementById(selectedItem.id);
+    this.richlistbox.insertBefore(selectedEl, prevEl);
+    this.richlistbox.ensureElementIsVisible(selectedEl);
+    this.setButtonState();
+  }
+
+  moveDown() {
+    let {selectedIndex} = this.richlistbox;
+    if (selectedIndex == this.items.length - 1) {
+      return;
+    }
+    let {items} = this;
+    let selectedItem = items[selectedIndex];
+    let nextItem = items[selectedIndex + 1];
+    items[selectedIndex + 1] = items[selectedIndex];
+    items[selectedIndex] = nextItem;
+    let nextEl = document.getElementById(nextItem.id);
+    let selectedEl = document.getElementById(selectedItem.id);
+    this.richlistbox.insertBefore(nextEl, selectedEl);
+    this.richlistbox.ensureElementIsVisible(selectedEl);
+    this.setButtonState();
+  }
+
+  setItems(items) {
+    this.items = items;
+    this.populate();
+    this.setButtonState();
+  }
+
+  populate() {
+    this.richlistbox.textContent = "";
+
+    for (let {id, label, value} of this.items) {
+      let listitem = document.createElement("richlistitem");
+      listitem.setAttribute("value", value);
+      let labelEl = document.createElement("label");
+      listitem.id = id;
+      labelEl.textContent = label;
+      listitem.appendChild(labelEl);
+      this.richlistbox.appendChild(listitem);
+    }
+
+    this.richlistbox.selectedIndex = 0;
+  }
+}
+
+function getLocaleDisplayInfo(localeCodes) {
+  let localeNames = Services.intl.getLocaleDisplayNames(undefined, localeCodes);
+  return localeCodes.map((code, i) => {
+    return {
+      id: "locale-" + code,
+      label: localeNames[i],
+      value: code,
+    };
+  });
+}
+
+var gBrowserLanguagesDialog = {
+  _orderedListBox: null,
+  requestedLocales: null,
+
+  beforeAccept() {
+    this.requestedLocales = this._orderedListBox.items.map(item => item.value);
+    return true;
+  },
+
+  onLoad() {
+    this._orderedListBox = new OrderedListBox({
+      richlistbox: document.getElementById("activeLocales"),
+      upButton: document.getElementById("up"),
+      downButton: document.getElementById("down"),
+    });
+    // Maintain the previously requested locales even if we cancel out.
+    this.requestedLocales = window.arguments[0];
+    let locales = window.arguments[0]
+      || Services.locale.getRequestedLocales();
+    this._orderedListBox.setItems(getLocaleDisplayInfo(locales));
+  },
+};
new file mode 100644
--- /dev/null
+++ b/browser/components/preferences/browserLanguages.xul
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+   - License, v. 2.0. If a copy of the MPL was not distributed with this
+   - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<?xml-stylesheet href="chrome://global/skin/"?>
+<?xml-stylesheet href="chrome://browser/skin/preferences/preferences.css"?>
+
+<dialog id="BrowserLanguagesDialog" type="child" class="prefwindow"
+        xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"
+        data-l10n-id="browser-languages-window"
+        data-l10n-attrs="title, style"
+        buttons="accept,cancel,help"
+        persist="screenX screenY"
+        role="dialog"
+        onload="gBrowserLanguagesDialog.onLoad();"
+        onbeforeaccept="return gBrowserLanguagesDialog.beforeAccept();"
+        helpTopic="prefs-languages"
+        ondialoghelp="openPrefsHelp()">
+
+  <link rel="localization" href="branding/brand.ftl"/>
+  <link rel="localization" href="browser/preferences/languages.ftl"/>
+  <script type="application/javascript" src="chrome://global/content/l10n.js"></script>
+
+  <script type="application/javascript" src="chrome://browser/content/utilityOverlay.js"/>
+  <script type="application/javascript" src="chrome://global/content/preferencesBindings.js"/>
+  <script type="application/javascript" src="chrome://browser/content/preferences/browserLanguages.js"/>
+
+  <vbox id="BrowserLanguagesDialogPane" class="prefpane largeDialogContainer">
+    <description data-l10n-id="browser-languages-description"/>
+
+    <grid flex="1">
+      <columns>
+        <column flex="1"/>
+        <column/>
+      </columns>
+      <rows>
+        <row flex="1">
+          <richlistbox id="activeLocales" flex="1"/>
+          <vbox>
+            <button id="up" disabled="true" data-l10n-id="languages-customize-moveup"/>
+            <button id="down" disabled="true" data-l10n-id="languages-customize-movedown"/>
+          </vbox>
+        </row>
+      </rows>
+    </grid>
+  </vbox>
+</dialog>
--- a/browser/components/preferences/in-content/main.js
+++ b/browser/components/preferences/in-content/main.js
@@ -229,20 +229,20 @@ if (AppConstants.MOZ_UPDATER) {
     ]);
   }
 }
 
 // A promise that resolves when the list of application handlers is loaded.
 // We store this in a global so tests can await it.
 var promiseLoadHandlersList;
 
-// Load the preferences string bundle for a given locale with fallbacks.
-function getBundleForLocale(locale) {
+// Load the preferences string bundle for other locales with fallbacks.
+function getBundleForLocales(newLocales) {
   let locales = Array.from(new Set([
-    locale,
+    ...newLocales,
     ...Services.locale.getRequestedLocales(),
     Services.locale.lastFallbackLocale,
   ]));
   function generateContexts(resourceIds) {
     return L10nRegistry.generateContexts(locales, resourceIds);
   }
   return new Localization([
     "browser/preferences/preferences.ftl",
@@ -800,49 +800,71 @@ var gMainPane = {
     let menulist = document.getElementById("defaultBrowserLanguage");
     let menupopup = menulist.querySelector("menupopup");
     menupopup.appendChild(fragment);
     menulist.value = Services.locale.getRequestedLocale();
 
     document.getElementById("browserLanguagesBox").hidden = false;
   },
 
-  /* Show the confirmation message bar to allow a restart into the new language. */
-  async onBrowserLanguageChange(event) {
-    let locale = event.target.value;
+  /* Show the confirmation message bar to allow a restart into the new locales. */
+  async showConfirmLanguageChangeMessageBar(locales) {
     let messageBar = document.getElementById("confirmBrowserLanguage");
-    if (locale == Services.locale.getRequestedLocale()) {
-      messageBar.hidden = true;
-      return;
-    }
     // Set the text in the message bar for the new locale.
-    let newBundle = getBundleForLocale(locale);
-    let description = messageBar.querySelector("description");
+    let newBundle = getBundleForLocales(locales);
+    let description = messageBar.querySelector(".message-bar-description");
     description.textContent = await newBundle.formatValue(
       "confirm-browser-language-change-description");
-    let button = messageBar.querySelector("button");
+    let button = messageBar.querySelector(".message-bar-button");
     button.setAttribute(
       "label", await newBundle.formatValue(
         "confirm-browser-language-change-button"));
+    button.setAttribute("locales", locales.join(","));
     messageBar.hidden = false;
+    gMainPane.requestingLocales = locales;
+  },
+
+  hideConfirmLanguageChangeMessageBar() {
+    let messageBar = document.getElementById("confirmBrowserLanguage");
+    messageBar.hidden = true;
+    messageBar.querySelector(".message-bar-button").removeAttribute("locales");
+    gMainPane.requestingLocales = null;
   },
 
   /* Confirm the locale change and restart the browser in the new locale. */
   confirmBrowserLanguageChange() {
-    let locale = document.getElementById("defaultBrowserLanguage").value;
-    Services.locale.setRequestedLocales([locale]);
+    let localesString = (event.target.getAttribute("locales") || "").trim();
+    if (!localesString || localesString.length == 0) {
+      return;
+    }
+    let locales = localesString.split(",");
+    Services.locale.setRequestedLocales(locales);
 
     // Restart with the new locale.
     let cancelQuit = Cc["@mozilla.org/supports-PRBool;1"].createInstance(Ci.nsISupportsPRBool);
     Services.obs.notifyObservers(cancelQuit, "quit-application-requested", "restart");
     if (!cancelQuit.data) {
       Services.startup.quit(Services.startup.eAttemptQuit | Services.startup.eRestart);
     }
   },
 
+  /* Show or hide the confirm change message bar based on the new locale. */
+  onBrowserLanguageChange(event) {
+    let locale = event.target.value;
+    if (locale == Services.locale.getRequestedLocale()) {
+      this.hideConfirmLanguageChangeMessageBar();
+      return;
+    }
+    let locales = Array.from(new Set([
+      locale,
+      ...Services.locale.getRequestedLocales(),
+    ]).values());
+    this.showConfirmLanguageChangeMessageBar(locales);
+  },
+
   onBrowserRestoreSessionChange(event) {
     const value = event.target.checked;
     const startupPref = Preferences.get("browser.startup.page");
     let newValue;
 
     if (value) {
       // We need to restore the blank homepage setting in our other pref
       if (startupPref.value === this.STARTUP_PREF_BLANK) {
@@ -957,16 +979,36 @@ var gMainPane = {
 
   /**
    * Shows a dialog in which the preferred language for web content may be set.
    */
   showLanguages() {
     gSubDialog.open("chrome://browser/content/preferences/languages.xul");
   },
 
+  showBrowserLanguages() {
+    gSubDialog.open(
+      "chrome://browser/content/preferences/browserLanguages.xul",
+      null, gMainPane.requestingLocales, this.browserLanguagesClosed);
+  },
+
+  /* Show or hide the confirm change message bar based on the updated ordering. */
+  browserLanguagesClosed() {
+    let requesting = this.gBrowserLanguagesDialog.requestedLocales;
+    let requested = Services.locale.getRequestedLocales();
+    let defaultBrowserLanguage = document.getElementById("defaultBrowserLanguage");
+    if (requesting && requesting.join(",") != requested.join(",")) {
+      gMainPane.showConfirmLanguageChangeMessageBar(requesting);
+      defaultBrowserLanguage.value = requesting[0];
+      return;
+    }
+    defaultBrowserLanguage.value = Services.locale.getRequestedLocale();
+    gMainPane.hideConfirmLanguageChangeMessageBar();
+  },
+
   /**
    * Displays the translation exceptions dialog where specific site and language
    * translation preferences can be set.
    */
   showTranslationExceptions() {
     gSubDialog.open("chrome://browser/content/preferences/translation.xul");
   },
 
--- a/browser/components/preferences/in-content/main.xul
+++ b/browser/components/preferences/in-content/main.xul
@@ -280,37 +280,43 @@
 </groupbox>
 
 <!-- Languages -->
 <groupbox id="languagesGroup" data-category="paneGeneral" hidden="true">
   <caption><label data-l10n-id="language-header"/></caption>
 
   <vbox id="browserLanguagesBox" align="start" hidden="true">
     <description flex="1" controls="chooseBrowserLanguage" data-l10n-id="choose-browser-language-description"/>
-    <menulist id="defaultBrowserLanguage" class="accessory-button" oncommand="gMainPane.onBrowserLanguageChange(event)" flex="1">
-      <menupopup/>
-    </menulist>
+    <hbox>
+      <menulist id="defaultBrowserLanguage" oncommand="gMainPane.onBrowserLanguageChange(event)">
+        <menupopup/>
+      </menulist>
+      <button id="manageBrowserLanguagesButton"
+              class="accessory-button"
+              data-l10n-id="manage-browser-languages-button"
+              oncommand="gMainPane.showBrowserLanguages()"/>
+    </hbox>
   </vbox>
   <hbox id="confirmBrowserLanguage" class="message-bar" align="center" hidden="true">
     <image class="message-bar-icon"/>
     <hbox class="message-bar-content" align="center" flex="1">
-      <description flex="1"/>
+      <description class="message-bar-description" flex="1"/>
       <button class="message-bar-button" oncommand="gMainPane.confirmBrowserLanguageChange()"/>
     </hbox>
   </hbox>
 
   <hbox id="languagesBox" align="center">
     <description flex="1" control="chooseLanguage" data-l10n-id="choose-language-description"/>
     <!-- Please don't remove the wrapping hbox/vbox/box for these elements. It's used to properly compute the search tooltip position. -->
     <hbox>
       <button id="chooseLanguage"
               class="accessory-button"
               data-l10n-id="choose-button"
               search-l10n-ids="
-                languages-window.title,
+                webpage-languages-window.title,
                 languages-description,
                 languages-customize-moveup.label,
                 languages-customize-movedown.label,
                 languages-customize-remove.label,
                 languages-customize-select-language.placeholder,
                 languages-customize-add.label,
               " />
     </hbox>
--- a/browser/components/preferences/in-content/tests/browser.ini
+++ b/browser/components/preferences/in-content/tests/browser.ini
@@ -47,16 +47,17 @@ skip-if = (verify && debug && (os == 'li
 [browser_cookies_exceptions.js]
 [browser_defaultbrowser_alwayscheck.js]
 [browser_healthreport.js]
 skip-if = true || !healthreport # Bug 1185403 for the "true"
 [browser_homepages_filter_aboutpreferences.js]
 [browser_homepages_use_bookmark.js]
 [browser_extension_controlled.js]
 [browser_languages_subdialog.js]
+[browser_browser_languages_subdialog.js]
 [browser_layersacceleration.js]
 [browser_masterpassword.js]
 [browser_newtab_menu.js]
 [browser_notifications_do_not_disturb.js]
 [browser_password_management.js]
 [browser_performance.js]
 skip-if = !e10s
 [browser_performance_e10srollout.js]
new file mode 100644
--- /dev/null
+++ b/browser/components/preferences/in-content/tests/browser_browser_languages_subdialog.js
@@ -0,0 +1,78 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+ChromeUtils.import("resource://gre/modules/Services.jsm");
+
+const BROWSER_LANGUAGES_URL = "chrome://browser/content/preferences/browserLanguages.xul";
+
+function assertLocaleOrder(list, locales) {
+  is(list.children.length, 2, "There are two requested locales");
+  is(Array.from(list.children).map(child => child.value).join(","),
+     locales, "The requested locales are in order");
+}
+
+async function openDialog(doc) {
+  let dialogLoaded = promiseLoadSubDialog(BROWSER_LANGUAGES_URL);
+  doc.getElementById("manageBrowserLanguagesButton").doCommand();
+  let dialogWin = await dialogLoaded;
+  let dialogDoc = dialogWin.document;
+  let list = dialogDoc.getElementById("activeLocales");
+  let dialog = dialogDoc.getElementById("BrowserLanguagesDialog");
+  return {dialog, dialogDoc, list};
+}
+
+add_task(async function testReorderingBrowserLanguages() {
+  await SpecialPowers.pushPrefEnv({
+    set: [
+      ["intl.multilingual.enabled", true],
+      ["intl.locale.requested", "pl,en-US"],
+    ],
+  });
+
+  await openPreferencesViaOpenPreferencesAPI("paneGeneral", {leaveOpen: true});
+
+  let doc = gBrowser.contentDocument;
+  let messageBar = doc.getElementById("confirmBrowserLanguage");
+  is(messageBar.hidden, true, "The message bar is hidden at first");
+
+  // Open the dialog.
+  let {dialog, dialogDoc, list} = await openDialog(doc);
+
+  // The initial order is set by the pref.
+  assertLocaleOrder(list, "pl,en-US");
+
+  // Moving pl down changes the order.
+  dialogDoc.getElementById("down").doCommand();
+  assertLocaleOrder(list, "en-US,pl");
+
+  // Accepting the change shows the confirm message bar.
+  let dialogClosed = BrowserTestUtils.waitForEvent(dialogDoc.documentElement, "dialogclosing");
+  dialog.acceptDialog();
+  await dialogClosed;
+  is(messageBar.hidden, false, "The message bar is now visible");
+  is(messageBar.querySelector("button").getAttribute("locales"), "en-US,pl",
+     "The locales are set on the message bar button");
+
+  // Open the dialog again.
+  let newDialog = await openDialog(doc);
+  dialog = newDialog.dialog;
+  dialogDoc = newDialog.dialogDoc;
+  list = newDialog.list;
+
+  // The initial order comes from the previous settings.
+  assertLocaleOrder(list, "en-US,pl");
+
+  // Select pl in the list.
+  list.selectedItem = list.querySelector("[value='pl']");
+  // Move pl back up.
+  dialogDoc.getElementById("up").doCommand();
+  assertLocaleOrder(list, "pl,en-US");
+
+  // Accepting the change hides the confirm message bar.
+  dialogClosed = BrowserTestUtils.waitForEvent(dialogDoc.documentElement, "dialogclosing");
+  dialog.acceptDialog();
+  await dialogClosed;
+  is(messageBar.hidden, true, "The message bar is hidden again");
+
+  BrowserTestUtils.removeTab(gBrowser.selectedTab);
+});
--- a/browser/components/preferences/jar.mn
+++ b/browser/components/preferences/jar.mn
@@ -2,16 +2,18 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 browser.jar:
     content/browser/preferences/applicationManager.xul
     content/browser/preferences/applicationManager.js
     content/browser/preferences/blocklists.xul
     content/browser/preferences/blocklists.js
+    content/browser/preferences/browserLanguages.xul
+    content/browser/preferences/browserLanguages.js
     content/browser/preferences/clearSiteData.css
     content/browser/preferences/clearSiteData.js
     content/browser/preferences/clearSiteData.xul
 *   content/browser/preferences/colors.xul
     content/browser/preferences/colors.js
     content/browser/preferences/connection.xul
     content/browser/preferences/connection.js
     content/browser/preferences/fonts.xul
--- a/browser/components/preferences/languages.xul
+++ b/browser/components/preferences/languages.xul
@@ -1,21 +1,20 @@
 <?xml version="1.0"?>
 
-<!-- -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- -->
 <!-- This Source Code Form is subject to the terms of the Mozilla Public
    - License, v. 2.0. If a copy of the MPL was not distributed with this
    - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
 
 <?xml-stylesheet href="chrome://global/skin/"?>
 <?xml-stylesheet href="chrome://browser/skin/preferences/preferences.css"?>
 
 <dialog id="LanguagesDialog" type="child" class="prefwindow"
         xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"
-        data-l10n-id="languages-window"
+        data-l10n-id="webpage-languages-window"
         data-l10n-attrs="title, style"
         buttons="accept,cancel,help"
         persist="lastSelected screenX screenY"
         role="dialog"
         onload="gLanguagesDialog.onLoad();"
         helpTopic="prefs-languages"
         ondialoghelp="openPrefsHelp()">
 
--- a/browser/locales/en-US/browser/preferences/languages.ftl
+++ b/browser/locales/en-US/browser/preferences/languages.ftl
@@ -1,15 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-languages-window =
-    .title = Languages
-    .style = width: 30em
+webpage-languages-window =
+    .title = Webpage Language Settings
+    .style = width: 40em
 
 languages-close-key =
     .key = w
 
 languages-description = Web pages are sometimes offered in more than one language. Choose languages for displaying these web pages, in order of preference
 
 languages-customize-spoof-english =
     .label = Request English versions of web pages for enhanced privacy
@@ -43,8 +43,14 @@ languages-customize-add =
 # Variables:
 #   $locale (String) - A name of the locale (for example: "Icelandic", "Spanish (Chile)")
 #   $code (String) - Locale code of the locale (for example: "is", "es-CL")
 languages-code-format =
     .label = { $locale } [{ $code }]
 
 languages-active-code-format =
     .value = { languages-code-format.label }
+
+browser-languages-window =
+    .title = { -brand-short-name } Language Settings
+    .style = width: 40em
+
+browser-languages-description = { -brand-short-name } will display the first language as your default and will display alternate languages if necessary in the order they appear.
--- a/browser/locales/en-US/browser/preferences/preferences.ftl
+++ b/browser/locales/en-US/browser/preferences/preferences.ftl
@@ -249,16 +249,19 @@ language-header = Language
 
 choose-language-description = Choose your preferred language for displaying pages
 
 choose-button =
     .label = Choose…
     .accesskey = o
 
 choose-browser-language-description = Choose the languages used to display menus, messages, and notifications from { -brand-short-name }.
+manage-browser-languages-button =
+  .label = Set Alternatives…
+  .accesskey = l
 confirm-browser-language-change-description = Restart { -brand-short-name } to apply these changes
 confirm-browser-language-change-button = Apply and Restart
 
 translate-web-pages =
     .label = Translate web content
     .accesskey = T
 
 # The <img> element is replaced by the logo of the provider
--- a/browser/themes/shared/incontentprefs/preferences.inc.css
+++ b/browser/themes/shared/incontentprefs/preferences.inc.css
@@ -918,10 +918,19 @@ menulist[indicator=true] > menupopup men
 
 /* Proxy port input */
 
 .proxy-port-input {
   width: calc(5ch + 22px); /* 5 chars + 11px padding on both sides */
 }
 
 #defaultBrowserLanguage {
+  margin-inline-start: 0;
   min-width: 20em;
 }
+
+#BrowserLanguagesDialog > .dialog-button-box > .dialog-button[dlgtype="help"] {
+  margin-inline-start: 0;
+}
+
+#activeLocales {
+  min-height: 200px;
+}
--- a/js/rust/build.rs
+++ b/js/rust/build.rs
@@ -165,17 +165,17 @@ const WHITELIST_TYPES: &'static [&'stati
     "JS::Handle",
     "JS::HandleFunction",
     "JS::HandleId",
     "JS::HandleObject",
     "JS::HandleString",
     "JS::HandleValue",
     "JS::HandleValueArray",
     "JS::IsAcceptableThis",
-    "JSAutoRealmAllowCCW",
+    "JSAutoRealm",
     "JSAutoStructuredCloneBuffer",
     "JSClass",
     "JSClassOps",
     "JSContext",
     "JSErrNum",
     "JSErrorCallback",
     "JSErrorFormatString",
     "JSErrorReport",
--- a/js/rust/src/ar.rs
+++ b/js/rust/src/ar.rs
@@ -1,56 +1,56 @@
 use jsapi::root::*;
 #[cfg(feature = "debugmozjs")]
 use std::ptr;
 
 #[derive(Debug)]
-pub struct AutoRealm(JSAutoRealmAllowCCW);
+pub struct AutoRealm(JSAutoRealm);
 
 impl AutoRealm {
     #[cfg(feature = "debugmozjs")]
     pub unsafe fn with_obj(cx: *mut JSContext,
                            target: *mut JSObject)
                            -> AutoRealm
     {
         let mut notifier = mozilla::detail::GuardObjectNotifier {
             mStatementDone: ptr::null_mut(),
         };
 
         AutoRealm(
-            JSAutoRealmAllowCCW::new(
+            JSAutoRealm::new(
                 cx,
                 target,
                 &mut notifier as *mut _))
     }
 
     #[cfg(not(feature = "debugmozjs"))]
     pub unsafe fn with_obj(cx: *mut JSContext,
                            target: *mut JSObject)
                            -> AutoRealm
     {
-        AutoRealm(JSAutoRealmAllowCCW::new(cx, target))
+        AutoRealm(JSAutoRealm::new(cx, target))
     }
 
     #[cfg(feature = "debugmozjs")]
     pub unsafe fn with_script(cx: *mut JSContext,
                               target: *mut JSScript)
                               -> AutoRealm
     {
         let mut notifier = mozilla::detail::GuardObjectNotifier {
             mStatementDone: ptr::null_mut(),
         };
 
         AutoRealm(
-            JSAutoRealmAllowCCW::new1(
+            JSAutoRealm::new1(
                 cx,
                 target,
                 &mut notifier as *mut _))
     }
 
     #[cfg(not(feature = "debugmozjs"))]
     pub unsafe fn with_script(cx: *mut JSContext,
                               target: *mut JSScript)
                               -> AutoRealm
     {
-        AutoRealm(JSAutoRealmAllowCCW::new1(cx, target))
+        AutoRealm(JSAutoRealm::new1(cx, target))
     }
 }
--- a/js/rust/src/rust.rs
+++ b/js/rust/src/rust.rs
@@ -615,17 +615,17 @@ impl GCMethods for JS::Value {
     unsafe fn post_barrier(v: *mut JS::Value, prev: JS::Value, next: JS::Value) {
         JS::HeapValuePostBarrier(v, &prev, &next);
     }
 }
 
 // ___________________________________________________________________________
 // Implementations for various things in jsapi.rs
 
-impl Drop for JSAutoRealmAllowCCW {
+impl Drop for JSAutoRealm {
     fn drop(&mut self) {
         unsafe { JS::LeaveRealm(self.cx_, self.oldRealm_); }
     }
 }
 
 impl JSJitMethodCallArgs {
     #[inline]
     pub fn get(&self, i: u32) -> JS::HandleValue {
--- a/js/src/devtools/rootAnalysis/annotations.js
+++ b/js/src/devtools/rootAnalysis/annotations.js
@@ -193,21 +193,16 @@ var ignoreFunctions = {
     "NS_LogCtor": true,
     "NS_LogDtor": true,
     "NS_LogCOMPtrAddRef": true,
     "NS_LogCOMPtrRelease": true,
 
     // FIXME!
     "NS_DebugBreak": true,
 
-    // These are a little overzealous -- these destructors *can* GC if they end
-    // up wrapping a pending exception. See bug 898815 for the heavyweight fix.
-    "void js::AutoRealm::~AutoRealm(int32)" : true,
-    "void JSAutoRealmAllowCCW::~JSAutoRealmAllowCCW(int32)" : true,
-
     // Similar to heap snapshot mock classes, and GTests below. This posts a
     // synchronous runnable when a GTest fails, and we are pretty sure that the
     // particular runnable it posts can't even GC, but the analysis isn't
     // currently smart enough to determine that. In either case, this is (a)
     // only in GTests, and (b) only when the Gtest has already failed. We have
     // static and dynamic checks for no GC in the non-test code, and in the test
     // code we fall back to only the dynamic checks.
     "void test::RingbufferDumper::OnTestPartResult(testing::TestPartResult*)" : true,
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -686,81 +686,71 @@ JS_SetExternalStringSizeofCallback(JSCon
 }
 
 JS_PUBLIC_API(Realm*)
 JS::EnterRealm(JSContext* cx, JSObject* target)
 {
     AssertHeapIsIdle();
     CHECK_REQUEST(cx);
 
+    MOZ_DIAGNOSTIC_ASSERT(!js::IsCrossCompartmentWrapper(target));
+
     Realm* oldRealm = cx->realm();
     cx->enterRealmOf(target);
     return oldRealm;
 }
 
 JS_PUBLIC_API(void)
 JS::LeaveRealm(JSContext* cx, JS::Realm* oldRealm)
 {
     AssertHeapIsIdle();
     CHECK_REQUEST(cx);
     cx->leaveRealm(oldRealm);
 }
 
-JSAutoRealmAllowCCW::JSAutoRealmAllowCCW(JSContext* cx, JSObject* target
-                                         MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
-  : cx_(cx),
-    oldRealm_(cx->realm())
-{
-    AssertHeapIsIdleOrIterating();
-    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-    cx_->enterRealmOf(target);
-}
-
-JSAutoRealmAllowCCW::JSAutoRealmAllowCCW(JSContext* cx, JSScript* target
-                                         MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
+JSAutoRealm::JSAutoRealm(JSContext* cx, JSObject* target
+                         MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : cx_(cx),
     oldRealm_(cx->realm())
 {
-    AssertHeapIsIdleOrIterating();
-    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-    cx_->enterRealmOf(target);
-}
-
-JSAutoRealmAllowCCW::~JSAutoRealmAllowCCW()
-{
-    cx_->leaveRealm(oldRealm_);
-}
-
-JSAutoRealm::JSAutoRealm(JSContext* cx, JSObject* target
-                         MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
-  : JSAutoRealmAllowCCW(cx, target)
-{
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     MOZ_DIAGNOSTIC_ASSERT(!js::IsCrossCompartmentWrapper(target));
+    AssertHeapIsIdleOrIterating();
+    cx_->enterRealmOf(target);
 }
 
 JSAutoRealm::JSAutoRealm(JSContext* cx, JSScript* target
                          MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
-  : JSAutoRealmAllowCCW(cx, target)
+  : cx_(cx),
+    oldRealm_(cx->realm())
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    AssertHeapIsIdleOrIterating();
+    cx_->enterRealmOf(target);
+}
+
+JSAutoRealm::~JSAutoRealm()
+{
+    cx_->leaveRealm(oldRealm_);
 }
 
 JSAutoNullableRealm::JSAutoNullableRealm(JSContext* cx,
                                          JSObject* targetOrNull
                                          MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : cx_(cx),
     oldRealm_(cx->realm())
 {
+    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     AssertHeapIsIdleOrIterating();
-    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-    if (targetOrNull)
+    if (targetOrNull) {
+        MOZ_DIAGNOSTIC_ASSERT(!js::IsCrossCompartmentWrapper(targetOrNull));
         cx_->enterRealmOf(targetOrNull);
-    else
+    } else {
         cx_->enterNullRealm();
+    }
 }
 
 JSAutoNullableRealm::~JSAutoNullableRealm()
 {
     cx_->leaveRealm(oldRealm_);
 }
 
 JS_PUBLIC_API(void)
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -853,20 +853,23 @@ JS_RefreshCrossCompartmentWrappers(JSCon
  * The current realm of a context may be changed. The preferred way to do
  * this is with JSAutoRealm:
  *
  *   void foo(JSContext* cx, JSObject* obj) {
  *     // in some realm 'r'
  *     {
  *       JSAutoRealm ar(cx, obj);  // constructor enters
  *       // in the realm of 'obj'
- *     }                                 // destructor leaves
+ *     }                           // destructor leaves
  *     // back in realm 'r'
  *   }
  *
+ * The object passed to JSAutoRealm must *not* be a cross-compartment wrapper,
+ * because CCWs are not associated with a single realm.
+ *
  * For more complicated uses that don't neatly fit in a C++ stack frame, the
  * realm can be entered and left using separate function calls:
  *
  *   void foo(JSContext* cx, JSObject* obj) {
  *     // in 'oldRealm'
  *     JS::Realm* oldRealm = JS::EnterRealm(cx, obj);
  *     // in the realm of 'obj'
  *     JS::LeaveRealm(cx, oldRealm);
@@ -877,37 +880,24 @@ JS_RefreshCrossCompartmentWrappers(JSCon
  * enter/leave calls on the context. Furthermore, only the return value of a
  * JS::EnterRealm call may be passed as the 'oldRealm' argument of
  * the corresponding JS::LeaveRealm call.
  *
  * Entering a realm roots the realm and its global object for the lifetime of
  * the JSAutoRealm.
  */
 
-// JSAutoRealmAllowCCW is deprecated and will be removed soon, because entering
-// the realm of a CCW doesn't make sense when CCWs are shared by all realms in
-// the compartment. New code should prefer JSAutoRealm below instead (it asserts
-// the object is not a CCW).
-class MOZ_RAII JS_PUBLIC_API(JSAutoRealmAllowCCW)
+class MOZ_RAII JS_PUBLIC_API(JSAutoRealm)
 {
     JSContext* cx_;
     JS::Realm* oldRealm_;
   public:
-    JSAutoRealmAllowCCW(JSContext* cx, JSObject* target MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
-    JSAutoRealmAllowCCW(JSContext* cx, JSScript* target MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
-    ~JSAutoRealmAllowCCW();
-
-    MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
-};
-
-class MOZ_RAII JS_PUBLIC_API(JSAutoRealm) : public JSAutoRealmAllowCCW
-{
-  public:
     JSAutoRealm(JSContext* cx, JSObject* target MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
     JSAutoRealm(JSContext* cx, JSScript* target MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
+    ~JSAutoRealm();
 
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 class MOZ_RAII JS_PUBLIC_API(JSAutoNullableRealm)
 {
     JSContext* cx_;
     JS::Realm* oldRealm_;
@@ -918,16 +908,19 @@ class MOZ_RAII JS_PUBLIC_API(JSAutoNulla
 
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 namespace JS {
 
 /** NB: This API is infallible; a nullptr return value does not indicate error.
  *
+ * |target| must not be a cross-compartment wrapper because CCWs are not
+ * associated with a single realm.
+ *
  * Entering a realm roots the realm and its global object until the matching
  * JS::LeaveRealm() call.
  */
 extern JS_PUBLIC_API(JS::Realm*)
 EnterRealm(JSContext* cx, JSObject* target);
 
 extern JS_PUBLIC_API(void)
 LeaveRealm(JSContext* cx, JS::Realm* oldRealm);
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -327,30 +327,31 @@ extern JS_FRIEND_API(void)
 SetRealmPrincipals(JS::Realm* realm, JSPrincipals* principals);
 
 extern JS_FRIEND_API(bool)
 GetIsSecureContext(JS::Realm* realm);
 
 } // namespace JS
 
 /**
- * Copies all own properties from |obj| to |target|. |obj| must be a "native"
- * object (that is to say, normal-ish - not an Array or a Proxy).
+ * Copies all own properties from |obj| to |target|. Both |obj| and |target|
+ * must not be cross-compartment wrappers because we have to enter their realms.
  *
- * This function immediately enters a compartment, and does not impose any
- * restrictions on the compartment of |cx|.
+ * This function immediately enters a realm, and does not impose any
+ * restrictions on the realm of |cx|.
  */
 extern JS_FRIEND_API(bool)
 JS_CopyPropertiesFrom(JSContext* cx, JS::HandleObject target, JS::HandleObject obj);
 
 /*
  * Single-property version of the above. This function asserts that an |own|
  * property of the given name exists on |obj|.
  *
- * On entry, |cx| must be same-compartment with |obj|.
+ * On entry, |cx| must be same-compartment with |obj|. |target| must not be a
+ * cross-compartment wrapper because we have to enter its realm.
  *
  * The copyBehavior argument controls what happens with
  * non-configurable properties.
  */
 typedef enum  {
     MakeNonConfigurableIntoConfigurable,
     CopyNonConfigurableAsIs
 } PropertyCopyBehavior;
--- a/js/src/vm/JSObject.cpp
+++ b/js/src/vm/JSObject.cpp
@@ -1133,16 +1133,20 @@ JSObject::nonNativeSetElement(JSContext*
         return false;
     return nonNativeSetProperty(cx, obj, id, v, receiver, result);
 }
 
 JS_FRIEND_API(bool)
 JS_CopyPropertyFrom(JSContext* cx, HandleId id, HandleObject target,
                     HandleObject obj, PropertyCopyBehavior copyBehavior)
 {
+    // |target| must not be a CCW because we need to enter its realm below and
+    // CCWs are not associated with a single realm.
+    MOZ_ASSERT(!IsCrossCompartmentWrapper(target));
+
     // |obj| and |cx| are generally not same-compartment with |target| here.
     cx->check(obj, id);
     Rooted<PropertyDescriptor> desc(cx);
 
     if (!GetOwnPropertyDescriptor(cx, obj, id, &desc))
         return false;
     MOZ_ASSERT(desc.object());
 
@@ -1152,29 +1156,34 @@ JS_CopyPropertyFrom(JSContext* cx, Handl
     if (desc.setter() && !desc.hasSetterObject())
         return true;
 
     if (copyBehavior == MakeNonConfigurableIntoConfigurable) {
         // Mask off the JSPROP_PERMANENT bit.
         desc.attributesRef() &= ~JSPROP_PERMANENT;
     }
 
-    JSAutoRealmAllowCCW ar(cx, target);
+    JSAutoRealm ar(cx, target);
     cx->markId(id);
     RootedId wrappedId(cx, id);
     if (!cx->compartment()->wrap(cx, &desc))
         return false;
 
     return DefineProperty(cx, target, wrappedId, desc);
 }
 
 JS_FRIEND_API(bool)
 JS_CopyPropertiesFrom(JSContext* cx, HandleObject target, HandleObject obj)
 {
-    JSAutoRealmAllowCCW ar(cx, obj);
+    // Both |obj| and |target| must not be CCWs because we need to enter their
+    // realms below and CCWs are not associated with a single realm.
+    MOZ_ASSERT(!IsCrossCompartmentWrapper(obj));
+    MOZ_ASSERT(!IsCrossCompartmentWrapper(target));
+
+    JSAutoRealm ar(cx, obj);
 
     AutoIdVector props(cx);
     if (!GetPropertyKeys(cx, obj, JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, &props))
         return false;
 
     for (size_t i = 0; i < props.length(); ++i) {
         if (!JS_CopyPropertyFrom(cx, props[i], target, obj))
             return false;
--- a/mfbt/tests/TestPoisonArea.cpp
+++ b/mfbt/tests/TestPoisonArea.cpp
@@ -152,17 +152,17 @@
 #endif
 
 #elif defined __s390__
 #define RETURN_INSTR 0x07fe0000 /* br %r14 */
 
 #elif defined __sh__
 #define RETURN_INSTR 0x0b000b00 /* rts; rts */
 
-#elif defined __aarch64__
+#elif defined __aarch64__ || defined _M_ARM64
 #define RETURN_INSTR 0xd65f03c0 /* ret */
 
 #elif defined __ia64
 struct ia64_instr { uint32_t mI[4]; };
 static const ia64_instr _return_instr =
   {{ 0x00000011, 0x00000001, 0x80000200, 0x00840008 }}; /* br.ret.sptk.many b0 */
 
 #define RETURN_INSTR _return_instr
--- a/mobile/android/app/mobile.js
+++ b/mobile/android/app/mobile.js
@@ -211,16 +211,17 @@ pref("extensions.update.url", "https://v
 pref("extensions.update.background.url", "https://versioncheck-bg.addons.mozilla.org/update/VersionCheck.php?reqVersion=%REQ_VERSION%&id=%ITEM_ID%&version=%ITEM_VERSION%&maxAppVersion=%ITEM_MAXAPPVERSION%&status=%ITEM_STATUS%&appID=%APP_ID%&appVersion=%APP_VERSION%&appOS=%APP_OS%&appABI=%APP_ABI%&locale=%APP_LOCALE%&currentAppVersion=%CURRENT_APP_VERSION%&updateType=%UPDATE_TYPE%&compatMode=%COMPATIBILITY_MODE%");
 
 /* preferences for the Get Add-ons pane */
 pref("extensions.getAddons.cache.enabled", true);
 pref("extensions.getAddons.search.browseURL", "https://addons.mozilla.org/%LOCALE%/android/search?q=%TERMS%&platform=%OS%&appver=%VERSION%");
 pref("extensions.getAddons.browseAddons", "https://addons.mozilla.org/%LOCALE%/android/");
 pref("extensions.getAddons.get.url", "https://services.addons.mozilla.org/api/v3/addons/search/?guid=%IDS%&lang=%LOCALE%");
 pref("extensions.getAddons.compatOverides.url", "https://services.addons.mozilla.org/api/v3/addons/compat-override/?guid=%IDS%&lang=%LOCALE%");
+pref("extensions.getAddons.langpacks.url", "https://services.addons.mozilla.org/api/v3/addons/language-tools/?app=android&type=language&appversion=%VERSION%");
 
 /* preference for the locale picker */
 pref("extensions.getLocales.get.url", "");
 pref("extensions.compatability.locales.buildid", "0");
 
 /* Don't let XPIProvider install distribution add-ons; we do our own thing on mobile. */
 pref("extensions.installDistroAddons", false);
 
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoSharedPrefs.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoSharedPrefs.java
@@ -168,16 +168,20 @@ public final class GeckoSharedPrefs {
     }
 
     /**
      * Performs all prefs migrations in the background thread to avoid StrictMode
      * exceptions from reading/writing in the UI thread. This method will block
      * the current thread until the migration is finished.
      */
     private static synchronized void migrateIfNecessary(final Context context) {
+        if (!GeckoAppShell.isFennec()) {
+            return;
+        }
+
         if (migrationDone) {
             return;
         }
 
         // We deliberately perform the migration in the current thread (which
         // is likely the UI thread) as this is actually cheaper than enforcing a
         // context switch to another thread (see bug 940575).
         // Avoid strict mode warnings when doing so.
@@ -250,17 +254,17 @@ public final class GeckoSharedPrefs {
         Log.d(LOGTAG, "All keys have been migrated");
     }
 
     /**
      * Moves all preferences stored in PreferenceManager's default prefs
      * to either app or profile scopes. The profile-scoped keys are defined
      * in given profileKeys list, all other keys are moved to the app scope.
      */
-    public static Editor migrateFromPreferenceManager(Context context, Editor appEditor,
+    private static Editor migrateFromPreferenceManager(Context context, Editor appEditor,
             Editor profileEditor, List<String> profileKeys) {
         Log.d(LOGTAG, "Migrating from PreferenceManager");
 
         final SharedPreferences pmPrefs =
                 PreferenceManager.getDefaultSharedPreferences(context);
 
         for (Map.Entry<String, ?> entry : pmPrefs.getAll().entrySet()) {
             final String key = entry.getKey();
@@ -279,17 +283,17 @@ public final class GeckoSharedPrefs {
         // and return the Editor to be committed.
         return pmPrefs.edit().clear();
     }
 
     /**
      * Moves the crash reporter's preferences from the app-wide prefs
      * into its own shared prefs to avoid cross-process pref accesses.
      */
-    public static void migrateCrashReporterSettings(SharedPreferences appPrefs, Editor appEditor,
+    private static void migrateCrashReporterSettings(SharedPreferences appPrefs, Editor appEditor,
                                                     Editor crashEditor, List<String> profileKeys) {
         Log.d(LOGTAG, "Migrating crash reporter settings");
 
         for (Map.Entry<String, ?> entry : appPrefs.getAll().entrySet()) {
             final String key = entry.getKey();
 
             if (profileKeys.contains(key)) {
                 putEntry(crashEditor, key, entry.getValue());
--- a/netwerk/cookie/CookieServiceChild.cpp
+++ b/netwerk/cookie/CookieServiceChild.cpp
@@ -178,17 +178,20 @@ CookieServiceChild::TrackCookieLoad(nsIC
   nsCOMPtr<nsIURI> uri;
   aChannel->GetURI(getter_AddRefs(uri));
   if (RequireThirdPartyCheck()) {
     mThirdPartyUtil->IsThirdPartyChannel(aChannel, uri, &isForeign);
   }
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 uri)) {
       firstPartyStorageAccessGranted = true;
     }
   }
   nsCOMPtr<nsILoadInfo> loadInfo = aChannel->GetLoadInfo();
   mozilla::OriginAttributes attrs;
   if (loadInfo) {
@@ -576,17 +579,20 @@ CookieServiceChild::GetCookieStringInter
   if (RequireThirdPartyCheck())
     mThirdPartyUtil->IsThirdPartyChannel(aChannel, aHostURI, &isForeign);
 
   bool isTrackingResource = false;
   bool firstPartyStorageAccessGranted = false;
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 aHostURI)) {
       firstPartyStorageAccessGranted = true;
     }
   }
 
   bool isSafeTopLevelNav = NS_IsSafeTopLevelNav(aChannel);
   bool isSameSiteForeign = NS_IsSameSiteForeign(aChannel, aHostURI);
@@ -633,17 +639,20 @@ CookieServiceChild::SetCookieStringInter
   if (RequireThirdPartyCheck())
     mThirdPartyUtil->IsThirdPartyChannel(aChannel, aHostURI, &isForeign);
 
   bool isTrackingResource = false;
   bool firstPartyStorageAccessGranted = false;
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 aHostURI)) {
       firstPartyStorageAccessGranted = true;
     }
   }
 
   nsDependentCString cookieString(aCookieString);
   nsDependentCString stringServerTime;
--- a/netwerk/cookie/CookieServiceParent.cpp
+++ b/netwerk/cookie/CookieServiceParent.cpp
@@ -156,17 +156,20 @@ CookieServiceParent::TrackCookieLoad(nsI
   bool isForeign = true;
   thirdPartyUtil->IsThirdPartyChannel(aChannel, uri, &isForeign);
 
   bool isTrackingResource = false;
   bool storageAccessGranted = false;
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 uri)) {
       storageAccessGranted = true;
     }
   }
 
   nsTArray<nsCookie*> foundCookieList;
   mCookieService->GetCookiesForURI(uri, isForeign, isTrackingResource,
--- a/netwerk/cookie/nsCookieService.cpp
+++ b/netwerk/cookie/nsCookieService.cpp
@@ -2040,17 +2040,20 @@ nsCookieService::GetCookieStringCommon(n
   mThirdPartyUtil->IsThirdPartyChannel(aChannel, aHostURI, &isForeign);
 
   bool isTrackingResource = false;
   bool firstPartyStorageAccessGranted = false;
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
 
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 aHostURI)) {
       firstPartyStorageAccessGranted = true;
     }
   }
 
   OriginAttributes attrs;
   if (aChannel) {
@@ -2147,17 +2150,20 @@ nsCookieService::SetCookieStringCommon(n
   mThirdPartyUtil->IsThirdPartyChannel(aChannel, aHostURI, &isForeign);
 
   bool isTrackingResource = false;
   bool firstPartyStorageAccessGranted = false;
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(aChannel);
   if (httpChannel) {
     isTrackingResource = httpChannel->GetIsTrackingResource();
 
-    if (isForeign && isTrackingResource &&
+    // Check first-party storage access even for non-tracking resources, since
+    // we will need the result when computing the access rights for the reject
+    // foreign cookie behavior mode.
+    if (isForeign &&
         AntiTrackingCommon::IsFirstPartyStorageAccessGrantedFor(httpChannel,
                                                                 aHostURI)) {
       firstPartyStorageAccessGranted = true;
     }
   }
 
   OriginAttributes attrs;
   if (aChannel) {
@@ -4254,31 +4260,35 @@ nsCookieService::CheckPrefs(nsICookiePer
   // check default prefs
   if (aCookieBehavior == nsICookieService::BEHAVIOR_REJECT) {
     COOKIE_LOGFAILURE(aCookieHeader ? SET_COOKIE : GET_COOKIE, aHostURI, aCookieHeader, "cookies are disabled");
     return STATUS_REJECTED;
   }
 
   // check if cookie is foreign
   if (aIsForeign) {
-    if (aCookieBehavior == nsICookieService::BEHAVIOR_REJECT_FOREIGN) {
+    // Check aFirstPartyStorageAccessGranted when rejecting all third-party cookies,
+    // so that we take things such as the content blocking allow list into account.
+    if (aCookieBehavior == nsICookieService::BEHAVIOR_REJECT_FOREIGN &&
+        !aFirstPartyStorageAccessGranted) {
       COOKIE_LOGFAILURE(aCookieHeader ? SET_COOKIE : GET_COOKIE, aHostURI, aCookieHeader, "context is third party");
       return STATUS_REJECTED;
     }
 
     if (aCookieBehavior == nsICookieService::BEHAVIOR_LIMIT_FOREIGN) {
       if (aNumOfCookies == 0) {
         COOKIE_LOGFAILURE(aCookieHeader ? SET_COOKIE : GET_COOKIE, aHostURI, aCookieHeader, "context is third party");
         return STATUS_REJECTED;
       }
     }
 
     MOZ_ASSERT(aCookieBehavior == nsICookieService::BEHAVIOR_ACCEPT ||
                aCookieBehavior == nsICookieService::BEHAVIOR_LIMIT_FOREIGN ||
                // But with permission granted.
+               aCookieBehavior == nsICookieService::BEHAVIOR_REJECT_FOREIGN ||
                aCookieBehavior == nsICookieService::BEHAVIOR_REJECT_TRACKER);
 
     if (aThirdPartySession)
       return STATUS_ACCEPT_SESSION;
 
     if (aThirdPartyNonsecureSession) {
       bool isHTTPS = false;
       aHostURI->SchemeIs("https", &isHTTPS);
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -2722,19 +2722,22 @@ class Repackage(MachCommandBase):
     @SubCommand('repackage', 'mar',
                 description='Repackage into complete MAR file')
     @CommandArgument('--input', '-i', type=str, required=True,
         help='Input filename')
     @CommandArgument('--mar', type=str, required=True,
         help='Mar binary path')
     @CommandArgument('--output', '-o', type=str, required=True,
         help='Output filename')
-    def repackage_mar(self, input, mar, output):
+    @CommandArgument('--format', type=str, default='lzma',
+        choices=('lzma', 'bz2'),
+        help='Mar format')
+    def repackage_mar(self, input, mar, output, format):
         from mozbuild.repackaging.mar import repackage_mar
-        repackage_mar(self.topsrcdir, input, mar, output)
+        repackage_mar(self.topsrcdir, input, mar, output, format)
 
 @CommandProvider
 class Analyze(MachCommandBase):
     """ Get information about a file in the build graph """
     @Command('analyze', category='misc',
         description='Analyze the build graph.')
     def analyze(self):
         print("Usage: ./mach analyze [files|report] [args...]")
--- a/python/mozbuild/mozbuild/repackaging/mar.py
+++ b/python/mozbuild/mozbuild/repackaging/mar.py
@@ -9,17 +9,17 @@ import shutil
 import zipfile
 import tarfile
 import subprocess
 import mozpack.path as mozpath
 from application_ini import get_application_ini_value
 from mozbuild.util import ensureParentDir
 
 
-def repackage_mar(topsrcdir, package, mar, output):
+def repackage_mar(topsrcdir, package, mar, output, mar_format='lzma'):
     if not zipfile.is_zipfile(package) and not tarfile.is_tarfile(package):
         raise Exception("Package file %s is not a valid .zip or .tar file." % package)
 
     ensureParentDir(output)
     tmpdir = tempfile.mkdtemp()
     try:
         if zipfile.is_zipfile(package):
             z = zipfile.ZipFile(package)
@@ -42,16 +42,18 @@ def repackage_mar(topsrcdir, package, ma
                             "(eg: 'firefox'), not: %s" % toplevel_dirs)
         ffxdir = mozpath.join(tmpdir, toplevel_dirs.pop())
 
         make_full_update = mozpath.join(topsrcdir, 'tools/update-packaging/make_full_update.sh')
 
         env = os.environ.copy()
         env['MOZ_FULL_PRODUCT_VERSION'] = get_application_ini_value(tmpdir, 'App', 'Version')
         env['MAR'] = mozpath.normpath(mar)
+        if mar_format == 'bz2':
+            env['MAR_OLD_FORMAT'] = '1'
         # The Windows build systems have xz installed but it isn't in the path
         # like it is on Linux and Mac OS X so just use the XZ env var so the mar
         # generation scripts can find it.
         xz_path = mozpath.join(topsrcdir, 'xz/xz.exe')
         if os.path.exists(xz_path):
             env['XZ'] = mozpath.normpath(xz_path)
 
         cmd = [make_full_update, output, ffxdir]
--- a/taskcluster/ci/beetmover-repackage/kind.yml
+++ b/taskcluster/ci/beetmover-repackage/kind.yml
@@ -10,16 +10,18 @@ transforms:
    - taskgraph.transforms.beetmover_repackage:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
    - repackage-signing
    - repackage-signing-l10n
    - partials-signing
 
+fake-multi-dep: true
+
 only-for-build-platforms:
    - linux-nightly/opt
    - linux64-nightly/opt
    - macosx64-nightly/opt
    - win32-nightly/opt
    - win64-nightly/opt
    - linux-devedition-nightly/opt
    - linux64-devedition-nightly/opt
--- a/taskcluster/ci/release-balrog-scheduling/kind.yml
+++ b/taskcluster/ci/release-balrog-scheduling/kind.yml
@@ -35,16 +35,33 @@ jobs:
                mozilla-release: [145]
                mozilla-esr60: [806]
                default: []
       treeherder:
          platform: firefox-release/opt
          symbol: Rel(BSFx)
          tier: 1
          kind: build
+   # FIXME: (Bug 1487172) This task should only run on esr60-like branches
+   firefox-bz2:
+      description: Schedule Firefox publishing in balrog (bz2)
+      name: release-firefox_schedule_publishing_in_balrog-bz2
+      shipping-product: firefox
+      worker:
+         product: firefox
+         publish-rules:
+            by-project:
+               mozilla-esr60: [521]
+               default: []
+         blob-suffix: -bz2
+      treeherder:
+         platform: firefox-release/opt
+         symbol: Rel(BSFx-bz2)
+         tier: 1
+         kind: build
    devedition:
       description: Schedule Devedition publishing in balrog
       name: release-devedition_schedule_publishing_in_balrog
       shipping-product: devedition
       worker:
          product: devedition
          publish-rules:
             by-project:
--- a/taskcluster/ci/release-balrog-submit-toplevel/kind.yml
+++ b/taskcluster/ci/release-balrog-submit-toplevel/kind.yml
@@ -65,16 +65,53 @@ jobs:
                default: []
          platforms: ["linux", "linux64", "macosx64", "win32", "win64"]
       treeherder:
          platform: firefox-release/opt
          symbol: Rel(BPFx)
          tier: 1
          kind: build
 
+   # FIXME: (Bug 1487172) This task should only run on esr60-like branches
+   firefox-bz2:
+      name: submit-toplevel-firefox-release-to-balrog-bz2
+      description: Submit toplevel Firefox release to balrog
+      shipping-product: firefox
+      worker:
+         product: firefox
+         archive-domain:
+            by-project:
+               mozilla-esr60: archive.mozilla.org
+               default: ftp.stage.mozaws.net
+         download-domain:
+            by-project:
+               mozilla-esr60: download.mozilla.org
+               default: download.mozilla.org
+         channel-names:
+            by-project:
+               mozilla-esr60: ["esr", "esr-localtest", "esr-cdntest"]
+               default: []
+         publish-rules:
+            by-project:
+               mozilla-esr60: [521]
+               default: []
+         rules-to-update:
+            by-project:
+               mozilla-esr60: ["esr52-cdntest", "esr52-localtest"]
+               default: []
+         platforms: ["linux", "linux64", "macosx64", "win32", "win64"]
+         blob-suffix: -bz2
+         complete-mar-filename-pattern: '%s-%s.bz2.complete.mar'
+         complete-mar-bouncer-product-pattern: '%s-%s-complete-bz2'
+      treeherder:
+         platform: firefox-release/opt
+         symbol: Rel(BPFx-bz2)
+         tier: 1
+         kind: build
+
    devedition:
       name: submit-toplevel-devedition-release-to-balrog
       description: submit toplevel Devedition release to balrog
       shipping-product: devedition
       worker:
          product: devedition
          archive-domain:
             by-project:
--- a/taskcluster/ci/release-bouncer-sub/kind.yml
+++ b/taskcluster/ci/release-bouncer-sub/kind.yml
@@ -57,18 +57,18 @@ jobs:
          platform: fennec-release/opt
 
    firefox:
       bouncer-platforms: ['linux', 'linux64', 'osx', 'win', 'win64']
       bouncer-products:
          by-project:
             default: ['complete-mar', 'installer', 'installer-ssl', 'partial-mar', 'stub-installer']
             # No stub installer in esr60
-            mozilla-esr60: ['complete-mar', 'installer', 'installer-ssl', 'partial-mar']
-            jamun: ['complete-mar', 'installer', 'installer-ssl', 'partial-mar']
+            mozilla-esr60: ['complete-mar', 'complete-mar-bz2', 'installer', 'installer-ssl', 'partial-mar']
+            jamun: ['complete-mar', 'complete-mar-bz2', 'installer', 'installer-ssl', 'partial-mar']
       shipping-product: firefox
       treeherder:
          platform: firefox-release/opt
 
    firefox-rc:
       bouncer-platforms: ['linux', 'linux64', 'osx', 'win', 'win64']
       bouncer-products: ['complete-mar-candidates', 'partial-mar-candidates']
       shipping-product: firefox
--- a/taskcluster/ci/release-eme-free-repack-repackage/kind.yml
+++ b/taskcluster/ci/release-eme-free-repack-repackage/kind.yml
@@ -22,13 +22,21 @@ only-for-build-platforms:
    - win32-nightly/opt
    - win64-nightly/opt
 
 job-template:
    mozharness:
       config:
          by-build-platform:
             macosx64-.*:
+               - repackage/base.py
                - repackage/osx_partner.py
             win32-.*:
+               - repackage/base.py
                - repackage/win32_partner.py
             win64-.*:
+               - repackage/base.py
                - repackage/win64_partner.py
+   package-formats:
+      by-build-platform:
+         macosx64\b.*: [dmg]
+         win32\b.*: [installer]
+         win64\b.*: [installer]
--- a/taskcluster/ci/release-partner-repack-repackage/kind.yml
+++ b/taskcluster/ci/release-partner-repack-repackage/kind.yml
@@ -22,13 +22,21 @@ only-for-build-platforms:
    - win32-nightly/opt
    - win64-nightly/opt
 
 job-template:
    mozharness:
       config:
          by-build-platform:
             macosx64-.*:
+               - repackage/base.py
                - repackage/osx_partner.py
             win32-.*:
+               - repackage/base.py
                - repackage/win32_partner.py
             win64-.*:
+               - repackage/base.py
                - repackage/win64_partner.py
+   package-formats:
+      by-build-platform:
+         macosx64\b.*: [dmg]
+         win32\b.*: [installer]
+         win64\b.*: [installer]
--- a/taskcluster/ci/release-update-verify-config/kind.yml
+++ b/taskcluster/ci/release-update-verify-config/kind.yml
@@ -82,18 +82,17 @@ job-defaults:
                   win64-.*: "56.0"
                   default: null
             mozilla-esr52:
                by-platform:
                   macosx64: "45.9.0esr"
                   win32: "45.3.0esr"
                   win64: "45.3.0esr"
                   default: null
-            # TODO add bz2 -> lzma watershed when we ship the first ESR 52 -> 60 update
-            mozilla-esr60: "60.0esr"
+            mozilla-esr60: "52.0esr"
             default: null
 
 jobs:
    firefox-linux:
       shipping-product: firefox
       treeherder:
          symbol: UVC
          platform: linux32/opt
--- a/taskcluster/ci/repackage-l10n/kind.yml
+++ b/taskcluster/ci/repackage-l10n/kind.yml
@@ -1,44 +1,66 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 loader: taskgraph.loader.single_dep:loader
 
 transforms:
-   - taskgraph.transforms.repackage_l10n:transforms
-   - taskgraph.transforms.name_sanity:transforms
-   - taskgraph.transforms.repackage:transforms
-   - taskgraph.transforms.use_toolchains:transforms
-   - taskgraph.transforms.job:transforms
-   - taskgraph.transforms.task:transforms
+    - taskgraph.transforms.repackage_l10n:transforms
+    - taskgraph.transforms.name_sanity:transforms
+    - taskgraph.transforms.repackage:transforms
+    - taskgraph.transforms.use_toolchains:transforms
+    - taskgraph.transforms.job:transforms
+    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
-   - nightly-l10n-signing
-   - toolchain
+    - nightly-l10n-signing
+    - toolchain
 
 only-for-build-platforms:
-   - linux-nightly/opt
-   - linux64-nightly/opt
-   - macosx64-nightly/opt
-   - win32-nightly/opt
-   - win64-nightly/opt
-   - linux-devedition-nightly/opt
-   - linux64-devedition-nightly/opt
-   - macosx64-devedition-nightly/opt
-   - win32-devedition-nightly/opt
-   - win64-devedition-nightly/opt
+    - linux-nightly/opt
+    - linux64-nightly/opt
+    - macosx64-nightly/opt
+    - win32-nightly/opt
+    - win64-nightly/opt
+    - linux-devedition-nightly/opt
+    - linux64-devedition-nightly/opt
+    - macosx64-devedition-nightly/opt
+    - win32-devedition-nightly/opt
+    - win64-devedition-nightly/opt
 
 job-template:
-   mozharness:
-      config:
-         by-build-platform:
-            linux-.*:
-               - repackage/linux32_signed.py
-            linux64-.*:
-               - repackage/linux64_signed.py
-            macosx64-.*:
-               - repackage/osx_signed.py
-            win32-.*:
-               - repackage/win32_signed.py
-            win64-.*:
-               - repackage/win64_signed.py
+    mozharness:
+        config:
+            by-build-platform:
+                linux-.*:
+                    - repackage/base.py
+                    - repackage/linux32_signed.py
+                linux64-.*:
+                    - repackage/base.py
+                    - repackage/linux64_signed.py
+                macosx64-.*:
+                    - repackage/base.py
+                    - repackage/osx_signed.py
+                win32-.*:
+                    - repackage/base.py
+                    - repackage/win32_signed.py
+                win64-.*:
+                    - repackage/base.py
+                    - repackage/win64_signed.py
+    package-formats:
+        by-project:
+            # Build bz2 mar's on try to excerise the code
+            (mozilla-esr60|try):
+                by-build-platform:
+                    linux.*: [mar, mar-bz2]
+                    linux4\b.*: [mar, mar-bz2]
+                    macosx64\b.*: [mar, mar-bz2, dmg]
+                    win32\b.*: [mar, mar-bz2, installer]
+                    win64\b.*: [mar, mar-bz2, installer]
+            default:
+                by-build-platform:
+                    linux.*: [mar]
+                    linux4\b.*: [mar]
+                    macosx64\b.*: [mar, dmg]
+                    win32\b.*: [mar, installer]
+                    win64\b.*: [mar, installer]
--- a/taskcluster/ci/repackage/kind.yml
+++ b/taskcluster/ci/repackage/kind.yml
@@ -35,17 +35,39 @@ only-for-build-platforms:
     - win64/opt
     - win64-asan-reporter-nightly/opt
 
 job-template:
     mozharness:
         config:
             by-build-platform:
                 linux\b.*:
+                    - repackage/base.py
                     - repackage/linux32_signed.py
                 linux64\b.*:
+                    - repackage/base.py
                     - repackage/linux64_signed.py
                 macosx64\b.*:
+                    - repackage/base.py
                     - repackage/osx_signed.py
                 win32\b.*:
+                    - repackage/base.py
                     - repackage/win32_signed.py
                 win64\b.*:
+                    - repackage/base.py
                     - repackage/win64_signed.py
+    package-formats:
+        by-project:
+            # Build bz2 mar's on try to excerise the code
+            (mozilla-esr60|try):
+                by-build-platform:
+                    linux.*: [mar, mar-bz2]
+                    linux4\b.*: [mar, mar-bz2]
+                    macosx64\b.*: [mar, mar-bz2, dmg]
+                    win32\b.*: [mar, mar-bz2, installer]
+                    win64\b.*: [mar, mar-bz2, installer]
+            default:
+                by-build-platform:
+                    linux.*: [mar]
+                    linux4\b.*: [mar]
+                    macosx64\b.*: [mar, dmg]
+                    win32\b.*: [mar, installer]
+                    win64\b.*: [mar, installer]
--- a/taskcluster/taskgraph/generator.py
+++ b/taskcluster/taskgraph/generator.py
@@ -64,17 +64,19 @@ class Kind(object):
         # perform the transformations on the loaded inputs
         trans_config = TransformConfig(self.name, self.path, config, parameters,
                                        kind_dependencies_tasks, self.graph_config)
         tasks = [Task(self.name,
                       label=task_dict['label'],
                       attributes=task_dict['attributes'],
                       task=task_dict['task'],
                       optimization=task_dict.get('optimization'),
-                      dependencies=task_dict.get('dependencies'))
+                      dependencies=task_dict.get('dependencies'),
+                      release_artifacts=task_dict.get('release-artifacts'),
+                      )
                  for task_dict in transforms(trans_config, inputs)]
         return tasks
 
     @classmethod
     def load(cls, root_dir, graph_config, kind_name):
         path = os.path.join(root_dir, kind_name)
         kind_yml = os.path.join(path, 'kind.yml')
         if not os.path.exists(kind_yml):
--- a/taskcluster/taskgraph/loader/single_dep.py
+++ b/taskcluster/taskgraph/loader/single_dep.py
@@ -24,16 +24,18 @@ def loader(kind, path, config, params, l
     Optional `job-template` kind configuration value, if specified, will be used to
     pass configuration down to the specified transforms used.
     """
     only_platforms = config.get('only-for-build-platforms')
     not_platforms = config.get('not-for-build-platforms')
     only_attributes = config.get('only-for-attributes')
     job_template = config.get('job-template')
 
+    include_parents = config.get('fake-multi-dep')
+
     for task in loaded_tasks:
         if task.kind not in config.get('kind-dependencies', []):
             continue
 
         if only_platforms or not_platforms:
             build_platform = task.attributes.get('build_platform')
             build_type = task.attributes.get('build_type')
             if not build_platform or not build_type:
@@ -46,19 +48,34 @@ def loader(kind, path, config, params, l
 
         if only_attributes:
             config_attrs = set(only_attributes)
             if config_attrs - set(task.attributes):
                 # make sure all attributes exist
                 continue
 
         job = {'dependent-task': task}
+        if include_parents:
+            job['grandparent-tasks'] = _get_grandparent_tasks(task.dependencies, loaded_tasks)
+
         if job_template:
             job.update(copy.deepcopy(job_template))
 
         # copy shipping_product from upstream
         product = task.attributes.get(
             'shipping_product', task.task.get('shipping-product')
         )
         if product:
             job.setdefault('shipping-product', product)
 
         yield job
+
+
+def _get_grandparent_tasks(dependencies, loaded_tasks):
+    parent_tasks = {}
+    for task in loaded_tasks:
+        if task.label in dependencies.values():
+            for name, label in dependencies.items():
+                if label == task.label:
+                    parent_tasks[name] = task
+    if set(parent_tasks.keys()) != set(dependencies.keys()):
+        raise Exception("Missing parent tasks.")
+    return parent_tasks
--- a/taskcluster/taskgraph/task.py
+++ b/taskcluster/taskgraph/task.py
@@ -20,65 +20,76 @@ class Task(object):
     And later, as the task-graph processing proceeds:
 
     - task_id -- TaskCluster taskId under which this task will be created
 
     This class is just a convenience wrapper for the data type and managing
     display, comparison, serialization, etc. It has no functionality of its own.
     """
     def __init__(self, kind, label, attributes, task,
-                 optimization=None, dependencies=None):
+                 optimization=None, dependencies=None,
+                 release_artifacts=None):
         self.kind = kind
         self.label = label
         self.attributes = attributes
         self.task = task
 
         self.task_id = None
 
         self.attributes['kind'] = kind
 
         self.optimization = optimization
         self.dependencies = dependencies or {}
+        if release_artifacts:
+            self.release_artifacts = frozenset(release_artifacts)
+        else:
+            self.release_artifacts = None
 
     def __eq__(self, other):
         return self.kind == other.kind and \
             self.label == other.label and \
             self.attributes == other.attributes and \
             self.task == other.task and \
             self.task_id == other.task_id and \
             self.optimization == other.optimization and \
-            self.dependencies == other.dependencies
+            self.dependencies == other.dependencies and \
+            self.release_artifacts == other.release_artifacts
 
     def __repr__(self):
         return ('Task({kind!r}, {label!r}, {attributes!r}, {task!r}, '
                 'optimization={optimization!r}, '
-                'dependencies={dependencies!r})'.format(**self.__dict__))
+                'dependencies={dependencies!r}, '
+                'release_artifacts={release_artifacts!r})'.format(**self.__dict__))
 
     def to_json(self):
         rv = {
             'kind': self.kind,
             'label': self.label,
             'attributes': self.attributes,
             'dependencies': self.dependencies,
             'optimization': self.optimization,
             'task': self.task,
         }
         if self.task_id:
             rv['task_id'] = self.task_id
+        if self.release_artifacts:
+            rv['release_artifacts'] = sorted(self.release_artifacts),
         return rv
 
     @classmethod
     def from_json(cls, task_dict):
         """
         Given a data structure as produced by taskgraph.to_json, re-construct
         the original Task object.  This is used to "resume" the task-graph
         generation process, for example in Action tasks.
         """
         rv = cls(
             kind=task_dict['kind'],
             label=task_dict['label'],
             attributes=task_dict['attributes'],
             task=task_dict['task'],
             optimization=task_dict['optimization'],
-            dependencies=task_dict.get('dependencies'))
+            dependencies=task_dict.get('dependencies'),
+            release_artifacts=task_dict.get('release-artifacts')
+        )
         if 'task_id' in task_dict:
             rv.task_id = task_dict['task_id']
         return rv
--- a/taskcluster/taskgraph/transforms/beetmover_repackage.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py
@@ -113,39 +113,33 @@ UPSTREAM_ARTIFACT_SIGNED_PATHS = {
         ['target.tar.bz2', 'target.tar.bz2.asc'],
     r'^win(32|64)(|-devedition|-asan-reporter)-nightly(|-l10n)$': ['target.zip'],
 }
 
 # Until bug 1331141 is fixed, if you are adding any new artifacts here that
 # need to be transfered to S3, please be aware you also need to follow-up
 # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
 # See example in bug 1348286
-UPSTREAM_ARTIFACT_REPACKAGE_PATHS = {
-    r'^macosx64(|-devedition)-nightly(|-l10n)$': ['target.dmg'],
-}
+UPSTREAM_ARTIFACT_REPACKAGE_PATHS = [
+    'target.dmg',
+]
 # Until bug 1331141 is fixed, if you are adding any new artifacts here that
 # need to be transfered to S3, please be aware you also need to follow-up
 # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
 # See example in bug 1348286
-UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS = {
-    r'^(linux(|64)|macosx64)(|-devedition|-asan-reporter)-nightly(|-l10n)$':
-        ['target.complete.mar'],
-    r'^win64(|-devedition|-asan-reporter)-nightly(|-l10n)$':
-        ['target.complete.mar', 'target.installer.exe'],
-    r'^win32(|-devedition)-nightly(|-l10n)$': [
-        'target.complete.mar',
-        'target.installer.exe',
-        'target.stub-installer.exe'
-        ],
-}
+UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS = [
+    'target.complete.mar',
+    'target.bz2.complete.mar',
+    'target.installer.exe',
+    'target.stub-installer.exe',
+]
 
 # Compile every regex once at import time
 for dict_ in (
     UPSTREAM_ARTIFACT_UNSIGNED_PATHS, UPSTREAM_ARTIFACT_SIGNED_PATHS,
-    UPSTREAM_ARTIFACT_REPACKAGE_PATHS, UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS,
 ):
     for uncompiled_regex, value in dict_.iteritems():
         compiled_regex = re.compile(uncompiled_regex)
         del dict_[uncompiled_regex]
         dict_[compiled_regex] = value
 
 # Voluptuous uses marker objects as dictionary *keys*, but they are not
 # comparable, so we cast all of the keys back to regular strings
@@ -157,16 +151,18 @@ transforms = TransformSequence()
 taskref_or_string = Any(
     basestring,
     {Required('task-reference'): basestring})
 
 beetmover_description_schema = Schema({
     # the dependent task (object) for this beetmover job, used to inform beetmover.
     Required('dependent-task'): object,
 
+    Required('grandparent-tasks'): object,
+
     # depname is used in taskref's to identify the taskID of the unsigned things
     Required('depname', default='build'): basestring,
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
     Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
@@ -209,52 +205,40 @@ def make_task_description(config, jobs):
             "Beetmover submission for locale '{locale}' for build '"
             "{build_platform}/{build_type}'".format(
                 locale=attributes.get('locale', 'en-US'),
                 build_platform=attributes.get('build_platform'),
                 build_type=attributes.get('build_type')
             )
         )
 
-        dependent_kind = str(dep_job.kind)
-        dependencies = {dependent_kind: dep_job.label}
+        dependent_kind = dep_job.kind
+        if dependent_kind == 'repackage-signing-l10n':
+            dependent_kind = "repackage-signing"
+        dependencies = {dependent_kind: dep_job}
 
         signing_name = "build-signing"
         if job.get('locale'):
             signing_name = "nightly-l10n-signing"
-        signing_dependencies = {signing_name:
-                                dep_job.dependencies[signing_name]
-                                }
-        dependencies.update(signing_dependencies)
+        dependencies['signing'] = job['grandparent-tasks'][signing_name]
 
         build_name = "build"
         if job.get('locale'):
             build_name = "unsigned-repack"
-        build_dependencies = {"build":
-                              dep_job.dependencies[build_name]
-                              }
-        dependencies.update(build_dependencies)
+        dependencies["build"] = job['grandparent-tasks'][build_name]
 
-        repackage_name = "repackage"
         # repackage-l10n actually uses the repackage depname here
-        repackage_dependencies = {"repackage":
-                                  dep_job.dependencies[repackage_name]
-                                  }
-        dependencies.update(repackage_dependencies)
+        dependencies["repackage"] = job['grandparent-tasks']["repackage"]
 
         # If this isn't a direct dependency, it won't be in there.
-        if 'repackage-signing' not in dependencies and \
-                'repackage-signing-l10n' not in dependencies:
+        if 'repackage-signing' not in dependencies:
             repackage_signing_name = "repackage-signing"
             if job.get('locale'):
                 repackage_signing_name = "repackage-signing-l10n"
-            repackage_signing_deps = {"repackage-signing":
-                                      dep_job.dependencies[repackage_signing_name]
-                                      }
-            dependencies.update(repackage_signing_deps)
+            dependencies["repackage-signing"] = job['grandparent-tasks'][repackage_signing_name]
 
         attributes = copy_attributes_from_dependent_job(dep_job)
         if job.get('locale'):
             attributes['locale'] = job['locale']
 
         bucket_scope = get_beetmover_bucket_scope(config)
         action_scope = get_beetmover_action_scope(config)
         phase = get_phase(config)
@@ -270,76 +254,79 @@ def make_task_description(config, jobs):
             'treeherder': treeherder,
             'shipping-phase': job.get('shipping-phase', phase),
             'shipping-product': job.get('shipping-product'),
         }
 
         yield task
 
 
-def generate_upstream_artifacts(job, build_task_ref, build_signing_task_ref,
-                                repackage_task_ref, repackage_signing_task_ref,
-                                platform, locale=None, project=None):
+def generate_upstream_artifacts(job, dependencies, platform, locale=None, project=None):
 
     build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
     build_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS
     repackage_mapping = UPSTREAM_ARTIFACT_REPACKAGE_PATHS
     repackage_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS
 
     artifact_prefix = get_artifact_prefix(job)
     if locale:
         artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
         platform = "{}-l10n".format(platform)
 
     upstream_artifacts = []
 
-    task_refs = [
-        build_task_ref,
-        build_signing_task_ref,
-        repackage_task_ref,
-        repackage_signing_task_ref
-    ]
-    tasktypes = ['build', 'signing', 'repackage', 'repackage']
-    mapping = [
-        build_mapping,
-        build_signing_mapping,
-        repackage_mapping,
-        repackage_signing_mapping
-    ]
-
-    for ref, tasktype, mapping in zip(task_refs, tasktypes, mapping):
+    for task_type, mapping in [
+        ("build", build_mapping),
+        ("signing", build_signing_mapping),
+    ]:
         platform_was_previously_matched_by_regex = None
         for platform_regex, paths in mapping.iteritems():
             if platform_regex.match(platform) is not None:
                 _check_platform_matched_only_one_regex(
-                    tasktype, platform, platform_was_previously_matched_by_regex, platform_regex
+                    task_type, platform, platform_was_previously_matched_by_regex, platform_regex
                 )
+                platform_was_previously_matched_by_regex = platform_regex
                 if paths:
                     usable_paths = paths[:]
 
-                    use_stub = job["attributes"].get('stub-installer')
-                    if not use_stub:
-                        if 'target.stub-installer.exe' in usable_paths:
-                            usable_paths.remove('target.stub-installer.exe')
                     if 'target.langpack.xpi' in usable_paths and \
                             not project == "mozilla-central":
                         # XXX This is only beetmoved for m-c nightlies.
                         # we should determine this better
                         usable_paths.remove('target.langpack.xpi')
+
                         if not len(usable_paths):
                             # We may have removed our only path.
                             continue
+
                     upstream_artifacts.append({
-                        "taskId": {"task-reference": ref},
-                        "taskType": tasktype,
-                        "paths": ["{}/{}".format(artifact_prefix, path)
-                                  for path in usable_paths],
+                        "taskId": {"task-reference": "<{}>".format(task_type)},
+                        "taskType": task_type,
+                        "paths": ["{}/{}".format(artifact_prefix, path) for path in usable_paths],
                         "locale": locale or "en-US",
                     })
-                platform_was_previously_matched_by_regex = platform_regex
+
+    for task_type, cot_type, paths in [
+        ('repackage', 'repackage', repackage_mapping),
+        ('repackage-signing', 'repackage', repackage_signing_mapping),
+    ]:
+        paths = ["{}/{}".format(artifact_prefix, path) for path in paths]
+        paths = [
+            path for path in paths
+            if path in dependencies[task_type].release_artifacts]
+
+        if not paths:
+            continue
+
+        upstream_artifacts.append({
+            "taskId": {"task-reference": "<{}>".format(task_type)},
+            "taskType": cot_type,
+            "paths": paths,
+            "locale": locale or "en-US",
+        })
 
     return upstream_artifacts
 
 
 def generate_partials_upstream_artifacts(job, artifacts, platform, locale=None):
     artifact_prefix = get_artifact_prefix(job)
     if locale and locale != 'en-US':
         artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
@@ -386,45 +373,26 @@ def make_task_worker(config, jobs):
     for job in jobs:
         if not is_valid_beetmover_job(job):
             raise NotImplementedError(
                 "{}: Beetmover_repackage must have five dependencies.".format(job['label'])
             )
 
         locale = job["attributes"].get("locale")
         platform = job["attributes"]["build_platform"]
-        build_task = None
-        build_signing_task = None
-        repackage_task = None
-        repackage_signing_task = None
 
-        for dependency in job["dependencies"].keys():
-            if 'repackage-signing' in dependency:
-                repackage_signing_task = dependency
-            elif 'repackage' in dependency:
-                repackage_task = dependency
-            elif 'signing' in dependency:
-                # catches build-signing and nightly-l10n-signing
-                build_signing_task = dependency
-            else:
-                build_task = "build"
-
-        build_task_ref = "<" + str(build_task) + ">"
-        build_signing_task_ref = "<" + str(build_signing_task) + ">"
-        repackage_task_ref = "<" + str(repackage_task) + ">"
-        repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">"
+        upstream_artifacts = generate_upstream_artifacts(
+            job, job['dependencies'], platform, locale,
+            project=config.params['project']
+        )
 
         worker = {
             'implementation': 'beetmover',
             'release-properties': craft_release_properties(config, job),
-            'upstream-artifacts': generate_upstream_artifacts(
-                job, build_task_ref, build_signing_task_ref, repackage_task_ref,
-                repackage_signing_task_ref, platform, locale,
-                project=config.params['project']
-            ),
+            'upstream-artifacts': upstream_artifacts,
         }
         if locale:
             worker["locale"] = locale
         job["worker"] = worker
 
         yield job
 
 
@@ -479,8 +447,18 @@ def make_partials_artifacts(config, jobs
                 if artifact_map[artifact].get(rel_attr):
                     artifact_extra[rel_attr] = artifact_map[artifact][rel_attr]
             extra.append(artifact_extra)
 
         job.setdefault('extra', {})
         job['extra']['partials'] = extra
 
         yield job
+
+
+@transforms.add
+def convert_deps(config, jobs):
+    for job in jobs:
+        job['dependencies'] = {
+            name: dep_job.label
+            for name, dep_job in job['dependencies'].items()
+        }
+        yield job
--- a/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py
@@ -30,14 +30,15 @@ def make_beetmover_description(config, j
 
         treeherder = {
             'symbol': join_symbol(group, symbol),
         }
 
         beet_description = {
             'label': job['label'],
             'dependent-task': dep_job,
+            'grandparent-tasks': job['grandparent-tasks'],
             'treeherder': treeherder,
             'locale': locale,
             'shipping-phase': job.get('shipping-phase'),
             'shipping-product': job.get('shipping-product'),
         }
         yield beet_description
--- a/taskcluster/taskgraph/transforms/bouncer_submission.py
+++ b/taskcluster/taskgraph/transforms/bouncer_submission.py
@@ -49,16 +49,22 @@ CONFIG_PER_BOUNCER_PRODUCT = {
         },
     },
     'complete-mar-candidates': {
         'path_template': CANDIDATES_PATH_TEMPLATE,
         'file_names': {
             'default': '{product}-{version}.complete.mar',
         },
     },
+    'complete-mar-bz2': {
+        'path_template': RELEASES_PATH_TEMPLATE,
+        'file_names': {
+            'default': '{product}-{version}.bz2.complete.mar',
+        },
+    },
     'installer': {
         'path_template': RELEASES_PATH_TEMPLATE,
         'file_names': {
             'linux': '{product}-{version}.tar.bz2',
             'linux64': '{product}-{version}.tar.bz2',
             'osx': '{pretty_product}%20{version}.dmg',
             'win': '{pretty_product}%20Setup%20{version}.exe',
             'win64': '{pretty_product}%20Setup%20{version}.exe',
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -62,16 +62,17 @@ job_description_schema = Schema({
     Optional('index'): task_description_schema['index'],
     Optional('run-on-projects'): task_description_schema['run-on-projects'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('coalesce'): task_description_schema['coalesce'],
     Optional('always-target'): task_description_schema['always-target'],
     Exclusive('optimization', 'optimization'): task_description_schema['optimization'],
     Optional('needs-sccache'): task_description_schema['needs-sccache'],
+    Optional('release-artifacts'): task_description_schema['release-artifacts'],
 
     # The "when" section contains descriptions of the circumstances under which
     # this task should be included in the task graph.  This will be converted
     # into an optimization, so it cannot be specified in a job description that
     # also gives 'optimization'.
     Exclusive('when', 'optimization'): Any({
         # This task only needs to be run if a file matching one of the given
         # patterns has changed in the push.  The patterns use the mozpack
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -13,30 +13,28 @@ from taskgraph.transforms.base import Tr
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import (
     validate_schema,
     optionally_keyed_by,
     resolve_keyed_by,
     Schema,
 )
 from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.platforms import archive_format, executable_extension
+from taskgraph.util.workertypes import worker_type_implementation
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Any, Required, Optional
 
 transforms = TransformSequence()
 
 # Voluptuous uses marker objects as dictionary *keys*, but they are not
 # comparable, so we cast all of the keys back to regular strings
 task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
 
 
-def _by_platform(arg):
-    return optionally_keyed_by('build-platform', arg)
-
-
 # shortcut for a string where task references are allowed
 taskref_or_string = Any(
     basestring,
     {Required('task-reference'): basestring})
 
 packaging_description_schema = Schema({
     # the dependant task (object) for this  job, used to inform repackaging.
     Required('dependent-task'): object,
@@ -60,31 +58,93 @@ packaging_description_schema = Schema({
 
     # passed through directly to the job description
     Optional('extra'): task_description_schema['extra'],
 
     # Shipping product and phase
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 
+    Required('package-formats'): optionally_keyed_by('build-platform', 'project', [basestring]),
+
     # All l10n jobs use mozharness
     Required('mozharness'): {
         # Config files passed to the mozharness script
-        Required('config'): _by_platform([basestring]),
+        Required('config'): optionally_keyed_by('build-platform', [basestring]),
 
         # Additional paths to look for mozharness configs in. These should be
         # relative to the base of the source checkout
         Optional('config-paths'): [basestring],
 
         # if true, perform a checkout of a comm-central based branch inside the
         # gecko checkout
         Required('comm-checkout', default=False): bool,
     }
 })
 
+# The configuration passed to the mozharness repackage script. This defines the
+# arguments passed to `mach repackage`
+# - `args` is interpolated by mozharness (`{installer-tag}`,
+#   `{stub-installer-tag}`, `{sfx-stub}`) with values from the mozharness
+#   config.
+# - `inputs` are passed as long-options, with the filename prefixed by
+#   `MOZ_FETCH_DIR`. The filename is interpolated by taskgraph
+#   (`{archive_format}`, `{executable_extension}`).
+# - `output` is passed to `--output`, with the filename prefixed by the output
+#   directory.
+PACKAGE_FORMATS = {
+    'mar': {
+        'args': ['mar'],
+        'inputs': {
+            'input': 'target{archive_format}',
+            'mar': 'mar{executable_extension}',
+        },
+        'output': "target.complete.mar",
+    },
+    'mar-bz2': {
+        'args': ['mar', "--format", "bz2"],
+        'inputs': {
+            'input': 'target{archive_format}',
+            'mar': 'mar{executable_extension}',
+        },
+        'output': "target.bz2.complete.mar",
+    },
+    'dmg': {
+        'args': ['dmg'],
+        'inputs': {
+            'input': 'target{archive_format}',
+        },
+        'output': "target.dmg",
+    },
+    'installer': {
+        'args': [
+            "installer",
+            "--package-name", "firefox",
+            "--tag", "{installer-tag}",
+            "--sfx-stub", "{sfx-stub}",
+        ],
+        'inputs': {
+            'package': 'target{archive_format}',
+            "setupexe": "setup.exe",
+        },
+        'output': "target.installer.exe",
+    },
+    'installer-stub': {
+        'args': [
+            "installer",
+            "--tag", "{stub-installer-tag}",
+            "--sfx-stub", "{sfx-stub}",
+        ],
+        'inputs': {
+            "setupexe": "setup-stub.exe",
+        },
+        'output': 'target.stub-installer.exe',
+    },
+}
+
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
         validate_schema(
             packaging_description_schema, job,
             "In packaging ({!r} kind) task for {!r}:".format(config.kind, label))
@@ -99,22 +159,27 @@ def copy_in_useful_magic(config, jobs):
         job['build-platform'] = dep.attributes.get("build_platform")
         yield job
 
 
 @transforms.add
 def handle_keyed_by(config, jobs):
     """Resolve fields that can be keyed by platform, etc."""
     fields = [
-        "mozharness.config",
+        'mozharness.config',
+        'package-formats',
     ]
     for job in jobs:
         job = copy.deepcopy(job)  # don't overwrite dict values here
         for field in fields:
-            resolve_keyed_by(item=job, field=field, item_name="?")
+            resolve_keyed_by(
+                item=job, field=field,
+                project=config.params['project'],
+                item_name="?",
+            )
         yield job
 
 
 @transforms.add
 def make_repackage_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
 
@@ -170,59 +235,79 @@ def make_job_description(config, jobs):
         attributes['repackage_type'] = 'repackage'
 
         locale = None
         if job.get('locale'):
             locale = job['locale']
             attributes['locale'] = locale
 
         level = config.params['level']
+        build_platform = attributes['build_platform']
 
-        build_platform = attributes['build_platform']
+        use_stub = attributes.get('stub-installer')
+
+        repackage_config = []
+        package_formats = job.get('package-formats')
+        if use_stub:
+            package_formats += ['installer-stub']
+        for format in package_formats:
+            command = copy.deepcopy(PACKAGE_FORMATS[format])
+            substs = {
+                'archive_format': archive_format(build_platform),
+                'executable_extension': executable_extension(build_platform),
+            }
+            command['inputs'] = {
+                name: filename.format(**substs)
+                for name, filename in command['inputs'].items()
+            }
+            repackage_config.append(command)
+
         run = job.get('mozharness', {})
         run.update({
             'using': 'mozharness',
             'script': 'mozharness/scripts/repackage.py',
             'job-script': 'taskcluster/scripts/builder/repackage.sh',
             'actions': ['setup', 'repackage'],
             'extra-workspace-cache-key': 'repackage',
+            'extra-config': {
+                'repackage_config': repackage_config,
+            },
         })
 
         worker = {
-            'artifacts': _generate_task_output_files(dep_job, build_platform,
-                                                     locale=locale,
-                                                     project=config.params["project"]),
             'chain-of-trust': True,
             'max-run-time': 7200 if build_platform.startswith('win') else 3600,
             # Don't add generic artifact directory.
             'skip-artifacts': True,
         }
 
         if locale:
             # Make sure we specify the locale-specific upload dir
             worker.setdefault('env', {}).update(LOCALE=locale)
 
-        use_stub = attributes.get('stub-installer')
-        if not use_stub:
-            worker.setdefault('env', {})['NO_STUB_INSTALLER'] = '1'
-
         if build_platform.startswith('win'):
             worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
             run['use-magic-mh-args'] = False
         else:
             if build_platform.startswith(('linux', 'macosx')):
                 worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
             else:
                 raise NotImplementedError(
                     'Unsupported build_platform: "{}"'.format(build_platform)
                 )
 
             run['tooltool-downloads'] = 'internal'
             worker['docker-image'] = {"in-tree": "debian7-amd64-build"}
 
+        worker['artifacts'] = _generate_task_output_files(
+            dep_job, worker_type_implementation(worker_type),
+            repackage_config=repackage_config,
+            locale=locale,
+        )
+
         description = (
             "Repackaging for locale '{locale}' for build '"
             "{build_platform}/{build_type}'".format(
                 locale=attributes.get('locale', 'en-US'),
                 build_platform=attributes.get('build_platform'),
                 build_type=attributes.get('build_type')
             )
         )
@@ -237,37 +322,37 @@ def make_job_description(config, jobs):
             'treeherder': treeherder,
             'routes': job.get('routes', []),
             'extra': job.get('extra', {}),
             'worker': worker,
             'run': run,
             'fetches': _generate_download_config(dep_job, build_platform, build_task,
                                                  signing_task, locale=locale,
                                                  project=config.params["project"]),
+            'release-artifacts': [artifact['name'] for artifact in worker['artifacts']]
         }
 
         if build_platform.startswith('macosx'):
             task['toolchains'] = [
                 'linux64-libdmg',
                 'linux64-hfsplus',
                 'linux64-node',
             ]
         yield task
 
 
 def _generate_download_config(task, build_platform, build_task, signing_task, locale=None,
                               project=None):
     locale_path = '{}/'.format(locale) if locale else ''
 
     if build_platform.startswith('linux') or build_platform.startswith('macosx'):
-        tarball_extension = 'bz2' if build_platform.startswith('linux') else 'gz'
         return {
             signing_task: [
                 {
-                    'artifact': '{}target.tar.{}'.format(locale_path, tarball_extension),
+                    'artifact': '{}target{}'.format(locale_path, archive_format(build_platform)),
                     'extract': False,
                 },
             ],
             build_task: [
                 'host/bin/mar',
             ],
         }
     elif build_platform.startswith('win'):
@@ -288,55 +373,29 @@ def _generate_download_config(task, buil
         if use_stub:
             fetch_config[signing_task].append('{}setup-stub.exe'.format(locale_path))
 
         return fetch_config
 
     raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
 
 
-def _generate_task_output_files(task, build_platform, locale=None, project=None):
+def _generate_task_output_files(task, worker_implementation, repackage_config, locale=None):
     locale_output_path = '{}/'.format(locale) if locale else ''
     artifact_prefix = get_artifact_prefix(task)
 
-    if build_platform.startswith('linux') or build_platform.startswith('macosx'):
-        output_files = [{
-            'type': 'file',
-            'path': '/builds/worker/workspace/build/outputs/{}target.complete.mar'
-                    .format(locale_output_path),
-            'name': '{}/{}target.complete.mar'.format(artifact_prefix, locale_output_path),
-        }]
+    if worker_implementation == ('docker-worker', 'linux'):
+        local_prefix = '/builds/worker/workspace/'
+    elif worker_implementation == ('generic-worker', 'windows'):
+        local_prefix = ''
+    else:
+        raise NotImplementedError(
+            'Unsupported worker implementation: "{}"'.format(worker_implementation))
 
-        if build_platform.startswith('macosx'):
-            output_files.append({
-                'type': 'file',
-                'path': '/builds/worker/workspace/build/outputs/{}target.dmg'
-                        .format(locale_output_path),
-                'name': '{}/{}target.dmg'.format(artifact_prefix, locale_output_path),
-            })
-
-    elif build_platform.startswith('win'):
-        output_files = [{
+    output_files = []
+    for config in repackage_config:
+        output_files.append({
             'type': 'file',
-            'path': 'build/outputs/{}target.installer.exe'.format(locale_output_path),
-            'name': '{}/{}target.installer.exe'.format(artifact_prefix, locale_output_path),
-        }, {
-            'type': 'file',
-            'path': 'build/outputs/{}target.complete.mar'.format(locale_output_path),
-            'name': '{}/{}target.complete.mar'.format(artifact_prefix, locale_output_path),
-        }]
-
-        use_stub = task.attributes.get('stub-installer')
-        if use_stub:
-            output_files.append({
-                'type': 'file',
-                'path': 'build/outputs/{}target.stub-installer.exe'.format(
-                    locale_output_path
-                ),
-                'name': '{}/{}target.stub-installer.exe'.format(
-                    artifact_prefix, locale_output_path
-                ),
-            })
-
-    if output_files:
-        return output_files
-
-    raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
+            'path': '{}build/outputs/{}{}'
+                    .format(local_prefix, locale_output_path, config['output']),
+            'name': '{}/{}{}'.format(artifact_prefix, locale_output_path, config['output']),
+        })
+    return output_files
--- a/taskcluster/taskgraph/transforms/repackage_partner.py
+++ b/taskcluster/taskgraph/transforms/repackage_partner.py
@@ -14,17 +14,20 @@ from taskgraph.util.attributes import co
 from taskgraph.util.schema import (
     validate_schema,
     optionally_keyed_by,
     resolve_keyed_by,
     Schema,
 )
 from taskgraph.util.taskcluster import get_artifact_prefix
 from taskgraph.util.partners import check_if_partners_enabled
+from taskgraph.util.platforms import archive_format, executable_extension
+from taskgraph.util.workertypes import worker_type_implementation
 from taskgraph.transforms.task import task_description_schema
+from taskgraph.transforms.repackage import PACKAGE_FORMATS
 from voluptuous import Any, Required, Optional
 
 transforms = TransformSequence()
 
 # Voluptuous uses marker objects as dictionary *keys*, but they are not
 # comparable, so we cast all of the keys back to regular strings
 task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
 
@@ -53,16 +56,18 @@ packaging_description_schema = Schema({
 
     # passed through directly to the job description
     Optional('extra'): task_description_schema['extra'],
 
     # Shipping product and phase
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 
+    Required('package-formats'): _by_platform([basestring]),
+
     # All l10n jobs use mozharness
     Required('mozharness'): {
         # Config files passed to the mozharness script
         Required('config'): _by_platform([basestring]),
 
         # Additional paths to look for mozharness configs in. These should be
         # relative to the base of the source checkout
         Optional('config-paths'): [basestring],
@@ -95,16 +100,17 @@ def copy_in_useful_magic(config, jobs):
         yield job
 
 
 @transforms.add
 def handle_keyed_by(config, jobs):
     """Resolve fields that can be keyed by platform, etc."""
     fields = [
         "mozharness.config",
+        'package-formats',
     ]
     for job in jobs:
         job = copy.deepcopy(job)  # don't overwrite dict values here
         for field in fields:
             resolve_keyed_by(item=job, field=field, item_name="?")
         yield job
 
 
@@ -143,51 +149,71 @@ def make_job_description(config, jobs):
             elif build_platform.startswith('win') and dependency.endswith('repack'):
                 signing_task = dependency
 
         attributes['repackage_type'] = 'repackage'
 
         level = config.params['level']
         repack_id = job['extra']['repack_id']
 
+        repackage_config = []
+        for format in job.get('package-formats'):
+            command = copy.deepcopy(PACKAGE_FORMATS[format])
+            substs = {
+                'archive_format': archive_format(build_platform),
+                'executable_extension': executable_extension(build_platform),
+            }
+            command['inputs'] = {
+                name: filename.format(**substs)
+                for name, filename in command['inputs'].items()
+            }
+            repackage_config.append(command)
+
         run = job.get('mozharness', {})
         run.update({
             'using': 'mozharness',
             'script': 'mozharness/scripts/repackage.py',
             'job-script': 'taskcluster/scripts/builder/repackage.sh',
             'actions': ['download_input', 'setup', 'repackage'],
             'extra-workspace-cache-key': 'repackage',
+            'extra-config': {
+                'repackage_config': repackage_config,
+            },
         })
 
         worker = {
-            'artifacts': _generate_task_output_files(dep_job, build_platform, partner=repack_id),
             'chain-of-trust': True,
             'max-run-time': 7200 if build_platform.startswith('win') else 3600,
             'taskcluster-proxy': True if get_artifact_prefix(dep_job) else False,
-            'env': {},
+            'env': {
+                'REPACK_ID': repack_id,
+            },
             # Don't add generic artifact directory.
             'skip-artifacts': True,
         }
 
-        worker['env'].update(REPACK_ID=repack_id)
-
         if build_platform.startswith('win'):
             worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
             run['use-magic-mh-args'] = False
         else:
             if build_platform.startswith('macosx'):
                 worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
             else:
                 raise NotImplementedError(
                     'Unsupported build_platform: "{}"'.format(build_platform)
                 )
 
             run['tooltool-downloads'] = 'internal'
             worker['docker-image'] = {"in-tree": "debian7-amd64-build"}
 
+        worker['artifacts'] = _generate_task_output_files(
+            dep_job, worker_type_implementation(worker_type),
+            repackage_config, partner=repack_id,
+        )
+
         description = (
             "Repackaging for repack_id '{repack_id}' for build '"
             "{build_platform}/{build_type}'".format(
                 repack_id=job['extra']['repack_id'],
                 build_platform=attributes.get('build_platform'),
                 build_type=attributes.get('build_type')
             )
         )
@@ -240,35 +266,33 @@ def _generate_download_config(task, buil
                 },
                 '{}setup.exe'.format(locale_path),
             ],
         }
 
     raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
 
 
-def _generate_task_output_files(task, build_platform, partner):
+def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
     """We carefully generate an explicit list here, but there's an artifacts directory
     too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
     Any errors here are likely masked by that.
     """
     partner_output_path = '{}/'.format(partner)
     artifact_prefix = get_artifact_prefix(task)
 
-    if build_platform.startswith('macosx'):
-        output_files = [{
-            'type': 'file',
-            'path': '/builds/worker/workspace/build/outputs/{}target.dmg'
-                    .format(partner_output_path),
-            'name': '{}/{}target.dmg'.format(artifact_prefix, partner_output_path),
-        }]
+    if worker_implementation == ('docker-worker', 'linux'):
+        local_prefix = '/builds/worker/workspace/'
+    elif worker_implementation == ('generic-worker', 'windows'):
+        local_prefix = ''
+    else:
+        raise NotImplementedError(
+            'Unsupported worker implementation: "{}"'.format(worker_implementation))
 
-    elif build_platform.startswith('win'):
-        output_files = [{
+    output_files = []
+    for config in repackage_config:
+        output_files.append({
             'type': 'file',
-            'path': 'build/outputs/{}target.installer.exe'.format(partner_output_path),
-            'name': '{}/{}target.installer.exe'.format(artifact_prefix, partner_output_path),
-        }]
-
-    if output_files:
-        return output_files
-
-    raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
+            'path': '{}build/outputs/{}{}'
+                    .format(local_prefix, partner_output_path, config['output']),
+            'name': '{}/{}{}'.format(artifact_prefix, partner_output_path, config['output']),
+        })
+    return output_files
--- a/taskcluster/taskgraph/transforms/repackage_signing.py
+++ b/taskcluster/taskgraph/transforms/repackage_signing.py
@@ -2,25 +2,26 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the repackage signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
+import os
+
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import validate_schema, Schema
 from taskgraph.util.scriptworker import (
     add_scope_prefix,
     get_signing_cert_scope_per_platform,
     get_worker_type_for_scope,
 )
-from taskgraph.util.taskcluster import get_artifact_path
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 # Voluptuous uses marker objects as dictionary *keys*, but they are not
 # comparable, so we cast all of the keys back to regular strings
 task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
 
 transforms = TransformSequence()
@@ -29,16 +30,23 @@ repackage_signing_description_schema = S
     Required('dependent-task'): object,
     Required('depname', default='repackage'): basestring,
     Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
+SIGNING_FORMATS = {
+    'target.complete.mar': ["mar_sha384"],
+    'target.bz2.complete.mar': ["mar"],
+    "target.installer.exe": ["sha2signcode"],
+    "target.stub-installer.exe": ["sha2signcodestub"],
+}
+
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
         validate_schema(
             repackage_signing_description_schema, job,
             "In repackage-signing ({!r} kind) task for {!r}:".format(config.kind, label))
@@ -72,70 +80,52 @@ def make_repackage_signing_description(c
         dependencies = {"repackage": dep_job.label}
 
         signing_dependencies = dep_job.dependencies
         # This is so we get the build task etc in our dependencies to
         # have better beetmover support.
         dependencies.update({k: v for k, v in signing_dependencies.items()
                              if k != 'docker-image'})
 
-        locale_str = ""
         if dep_job.attributes.get('locale'):
             treeherder['symbol'] = 'rs({})'.format(dep_job.attributes.get('locale'))
             attributes['locale'] = dep_job.attributes.get('locale')
-            locale_str = "{}/".format(dep_job.attributes.get('locale'))
 
         description = (
             "Signing of repackaged artifacts for locale '{locale}' for build '"
             "{build_platform}/{build_type}'".format(
                 locale=attributes.get('locale', 'en-US'),
                 build_platform=attributes.get('build_platform'),
                 build_type=attributes.get('build_type')
             )
         )
 
         build_platform = dep_job.attributes.get('build_platform')
         is_nightly = dep_job.attributes.get('nightly')
         signing_cert_scope = get_signing_cert_scope_per_platform(
             build_platform, is_nightly, config
         )
-        scopes = [signing_cert_scope, add_scope_prefix(config, 'signing:format:mar_sha384')]
+        scopes = [signing_cert_scope]
 
-        upstream_artifacts = [{
-            "taskId": {"task-reference": "<repackage>"},
-            "taskType": "repackage",
-            "paths": [
-                get_artifact_path(dep_job, "{}target.complete.mar".format(locale_str)),
-            ],
-            "formats": ["mar_sha384"]
-        }]
-        if 'win' in build_platform:
-            upstream_artifacts.append({
-                "taskId": {"task-reference": "<repackage>"},
-                "taskType": "repackage",
-                "paths": [
-                    get_artifact_path(dep_job, "{}target.installer.exe".format(locale_str)),
-                ],
-                "formats": ["sha2signcode"]
-            })
-            scopes.append(add_scope_prefix(config, "signing:format:sha2signcode"))
-
-            use_stub = attributes.get('stub-installer')
-            if use_stub:
+        upstream_artifacts = []
+        for artifact in dep_job.release_artifacts:
+            basename = os.path.basename(artifact)
+            if basename in SIGNING_FORMATS:
                 upstream_artifacts.append({
                     "taskId": {"task-reference": "<repackage>"},
                     "taskType": "repackage",
-                    "paths": [
-                        get_artifact_path(
-                            dep_job, "{}target.stub-installer.exe".format(locale_str)
-                        ),
-                    ],
-                    "formats": ["sha2signcodestub"]
+                    "paths": [artifact],
+                    "formats": SIGNING_FORMATS[os.path.basename(artifact)],
                 })
-                scopes.append(add_scope_prefix(config, "signing:format:sha2signcodestub"))
+
+        scopes += list({
+            add_scope_prefix(config, 'signing:format:{}'.format(format))
+            for artifact in upstream_artifacts
+            for format in artifact['formats']
+        })
 
         task = {
             'label': label,
             'description': description,
             'worker-type': get_worker_type_for_scope(config, signing_cert_scope),
             'worker': {'implementation': 'scriptworker-signing',
                        'upstream-artifacts': upstream_artifacts,
                        'max-run-time': 3600},
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -28,16 +28,17 @@ from taskgraph.util.schema import (
     resolve_keyed_by,
     OptimizationSchema,
 )
 from taskgraph.util.scriptworker import (
     BALROG_ACTIONS,
     get_release_config,
     add_scope_prefix,
 )
+from taskgraph.util.signed_artifacts import get_signed_artifacts
 from voluptuous import Any, Required, Optional, Extra
 from taskgraph import GECKO, MAX_DEPENDENCIES
 from ..util import docker as dockerutil
 
 RUN_TASK = os.path.join(GECKO, 'taskcluster', 'scripts', 'run-task')
 
 
 @memoize
@@ -208,16 +209,19 @@ task_description_schema = Schema({
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
     'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
     Required('needs-sccache'): bool,
 
+    # Set of artifacts relevant to release tasks
+    Optional('release-artifacts'): [basestring],
+
     # information specific to the worker implementation that will run this task
     'worker': Any({
         Required('implementation'): Any('docker-worker', 'docker-engine'),
         Required('os'): 'linux',
 
         # For tasks that will run in docker-worker or docker-engine, this is the
         # name of the docker image or in-tree docker image to run the task in.  If
         # in-tree, then a dependency will be created automatically.  This is
@@ -537,16 +541,19 @@ task_description_schema = Schema({
         Optional('platforms'): [basestring],
         Optional('release-eta'): basestring,
         Optional('channel-names'): optionally_keyed_by('project', [basestring]),
         Optional('require-mirrors'): bool,
         Optional('publish-rules'): optionally_keyed_by('project', [int]),
         Optional('rules-to-update'): optionally_keyed_by('project', [basestring]),
         Optional('archive-domain'): optionally_keyed_by('project', basestring),
         Optional('download-domain'): optionally_keyed_by('project', basestring),
+        Optional('blob-suffix'): basestring,
+        Optional('complete-mar-filename-pattern'): basestring,
+        Optional('complete-mar-bouncer-product-pattern'): basestring,
 
         # list of artifact URLs for the artifacts that should be beetmoved
         Optional('upstream-artifacts'): [{
             # taskId of the task with the artifact
             Required('taskId'): taskref_or_string,
 
             # type of signing task (for CoT)
             Required('taskType'): basestring,
@@ -1032,16 +1039,26 @@ def build_generic_worker_payload(config,
 def build_scriptworker_signing_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
         'upstreamArtifacts':  worker['upstream-artifacts']
     }
 
+    artifacts = set(task.get('release-artifacts', []))
+    for upstream_artifact in worker['upstream-artifacts']:
+        for path in upstream_artifact['paths']:
+            artifacts.update(get_signed_artifacts(
+                input=path,
+                formats=upstream_artifact['formats'],
+            ))
+
+    task['release-artifacts'] = list(artifacts)
+
 
 @payload_builder('binary-transparency')
 def build_binary_transparency_payload(config, task, task_def):
     release_config = get_release_config(config)
 
     task_def['payload'] = {
         'version': release_config['version'],
         'chain': 'TRANSPARENCY.pem',
@@ -1126,16 +1143,20 @@ def build_balrog_payload(config, task, t
                     worker, prop, task['description'],
                     **config.params
                 )
         task_def['payload'] = {
             'build_number': release_config['build_number'],
             'product': worker['product'],
             'version': release_config['version'],
         }
+        for prop in ('blob-suffix', 'complete-mar-filename-pattern',
+                     'complete-mar-bouncer-product-pattern'):
+            if prop in worker:
+                task_def['payload'][prop.replace('-', '_')] = worker[prop]
         if worker['balrog-action'] == 'submit-toplevel':
             task_def['payload'].update({
                 'app_version': release_config['appVersion'],
                 'archive_domain': worker['archive-domain'],
                 'channel_names': worker['channel-names'],
                 'download_domain': worker['download-domain'],
                 'partial_versions': release_config.get('partial_versions', ""),
                 'platforms': worker['platforms'],
@@ -1731,16 +1752,17 @@ def build_task(config, tasks):
                 env['MOZ_AUTOMATION'] = '1'
 
         yield {
             'label': task['label'],
             'task': task_def,
             'dependencies': task.get('dependencies', {}),
             'attributes': attributes,
             'optimization': task.get('optimization', None),
+            'release-artifacts': task.get('release-artifacts', []),
         }
 
 
 @transforms.add
 def chain_of_trust(config, tasks):
     for task in tasks:
         if task['task'].get('payload', {}).get('features', {}).get('chainOfTrust'):
             image = task.get('dependencies', {}).get('docker-image')
--- a/taskcluster/taskgraph/util/platforms.py
+++ b/taskcluster/taskgraph/util/platforms.py
@@ -9,12 +9,35 @@ import re
 # platform family is extracted from build platform by taking the alphabetic prefix
 # and then translating win -> windows
 _platform_re = re.compile(r'^[a-z]*')
 _renames = {
     'win': 'windows'
 }
 
 
+_archive_formats = {
+    'linux': '.tar.bz2',
+    'macosx': '.tar.gz',
+    'windows': '.zip',
+}
+
+_executable_extension = {
+    'linux': '',
+    'macosx': '',
+    'windows': '.exe',
+}
+
+
 def platform_family(build_platform):
     """Given a build platform, return the platform family (linux, macosx, etc.)"""
     family = _platform_re.match(build_platform).group(0)
     return _renames.get(family, family)
+
+
+def archive_format(build_platform):
+    """Given a build platform, return the archive format used on the platform."""
+    return _archive_formats[platform_family(build_platform)]
+
+
+def executable_extension(build_platform):
+    """Given a build platform, return the executable extension used on the platform."""
+    return _executable_extension[platform_family(build_platform)]
--- a/taskcluster/taskgraph/util/signed_artifacts.py
+++ b/taskcluster/taskgraph/util/signed_artifacts.py
@@ -92,8 +92,23 @@ def _strip_widevine_for_partners(artifac
     """ Partner repacks should not resign that's previously signed for fear of breaking partial
     updates
     """
     for spec in artifacts_specifications:
         if 'widevine' in spec['formats']:
             spec['formats'].remove('widevine')
 
     return artifacts_specifications
+
+
+def get_signed_artifacts(input, formats):
+    """
+    Get the list of signed artifacts for the given input and formats.
+    """
+    artifacts = set()
+    if input.endswith('.dmg'):
+        artifacts.add(input.replace('.dmg', '.tar.gz'))
+    else:
+        artifacts.add(input)
+    if 'gpg' in formats:
+        artifacts.add('{}.asc'.format(input))
+
+    return artifacts
--- a/testing/marionette/harness/marionette_harness/tests/unit/test_screenshot.py
+++ b/testing/marionette/harness/marionette_harness/tests/unit/test_screenshot.py
@@ -289,16 +289,17 @@ class TestScreenCaptureContent(WindowMan
     def test_capture_tab_already_closed(self):
         tab = self.open_tab()
         self.marionette.switch_to_window(tab)
         self.marionette.close()
 
         self.assertRaises(NoSuchWindowException, self.marionette.screenshot)
         self.marionette.switch_to_window(self.start_tab)
 
+    @skip_if_mobile("Bug 1487124 - Android need its own maximum allowed dimensions")
     def test_capture_vertical_bounds(self):
         self.marionette.navigate(inline("<body style='margin-top: 32768px'>foo"))
         screenshot = self.marionette.screenshot()
         self.assert_png(screenshot)
 
     def test_capture_element(self):
         self.marionette.navigate(box)
         el = self.marionette.find_element(By.TAG_NAME, "div")
--- a/testing/mozbase/mozdevice/setup.py
+++ b/testing/mozbase/mozdevice/setup.py
@@ -3,17 +3,17 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 from setuptools import setup
 
 PACKAGE_NAME = 'mozdevice'
-PACKAGE_VERSION = '1.0.1'
+PACKAGE_VERSION = '1.1.0'
 
 deps = ['mozfile >= 1.0',
         'mozlog >= 3.0',
         'moznetwork >= 0.24',
         'mozprocess >= 0.19',
         ]
 
 setup(name=PACKAGE_NAME,
new file mode 100644
--- /dev/null
+++ b/testing/mozharness/configs/repackage/base.py
@@ -0,0 +1,5 @@
+config = {
+    "installer-tag": "browser/installer/windows/app.tag",
+    "stub-installer-tag": "browser/installer/windows/stub.tag",
+    "sfx-stub": "other-licenses/7zstub/firefox/7zSD.sfx",
+}
--- a/testing/mozharness/configs/repackage/linux32_signed.py
+++ b/testing/mozharness/configs/repackage/linux32_signed.py
@@ -1,20 +1,13 @@
 import os
 
 platform = "linux32"
 
 config = {
     "locale": os.environ.get("LOCALE"),
 
-    "repackage_config": [[
-        "mar",
-        "-i", "{abs_input_dir}/target.tar.bz2",
-        "--mar", "{abs_input_dir}/mar",
-        "-o", "{abs_output_dir}/target.complete.mar"
-    ]],
-
     # ToolTool
     "tooltool_url": 'http://relengapi/tooltool/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/configs/repackage/linux64_signed.py
+++ b/testing/mozharness/configs/repackage/linux64_signed.py
@@ -1,20 +1,13 @@
 import os
 
 platform = "linux64"
 
 config = {
     "locale": os.environ.get("LOCALE"),
 
-    "repackage_config": [[
-        "mar",
-        "-i", "{abs_input_dir}/target.tar.bz2",
-        "--mar", "{abs_input_dir}/mar",
-        "-o", "{abs_output_dir}/target.complete.mar"
-    ]],
-
     # ToolTool
     "tooltool_url": 'http://relengapi/tooltool/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/configs/repackage/osx_partner.py
+++ b/testing/mozharness/configs/repackage/osx_partner.py
@@ -1,17 +1,11 @@
 import os
 
 config = {
     "src_mozconfig": "browser/config/mozconfigs/macosx64/repack",
 
     "repack_id": os.environ.get("REPACK_ID"),
 
-    "repackage_config": [[
-        "dmg",
-        "-i", "{abs_input_dir}/target.tar.gz",
-        "-o", "{abs_output_dir}/target.dmg"
-    ]],
-
     # ToolTool
     "tooltool_url": 'http://relengapi/tooltool/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 }
--- a/testing/mozharness/configs/repackage/osx_signed.py
+++ b/testing/mozharness/configs/repackage/osx_signed.py
@@ -1,22 +1,11 @@
 import os
 
 config = {
     "src_mozconfig": "browser/config/mozconfigs/macosx64/repack",
 
     "locale": os.environ.get("LOCALE"),
 
-    "repackage_config": [[
-        "dmg",
-        "-i", "{abs_input_dir}/target.tar.gz",
-        "-o", "{abs_output_dir}/target.dmg"
-    ], [
-        "mar",
-        "-i", "{abs_input_dir}/target.tar.gz",
-        "--mar", "{abs_input_dir}/mar",
-        "-o", "{abs_output_dir}/target.complete.mar"
-    ]],
-
     # ToolTool
     "tooltool_url": 'http://relengapi/tooltool/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 }
--- a/testing/mozharness/configs/repackage/win32_partner.py
+++ b/testing/mozharness/configs/repackage/win32_partner.py
@@ -1,26 +1,14 @@
 import os
 
 platform = "win32"
 
-repackage_config = [[
-        "installer",
-        "--package-name", "firefox",
-        "--package", "{abs_input_dir}\\target.zip",
-        "--tag", "{abs_mozilla_dir}\\browser\\installer\\windows\\app.tag",
-        "--setupexe", "{abs_input_dir}\\setup.exe",
-        "-o", "{abs_output_dir}\\target.installer.exe",
-        "--sfx-stub", "other-licenses/7zstub/firefox/7zSD.sfx",
-    ]]
-
 config = {
     "repack_id": os.environ.get("REPACK_ID"),
 
-    "repackage_config": repackage_config,
-
     # ToolTool
     "tooltool_manifest_src": 'browser\\config\\tooltool-manifests\\{}\\releng.manifest'.format(platform),
     'tooltool_url': 'https://tooltool.mozilla-releng.net/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/configs/repackage/win32_signed.py
+++ b/testing/mozharness/configs/repackage/win32_signed.py
@@ -1,41 +1,14 @@
 import os
 
 platform = "win32"
 
-repackage_config = [[
-        "installer",
-        "--package-name", "firefox",
-        "--package", "{abs_input_dir}\\target.zip",
-        "--tag", "{abs_mozilla_dir}\\browser\\installer\\windows\\app.tag",
-        "--setupexe", "{abs_input_dir}\\setup.exe",
-        "-o", "{abs_output_dir}\\target.installer.exe",
-        "--sfx-stub", "other-licenses/7zstub/firefox/7zSD.sfx",
-    ], [
-        "mar",
-        "-i", "{abs_input_dir}\\target.zip",
-        "--mar", "{abs_input_dir}\\mar.exe",
-        "-o", "{abs_output_dir}\\target.complete.mar",
-    ]]
-
-if not os.environ.get("NO_STUB_INSTALLER"):
-    # Some channels, like esr don't build a stub installer
-    repackage_config.append([
-        "installer",
-        "--tag", "{abs_mozilla_dir}\\browser\\installer\\windows\\stub.tag",
-        "--setupexe", "{abs_input_dir}\\setup-stub.exe",
-        "-o", "{abs_output_dir}\\target.stub-installer.exe",
-        "--sfx-stub", "other-licenses/7zstub/firefox/7zSD.sfx",
-    ])
-
 config = {
     "locale": os.environ.get("LOCALE"),
 
-    "repackage_config": repackage_config,
-
     # ToolTool
     "tooltool_manifest_src": 'browser\\config\\tooltool-manifests\\{}\\releng.manifest'.format(platform),
     'tooltool_url': 'https://tooltool.mozilla-releng.net/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/configs/repackage/win64_partner.py
+++ b/testing/mozharness/configs/repackage/win64_partner.py
@@ -1,26 +1,14 @@
 import os
 
 platform = "win64"
 
-repackage_config = [[
-        "installer",
-        "--package-name", "firefox",
-        "--package", "{abs_input_dir}\\target.zip",
-        "--tag", "{abs_mozilla_dir}\\browser\\installer\\windows\\app.tag",
-        "--setupexe", "{abs_input_dir}\\setup.exe",
-        "-o", "{abs_output_dir}\\target.installer.exe",
-        "--sfx-stub", "other-licenses/7zstub/firefox/7zSD.sfx",
-    ]]
-
 config = {
     "repack_id": os.environ.get("REPACK_ID"),
 
-    "repackage_config": repackage_config,
-
     # ToolTool
     "tooltool_manifest_src": 'browser\\config\\tooltool-manifests\\{}\\releng.manifest'.format(platform),
     'tooltool_url': 'https://tooltool.mozilla-releng.net/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/configs/repackage/win64_signed.py
+++ b/testing/mozharness/configs/repackage/win64_signed.py
@@ -1,31 +1,14 @@
 import os
 
 platform = "win64"
 
-repackage_config = [[
-        "installer",
-        "--package-name", "firefox",
-        "--package", "{abs_input_dir}\\target.zip",
-        "--tag", "{abs_mozilla_dir}\\browser\\installer\\windows\\app.tag",
-        "--setupexe", "{abs_input_dir}\\setup.exe",
-        "-o", "{abs_output_dir}\\target.installer.exe",
-        "--sfx-stub", "other-licenses/7zstub/firefox/7zSD.sfx",
-    ], [
-        "mar",
-        "-i", "{abs_input_dir}\\target.zip",
-        "--mar", "{abs_input_dir}\\mar.exe",
-        "-o", "{abs_output_dir}\\target.complete.mar",
-    ]]
-
 config = {
     "locale": os.environ.get("LOCALE"),
 
-    "repackage_config": repackage_config,
-
     # ToolTool
     "tooltool_manifest_src": 'browser\\config\\tooltool-manifests\\{}\\releng.manifest'.format(platform),
     'tooltool_url': 'https://tooltool.mozilla-releng.net/',
     'tooltool_cache': os.environ.get('TOOLTOOL_CACHE'),
 
     'run_configure': False,
 }
--- a/testing/mozharness/scripts/repackage.py
+++ b/testing/mozharness/scripts/repackage.py
@@ -28,34 +28,30 @@ class Repackage(BaseScript):
 
         self._run_tooltool()
 
         mar_path = os.path.join(dirs['abs_input_dir'], 'mar')
         if self._is_windows():
             mar_path += '.exe'
         if mar_path:
             self.chmod(mar_path, 0755)
-
         if self.config.get("run_configure", True):
             self._get_mozconfig()
             self._run_configure()
 
     def query_abs_dirs(self):
         if self.abs_dirs:
             return self.abs_dirs
         abs_dirs = super(Repackage, self).query_abs_dirs()
         config = self.config
 
         dirs = {}
         dirs['abs_tools_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tools')
         dirs['abs_mozilla_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'src')
-        dirs['abs_input_dir'] = os.path.join(
-            abs_dirs['base_work_dir'],
-            os.environ.get('MOZ_FETCHES_DIR', 'fetches'),
-        )
+        dirs['abs_input_dir'] = os.path.join(abs_dirs['base_work_dir'], 'fetches')
         output_dir_suffix = []
         if config.get('locale'):
             output_dir_suffix.append(config['locale'])
         if config.get('repack_id'):
             output_dir_suffix.append(config['repack_id'])
         dirs['abs_output_dir'] = os.path.join(
             abs_dirs['abs_work_dir'], 'outputs', *output_dir_suffix)
         for key in dirs.keys():
@@ -63,23 +59,37 @@ class Repackage(BaseScript):
                 abs_dirs[key] = dirs[key]
         self.abs_dirs = abs_dirs
         return self.abs_dirs
 
     def repackage(self):
         config = self.config
         dirs = self.query_abs_dirs()
 
+        subst = {
+            'sfx-stub': config['sfx-stub'],
+            'installer-tag': config['installer-tag'],
+            'stub-installer-tag': config['stub-installer-tag'],
+        }
+        subst.update(dirs)
+
         # Make sure the upload dir is around.
         self.mkdir_p(dirs['abs_output_dir'])
 
         for repack_config in config["repackage_config"]:
-            command = [sys.executable, 'mach', '--log-no-times', 'repackage'] + \
-                [arg.format(**dirs)
-                    for arg in list(repack_config)]
+            command = [sys.executable, 'mach', '--log-no-times', 'repackage']
+            command.extend([arg.format(**subst) for arg in repack_config['args']])
+            for arg, filename in repack_config['inputs'].items():
+                command.extend([
+                    '--{}'.format(arg),
+                    os.path.join(dirs['abs_input_dir'], filename),
+                ])
+            command.extend([
+                '--output', os.path.join(dirs['abs_output_dir'], repack_config['output']),
+            ])
             self.run_command(
                 command=command,
                 cwd=dirs['abs_mozilla_dir'],
                 halt_on_failure=True,
             )
 
     def _run_tooltool(self):
         config = self.config
--- a/testing/web-platform/meta/MANIFEST.json
+++ b/testing/web-platform/meta/MANIFEST.json
@@ -311074,16 +311074,26 @@
      {}
     ]
    ],
    "webdriver/tests/switch_to_window/__init__.py": [
     [
      {}
     ]
    ],
+   "webdriver/tests/take_element_screenshot/__init__.py": [
+    [
+     {}
+    ]
+   ],
+   "webdriver/tests/take_screenshot/__init__.py": [
+    [
+     {}
+    ]
+   ],
    "webgl/META.yml": [
     [
      {}
     ]
    ],
    "webgl/common.js": [
     [
      {}
@@ -424226,16 +424236,44 @@
      {}
     ]
    ],
    "webdriver/tests/switch_to_window/switch.py": [
     [
      "/webdriver/tests/switch_to_window/switch.py",
      {}
     ]
+   ],
+   "webdriver/tests/take_element_screenshot/screenshot.py": [
+    [
+     "/webdriver/tests/take_element_screenshot/screenshot.py",
+     {}
+    ]
+   ],
+   "webdriver/tests/take_element_screenshot/user_prompts.py": [
+    [
+     "/webdriver/tests/take_element_screenshot/user_prompts.py",
+     {
+      "timeout": "long"
+     }
+    ]
+   ],
+   "webdriver/tests/take_screenshot/screenshot.py": [
+    [
+     "/webdriver/tests/take_screenshot/screenshot.py",
+     {}
+    ]
+   ],
+   "webdriver/tests/take_screenshot/user_prompts.py": [
+    [
+     "/webdriver/tests/take_screenshot/user_prompts.py",
+     {
+      "timeout": "long"
+     }
+    ]
    ]
   }
  },
  "paths": {
   ".codecov.yml": [
    "904cf85a4fd7576bb470172ade7317de94435dd0",
    "support"
   ],
@@ -650046,16 +650084,40 @@
   "webdriver/tests/switch_to_window/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/switch_to_window/switch.py": [
    "907be66a149e8196c87760544140636d9625bbb9",
    "wdspec"
   ],
+  "webdriver/tests/take_element_screenshot/__init__.py": [
+   "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
+   "support"
+  ],
+  "webdriver/tests/take_element_screenshot/screenshot.py": [
+   "e4d2869af4f3a50aa4da87b499d84b75254967a7",
+   "wdspec"
+  ],
+  "webdriver/tests/take_element_screenshot/user_prompts.py": [
+   "fa239999e4f3479526423498f5718a455ffde53f",
+   "wdspec"
+  ],
+  "webdriver/tests/take_screenshot/__init__.py": [
+   "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
+   "support"
+  ],
+  "webdriver/tests/take_screenshot/screenshot.py": [
+   "d3153710f7b108f167a4ff82b9ef288bcfa9440c",
+   "wdspec"
+  ],
+  "webdriver/tests/take_screenshot/user_prompts.py": [
+   "4d4840f08ba6f57f3e22bda72a0071128bea9be0",
+   "wdspec"
+  ],
   "webgl/META.yml": [
    "8e6b680d91c9b81b85e3ce010c5fb9fe338f37bf",
    "support"
   ],
   "webgl/bufferSubData.html": [
    "a97df9062d6ea964e500059e155b29604edc21b6",
    "testharness"
   ],
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/take_element_screenshot/screenshot.py
@@ -0,0 +1,40 @@
+import base64
+import imghdr
+
+from tests.support.asserts import assert_error, assert_success
+from tests.support.inline import inline
+
+
+def take_element_screenshot(session, element_id):
+    return session.transport.send(
+        "GET",
+        "session/{session_id}/element/{element_id}/screenshot".format(
+            session_id=session.session_id,
+            element_id=element_id,
+        )
+    )
+
+
+def test_no_browsing_context(session, closed_window):
+    response = take_element_screenshot(session, "foo")
+    assert_error(response, "no such window")
+
+
+def test_screenshot(session):
+    session.url = inline("<input>")
+    element = session.find.css("input", all=False)
+
+    response = take_element_screenshot(session, element.id)
+    value = assert_success(response)
+
+    image = base64.decodestring(value)
+    assert imghdr.what("", image) == "png"
+
+
+def test_stale(session):
+    session.url = inline("<input>")
+    element = session.find.css("input", all=False)
+    session.refresh()
+
+    result = take_element_screenshot(session, element.id)
+    assert_error(result, "stale element reference")
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/take_element_screenshot/user_prompts.py
@@ -0,0 +1,74 @@
+# META: timeout=long
+
+import base64
+import imghdr
+
+import pytest
+
+from tests.support.asserts import assert_success
+from tests.support.inline import inline
+
+
+def take_element_screenshot(session, element_id):
+    return session.transport.send(
+        "GET",
+        "session/{session_id}/element/{element_id}/screenshot".format(
+            session_id=session.session_id,
+            element_id=element_id,
+        )
+    )
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_without_exception(session, create_dialog):
+    def check_user_prompt_not_closed_without_exception(dialog_type):
+        session.url = inline("<input/>")
+        element = session.find.css("input", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = take_element_screenshot(session, element.id)
+        value = assert_success(response)
+
+        image = base64.decodestring(value)
+        assert imghdr.what("", image) == "png"
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_without_exception
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_accept(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_accept_and_notify(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_dismiss(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_dismiss_and_notify(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_default(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/take_screenshot/screenshot.py
@@ -0,0 +1,25 @@
+import base64
+import imghdr
+
+from tests.support.asserts import assert_error, assert_success
+from tests.support.inline import inline
+
+
+def take_screenshot(session):
+    return session.transport.send(
+        "GET", "session/{session_id}/screenshot".format(**vars(session)))
+
+
+def test_no_browsing_context(session, closed_window):
+    response = take_screenshot(session)
+    assert_error(response, "no such window")
+
+
+def test_screenshot(session):
+    session.url = inline("<input>")
+
+    response = take_screenshot(session)
+    value = assert_success(response)
+
+    image = base64.decodestring(value)
+    assert imghdr.what("", image) == "png"
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/take_screenshot/user_prompts.py
@@ -0,0 +1,68 @@
+# META: timeout=long
+
+import base64
+import imghdr
+
+import pytest
+
+from tests.support.asserts import assert_success
+from tests.support.inline import inline
+
+
+def take_screenshot(session):
+    return session.transport.send(
+        "GET", "session/{session_id}/screenshot".format(**vars(session)))
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_without_exception(session, create_dialog):
+    def check_user_prompt_not_closed_without_exception(dialog_type):
+        session.url = inline("<input/>")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = take_screenshot(session)
+        value = assert_success(response)
+
+        image = base64.decodestring(value)
+        assert imghdr.what("", image) == "png"
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_without_exception
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_accept(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_accept_and_notify(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_dismiss(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_dismiss_and_notify(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
+
+
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_default(check_user_prompt_not_closed_without_exception, dialog_type):
+    check_user_prompt_not_closed_without_exception(dialog_type)
--- a/testing/webdriver/src/command.rs
+++ b/testing/webdriver/src/command.rs
@@ -59,17 +59,17 @@ pub enum WebDriverCommand<T: WebDriverEx
     ElementSendKeys(WebElement, SendKeysParameters),
     PerformActions(ActionsParameters),
     ReleaseActions,
     DismissAlert,
     AcceptAlert,
     GetAlertText,
     SendAlertText(SendKeysParameters),
     TakeScreenshot,
-    TakeElementScreenshot(TakeScreenshotParameters),
+    TakeElementScreenshot(WebElement),
     Status,
     Extension(T),
 }
 
 pub trait WebDriverExtensionCommand: Clone + Send + PartialEq {
     fn parameters_json(&self) -> Option<Value>;
 }
 
@@ -325,17 +325,23 @@ impl<U: WebDriverExtensionRoute> WebDriv
             Route::DismissAlert => WebDriverCommand::DismissAlert,
             Route::AcceptAlert => WebDriverCommand::AcceptAlert,
             Route::GetAlertText => WebDriverCommand::GetAlertText,
             Route::SendAlertText => {
                 WebDriverCommand::SendAlertText(serde_json::from_str(raw_body)?)
             }
             Route::TakeScreenshot => WebDriverCommand::TakeScreenshot,
             Route::TakeElementScreenshot => {
-                WebDriverCommand::TakeElementScreenshot(serde_json::from_str(raw_body)?)
+                let element_id = try_opt!(
+                    params.name("elementId"),
+                    ErrorStatus::InvalidArgument,
+                    "Missing elementId parameter"
+                );
+                let element = WebElement::new(element_id.as_str().into());
+                WebDriverCommand::TakeElementScreenshot(element)
             }
             Route::Status => WebDriverCommand::Status,
             Route::Extension(ref extension) => try!(extension.command(params, &body_data)),
         };
         Ok(WebDriverMessage::new(session_id, command))
     }
 
     fn get_session_id(params: &Captures) -> Option<String> {
--- a/third_party/rust/aho-corasick/.cargo-checksum.json
+++ b/third_party/rust/aho-corasick/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"e17babe5ba0bdd19ec59a37b4a099fd4313bff58be63a2ff506075f9a97dc172","COPYING":"01c266bced4a434da0051174d6bee16a4c82cf634e2679b6155d40d75012390f","Cargo.toml":"b3fa06c2147a4749cd984ded69024ddcc8b7d578ab763b60227b3ba474c3ec70","LICENSE-MIT":"0f96a83840e146e43c0ec96a22ec1f392e0680e6c1226e6f3ba87e0740af850f","Makefile":"a45a128685a2ae7d4fa39d310786674417ee113055ef290a11f88002285865fc","README.md":"9bc60d2cec222b50f87c85cf9475349bb228a36f89796c5d6481c52560ddde3a","UNLICENSE":"7e12e5df4bae12cb21581ba157ced20e1986a0508dd10d0e8a4ab9a4cf94e85c","benches/bench.rs":"acf4844efadeafc7bc396c2b16f2a184e140b6c17d1084dbaf454196de2090cd","benches/random.txt":"9386fb3efedc7ffbd09fb49088347f1056bc2d90a861009fa2f804cdb714efcb","ctags.rust":"3d128d3cc59f702e68953ba2fe6c3f46bc6991fc575308db060482d5da0c79f3","examples/dict-search.rs":"30eb44b1a0b599507db4c23a90f74199faabc64a8ae1d603ecdf3bba7428eb1e","session.vim":"95cb1d7caf0ff7fbe76ec911988d908ddd883381c925ba64b537695bc9f021c4","src/autiter.rs":"98c31a7fbe21cfacaa858f90409f0d86edd46dda1b7651f4e800d929a50afb7b","src/full.rs":"b83a9c8ff3ef611c316b68650915df2d7f361a49b59dab103dc2c5476f2d8303","src/lib.rs":"68bf2ed02d58bebee6f7f7579038f1e4b60a2c4acc334263cb837bcbe15ffe94","src/main.rs":"fc867cb5f0b02d0f49ecab06b72c05a247cbcf3bf9228c235de8e787bda7bef5"},"package":"500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"}
\ No newline at end of file
+{"files":{".travis.yml":"5749b97291a6f4e6cc0b308d2d410e1ae20530f0976575581270d3d4eb60ed19","COPYING":"01c266bced4a434da0051174d6bee16a4c82cf634e2679b6155d40d75012390f","Cargo.toml":"64af7d0a0a299d3a05f52baed2ea87cc0d783c4fdeaa555a595aed92e5d780a6","LICENSE-MIT":"0f96a83840e146e43c0ec96a22ec1f392e0680e6c1226e6f3ba87e0740af850f","Makefile":"a45a128685a2ae7d4fa39d310786674417ee113055ef290a11f88002285865fc","README.md":"9fb3256ce6fc5b25c9a92fe3b8f7a82a26d380fcf6121c934c2bb6f85102fede","UNLICENSE":"7e12e5df4bae12cb21581ba157ced20e1986a0508dd10d0e8a4ab9a4cf94e85c","benches/bench.rs":"c74b297ed2217e4784614573a8b44c61dfe5173985c43616e43a3f608973ff2e","benches/random.txt":"9386fb3efedc7ffbd09fb49088347f1056bc2d90a861009fa2f804cdb714efcb","ci/script.sh":"061b81cf1dba37d34063500117f5aa164919445472086665549ef2aa6a43bf77","ctags.rust":"3d128d3cc59f702e68953ba2fe6c3f46bc6991fc575308db060482d5da0c79f3","examples/dict-search.rs":"0dac88736039262dfb8df9ff85ece18de90dd4da9b8b895cf4d57bf33167b224","session.vim":"95cb1d7caf0ff7fbe76ec911988d908ddd883381c925ba64b537695bc9f021c4","src/autiter.rs":"b3bd067bf6c25708109870afbb5d09e9a04d0f2f596f59c368e42ddbc7c65f93","src/full.rs":"acb6389d210e7bc4125f0ebb6f268b712e60ec50b58b9c4094a8083bb8f4caf2","src/lib.rs":"fdddbb688fbc71461242b94e1cfb3dfbf0a9f06b2c70a6e83de15a0e54804a28","src/main.rs":"fc867cb5f0b02d0f49ecab06b72c05a247cbcf3bf9228c235de8e787bda7bef5"},"package":"68f56c7353e5a9547cbd76ed90f7bb5ffc3ba09d4ea9bd1d8c06c8b1142eeb5a"}
\ No newline at end of file
--- a/third_party/rust/aho-corasick/.travis.yml
+++ b/third_party/rust/aho-corasick/.travis.yml
@@ -1,13 +1,10 @@
 language: rust
 rust:
-  - 1.12.0
+  - 1.13.0
   - stable
   - beta
   - nightly
-script:
-  - cargo build --verbose
-  - cargo test --verbose
-  - cargo doc
-  - if [ "$TRAVIS_RUST_VERSION" = "nightly" ]; then
-      cargo bench --verbose;
-    fi
+script: ci/script.sh
+branches:
+  only:
+    - master
--- a/third_party/rust/aho-corasick/Cargo.toml
+++ b/third_party/rust/aho-corasick/Cargo.toml
@@ -1,47 +1,72 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "aho-corasick"
-version = "0.6.3"  #:version
+version = "0.6.8"
 authors = ["Andrew Gallant <jamslam@gmail.com>"]
+exclude = ["benches/sherlock.txt"]
 description = "Fast multiple substring searching with finite state machines."
-documentation = "http://burntsushi.net/rustdoc/aho_corasick/"
 homepage = "https://github.com/BurntSushi/aho-corasick"
-repository = "https://github.com/BurntSushi/aho-corasick"
 readme = "README.md"
 keywords = ["string", "search", "text", "aho", "corasick"]
 license = "Unlicense/MIT"
-exclude = ["benches/sherlock.txt"]
-
-[lib]
-name = "aho_corasick"
-
-[[bin]]
-name = "aho-corasick-dot"
-test = false
-doc = false
-bench = false
-
-[dependencies]
-memchr = "1"
-
-[dev-dependencies]
-csv = "0.15"
-docopt = "0.7"
-memmap = "0.5"
-quickcheck = { version = "0.4", default-features = false }
-rand = "0.3"
-rustc-serialize = "0.3"
-
-[[bench]]
-name = "bench"
-path = "benches/bench.rs"
-test = false
-bench = true
-
+repository = "https://github.com/BurntSushi/aho-corasick"
 [profile.test]
 debug = true
 
 [profile.bench]
 debug = true
 
 [profile.release]
 debug = true
+
+[lib]
+name = "aho_corasick"
+
+[[bin]]
+name = "aho-corasick-dot"
+path = "src/main.rs"
+test = false
+bench = false
+doc = false
+
+[[bench]]
+name = "bench"
+path = "benches/bench.rs"
+test = false
+bench = true
+[dependencies.memchr]
+version = "2"
+[dev-dependencies.csv]
+version = "1"
+
+[dev-dependencies.docopt]
+version = "1"
+
+[dev-dependencies.memmap]
+version = "0.6"
+
+[dev-dependencies.quickcheck]
+version = "0.7"
+default-features = false
+
+[dev-dependencies.rand]
+version = "0.5"
+
+[dev-dependencies.serde]
+version = "1"
+
+[dev-dependencies.serde_derive]
+version = "1"
+[badges.travis-ci]
+repository = "BurntSushi/aho-corasick"
--- a/third_party/rust/aho-corasick/README.md
+++ b/third_party/rust/aho-corasick/README.md
@@ -12,17 +12,17 @@ the length of the search text and `m` is
 [![Build status](https://api.travis-ci.org/BurntSushi/aho-corasick.png)](https://travis-ci.org/BurntSushi/aho-corasick)
 [![](http://meritbadge.herokuapp.com/aho-corasick)](https://crates.io/crates/aho-corasick)
 
 Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
 
 
 ### Documentation
 
-[http://burntsushi.net/rustdoc/aho_corasick/](http://burntsushi.net/rustdoc/aho_corasick/).
+[https://docs.rs/aho-corasick/](https://docs.rs/aho-corasick/).
 
 
 ### Example
 
 The documentation contains several examples, and there is a more complete
 example as a full program in `examples/dict-search.rs`.
 
 Here is a quick example showing simple substring matching:
--- a/third_party/rust/aho-corasick/benches/bench.rs
+++ b/third_party/rust/aho-corasick/benches/bench.rs
@@ -52,16 +52,50 @@ fn bench_full_aut_overlapping_no_match<P
 
 fn bench_naive_no_match<S>(b: &mut Bencher, needles: Vec<S>, haystack: &str)
         where S: Into<String> {
     b.bytes = haystack.len() as u64;
     let needles: Vec<String> = needles.into_iter().map(Into::into).collect();
     b.iter(|| assert!(!naive_find(&needles, haystack)));
 }
 
+#[bench]
+fn bench_construction(b: &mut Bencher) {
+    b.iter(|| {
+        AcAutomaton::new(test::black_box(
+            [
+                "ADL", "ADl", "AdL", "Adl", "BAK", "BAk", "BAK", "BaK", "Bak", "BaK", "HOL",
+                "HOl", "HoL", "Hol", "IRE", "IRe", "IrE", "Ire", "JOH", "JOh", "JoH", "Joh", "SHE",
+                "SHe", "ShE", "She", "WAT", "WAt", "WaT", "Wat", "aDL", "aDl", "adL", "adl", "bAK",
+                "bAk", "bAK", "baK", "bak", "baK", "hOL", "hOl", "hoL", "hol", "iRE", "iRe",
+                "irE", "ire", "jOH", "jOh", "joH", "joh", "sHE", "sHe", "shE", "she", "wAT", "wAt",
+                "waT", "wat", "ſHE", "ſHe", "ſhE", "ſhe",
+            ].iter()
+                .map(|x| *x),
+        ))
+    })
+}
+
+#[bench]
+fn bench_full_construction(b: &mut Bencher) {
+    b.iter(|| {
+        AcAutomaton::new(test::black_box(
+            [
+                "ADL", "ADl", "AdL", "Adl", "BAK", "BAk", "BAK", "BaK", "Bak", "BaK", "HOL",
+                "HOl", "HoL", "Hol", "IRE", "IRe", "IrE", "Ire", "JOH", "JOh", "JoH", "Joh", "SHE",
+                "SHe", "ShE", "She", "WAT", "WAt", "WaT", "Wat", "aDL", "aDl", "adL", "adl", "bAK",
+                "bAk", "bAK", "baK", "bak", "baK", "hOL", "hOl", "hoL", "hol", "iRE", "iRe",
+                "irE", "ire", "jOH", "jOh", "joH", "joh", "sHE", "sHe", "shE", "she", "wAT", "wAt",
+                "waT", "wat", "ſHE", "ſHe", "ſhE", "ſhe",
+            ].iter()
+                .map(|x| *x),
+        )).into_full()
+    })
+}
+
 fn haystack_same(letter: char) -> String {
     iter::repeat(letter).take(10000).collect()
 }
 
 macro_rules! aut_benches {
     ($prefix:ident, $aut:expr, $bench:expr) => {
         mod $prefix {
 #![allow(unused_imports)]
new file mode 100755
--- /dev/null
+++ b/third_party/rust/aho-corasick/ci/script.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+set -ex
+
+cargo build --verbose
+cargo doc --verbose
+
+# If we're testing on an older version of Rust, then only check that we
+# can build the crate. This is because the dev dependencies might be updated
+# more frequently, and therefore might require a newer version of Rust.
+#
+# This isn't ideal. It's a compromise.
+if [ "$TRAVIS_RUST_VERSION" = "1.13.0" ]; then
+  exit
+fi
+
+cargo test --verbose
+if [ "$TRAVIS_RUST_VERSION" = "nightly" ]; then
+  cargo bench --verbose --no-run
+fi
--- a/third_party/rust/aho-corasick/examples/dict-search.rs
+++ b/third_party/rust/aho-corasick/examples/dict-search.rs
@@ -1,25 +1,27 @@
 // This example demonstrates how to use the Aho-Corasick algorithm to rapidly
 // scan text for matches in a large dictionary of keywords. This example by
 // default reads your system's dictionary (~120,000 words).
 extern crate aho_corasick;
 extern crate csv;
 extern crate docopt;
 extern crate memmap;
-extern crate rustc_serialize;
+extern crate serde;
+#[macro_use]
+extern crate serde_derive;
 
 use std::error::Error;
 use std::fs::File;
 use std::io::{self, BufRead, Write};
 use std::process;
 
 use aho_corasick::{Automaton, AcAutomaton, Match};
 use docopt::Docopt;
-use memmap::{Mmap, Protection};
+use memmap::Mmap;
 
 static USAGE: &'static str = "
 Usage: dict-search [options] <input>
        dict-search --help
 
 Options:
     -d <path>, --dict <path>   Path to dictionary of keywords to search.
                                [default: /usr/share/dict/words]
@@ -28,30 +30,30 @@ Options:
     --overlapping              Report overlapping matches.
     -c, --count                Show only the numebr of matches.
     --memory-usage             Show memory usage of automaton.
     --full                     Use fully expanded transition matrix.
                                Warning: may use lots of memory.
     -h, --help                 Show this usage message.
 ";
 
-#[derive(Clone, Debug, RustcDecodable)]
+#[derive(Clone, Debug, Deserialize)]
 struct Args {
     arg_input: String,
     flag_dict: String,
     flag_min_len: usize,
     flag_overlapping: bool,
     flag_memory_usage: bool,
     flag_full: bool,
     flag_count: bool,
 }
 
 fn main() {
     let args: Args = Docopt::new(USAGE)
-                            .and_then(|d| d.decode())
+                            .and_then(|d| d.deserialize())
                             .unwrap_or_else(|e| e.exit());
     match run(&args) {
         Ok(()) => {}
         Err(err) => {
             writeln!(&mut io::stderr(), "{}", err).unwrap();
             process::exit(1);
         }
     }
@@ -65,76 +67,61 @@ fn run(args: &Args) -> Result<(), Box<Er
             (aut.heap_bytes(), aut.num_states())
         } else {
             (aut.heap_bytes(), aut.num_states())
         };
         println!("{} bytes, {} states", bytes, states);
         return Ok(());
     }
 
+    let rdr = try!(File::open(&args.arg_input));
     if args.flag_full {
         let aut = aut.into_full();
         if args.flag_overlapping {
             if args.flag_count {
-                let mmap = Mmap::open_path(
-                    &args.arg_input, Protection::Read).unwrap();
-                let text = unsafe { mmap.as_slice() };
-                println!("{}", aut.find_overlapping(text).count());
+                let mmap = unsafe { try!(Mmap::map(&rdr)) };
+                println!("{}", aut.find_overlapping(&*mmap).count());
             } else {
-                let rdr = try!(File::open(&args.arg_input));
                 try!(write_matches(&aut, aut.stream_find_overlapping(rdr)));
             }
         } else {
             if args.flag_count {
-                let mmap = Mmap::open_path(
-                    &args.arg_input, Protection::Read).unwrap();
-                let text = unsafe { mmap.as_slice() };
-                println!("{}", aut.find(text).count());
+                let mmap = unsafe { try!(Mmap::map(&rdr)) };
+                println!("{}", aut.find(&*mmap).count());
             } else {
-                let rdr = try!(File::open(&args.arg_input));
                 try!(write_matches(&aut, aut.stream_find(rdr)));
             }
         }
     } else {
         if args.flag_overlapping {
             if args.flag_count {
-                let mmap = Mmap::open_path(
-                    &args.arg_input, Protection::Read).unwrap();
-                let text = unsafe { mmap.as_slice() };
-                println!("{}", aut.find_overlapping(text).count());
+                let mmap = unsafe { try!(Mmap::map(&rdr)) };
+                println!("{}", aut.find_overlapping(&*mmap).count());
             } else {
-                let rdr = try!(File::open(&args.arg_input));
                 try!(write_matches(&aut, aut.stream_find_overlapping(rdr)));
             }
         } else {
             if args.flag_count {
-                let mmap = Mmap::open_path(
-                    &args.arg_input, Protection::Read).unwrap();
-                let text = unsafe { mmap.as_slice() };
-                println!("{}", aut.find(text).count());
+                let mmap = unsafe { try!(Mmap::map(&rdr)) };
+                println!("{}", aut.find(&*mmap).count());
             } else {
-                let rdr = try!(File::open(&args.arg_input));
                 try!(write_matches(&aut, aut.stream_find(rdr)));
             }
         }
     }
     Ok(())
 }
 
 fn write_matches<A, I>(aut: &A, it: I) -> Result<(), Box<Error>>
         where A: Automaton<String>, I: Iterator<Item=io::Result<Match>> {
     let mut wtr = csv::Writer::from_writer(io::stdout());
-    try!(wtr.write(["pattern", "start", "end"].iter()));
+    try!(wtr.serialize(("pattern", "start", "end")));
     for m in it {
         let m = try!(m);
-        try!(wtr.write([
-            aut.pattern(m.pati),
-            &m.start.to_string(),
-            &m.end.to_string(),
-        ].iter()));
+        try!(wtr.serialize((aut.pattern(m.pati), m.start, m.end)));
     }
     try!(wtr.flush());
     Ok(())
 }
 
 fn build_automaton(
     dict_path: &str,
     min_len: usize,
--- a/third_party/rust/aho-corasick/src/autiter.rs
+++ b/third_party/rust/aho-corasick/src/autiter.rs
@@ -37,17 +37,17 @@ pub trait Automaton<P> {
     #[inline]
     fn len(&self) -> usize {
         self.patterns().len()
     }
 
     /// Returns true if the automaton has no patterns.
     #[inline]
     fn is_empty(&self) -> bool {
-        self.len() == 0
+        self.patterns().is_empty()
     }
 
     /// Returns an iterator of non-overlapping matches in `s`.
     fn find<'a, 's, Q: ?Sized + AsRef<[u8]>>(
         &'a self,
         s: &'s Q,
     ) -> Matches<'a, 's, P, Self>
     where Self: Sized {
@@ -376,17 +376,17 @@ impl<'a, R: io::Read, P, A: Automaton<P>
 
     fn next(&mut self) -> Option<io::Result<Match>> {
         let mut m = None;
         let mut consumed = 0;
 'LOOP:  loop {
             self.buf.consume(consumed);
             let bs = match self.buf.fill_buf() {
                 Err(err) => return Some(Err(err)),
-                Ok(bs) if bs.len() == 0 => break,
+                Ok(bs) if bs.is_empty() => break,
                 Ok(bs) => bs,
             };
             consumed = bs.len(); // is shortened if we find a match
             for (i, &b) in bs.iter().enumerate() {
                 self.si = self.aut.next_state(self.si, b);
                 if self.aut.has_match(self.si, 0) {
                     m = Some(Ok(self.aut.get_match(self.si, 0, self.texti)));
                     consumed = i + 1;
@@ -500,17 +500,17 @@ impl<'a, R: io::Read, P, A: Automaton<P>
         }
         let mut m = None;
         let mut consumed = 0;
         self.outi = 0;
 'LOOP:  loop {
             self.buf.consume(consumed);
             let bs = match self.buf.fill_buf() {
                 Err(err) => return Some(Err(err)),
-                Ok(bs) if bs.len() == 0 => break,
+                Ok(bs) if bs.is_empty() => break,
                 Ok(bs) => bs,
             };
             consumed = bs.len(); // is shortened if we find a match
             for (i, &b) in bs.iter().enumerate() {
                 self.si = self.aut.next_state(self.si, b);
                 if self.aut.has_match(self.si, self.outi) {
                     m = Some(Ok(self.aut.get_match(
                         self.si, self.outi, self.texti)));
--- a/third_party/rust/aho-corasick/src/full.rs
+++ b/third_party/rust/aho-corasick/src/full.rs
@@ -39,33 +39,33 @@ impl<P: AsRef<[u8]>> FullAcAutomaton<P> 
         fac.start_bytes = ac.start_bytes;
         fac
     }
 
     #[doc(hidden)]
     pub fn memory_usage(&self) -> usize {
         self.pats.iter()
             .map(|p| vec_bytes() + p.as_ref().len())
-            .fold(0, |a, b| a + b)
+            .sum::<usize>()
         + (4 * self.trans.len())
         + self.out.iter()
               .map(|v| vec_bytes() + (usize_bytes() * v.len()))
-              .fold(0, |a, b| a + b)
+              .sum::<usize>()
         + self.start_bytes.len()
     }
 
     #[doc(hidden)]
     pub fn heap_bytes(&self) -> usize {
         self.pats.iter()
             .map(|p| mem::size_of::<P>() + p.as_ref().len())
-            .fold(0, |a, b| a + b)
+            .sum::<usize>()
         + (4 * self.trans.len())
         + self.out.iter()
               .map(|v| vec_bytes() + (usize_bytes() * v.len()))
-              .fold(0, |a, b| a + b)
+              .sum::<usize>()
         + self.start_bytes.len()
     }
 
     fn set(&mut self, si: StateIdx, i: u8, goto: StateIdx) {
         let ns = self.num_states();
         self.trans[i as usize * ns + si as usize] = goto;
     }
 
@@ -114,23 +114,32 @@ impl<P: AsRef<[u8]>> Automaton<P> for Fu
     fn pattern(&self, i: usize) -> &P {
         &self.pats[i]
     }
 }
 
 impl<P: AsRef<[u8]>> FullAcAutomaton<P> {
     fn build_matrix<T: Transitions>(&mut self, ac: &AcAutomaton<P, T>) {
         for (si, s) in ac.states.iter().enumerate().skip(1) {
-            for b in (0..256).map(|b| b as u8) {
-                self.set(si as StateIdx, b, ac.next_state(si as StateIdx, b));
-            }
-            for &pati in &s.out {
-                self.out[si].push(pati);
-            }
+            self.set_states(ac, si as StateIdx);
+            self.out[si].extend_from_slice(&s.out);
         }
     }
+
+    fn set_states<T: Transitions>(&mut self, ac: &AcAutomaton<P, T>, si: StateIdx) {
+        let current_state = &ac.states[si as usize];
+        let first_fail_state = current_state.fail;
+        current_state.for_each_transition(move |b, maybe_si| {
+            let goto = if maybe_si == FAIL_STATE {
+                ac.memoized_next_state(self, si, first_fail_state, b)
+            } else {
+                maybe_si
+            };
+            self.set(si, b, goto);
+        });
+    }
 }
 
 impl<P: AsRef<[u8]> + fmt::Debug> fmt::Debug for FullAcAutomaton<P> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         write!(f, "FullAcAutomaton({:?})", self.pats)
     }
 }
--- a/third_party/rust/aho-corasick/src/lib.rs
+++ b/third_party/rust/aho-corasick/src/lib.rs
@@ -116,18 +116,20 @@ let aut = AcAutomaton::<&str, Sparse>::w
 let matches: Vec<_> = aut.find("abc").collect();
 assert_eq!(matches, vec![Match { pati: 1, start: 0, end: 1}]);
 ```
 */
 
 #![deny(missing_docs)]
 
 extern crate memchr;
-#[cfg(test)] extern crate quickcheck;
-#[cfg(test)] extern crate rand;
+#[cfg(test)]
+extern crate quickcheck;
+#[cfg(test)]
+extern crate rand;
 
 use std::collections::VecDeque;
 use std::fmt;
 use std::iter::FromIterator;
 use std::mem;
 
 pub use self::autiter::{
     Automaton, Match,
@@ -236,34 +238,57 @@ impl<P: AsRef<[u8]>, T: Transitions> AcA
     pub fn num_states(&self) -> usize {
         self.states.len()
     }
 
     #[doc(hidden)]
     pub fn heap_bytes(&self) -> usize {
         self.pats.iter()
             .map(|p| mem::size_of::<P>() + p.as_ref().len())
-            .fold(0, |a, b| a + b)
+            .sum::<usize>()
         + self.states.iter()
               .map(|s| mem::size_of::<State<T>>() + s.heap_bytes())
-              .fold(0, |a, b| a + b)
+              .sum::<usize>()
         + self.start_bytes.len()
     }
+
+    // The states of `full_automaton` should be set for all states < si
+    fn memoized_next_state(
+        &self,
+        full_automaton: &FullAcAutomaton<P>,
+        current_si: StateIdx,
+        mut si: StateIdx,
+        b: u8,
+    ) -> StateIdx {
+        loop {
+            if si < current_si {
+                return full_automaton.next_state(si, b);
+            }
+            let state = &self.states[si as usize];
+            let maybe_si = state.goto(b);
+            if maybe_si != FAIL_STATE {
+                return maybe_si;
+            } else {
+                si = state.fail;
+            }
+        }
+    }
 }
 
 impl<P: AsRef<[u8]>, T: Transitions> Automaton<P> for AcAutomaton<P, T> {
     #[inline]
     fn next_state(&self, mut si: StateIdx, b: u8) -> StateIdx {
         loop {
-            let maybe_si = self.states[si as usize].goto(b);
+            let state = &self.states[si as usize];
+            let maybe_si = state.goto(b);
             if maybe_si != FAIL_STATE {
                 si = maybe_si;
                 break;
             } else {
-                si = self.states[si as usize].fail;
+                si = state.fail;
             }
         }
         si
     }
 
     #[inline]
     fn get_match(&self, si: StateIdx, outi: usize, texti: usize) -> Match {
         let pati = self.states[si as usize].out[outi];
@@ -292,16 +317,39 @@ impl<P: AsRef<[u8]>, T: Transitions> Aut
     }
 
     #[inline]
     fn pattern(&self, i: usize) -> &P {
         &self.pats[i]
     }
 }
 
+// `(0..256).map(|b| b as u8)` optimizes poorly in debug builds so
+// we use this small explicit iterator instead
+struct AllBytesIter(i32);
+impl Iterator for AllBytesIter {
+    type Item = u8;
+    #[inline]
+    fn next(&mut self) -> Option<Self::Item> {
+        if self.0 < 256 {
+            let b = self.0 as u8;
+            self.0 += 1;
+            Some(b)
+        } else {
+            None
+        }
+    }
+}
+
+impl AllBytesIter {
+    fn new() -> AllBytesIter {
+        AllBytesIter(0)
+    }
+}
+
 // Below contains code for *building* the automaton. It's a reasonably faithful
 // translation of the description/psuedo-code from:
 // http://www.cs.uku.fi/~kilpelai/BSA05/lectures/slides04.pdf
 
 impl<P: AsRef<[u8]>, T: Transitions> AcAutomaton<P, T> {
     // This is the first phase and builds the initial keyword tree.
     fn build(mut self, pats: Vec<P>) -> AcAutomaton<P, T> {
         for (pati, pat) in pats.iter().enumerate() {
@@ -316,21 +364,24 @@ impl<P: AsRef<[u8]>, T: Transitions> AcA
                     let depth = self.states[previ as usize].depth + 1;
                     let nexti = self.add_state(State::new(depth));
                     self.states[previ as usize].set_goto(b, nexti);
                     previ = nexti;
                 }
             }
             self.states[previ as usize].out.push(pati);
         }
-        for c in (0..256).into_iter().map(|c| c as u8) {
-            if self.states[ROOT_STATE as usize].goto(c) == FAIL_STATE {
-                self.states[ROOT_STATE as usize].set_goto(c, ROOT_STATE);
-            } else {
-                self.start_bytes.push(c);
+        {
+            let root_state = &mut self.states[ROOT_STATE as usize];
+            for c in AllBytesIter::new() {
+                if root_state.goto(c) == FAIL_STATE {
+                    root_state.set_goto(c, ROOT_STATE);
+                } else {
+                    self.start_bytes.push(c);
+                }
             }
         }
         // If any of the start bytes are non-ASCII, then remove them all,
         // because we don't want to be calling memchr on non-ASCII bytes.
         // (Well, we could, but it requires being more clever. Simply using
         // the prefix byte isn't good enough.)
         if self.start_bytes.iter().any(|&b| b > 0x7F) {
             self.start_bytes.clear();
@@ -339,36 +390,55 @@ impl<P: AsRef<[u8]>, T: Transitions> AcA
         self.fill()
     }
 
     // The second phase that fills in the back links.
     fn fill(mut self) -> AcAutomaton<P, T> {
         // Fill up the queue with all non-root transitions out of the root
         // node. Then proceed by breadth first traversal.
         let mut q = VecDeque::new();
-        for c in (0..256).into_iter().map(|c| c as u8) {
-            let si = self.states[ROOT_STATE as usize].goto(c);
+        self.states[ROOT_STATE as usize].for_each_transition(|_, si| {
             if si != ROOT_STATE {
                 q.push_front(si);
             }
-        }
+        });
+
+        let mut transitions = Vec::new();
+
         while let Some(si) = q.pop_back() {
-            for c in (0..256).into_iter().map(|c| c as u8) {
-                let u = self.states[si as usize].goto(c);
-                if u != FAIL_STATE {
-                    q.push_front(u);
-                    let mut v = self.states[si as usize].fail;
-                    while self.states[v as usize].goto(c) == FAIL_STATE {
-                        v = self.states[v as usize].fail;
+            self.states[si as usize].for_each_ok_transition(|c, u| {
+                transitions.push((c, u));
+                q.push_front(u);
+            });
+
+            for (c, u) in transitions.drain(..) {
+                let mut v = self.states[si as usize].fail;
+                loop {
+                    let state = &self.states[v as usize];
+                    if state.goto(c) == FAIL_STATE {
+                        v = state.fail;
+                    } else {
+                        break;
                     }
-                    let ufail = self.states[v as usize].goto(c);
-                    self.states[u as usize].fail = ufail;
-                    let ufail_out = self.states[ufail as usize].out.clone();
-                    self.states[u as usize].out.extend(ufail_out);
                 }
+                let ufail = self.states[v as usize].goto(c);
+                self.states[u as usize].fail = ufail;
+
+                fn get_two<T>(xs: &mut [T], i: usize, j: usize) -> (&mut T, &mut T) {
+                    if i < j {
+                        let (before, after) = xs.split_at_mut(j);
+                        (&mut before[i], &mut after[0])
+                    } else {
+                        let (before, after) = xs.split_at_mut(i);
+                        (&mut after[0], &mut before[j])
+                    }
+                }
+
+                let (ufail_out, out) = get_two(&mut self.states, ufail as usize, u as usize);
+                out.out.extend_from_slice(&ufail_out.out);
             }
         }
         self
     }
 
     fn add_state(&mut self, state: State<T>) -> StateIdx {
         let i = self.states.len();
         self.states.push(state);
@@ -393,16 +463,28 @@ impl<T: Transitions> State<T> {
     fn set_goto(&mut self, b: u8, si: StateIdx) {
         self.goto.set_goto(b, si);
     }
 
     fn heap_bytes(&self) -> usize {
         (self.out.len() * usize_bytes())
         + self.goto.heap_bytes()
     }
+
+    fn for_each_transition<F>(&self, f: F)
+        where F: FnMut(u8, StateIdx)
+    {
+        self.goto.for_each_transition(f)
+    }
+
+    fn for_each_ok_transition<F>(&self, f: F)
+        where F: FnMut(u8, StateIdx)
+    {
+        self.goto.for_each_ok_transition(f)
+    }
 }
 
 /// An abstraction over state transition strategies.
 ///
 /// This is an attempt to let the caller choose the space/time trade offs
 /// used for state transitions.
 ///
 /// (It's possible that this interface is merely good enough for just the two
@@ -411,91 +493,158 @@ pub trait Transitions {
     /// Return a new state at the given depth.
     fn new(depth: u32) -> Self;
     /// Return the next state index given the next character.
     fn goto(&self, alpha: u8) -> StateIdx;
     /// Set the next state index for the character given.
     fn set_goto(&mut self, alpha: u8, si: StateIdx);
     /// The memory use in bytes (on the heap) of this set of transitions.
     fn heap_bytes(&self) -> usize;
+
+    /// Iterates over each state
+    fn for_each_transition<F>(&self, mut f: F)
+        where F: FnMut(u8, StateIdx)
+    {
+        for b in AllBytesIter::new() {
+            f(b, self.goto(b));
+        }
+    }
+
+    /// Iterates over each non-fail state
+    fn for_each_ok_transition<F>(&self, mut f: F)
+    where
+        F: FnMut(u8, StateIdx),
+    {
+        self.for_each_transition(|b, si| {
+            if si != FAIL_STATE {
+                f(b, si);
+            }
+        });
+    }
 }
 
 /// State transitions that can be stored either sparsely or densely.
 ///
 /// This uses less space but at the expense of slower matching.
 #[derive(Clone, Debug)]
 pub struct Dense(DenseChoice);
 
 #[derive(Clone, Debug)]
 enum DenseChoice {
-    Sparse(Vec<StateIdx>), // indexed by alphabet
+    Sparse(Box<Sparse>),
     Dense(Vec<(u8, StateIdx)>),
 }
 
 impl Transitions for Dense {
     fn new(depth: u32) -> Dense {
         if depth <= DENSE_DEPTH_THRESHOLD {
-            Dense(DenseChoice::Sparse(vec![0; 256]))
+            Dense(DenseChoice::Sparse(Box::new(Sparse::new(depth))))
         } else {
             Dense(DenseChoice::Dense(vec![]))
         }
     }
 
     fn goto(&self, b1: u8) -> StateIdx {
         match self.0 {
-            DenseChoice::Sparse(ref m) => m[b1 as usize],
+            DenseChoice::Sparse(ref m) => m.goto(b1),
             DenseChoice::Dense(ref m) => {
                 for &(b2, si) in m {
                     if b1 == b2 {
                         return si;
                     }
                 }
                 FAIL_STATE
             }
         }
     }
 
     fn set_goto(&mut self, b: u8, si: StateIdx) {
         match self.0 {
-            DenseChoice::Sparse(ref mut m) => m[b as usize] = si,
+            DenseChoice::Sparse(ref mut m) => m.set_goto(b, si),
             DenseChoice::Dense(ref mut m) => m.push((b, si)),
         }
     }
 
     fn heap_bytes(&self) -> usize {
         match self.0 {
-            DenseChoice::Sparse(ref m) => m.len() * 4,
+            DenseChoice::Sparse(_) => mem::size_of::<Sparse>(),
             DenseChoice::Dense(ref m) => m.len() * (1 + 4),
         }
     }
+
+    fn for_each_transition<F>(&self, mut f: F)
+        where F: FnMut(u8, StateIdx)
+    {
+        match self.0 {
+            DenseChoice::Sparse(ref m) => m.for_each_transition(f),
+            DenseChoice::Dense(ref m) => {
+                let mut iter = m.iter();
+                let mut b = 0i32;
+                while let Some(&(next_b, next_si)) = iter.next() {
+                    while (b as u8) < next_b {
+                        f(b as u8, FAIL_STATE);
+                        b += 1;
+                    }
+                    f(b as u8, next_si);
+                    b += 1;
+                }
+                while b < 256 {
+                    f(b as u8, FAIL_STATE);
+                    b += 1;
+                }
+            }
+        }
+    }
+    fn for_each_ok_transition<F>(&self, mut f: F)
+    where
+        F: FnMut(u8, StateIdx),
+    {
+        match self.0 {
+            DenseChoice::Sparse(ref m) => m.for_each_ok_transition(f),
+            DenseChoice::Dense(ref m) => for &(b, si) in m {
+                f(b, si)
+            }
+        }
+    }
 }
 
 /// State transitions that are always sparse.
 ///
 /// This can use enormous amounts of memory when there are many patterns,
 /// but matching is very fast.
-#[derive(Clone, Debug)]
-pub struct Sparse(Vec<StateIdx>);
+pub struct Sparse([StateIdx; 256]);
+
+impl Clone for Sparse {
+    fn clone(&self) -> Sparse {
+        Sparse(self.0)
+    }
+}
+
+impl fmt::Debug for Sparse {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_tuple("Sparse").field(&&self.0[..]).finish()
+    }
+}
 
 impl Transitions for Sparse {
     fn new(_: u32) -> Sparse {
-        Sparse(vec![0; 256])
+        Sparse([0; 256])
     }
 
     #[inline]
     fn goto(&self, b: u8) -> StateIdx {
         self.0[b as usize]
     }
 
     fn set_goto(&mut self, b: u8, si: StateIdx) {
         self.0[b as usize] = si;
     }
 
     fn heap_bytes(&self) -> usize {
-        self.0.len() * 4
+        0
     }
 }
 
 impl<S: AsRef<[u8]>> FromIterator<S> for AcAutomaton<S> {
     /// Create an automaton from an iterator of strings.
     fn from_iter<T>(it: T) -> AcAutomaton<S> where T: IntoIterator<Item=S> {
         AcAutomaton::new(it)
     }
@@ -520,25 +669,23 @@ impl<P: AsRef<[u8]> + fmt::Debug, T: Tra
 
 impl<T: Transitions> State<T> {
     fn debug(&self, root: bool) -> String {
         format!("State {{ depth: {:?}, out: {:?}, fail: {:?}, goto: {{{}}} }}",
                 self.depth, self.out, self.fail, self.goto_string(root))
     }
 
     fn goto_string(&self, root: bool) -> String {
-        use std::char::from_u32;
-
         let mut goto = vec![];
-        for b in (0..256).map(|b| b as u8) {
+        for b in AllBytesIter::new() {
             let si = self.goto(b);
             if (!root && si == FAIL_STATE) || (root && si == ROOT_STATE) {
                 continue;
             }
-            goto.push(format!("{} => {}", from_u32(b as u32).unwrap(), si));
+            goto.push(format!("{} => {}", b as char, si));
         }
         goto.join(", ")
     }
 }
 
 impl<T: Transitions> fmt::Debug for State<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         write!(f, "{}", self.debug(false))
@@ -558,23 +705,23 @@ impl<T: Transitions> AcAutomaton<String,
 digraph automaton {{
     label=<<FONT POINT-SIZE="20">{}</FONT>>;
     labelloc="l";
     labeljust="l";
     rankdir="LR";
 "#, self.pats.join(", "));
         for (i, s) in self.states.iter().enumerate().skip(1) {
             let i = i as u32;
-            if s.out.len() == 0 {
+            if s.out.is_empty() {
                 w!(out, "    {};\n", i);
             } else {
                 w!(out, "    {} [peripheries=2];\n", i);
             }
             w!(out, "    {} -> {} [style=dashed];\n", i, s.fail);
-            for b in (0..256).map(|b| b as u8) {
+            for b in AllBytesIter::new() {
                 let si = s.goto(b);
                 if si == FAIL_STATE || (i == ROOT_STATE && si == ROOT_STATE) {
                     continue;
                 }
                 w!(out, "    {} -> {} [label={}];\n", i, si, b as char);
             }
         }
         w!(out, "}}");
@@ -592,18 +739,19 @@ fn usize_bytes() -> usize {
 }
 
 #[cfg(test)]
 mod tests {
     use std::collections::HashSet;
     use std::io;
 
     use quickcheck::{Arbitrary, Gen, quickcheck};
+    use rand::Rng;
 
-    use super::{Automaton, AcAutomaton, Match};
+    use super::{AcAutomaton, Automaton, Match, AllBytesIter};
 
     fn aut_find<S>(xs: &[S], haystack: &str) -> Vec<Match>
             where S: Clone + AsRef<[u8]> {
         AcAutomaton::new(xs.to_vec()).find(&haystack).collect()
     }
 
     fn aut_finds<S>(xs: &[S], haystack: &str) -> Vec<Match>
             where S: Clone + AsRef<[u8]> {
@@ -865,17 +1013,17 @@ mod tests {
     pub struct BiasAscii(String);
 
     impl Arbitrary for BiasAscii {
         fn arbitrary<G: Gen>(g: &mut G) -> BiasAscii {
             use std::char::from_u32;
             let size = { let s = g.size(); g.gen_range(0, s) };
             let mut s = String::with_capacity(size);
             for _ in 0..size {
-                if g.gen_weighted_bool(3) {
+                if g.gen_bool(0.3) {
                     s.push(char::arbitrary(g));
                 } else {
                     for _ in 0..5 {
                         s.push(from_u32(g.gen_range(97, 123)).unwrap());
                     }
                 }
             }
             BiasAscii(s)
@@ -917,9 +1065,18 @@ mod tests {
             // Ordering isn't always the same. I don't think we care, so do
             // an unordered comparison.
             let aset: HashSet<Match> = aut_matches.iter().cloned().collect();
             let nset: HashSet<Match> = naive_matches.iter().cloned().collect();
             aset == nset
         }
         quickcheck(prop as fn(Vec<SmallAscii>, BiasAscii) -> bool);
     }
+
+
+    #[test]
+    fn all_bytes_iter() {
+        let all_bytes = AllBytesIter::new().collect::<Vec<_>>();
+        assert_eq!(all_bytes[0], 0);
+        assert_eq!(all_bytes[255], 255);
+        assert!(AllBytesIter::new().enumerate().all(|(i, b)| b as usize == i));
+    }
 }
--- a/third_party/rust/atty/.cargo-checksum.json
+++ b/third_party/rust/atty/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"4752c993a36dc8b271f25998b2c0b34af65f82fb61f7d71d0e34612a8a7cd5b0","CHANGELOG.md":"a182831141a059342664a8aaf40b9fd7828e8004094fb42e1b17129a090899ec","Cargo.toml":"5fa1586ac82ee945f057b87c81acae6e588de2303536445b4a766028633347e0","LICENSE":"235760c32039b0a6b23207918b71c1aa5d8318ee651c0f245d290ba1f47631cf","README.md":"b23f66e15c8311e11cbc3b10bfc87a7cb10bc4d758c6a352b155127b48b970d7","appveyor.yml":"dfe3d3eddd762a3cc76174e03ea91c93f544ce7fa05fbca4975f1624757d65e4","examples/atty.rs":"1551387a71474d9ac1b5153231f884e9e05213badcfaa3494ad2cb7ea958374a","src/lib.rs":"4530fe39e123b042eb023e4cf98a81d5184d06c938d3604b002f418101beb524"},"package":"d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"}
\ No newline at end of file
+{"files":{".travis.yml":"9002419ea748e146ea85fd5c2646aaa6649168d4baf2e23de834c7e13e5742d3","CHANGELOG.md":"7bd35273865921a87f96ea8d69bd98bde11a96001b74c79cdcdbc83bfbbee53c","Cargo.toml":"77b51a9746012bf939c0fd0a72daa9db816c26798c557c234213a2d38b8aae81","LICENSE":"f3f8d32084848316048c5a1e125a3c5003eb32145a5f5f2a0d5586377324f9ba","README.md":"a62d294c45c9d8b2e54fcf35d9ee1ba8b8e2ab6960fb3d3f4cc9d59e8aed0835","appveyor.yml":"dea9c8da309cbb02bce31c613b697256f4cfada20b2f7b0c8911b73d569daf58","examples/atty.rs":"1551387a71474d9ac1b5153231f884e9e05213badcfaa3494ad2cb7ea958374a","rustfmt.toml":"bd196700242d17913cf8adead6912f55e9347e52ab5a001729d6c18d169f05c4","src/lib.rs":"ec3428266e83b35a8714ab99d9962c9e29f78becb39e313846f042f5b176c723"},"package":"9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"}
\ No newline at end of file
--- a/third_party/rust/atty/.travis.yml
+++ b/third_party/rust/atty/.travis.yml
@@ -1,44 +1,73 @@
 sudo: false
 language: rust
 matrix:
   fast_finish: true
   include:
     - rust: nightly
+    - rust: nightly
+      os: osx
     - rust: beta
+    - rust: beta
+      os: osx
+    - rust: stable
     - rust: stable
-    - rust: 1.8.0
-os:
- - linux
- - osx
+      os: osx
+  allow_failures:
+    - rust: nightly
+
+before_cache:
+  # Travis can't cache files that are not readable by "others"
+  - chmod -R a+r $HOME/.cargo
+
+before_install:
+  # install kcov
+  - >
+    if [ ! -d "$HOME/.kcov/bin" ]; then
+      wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
+      tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build &&
+      cmake -DCMAKE_INSTALL_PREFIX:PATH=$HOME/.kcov .. && make && make install && cd ../..
+    fi
+  - export PATH=$HOME/.kcov/bin:$PATH
+
 script:
   - cargo build
-  - cargo test
+
 cache:
+  cargo: true
   apt: true
   directories:
   - target/debug/deps
   - target/debug/build
+
 addons:
   apt:
     packages:
-    - libcurl4-openssl-dev
-    - libelf-dev
-    - libdw-dev
-    - binutils-dev # required for `kcov --verify`
-    - libbfd-dev # required for `kcov --verify`
-after_success: |
-  [ $TRAVIS_RUST_VERSION = stable ] &&
-  wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
-  tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
-  ls target/debug &&
-  ./kcov-master/tmp/usr/local/bin/kcov --verify --coveralls-id=$TRAVIS_JOB_ID --exclude-pattern=/.cargo target/kcov target/debug/atty-* &&
-  [ $TRAVIS_BRANCH = master ] &&
-  [ $TRAVIS_PULL_REQUEST = false ] &&
-  cargo doc --no-deps &&
-  echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
-  pip install --user ghp-import &&
-  /home/travis/.local/bin/ghp-import -n target/doc &&
-  git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
-env:
-  global:
-    secure: acjXoBFG4yFklz/iW4q9PLaMmTgug0c8hOov4uiaXYjDkVGhnEePBozGc8ctKuFv2BVlwBSzvE1neE9dHcCS6il0x+G79sVTekfVN5dERja3UpwrC0/QodJuDmErIUpb6zylupPnUGq5pzZabRPNKyAnsFS5wYhLMSLxGPu4pfYdW0Eu8CEPIgPYsI6o2pfKgNpXbeizdHRLMeZCN4cbEPohO1odc+Z6WJvgKn2xEkpAcfhAuaroqGGxRtmDiJZ/JaBijAKY/O9Q3Xq1GSGOPT5lmwJSp3Fxw5dgmeX6LmN0ZODASdnEoYfoqUDUFzkCON3Sk4a7hugxlkZ7cx1tfqXxMg+0BgYIUdGQNloDJnuusWvXPBFdB2jxMsfcbrCjNsrJ8kjN6uBsW9yy0kqN7a8eOJckwh5fYRWfNta0R+BrveNXWmGp4u4aBq/85jEiHi30XKTzaEUbF0Y3cIONweWeWwBOcAvPBhO63Y07TRRe+SSk1NYm7QHGW9RsHhz89OSbaIXqn+r/o+6DZcw5XaO73DtZ62Kx48NErej9kVqcIJ6HnyvCJ/fJoT7h1ixSRI/WmS30l2S/q33Q2G4C/IZ4ZZRD/1thSltAxeA6OAUnr8ITZyW47CqOmyL1IUptrdAb9OLEedYV/QrOhcg2RJLXyP66xnItOwMp014bEp4=
+      - libcurl4-openssl-dev
+      - libelf-dev
+      - libdw-dev
+      - binutils-dev
+      - libiberty-dev
+
+after_success:
+  - '[ $TRAVIS_RUST_VERSION = stable ] &&
+    [ $TRAVIS_BRANCH = master ] &&
+    [ $TRAVIS_PULL_REQUEST = false ] &&
+    (ls target/debug &&
+    RUSTFLAGS="-C link-dead-code" cargo test --no-run &&
+    for file in target/debug/atty-*; do
+       if [[ "${file: -2}" != ".d" ]]; then
+         mkdir -p "target/cov/$(basename $file)";
+         kcov --exclude-pattern=/.cargo,/usr/lib --verify "target/cov/$(basename $file)" "$file";
+       fi;
+    done &&
+    kcov --coveralls-id=$COVERALLS_REPO_TOKEN --merge target/cov target/cov/* &&
+    echo "covered") || true'
+  - '[ $TRAVIS_RUST_VERSION = stable ] &&
+    [ $TRAVIS_BRANCH = master ] &&
+    [ $TRAVIS_PULL_REQUEST = false ]
+    && cargo doc --no-deps &&
+    echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d / -f 2`/index.html>" > target/doc/index.html &&
+    pip install --user ghp-import &&
+    /home/travis/.local/bin/ghp-import -n target/doc &&
+    git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages &&
+    echo "documented"'
\ No newline at end of file
--- a/third_party/rust/atty/CHANGELOG.md
+++ b/third_party/rust/atty/CHANGELOG.md
@@ -1,8 +1,44 @@
+# 0.2.11
+
+* fix msys detection with `winapi@0.3.5` [#28](https://github.com/softprops/atty/pull/28)
+
+# 0.2.10
+
+* fix wasm regression [#27](https://github.com/softprops/atty/pull/27)
+
+# 0.2.9
+
+* Fix fix pty detection [#25](https://github.com/softprops/atty/pull/25)
+
+# 0.2.8
+
+* Fix an inverted condition on MinGW [#22](https://github.com/softprops/atty/pull/22)
+
+# 0.2.7
+
+* Change `||` to `&&` for whether MSYS is a tty [#24](https://github.com/softprops/atty/pull/24/)
+
+# 0.2.6
+
+* updated winapi dependency to [0.3](https://retep998.github.io/blog/winapi-0.3/) [#18](https://github.com/softprops/atty/pull/18)
+
+# 0.2.5
+
+* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17)
+
+# 0.2.4
+
+* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17)
+
+# 0.2.3
+
+* added support for Redox OS [#14](https://github.com/softprops/atty/pull/14)
+
 # 0.2.2
 
 * use target specific dependencies [#11](https://github.com/softprops/atty/pull/11)
 * Add tty detection for MSYS terminals [#12](https://github.com/softprops/atty/pull/12)
 
 # 0.2.1
 
 * fix windows bug
--- a/third_party/rust/atty/Cargo.toml
+++ b/third_party/rust/atty/Cargo.toml
@@ -1,17 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "atty"
-version = "0.2.2"
+version = "0.2.11"
 authors = ["softprops <d.tangren@gmail.com>"]
 description = "A simple interface for querying atty"
+homepage = "https://github.com/softprops/atty"
 documentation = "http://softprops.github.io/atty"
-homepage = "https://github.com/softprops/atty"
-repository = "https://github.com/softprops/atty"
+readme = "README.md"
 keywords = ["terminal", "tty"]
 license = "MIT"
-
-[target.'cfg(not(windows))'.dependencies]
-libc = "0.2"
-
-[target.'cfg(windows)'.dependencies]
-kernel32-sys = "0.2"
-winapi = "0.2"
+repository = "https://github.com/softprops/atty"
+[target."cfg(target_os = \"redox\")".dependencies.termion]
+version = "1.5"
+[target."cfg(unix)".dependencies.libc]
+version = "0.2"
+default-features = false
+[target."cfg(windows)".dependencies.winapi]
+version = "0.3"
+features = ["consoleapi", "processenv", "minwinbase", "minwindef", "winbase"]
+[badges.travis-ci]
+repository = "softprops/atty"
--- a/third_party/rust/atty/LICENSE
+++ b/third_party/rust/atty/LICENSE
@@ -1,9 +1,9 @@
-Copyright (c) 2015-2016 Doug Tangren
+Copyright (c) 2015-2017 Doug Tangren
 
 Permission is hereby granted, free of charge, to any person obtaining
 a copy of this software and associated documentation files (the
 "Software"), to deal in the Software without restriction, including
 without limitation the rights to use, copy, modify, merge, publish,
 distribute, sublicense, and/or sell copies of the Software, and to
 permit persons to whom the Software is furnished to do so, subject to
 the following conditions:
--- a/third_party/rust/atty/README.md
+++ b/third_party/rust/atty/README.md
@@ -1,16 +1,23 @@
 # atty
 
-[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master) [![crates.io](http://meritbadge.herokuapp.com/atty)](https://crates.io/crates/atty)
+[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master) [![crates.io](https://img.shields.io/crates/v/atty.svg)](https://crates.io/crates/atty) [![Released API docs](https://docs.rs/atty/badge.svg)](http://docs.rs/atty) [![Master API docs](https://img.shields.io/badge/docs-master-green.svg)](https://softprops.github.io/atty)
 
 > are you or are you not a tty?
 
 
-[Api documentation](http://softprops.github.io/atty)
+## install
+
+Add the following to your `Cargo.toml`
+
+```toml
+[dependencies]
+atty = "0.2"
+```
 
 ## usage
 
 ```rust
 extern crate atty;
 
 use atty::Stream;
 
@@ -18,25 +25,16 @@ fn main() {
   if atty::is(Stream::Stdout) {
     println!("I'm a terminal");
   } else {
     println!("I'm not");
   }
 }
 ```
 
-## install
-
-Add the following to your `Cargo.toml`
-
-```toml
-[dependencies]
-atty = "0.2"
-```
-
 ## testing
 
 This library has been unit tested on both unix and windows platforms (via appveyor).
 
 
 A simple example program is provided in this repo to test various tty's. By default.
 
 It prints
@@ -70,9 +68,9 @@ To test std err, pipe the program to som
 
 ```bash
 $ cargo run --example atty 2>&1 | grep std
 stdout? false
 stderr? false
 stdin? true
 ```
 
-Doug Tangren (softprops) 2015
+Doug Tangren (softprops) 2015-2017
--- a/third_party/rust/atty/appveyor.yml
+++ b/third_party/rust/atty/appveyor.yml
@@ -1,17 +1,16 @@
 environment:
   matrix:
   - TARGET: nightly-x86_64-pc-windows-msvc
   - TARGET: nightly-i686-pc-windows-msvc
   - TARGET: nightly-x86_64-pc-windows-gnu
   - TARGET: nightly-i686-pc-windows-gnu
-  - TARGET: 1.8.0-x86_64-pc-windows-gnu
 install:
   - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:TARGET}.exe" -FileName "rust-install.exe"
   - ps: .\rust-install.exe /VERYSILENT /NORESTART /DIR="C:\rust" | Out-Null
   - ps: $env:PATH="$env:PATH;C:\rust\bin"
   - call "%VCVARS%" || ver>nul
   - rustc -vV
   - cargo -vV
 build: false
 test_script:
-  - cargo test --verbose
+  - cargo build
new file mode 100644
--- /dev/null
+++ b/third_party/rust/atty/rustfmt.toml
@@ -0,0 +1,10 @@
+# keep imports tidy
+reorder_imported_names = true
+reorder_imports = true
+reorder_imports_in_group = true
+# there is no try!
+use_try_shorthand = true
+# don't create rustfmt artifacts
+write_mode = "Replace"
+# reduce wide load
+max_width = 80
\ No newline at end of file
--- a/third_party/rust/atty/src/lib.rs
+++ b/third_party/rust/atty/src/lib.rs
@@ -10,55 +10,56 @@
 //! ```
 //!
 //! ```
 //! if atty::isnt(atty::Stream::Stdout) {
 //!   println!("i'm not a tty")
 //! }
 //! ```
 
-#[cfg(windows)]
-extern crate kernel32;
-#[cfg(not(windows))]
+#![cfg_attr(unix, no_std)]
+
+#[cfg(unix)]
 extern crate libc;
 #[cfg(windows)]
 extern crate winapi;
+#[cfg(target_os = "redox")]
+extern crate termion;
 
 #[cfg(windows)]
-use winapi::minwindef::DWORD;
+use winapi::shared::minwindef::DWORD;
+#[cfg(windows)]
+use winapi::shared::ntdef::WCHAR;
 
 /// possible stream sources
 #[derive(Clone, Copy, Debug)]
 pub enum Stream {
     Stdout,
     Stderr,
     Stdin,
 }
 
 /// returns true if this is a tty
-#[cfg(unix)]
+#[cfg(all(unix, not(target_arch = "wasm32")))]
 pub fn is(stream: Stream) -> bool {
     extern crate libc;
 
     let fd = match stream {
         Stream::Stdout => libc::STDOUT_FILENO,
         Stream::Stderr => libc::STDERR_FILENO,
         Stream::Stdin => libc::STDIN_FILENO,
     };
     unsafe { libc::isatty(fd) != 0 }
 }
 
 /// returns true if this is a tty
 #[cfg(windows)]
 pub fn is(stream: Stream) -> bool {
-    use winapi::{
-        STD_INPUT_HANDLE as STD_INPUT,
-        STD_ERROR_HANDLE as STD_ERROR,
-        STD_OUTPUT_HANDLE as STD_OUTPUT
-    };
+    use winapi::um::winbase::{STD_ERROR_HANDLE as STD_ERROR, STD_INPUT_HANDLE as STD_INPUT,
+                 STD_OUTPUT_HANDLE as STD_OUTPUT};
 
     let (fd, others) = match stream {
         Stream::Stdin => (STD_INPUT, [STD_ERROR, STD_OUTPUT]),
         Stream::Stderr => (STD_ERROR, [STD_INPUT, STD_OUTPUT]),
         Stream::Stdout => (STD_OUTPUT, [STD_INPUT, STD_ERROR]),
     };
     if unsafe { console_on_any(&[fd]) } {
         // False positives aren't possible. If we got a console then
@@ -82,64 +83,90 @@ pub fn is(stream: Stream) -> bool {
 /// returns true if this is _not_ a tty
 pub fn isnt(stream: Stream) -> bool {
     !is(stream)
 }
 
 /// Returns true if any of the given fds are on a console.
 #[cfg(windows)]
 unsafe fn console_on_any(fds: &[DWORD]) -> bool {
+    use winapi::um::consoleapi::GetConsoleMode;
+    use winapi::um::processenv::GetStdHandle;
+
     for &fd in fds {
         let mut out = 0;
-        let handle = kernel32::GetStdHandle(fd);
-        if kernel32::GetConsoleMode(handle, &mut out) != 0 {
+        let handle = GetStdHandle(fd);
+        if GetConsoleMode(handle, &mut out) != 0 {
             return true;
         }
     }
     false
 }
 
 /// Returns true if there is an MSYS tty on the given handle.
 #[cfg(windows)]
 unsafe fn msys_tty_on(fd: DWORD) -> bool {
-    use std::ffi::OsString;
     use std::mem;
-    use std::os::raw::c_void;
-    use std::os::windows::ffi::OsStringExt;
     use std::slice;
 
-    use kernel32::GetFileInformationByHandleEx;
-    use winapi::fileapi::FILE_NAME_INFO;
-    use winapi::minwinbase::FileNameInfo;
-    use winapi::minwindef::MAX_PATH;
+    use winapi::ctypes::c_void;
+    use winapi::um::winbase::GetFileInformationByHandleEx;
+    use winapi::um::fileapi::FILE_NAME_INFO;
+    use winapi::um::minwinbase::FileNameInfo;
+    use winapi::um::processenv::GetStdHandle;
+    use winapi::shared::minwindef::MAX_PATH;
 
     let size = mem::size_of::<FILE_NAME_INFO>();
-    let mut name_info_bytes = vec![0u8; size + MAX_PATH];
+    let mut name_info_bytes = vec![0u8; size + MAX_PATH * mem::size_of::<WCHAR>()];
     let res = GetFileInformationByHandleEx(
-        kernel32::GetStdHandle(fd),
+        GetStdHandle(fd),
         FileNameInfo,
         &mut *name_info_bytes as *mut _ as *mut c_void,
-        name_info_bytes.len() as u32);
+        name_info_bytes.len() as u32,
+    );
     if res == 0 {
-        return true;
+        return false;
     }
-    let name_info: FILE_NAME_INFO =
-        *(name_info_bytes[0..size].as_ptr() as *const FILE_NAME_INFO);
-    let name_bytes =
-        &name_info_bytes[size..size + name_info.FileNameLength as usize];
-    let name_u16 = slice::from_raw_parts(
-        name_bytes.as_ptr() as *const u16, name_bytes.len() / 2);
-    let name = OsString::from_wide(name_u16)
-        .as_os_str().to_string_lossy().into_owned();
-    name.contains("msys-") || name.contains("-pty")
+    let name_info: &FILE_NAME_INFO = &*(name_info_bytes.as_ptr() as *const FILE_NAME_INFO);
+    let s = slice::from_raw_parts(
+        name_info.FileName.as_ptr(),
+        name_info.FileNameLength as usize / 2,
+    );
+    let name = String::from_utf16_lossy(s);
+    // This checks whether 'pty' exists in the file name, which indicates that
+    // a pseudo-terminal is attached. To mitigate against false positives
+    // (e.g., an actual file name that contains 'pty'), we also require that
+    // either the strings 'msys-' or 'cygwin-' are in the file name as well.)
+    let is_msys = name.contains("msys-") || name.contains("cygwin-");
+    let is_pty = name.contains("-pty");
+    is_msys && is_pty
+}
+
+/// returns true if this is a tty
+#[cfg(target_os = "redox")]
+pub fn is(stream: Stream) -> bool {
+    use std::io;
+    use termion::is_tty;
+
+    match stream {
+        Stream::Stdin => is_tty(&io::stdin()),
+        Stream::Stdout => is_tty(&io::stdout()),
+        Stream::Stderr => is_tty(&io::stderr()),
+    }
+}
+
+/// returns true if this is a tty
+#[cfg(target_arch = "wasm32")]
+pub fn is(_stream: Stream) -> bool {
+    false
 }
 
 #[cfg(test)]
 mod tests {
-    use super::{is, Stream};
+    use super::{Stream, is};
 
     #[test]
     #[cfg(windows)]
     fn is_err() {
         // appveyor pipes its output
         assert!(!is(Stream::Stderr))
     }
 
@@ -167,17 +194,17 @@ mod tests {
     fn is_out() {
         assert!(is(Stream::Stdout))
     }
 
     #[test]
     #[cfg(target_os = "macos")]
     fn is_in() {
         // macos on travis seems to pipe its input
-        assert!(!is(Stream::Stdin))
+        assert!(is(Stream::Stdin))
     }
 
     #[test]
     #[cfg(all(not(target_os = "macos"), unix))]
     fn is_in() {
         assert!(is(Stream::Stdin))
     }
 }
--- a/third_party/rust/fs2/.cargo-checksum.json
+++ b/third_party/rust/fs2/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".appveyor.yml":"15c5548159ad6ebcc02960bb6a3269e729e772df2733b7d4c7cc1583c413ae45",".travis.yml":"5733d01f7cd27cbdd17a46399103e83eca528727e6cad7f355f6748e772ef916","Cargo.toml":"c257476252f17472f1a78c9fa92b137dc435873797ec1a137aa73043b3ad06a7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"7667acd3dfd050dadccf8b7815435b9108c24c5704944085281beed6a181e220","src/lib.rs":"106e402d1c5ae68558f4e8a3971b646c12f19762363d2cf15c13a1c2aeb1d1e7","src/unix.rs":"67f0244c118cff918f01b6c164dfe604039ce9160a099ba6e4ff86dcf8ec0097","src/windows.rs":"5767d923280998e341504f8d2a015b8b0c3f8b2b1188610aa4c1b6a343da5682"},"package":"9ab76cfd2aaa59b7bf6688ad9ba15bbae64bff97f04ea02144cfd3443e5c2866"}
\ No newline at end of file
+{"files":{".appveyor.yml":"15c5548159ad6ebcc02960bb6a3269e729e772df2733b7d4c7cc1583c413ae45",".travis.yml":"5733d01f7cd27cbdd17a46399103e83eca528727e6cad7f355f6748e772ef916","Cargo.toml":"c47bb59c1d58dc1eb439331bb140e3a4174370f83b0f0b702d41eb64b27cda3f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"4dda80a0599cc235f8eaf56c33117fcc7cbeb48aece7f7b262f374ee12e3f1f4","src/lib.rs":"38e0f03bb81d76ac8d71b73b00fae648359aa7e1c1b9ba7159cb1b19eb12b987","src/unix.rs":"67f0244c118cff918f01b6c164dfe604039ce9160a099ba6e4ff86dcf8ec0097","src/windows.rs":"4178e02fe48c1148a4d0edcdac6cec8fecf016e636064843b60e7a1d78c817a7"},"package":"9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"}
\ No newline at end of file
--- a/third_party/rust/fs2/Cargo.toml
+++ b/third_party/rust/fs2/Cargo.toml
@@ -7,24 +7,27 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "fs2"
-version = "0.4.2"
+version = "0.4.3"
 authors = ["Dan Burkert <dan@danburkert.com>"]
 description = "Cross-platform file locks and file duplication."
 documentation = "https://docs.rs/fs2"
 keywords = ["file", "file-system", "lock", "duplicate", "flock"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/danburkert/fs2-rs"
 [dev-dependencies.tempdir]
 version = "0.3"
+[target."cfg(unix)".dependencies.libc]
+version = "0.2.30"
 [target."cfg(windows)".dependencies.winapi]
-version = "0.2"
+version = "0.3"
+features = ["handleapi", "processthreadsapi", "winerror", "fileapi", "winbase", "std"]
+[badges.appveyor]
+repository = "danburkert/fs2-rs"
 
-[target."cfg(windows)".dependencies.kernel32-sys]
-version = "0.2"
-[target."cfg(unix)".dependencies.libc]
-version = "0.2.2"
+[badges.travis-ci]
+repository = "danburkert/fs2-rs"
--- a/third_party/rust/fs2/README.md
+++ b/third_party/rust/fs2/README.md
@@ -1,17 +1,17 @@
 # fs2
 
 Extended utilities for working with files and filesystems in Rust. `fs2`
 requires Rust stable 1.8 or greater.
 
-[Documentation](https://docs.rs/fs2)
-
-[![Linux Status](https://travis-ci.org/danburkert/fs2-rs.svg?branch=master)](https://travis-ci.org/danburkert/fs2-rs)
-[![Windows Status](https://ci.appveyor.com/api/projects/status/iuvjv1aaaml0rntt/branch/master?svg=true)](https://ci.appveyor.com/project/danburkert/fs2-rs)
+[![Build Status](https://travis-ci.org/danburkert/fs2-rs.svg?branch=master)](https://travis-ci.org/danburkert/fs2-rs)
+[![Windows Build status](https://ci.appveyor.com/api/projects/status/iuvjv1aaaml0rntt/branch/master?svg=true)](https://ci.appveyor.com/project/danburkert/fs2-rs/branch/master)
+[![Documentation](https://docs.rs/fs2/badge.svg)](https://docs.rs/memmap)
+[![Crate](https://img.shields.io/crates/v/fs2.svg)](https://crates.io/crates/memmap)
 
 ## Features
 
 - [x] file descriptor duplication.
 - [x] file locks.
 - [x] file (pre)allocation.
 - [x] file allocation information.
 - [x] filesystem space usage information.
--- a/third_party/rust/fs2/src/lib.rs
+++ b/third_party/rust/fs2/src/lib.rs
@@ -1,10 +1,16 @@
+//! Extended utilities for working with files and filesystems in Rust.
+
+#![doc(html_root_url = "https://docs.rs/fs2/0.4.3")]
+
 #![cfg_attr(test, feature(test))]
-#![deny(warnings)]
+
+#[cfg(windows)]
+extern crate winapi;
 
 #[cfg(unix)]
 mod unix;
 #[cfg(unix)]
 use unix as sys;
 
 #[cfg(windows)]
 mod windows;
--- a/third_party/rust/fs2/src/windows.rs
+++ b/third_party/rust/fs2/src/windows.rs
@@ -1,68 +1,76 @@
-extern crate kernel32;
-extern crate winapi;
-
 use std::fs::File;
 use std::io::{Error, Result};
 use std::mem;
 use std::os::windows::ffi::OsStrExt;
 use std::os::windows::io::{AsRawHandle, FromRawHandle};
 use std::path::Path;
 use std::ptr;
 
+use winapi::shared::minwindef::{BOOL, DWORD};
+use winapi::shared::winerror::ERROR_LOCK_VIOLATION;
+use winapi::um::fileapi::{FILE_ALLOCATION_INFO, FILE_STANDARD_INFO, GetDiskFreeSpaceW};
+use winapi::um::fileapi::{GetVolumePathNameW, LockFileEx, UnlockFile, SetFileInformationByHandle};
+use winapi::um::handleapi::DuplicateHandle;
+use winapi::um::minwinbase::{FileAllocationInfo, FileStandardInfo};
+use winapi::um::minwinbase::{LOCKFILE_FAIL_IMMEDIATELY, LOCKFILE_EXCLUSIVE_LOCK};
+use winapi::um::processthreadsapi::GetCurrentProcess;
+use winapi::um::winbase::GetFileInformationByHandleEx;
+use winapi::um::winnt::DUPLICATE_SAME_ACCESS;
+
 use FsStats;
 
 pub fn duplicate(file: &File) -> Result<File> {
     unsafe {
         let mut handle = ptr::null_mut();
-        let current_process = kernel32::GetCurrentProcess();
-        let ret = kernel32::DuplicateHandle(current_process,
-                                            file.as_raw_handle(),
-                                            current_process,
-                                            &mut handle,
-                                            0,
-                                            true as winapi::BOOL,
-                                            winapi::DUPLICATE_SAME_ACCESS);
+        let current_process = GetCurrentProcess();
+        let ret = DuplicateHandle(current_process,
+                                  file.as_raw_handle(),
+                                  current_process,
+                                  &mut handle,
+                                  0,
+                                  true as BOOL,
+                                  DUPLICATE_SAME_ACCESS);
         if ret == 0 {
             Err(Error::last_os_error())
         } else {
             Ok(File::from_raw_handle(handle))
         }
     }
 }
 
 pub fn allocated_size(file: &File) -> Result<u64> {
     unsafe {
-        let mut info: winapi::FILE_STANDARD_INFO = mem::zeroed();
+        let mut info: FILE_STANDARD_INFO = mem::zeroed();
 
-        let ret = kernel32::GetFileInformationByHandleEx(
+        let ret = GetFileInformationByHandleEx(
             file.as_raw_handle(),
-            winapi::FileStandardInfo,
+            FileStandardInfo,
             &mut info as *mut _ as *mut _,
-            mem::size_of::<winapi::FILE_STANDARD_INFO>() as winapi::DWORD);
+            mem::size_of::<FILE_STANDARD_INFO>() as DWORD);
 
         if ret == 0 {
             Err(Error::last_os_error())
         } else {
-            Ok(info.AllocationSize as u64)
+            Ok(*info.AllocationSize.QuadPart() as u64)
         }
     }
 }
 
 pub fn allocate(file: &File, len: u64) -> Result<()> {
     if try!(allocated_size(file)) < len {
         unsafe {
-            let mut info: winapi::FILE_ALLOCATION_INFO = mem::zeroed();
-            info.AllocationSize = len as i64;
-            let ret = kernel32::SetFileInformationByHandle(
+            let mut info: FILE_ALLOCATION_INFO = mem::zeroed();
+            *info.AllocationSize.QuadPart_mut() = len as i64;
+            let ret = SetFileInformationByHandle(
                 file.as_raw_handle(),
-                winapi::FileAllocationInfo,
+                FileAllocationInfo,
                 &mut info as *mut _ as *mut _,
-                mem::size_of::<winapi::FILE_ALLOCATION_INFO>() as winapi::DWORD);
+                mem::size_of::<FILE_ALLOCATION_INFO>() as DWORD);
             if ret == 0 {
                 return Err(Error::last_os_error());
             }
         }
     }
     if try!(file.metadata()).len() < len {
         file.set_len(len)
     } else {
@@ -70,71 +78,71 @@ pub fn allocate(file: &File, len: u64) -
     }
 }
 
 pub fn lock_shared(file: &File) -> Result<()> {
     lock_file(file, 0)
 }
 
 pub fn lock_exclusive(file: &File) -> Result<()> {
-    lock_file(file, winapi::LOCKFILE_EXCLUSIVE_LOCK)
+    lock_file(file, LOCKFILE_EXCLUSIVE_LOCK)
 }
 
 pub fn try_lock_shared(file: &File) -> Result<()> {
-    lock_file(file, winapi::LOCKFILE_FAIL_IMMEDIATELY)
+    lock_file(file, LOCKFILE_FAIL_IMMEDIATELY)
 }
 
 pub fn try_lock_exclusive(file: &File) -> Result<()> {
-    lock_file(file, winapi::LOCKFILE_EXCLUSIVE_LOCK | winapi::LOCKFILE_FAIL_IMMEDIATELY)
+    lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY)
 }
 
 pub fn unlock(file: &File) -> Result<()> {
     unsafe {
-        let ret = kernel32::UnlockFile(file.as_raw_handle(), 0, 0, !0, !0);
+        let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0);
         if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) }
     }
 }
 
 pub fn lock_error() -> Error {
-    Error::from_raw_os_error(winapi::ERROR_LOCK_VIOLATION as i32)
+    Error::from_raw_os_error(ERROR_LOCK_VIOLATION as i32)
 }
 
-fn lock_file(file: &File, flags: winapi::DWORD) -> Result<()> {
+fn lock_file(file: &File, flags: DWORD) -> Result<()> {
     unsafe {
         let mut overlapped = mem::zeroed();
-        let ret = kernel32::LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped);
+        let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped);
         if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) }
     }
 }
 
 fn volume_path(path: &Path, volume_path: &mut [u16]) -> Result<()> {
     let path_utf8: Vec<u16> = path.as_os_str().encode_wide().chain(Some(0)).collect();
     unsafe {
-        let ret = kernel32::GetVolumePathNameW(path_utf8.as_ptr(),
-                                               volume_path.as_mut_ptr(),
-                                               volume_path.len() as winapi::DWORD);
+        let ret = GetVolumePathNameW(path_utf8.as_ptr(),
+                                     volume_path.as_mut_ptr(),
+                                     volume_path.len() as DWORD);
         if ret == 0 { Err(Error::last_os_error()) } else { Ok(())
         }
     }
 }
 
 pub fn statvfs(path: &Path) -> Result<FsStats> {
     let root_path: &mut [u16] = &mut [0; 261];
     try!(volume_path(path, root_path));
     unsafe {
 
         let mut sectors_per_cluster = 0;
         let mut bytes_per_sector = 0;
         let mut number_of_free_clusters = 0;
         let mut total_number_of_clusters = 0;
-        let ret = kernel32::GetDiskFreeSpaceW(root_path.as_ptr(),
-                                              &mut sectors_per_cluster,
-                                              &mut bytes_per_sector,
-                                              &mut number_of_free_clusters,
-                                              &mut total_number_of_clusters);
+        let ret = GetDiskFreeSpaceW(root_path.as_ptr(),
+                                    &mut sectors_per_cluster,
+                                    &mut bytes_per_sector,
+                                    &mut number_of_free_clusters,
+                                    &mut total_number_of_clusters);
         if ret == 0 {
             Err(Error::last_os_error())
         } else {
             let bytes_per_cluster = sectors_per_cluster as u64 * bytes_per_sector as u64;
             let free_space = bytes_per_cluster * number_of_free_clusters as u64;
             let total_space = bytes_per_cluster * total_number_of_clusters as u64;
             Ok(FsStats {
                 free_space: free_space,
--- a/third_party/rust/msdos_time/.cargo-checksum.json
+++ b/third_party/rust/msdos_time/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"f0902052851b9d4fec53cea56ce4075686365f0075d64a788fe7ba9f9b98fb8a","Cargo.toml":"b2dbd542893d6b03621c7c65c4fdd7ddb7e6e7c713c610ea3ef5ca88108d3644","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"0241727cb9f7e4ab1ad206ff89ec1dc31034a69b146f076237e8c1b1534b6fe0","README.md":"11cdfba96992fcee089bdcad6682ba5357d8161304d35618ee76abad298ffba7","appveyor.yml":"266ca65d48f02c6a3ce1ba1c0772ef9afece737da03b3176e000b7c32a9ea748","script/doc-upload.cfg":"8cae598ef8592842fa8319b18d515e7a322296490cbdf909b29f5e042a95419e","src/lib.rs":"996474c9c88d9c79865b4923d9739f4c9bc650b29ea70db1c7af43fa59947ed1"},"package":"65ba9d75bcea84e07812618fedf284a64776c2f2ea0cad6bca7f69739695a958"}
\ No newline at end of file
+{"files":{".travis.yml":"f0902052851b9d4fec53cea56ce4075686365f0075d64a788fe7ba9f9b98fb8a","Cargo.toml":"3faa730a95b1a21d63ca11b375e9e86793565a573db649df01562bb24a0b1b43","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"0241727cb9f7e4ab1ad206ff89ec1dc31034a69b146f076237e8c1b1534b6fe0","README.md":"11cdfba96992fcee089bdcad6682ba5357d8161304d35618ee76abad298ffba7","appveyor.yml":"266ca65d48f02c6a3ce1ba1c0772ef9afece737da03b3176e000b7c32a9ea748","script/doc-upload.cfg":"8cae598ef8592842fa8319b18d515e7a322296490cbdf909b29f5e042a95419e","src/lib.rs":"282e6beaef56932a3737e1b60e71ddfdf620d35a9cafdfaa1cc59b54f7d43009"},"package":"aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729"}
\ No newline at end of file
--- a/third_party/rust/msdos_time/Cargo.toml
+++ b/third_party/rust/msdos_time/Cargo.toml
@@ -1,20 +1,28 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
 [package]
 name = "msdos_time"
-version = "0.1.5"
+version = "0.1.6"
 authors = ["Mathijs van de Nes <git@mathijs.vd-nes.nl>"]
+description = "Converts an MsDosDateTime (FAT time) to a Tm value\n"
+documentation = "https://mvdnes.github.io/rust-docs/msdos_time/msdos_time/"
 license = "MIT OR Apache-2.0"
-description = """
-Converts an MsDosDateTime (FAT time) to a Tm value
-"""
 repository = "https://github.com/mvdnes/msdos_time"
-documentation = "https://mvdnes.github.io/rust-docs/msdos_time/msdos_time/"
 
 [lib]
 name = "msdos_time"
-
-[dependencies]
-time = "0.1"
-
-[target.'cfg(windows)'.dependencies]
-winapi = "0.2"
-kernel32-sys = "0.2"
+[dependencies.time]
+version = "0.1"
+[target."cfg(windows)".dependencies.winapi]
+version = "0.3"
+features = ["winbase", "timezoneapi"]
--- a/third_party/rust/msdos_time/src/lib.rs
+++ b/third_party/rust/msdos_time/src/lib.rs
@@ -1,17 +1,16 @@
 #![warn(missing_docs)]
 
 //! This crate converts a `Tm` struct to an `MsDosDateTime` and vice-versa
 //!
 //! MsDosDateTime is based on a FAT datetime and is a compact representation of a date.
 //! It is currently mostly used in zip files.
 
 extern crate time;
-#[cfg(windows)] extern crate kernel32;
 #[cfg(windows)] extern crate winapi;
 
 use std::io;
 use time::Tm;
 
 /// Struct representing the date and time part of an MsDos datetime
 #[derive(Copy, Clone, Debug)]
 pub struct MsDosDateTime {
@@ -88,18 +87,20 @@ mod sys {
         Ok(MsDosDateTime { datepart: datepart, timepart: timepart })
     }
 }
 
 #[cfg(windows)]
 mod sys {
     use super::MsDosDateTime;
     use time::{self, Tm};
-    use winapi::*;
-    use kernel32::*;
+    use winapi::shared::minwindef::{WORD, FILETIME};
+    use winapi::um::minwinbase::SYSTEMTIME;
+    use winapi::um::timezoneapi::{FileTimeToSystemTime, SystemTimeToFileTime};
+    use winapi::um::winbase::{DosDateTimeToFileTime, FileTimeToDosDateTime};
     use std::io;
 
     pub fn msdos_to_tm(ms: MsDosDateTime) -> Result<Tm, io::Error> {
         let datepart: WORD = ms.datepart;
         let timepart: WORD = ms.timepart;
         let mut filetime: FILETIME = unsafe { ::std::mem::zeroed() };
         let mut systemtime: SYSTEMTIME = unsafe { ::std::mem::zeroed() };
 
--- a/third_party/rust/parking_lot_core/.cargo-checksum.json
+++ b/third_party/rust/parking_lot_core/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"c8a7070b6801c4cc9f410d45819dab3c6efcbe77806a617415289d8de0fbb01b","src/lib.rs":"932b67d85b4176bfb1d7a78b70ec5dd356839158dad22d2e932b88be30925572","src/parking_lot.rs":"aabffbdf465648a1066b24e88c74fbff9cef636354e0bfdd125937f61d771449","src/spinwait.rs":"5aee4a6e8d33eec1b6a81b21ff3b223460d8fa2d37e633b31b8ca27fabe659cb","src/stable.rs":"4562ea9a408bd3917df6d30c8354bf51f46bc69b3360815813730743204adfdc","src/thread_parker/generic.rs":"0c30db3d1c96bd5ef284a4761a829aba8d21fc813b3d1d70b2baf5f00744e006","src/thread_parker/linux.rs":"4e0a142ce3ff59d37e5c452bf57b3481ef00274e8e489ac1a1d11b2f31b473ed","src/thread_parker/unix.rs":"ff5a543f21895c8114bd4f89b5764882beab1f3a37ddbd8fc31c783e1db3f1c1","src/thread_parker/windows/keyed_event.rs":"b54b0855b10ed2c188ce42094c6e4069e92e325f870d0c0f8244bfe2d7811b66","src/thread_parker/windows/mod.rs":"dc5359b10275a4aaee04024c202b115d267e4ea15917546b042c4035c0218136","src/thread_parker/windows/waitaddress.rs":"2da78bfe09e4262a6cd6271d6416a9debdb3fd3abb1993be1c68515952576874","src/util.rs":"2d07c0c010a857790ae2ed6a1215eeed8af76859e076797ea1ba8dec82169e84","src/word_lock.rs":"6ab156a775c46423bbb7dae520f181dde1747140d52ba995850969498559c7b2"},"package":"6c677d78851950b3aec390e681a411f78cc250cba277d4f578758a377f727970"}
\ No newline at end of file
+{"files":{"Cargo.toml":"220144666e4c0a4b3b3235e7d3b10f4f34cb3b8ca292ee19437f23c9a15758de","src/lib.rs":"e80f927665ef24660878e5e4a4ea3c26892c2849889d59aacee6beb59d02020d","src/parking_lot.rs":"2da388ff4c13003fc30531bb6110e4feedac30ad3ce905912e657711a6b0fdad","src/spinwait.rs":"cbd2d2464ef6fa5fb05109bdb3ca588467949dcd4ee9194deafef6004d10215e","src/thread_parker/generic.rs":"0c30db3d1c96bd5ef284a4761a829aba8d21fc813b3d1d70b2baf5f00744e006","src/thread_parker/linux.rs":"1c4c023ebb58fcc16451683c6c8b68311e87ab34537dc17a060ddf5aad02a215","src/thread_parker/unix.rs":"dc6f4af965618cc2d87d3bef6455ba78b44ffe5b38dff9d41fb86e1526cbbcd1","src/thread_parker/windows/keyed_event.rs":"efe64f7bcdfe03049a7b901d2573bc7db1bb73b8ab4a040245423d95c8f9514f","src/thread_parker/windows/mod.rs":"f31eed53f3e402477d80a70a7c6d474c01ba4c9ad952bbe562509448cd3cc1ad","src/thread_parker/windows/waitaddress.rs":"09d1e6a5a6c3f23f375ae4beee946290f7c66d183e69d476ce69b21a4a5aa7af","src/util.rs":"2d07c0c010a857790ae2ed6a1215eeed8af76859e076797ea1ba8dec82169e84","src/word_lock.rs":"692f443c52672c6e88c0cad259cf7c89dc2a1b54aa95eeeea582401b2a7d058d"},"package":"4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa"}
\ No newline at end of file
--- a/third_party/rust/parking_lot_core/Cargo.toml
+++ b/third_party/rust/parking_lot_core/Cargo.toml
@@ -7,43 +7,41 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "parking_lot_core"
-version = "0.2.7"
+version = "0.2.14"
 authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
 description = "An advanced API for creating custom synchronization primitives."
 documentation = "https://amanieu.github.io/parking_lot/parking_lot_core/index.html"
 keywords = ["mutex", "condvar", "rwlock", "once", "thread"]
 license = "Apache-2.0/MIT"
 repository = "https://github.com/Amanieu/parking_lot"
 [dependencies.backtrace]
 version = "0.3.2"
 optional = true
 
 [dependencies.petgraph]
 version = "0.4.5"
 optional = true
 
 [dependencies.rand]
-version = "0.3"
+version = "0.4"
 
 [dependencies.smallvec]
 version = "0.6"
 
 [dependencies.thread-id]
 version = "3.2.0"
 optional = true
 
 [features]
 deadlock_detection = ["petgraph", "thread-id", "backtrace"]
 nightly = []
 [target."cfg(unix)".dependencies.libc]
-version = "0.2.15"
-[target."cfg(windows)".dependencies.kernel32-sys]
-version = "0.2"
-
+version = "0.2.27"
 [target."cfg(windows)".dependencies.winapi]
-version = "0.2"
+version = "0.3"
+features = ["winnt", "ntstatus", "minwindef", "winerror", "winbase", "errhandlingapi", "handleapi"]
--- a/third_party/rust/parking_lot_core/src/lib.rs
+++ b/third_party/rust/parking_lot_core/src/lib.rs
@@ -6,25 +6,26 @@
 // copied, modified, or distributed except according to those terms.
 
 //! This library exposes a low-level API for creating your own efficient
 //! synchronization primitives.
 //!
 //! # The parking lot
 //!
 //! To keep synchronization primitives small, all thread queuing and suspending
-//! functionality is offloaded to the *parking lot*. The idea behind this is
-//! based on the Webkit [`WTF::ParkingLot`]
-//! (https://webkit.org/blog/6161/locking-in-webkit/) class, which essentially
-//! consists of a hash table mapping of lock addresses to queues of parked
-//! (sleeping) threads. The Webkit parking lot was itself inspired by Linux
-//! [futexes](http://man7.org/linux/man-pages/man2/futex.2.html), but it is more
-//! powerful since it allows invoking callbacks while holding a queue lock.
+//! functionality is offloaded to the *parking lot*. The idea behind this is based
+//! on the Webkit [`WTF::ParkingLot`](https://webkit.org/blog/6161/locking-in-webkit/)
+//! class, which essentially consists of a hash table mapping of lock addresses
+//! to queues of parked (sleeping) threads. The Webkit parking lot was itself
+//! inspired by Linux [futexes](http://man7.org/linux/man-pages/man2/futex.2.html),
+//! but it is more powerful since it allows invoking callbacks while holding a
+//! queue lock.
 //!
 //! There are two main operations that can be performed on the parking lot:
+//!
 //!  - *Parking* refers to suspending the thread while simultaneously enqueuing it
 //! on a queue keyed by some address.
 //! - *Unparking* refers to dequeuing a thread from a queue keyed by some address
 //! and resuming it.
 //!
 //! See the documentation of the individual functions for more details.
 //!
 //! # Building custom synchronization primitives
@@ -32,54 +33,47 @@
 //! Building custom synchronization primitives is very simple since the parking
 //! lot takes care of all the hard parts for you. A simple example for a
 //! custom primitive would be to integrate a `Mutex` inside another data type.
 //! Since a mutex only requires 2 bits, it can share space with other data.
 //! For example, one could create an `ArcMutex` type that combines the atomic
 //! reference count and the two mutex bits in the same atomic word.
 
 #![warn(missing_docs)]
-#![cfg_attr(feature = "nightly", feature(const_fn, thread_local_state))]
 #![cfg_attr(all(feature = "nightly", target_os = "linux"), feature(integer_atomics))]
-#![cfg_attr(feature = "nightly", feature(asm))]
 
 extern crate rand;
 extern crate smallvec;
 
 #[cfg(feature = "deadlock_detection")]
 extern crate backtrace;
 #[cfg(feature = "deadlock_detection")]
 extern crate petgraph;
 #[cfg(feature = "deadlock_detection")]
 extern crate thread_id;
 
 #[cfg(unix)]
 extern crate libc;
 
 #[cfg(windows)]
-extern crate kernel32;
-#[cfg(windows)]
 extern crate winapi;
 
 #[cfg(all(feature = "nightly", target_os = "linux"))]
 #[path = "thread_parker/linux.rs"]
 mod thread_parker;
 #[cfg(all(unix, not(all(feature = "nightly", target_os = "linux"))))]
 #[path = "thread_parker/unix.rs"]
 mod thread_parker;
 #[cfg(windows)]
 #[path = "thread_parker/windows/mod.rs"]
 mod thread_parker;
 #[cfg(not(any(windows, unix)))]
 #[path = "thread_parker/generic.rs"]
 mod thread_parker;
 
-#[cfg(not(feature = "nightly"))]
-mod stable;
-
 mod util;
 mod spinwait;
 mod word_lock;
 mod parking_lot;
 
 pub use parking_lot::{FilterOp, ParkResult, ParkToken, RequeueOp, UnparkResult, UnparkToken};
 pub use parking_lot::{DEFAULT_PARK_TOKEN, DEFAULT_UNPARK_TOKEN};
 pub use parking_lot::{park, unpark_all, unpark_filter, unpark_one, unpark_requeue};
--- a/third_party/rust/parking_lot_core/src/parking_lot.rs
+++ b/third_party/rust/parking_lot_core/src/parking_lot.rs
@@ -1,19 +1,16 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(feature = "nightly")]
 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
-#[cfg(not(feature = "nightly"))]
-use stable::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
 use std::time::{Duration, Instant};
 use std::cell::{Cell, UnsafeCell};
 use std::ptr;
 use std::mem;
 use std::thread::LocalKey;
 #[cfg(not(feature = "nightly"))]
 use std::panic;
 use smallvec::SmallVec;
@@ -885,17 +882,17 @@ unsafe fn unpark_requeue_internal(
         unlock_bucket_pair(bucket_from, bucket_to);
         return result;
     }
 
     // Remove all threads with the given key in the source bucket
     let mut link = &bucket_from.queue_head;
     let mut current = bucket_from.queue_head.get();
     let mut previous = ptr::null();
-    let mut requeue_threads = ptr::null();
+    let mut requeue_threads: *const ThreadData = ptr::null();
     let mut requeue_threads_tail: *const ThreadData = ptr::null();
     let mut wakeup_thread = None;
     while !current.is_null() {
         if (*current).key.load(Ordering::Relaxed) == key_from {
             // Remove the thread from the queue
             let next = (*current).next_in_queue.get();
             link.set(next);
             if bucket_from.queue_tail.get() == current {
--- a/third_party/rust/parking_lot_core/src/spinwait.rs
+++ b/third_party/rust/parking_lot_core/src/spinwait.rs
@@ -1,103 +1,79 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(windows)]
-use kernel32;
 #[cfg(unix)]
 use libc;
+#[cfg(windows)]
+use winapi;
 #[cfg(not(any(windows, unix)))]
 use std::thread;
-#[cfg(not(feature = "nightly"))]
-use std::sync::atomic::{fence, Ordering};
+use std::sync::atomic::spin_loop_hint;
 
 // Yields the rest of the current timeslice to the OS
 #[cfg(windows)]
 #[inline]
 fn thread_yield() {
+    // Note that this is manually defined here rather than using the definition
+    // through `winapi`. The `winapi` definition comes from the `synchapi`
+    // header which enables the "synchronization.lib" library. It turns out,
+    // however that `Sleep` comes from `kernel32.dll` so this activation isn't
+    // necessary.
+    //
+    // This was originally identified in rust-lang/rust where on MinGW the
+    // libsynchronization.a library pulls in a dependency on a newer DLL not
+    // present in older versions of Windows. (see rust-lang/rust#49438)
+    //
+    // This is a bit of a hack for now and ideally we'd fix MinGW's own import
+    // libraries, but that'll probably take a lot longer than patching this here
+    // and avoiding the `synchapi` feature entirely.
+    extern "system" {
+        fn Sleep(a: winapi::shared::minwindef::DWORD);
+    }
     unsafe {
         // We don't use SwitchToThread here because it doesn't consider all
         // threads in the system and the thread we are waiting for may not get
         // selected.
-        kernel32::Sleep(0);
+        Sleep(0);
     }
 }
 #[cfg(unix)]
 #[inline]
 fn thread_yield() {
     unsafe {
         libc::sched_yield();
     }
 }
 #[cfg(not(any(windows, unix)))]
 #[inline]
 fn thread_yield() {
     thread::yield_now();
 }
 
-// Wastes some CPU time for the given number of iterations, preferably also
+// Wastes some CPU time for the given number of iterations,
 // using a hint to indicate to the CPU that we are spinning.
-#[cfg(all(feature = "nightly", any(target_arch = "x86", target_arch = "x86_64")))]
-#[inline]
-fn cpu_relax(iterations: u32) {
-    for _ in 0..iterations {
-        unsafe {
-            asm!("pause" ::: "memory" : "volatile");
-        }
-    }
-}
-#[cfg(all(feature = "nightly", target_arch = "aarch64"))]
 #[inline]
 fn cpu_relax(iterations: u32) {
     for _ in 0..iterations {
-        unsafe {
-            asm!("yield" ::: "memory" : "volatile");
-        }
-    }
-}
-#[cfg(all(feature = "nightly",
-          not(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64"))))]
-#[inline]
-fn cpu_relax(iterations: u32) {
-    for _ in 0..iterations {
-        unsafe {
-            asm!("" ::: "memory" : "volatile");
-        }
-    }
-}
-#[cfg(not(feature = "nightly"))]
-#[inline]
-fn cpu_relax(iterations: u32) {
-    // This is a bit tricky: we rely on the fact that LLVM doesn't optimize
-    // atomic operations and effectively treats them as volatile.
-    for _ in 0..iterations {
-        fence(Ordering::SeqCst);
+        spin_loop_hint()
     }
 }
 
 /// A counter used to perform exponential backoff in spin loops.
 pub struct SpinWait {
     counter: u32,
 }
 
 impl SpinWait {
     /// Creates a new `SpinWait`.
-    #[cfg(feature = "nightly")]
-    #[inline]
-    pub const fn new() -> SpinWait {
-        SpinWait { counter: 0 }
-    }
-
-    /// Creates a new `SpinWait`.
-    #[cfg(not(feature = "nightly"))]
     #[inline]
     pub fn new() -> SpinWait {
         SpinWait { counter: 0 }
     }
 
     /// Resets a `SpinWait` to its initial state.
     #[inline]
     pub fn reset(&mut self) {
deleted file mode 100644
--- a/third_party/rust/parking_lot_core/src/stable.rs
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2016 Amanieu d'Antras
-//
-// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
-// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
-// http://opensource.org/licenses/MIT>, at your option. This file may not be
-// copied, modified, or distributed except according to those terms.
-
-#![allow(dead_code)]
-
-use std::sync::atomic;
-
-// Re-export this for convenience
-pub use std::sync::atomic::{fence, Ordering};
-
-// Wrapper around AtomicUsize for non-nightly which has usable compare_exchange
-// and compare_exchange_weak methods.
-pub struct AtomicUsize(atomic::AtomicUsize);
-pub use self::AtomicUsize as AtomicU8;
-
-// Constants for static initialization
-pub const ATOMIC_USIZE_INIT: AtomicUsize = AtomicUsize(atomic::ATOMIC_USIZE_INIT);
-pub use self::ATOMIC_USIZE_INIT as ATOMIC_U8_INIT;
-
-impl AtomicUsize {
-    #[inline]
-    pub fn new(val: usize) -> AtomicUsize {
-        AtomicUsize(atomic::AtomicUsize::new(val))
-    }
-    #[inline]
-    pub fn load(&self, order: Ordering) -> usize {
-        self.0.load(order)
-    }
-    #[inline]
-    pub fn store(&self, val: usize, order: Ordering) {
-        self.0.store(val, order);
-    }
-    #[inline]
-    pub fn swap(&self, val: usize, order: Ordering) -> usize {
-        self.0.swap(val, order)
-    }
-    #[inline]
-    pub fn fetch_add(&self, val: usize, order: Ordering) -> usize {
-        self.0.fetch_add(val, order)
-    }
-    #[inline]
-    pub fn fetch_sub(&self, val: usize, order: Ordering) -> usize {
-        self.0.fetch_sub(val, order)
-    }
-    #[inline]
-    pub fn fetch_and(&self, val: usize, order: Ordering) -> usize {
-        self.0.fetch_and(val, order)
-    }
-    #[inline]
-    pub fn fetch_or(&self, val: usize, order: Ordering) -> usize {
-        self.0.fetch_or(val, order)
-    }
-    #[inline]
-    pub fn compare_exchange(
-        &self,
-        old: usize,
-        new: usize,
-        order: Ordering,
-        _: Ordering,
-    ) -> Result<usize, usize> {
-        let res = self.0.compare_and_swap(old, new, order);
-        if res == old {
-            Ok(res)
-        } else {
-            Err(res)
-        }
-    }
-    #[inline]
-    pub fn compare_exchange_weak(
-        &self,
-        old: usize,
-        new: usize,
-        order: Ordering,
-        _: Ordering,
-    ) -> Result<usize, usize> {
-        let res = self.0.compare_and_swap(old, new, order);
-        if res == old {
-            Ok(res)
-        } else {
-            Err(res)
-        }
-    }
-}
--- a/third_party/rust/parking_lot_core/src/thread_parker/linux.rs
+++ b/third_party/rust/parking_lot_core/src/thread_parker/linux.rs
@@ -4,29 +4,29 @@
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
 use std::sync::atomic::{AtomicI32, Ordering};
 use std::time::Instant;
 use libc;
 
-#[cfg(target_arch = "x86")]
-const SYS_FUTEX: libc::c_long = 240;
-#[cfg(target_arch = "x86_64")]
-const SYS_FUTEX: libc::c_long = 202;
-#[cfg(target_arch = "arm")]
-const SYS_FUTEX: libc::c_long = 240;
-#[cfg(target_arch = "aarch64")]
-const SYS_FUTEX: libc::c_long = 98;
-
 const FUTEX_WAIT: i32 = 0;
 const FUTEX_WAKE: i32 = 1;
 const FUTEX_PRIVATE: i32 = 128;
 
+// x32 Linux uses a non-standard type for tv_nsec in timespec.
+// See https://sourceware.org/bugzilla/show_bug.cgi?id=16437
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "32"))]
+#[allow(non_camel_case_types)]
+type tv_nsec_t = i64;
+#[cfg(not(all(target_arch = "x86_64", target_pointer_width = "32")))]
+#[allow(non_camel_case_types)]
+type tv_nsec_t = libc::c_long;
+
 // Helper type for putting a thread to sleep until some other thread wakes it up
 pub struct ThreadParker {
     futex: AtomicI32,
 }
 
 impl ThreadParker {
     pub fn new() -> ThreadParker {
         ThreadParker {
@@ -44,17 +44,23 @@ impl ThreadParker {
     pub unsafe fn timed_out(&self) -> bool {
         self.futex.load(Ordering::Relaxed) != 0
     }
 
     // Parks the thread until it is unparked. This should be called after it has
     // been added to the queue, after unlocking the queue.
     pub unsafe fn park(&self) {
         while self.futex.load(Ordering::Acquire) != 0 {
-            let r = libc::syscall(SYS_FUTEX, &self.futex, FUTEX_WAIT | FUTEX_PRIVATE, 1, 0);
+            let r = libc::syscall(
+                libc::SYS_futex,
+                &self.futex,
+                FUTEX_WAIT | FUTEX_PRIVATE,
+                1,
+                0,
+            );
             debug_assert!(r == 0 || r == -1);
             if r == -1 {
                 debug_assert!(
                     *libc::__errno_location() == libc::EINTR
                         || *libc::__errno_location() == libc::EAGAIN
                 );
             }
         }
@@ -72,19 +78,25 @@ impl ThreadParker {
             let diff = timeout - now;
             if diff.as_secs() as libc::time_t as u64 != diff.as_secs() {
                 // Timeout overflowed, just sleep indefinitely
                 self.park();
                 return true;
             }
             let ts = libc::timespec {
                 tv_sec: diff.as_secs() as libc::time_t,
-                tv_nsec: diff.subsec_nanos() as libc::c_long,
+                tv_nsec: diff.subsec_nanos() as tv_nsec_t,
             };
-            let r = libc::syscall(SYS_FUTEX, &self.futex, FUTEX_WAIT | FUTEX_PRIVATE, 1, &ts);
+            let r = libc::syscall(
+                libc::SYS_futex,
+                &self.futex,
+                FUTEX_WAIT | FUTEX_PRIVATE,
+                1,
+                &ts,
+            );
             debug_assert!(r == 0 || r == -1);
             if r == -1 {
                 debug_assert!(
                     *libc::__errno_location() == libc::EINTR
                         || *libc::__errno_location() == libc::EAGAIN
                         || *libc::__errno_location() == libc::ETIMEDOUT
                 );
             }
@@ -111,15 +123,15 @@ pub struct UnparkHandle {
 }
 
 impl UnparkHandle {
     // Wakes up the parked thread. This should be called after the queue lock is
     // released to avoid blocking the queue for too long.
     pub unsafe fn unpark(self) {
         // The thread data may have been freed at this point, but it doesn't
         // matter since the syscall will just return EFAULT in that case.
-        let r = libc::syscall(SYS_FUTEX, self.futex, FUTEX_WAKE | FUTEX_PRIVATE, 1);
+        let r = libc::syscall(libc::SYS_futex, self.futex, FUTEX_WAKE | FUTEX_PRIVATE, 1);
         debug_assert!(r == 0 || r == 1 || r == -1);
         if r == -1 {
             debug_assert_eq!(*libc::__errno_location(), libc::EFAULT);
         }
     }
 }
--- a/third_party/rust/parking_lot_core/src/thread_parker/unix.rs
+++ b/third_party/rust/parking_lot_core/src/thread_parker/unix.rs
@@ -213,15 +213,13 @@ unsafe fn timeout_to_timespec(timeout: D
     let now = timespec_now();
     let mut nsec = now.tv_nsec + timeout.subsec_nanos() as libc::c_long;
     let mut sec = now.tv_sec.checked_add(timeout.as_secs() as libc::time_t);
     if nsec >= 1_000_000_000 {
         nsec -= 1_000_000_000;
         sec = sec.and_then(|sec| sec.checked_add(1));
     }
 
-    sec.map(|sec| {
-        libc::timespec {
-            tv_nsec: nsec,
-            tv_sec: sec,
-        }
+    sec.map(|sec| libc::timespec {
+        tv_nsec: nsec,
+        tv_sec: sec,
     })
 }
--- a/third_party/rust/parking_lot_core/src/thread_parker/windows/keyed_event.rs
+++ b/third_party/rust/parking_lot_core/src/thread_parker/windows/keyed_event.rs
@@ -1,117 +1,115 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(feature = "nightly")]
 use std::sync::atomic::{AtomicUsize, Ordering};
-#[cfg(not(feature = "nightly"))]
-use stable::{AtomicUsize, Ordering};
 use std::time::Instant;
 use std::ptr;
 use std::mem;
-use winapi;
-use kernel32;
+
+use winapi::shared::minwindef::{TRUE, ULONG};
+use winapi::shared::ntdef::NTSTATUS;
+use winapi::shared::ntstatus::{STATUS_SUCCESS, STATUS_TIMEOUT};
+use winapi::um::handleapi::CloseHandle;
+use winapi::um::libloaderapi::{GetModuleHandleA, GetProcAddress};
+use winapi::um::winnt::{ACCESS_MASK, GENERIC_READ, GENERIC_WRITE, LPCSTR};
+use winapi::um::winnt::{BOOLEAN, HANDLE, LARGE_INTEGER, PHANDLE, PLARGE_INTEGER, PVOID};
 
 const STATE_UNPARKED: usize = 0;
 const STATE_PARKED: usize = 1;
 const STATE_TIMED_OUT: usize = 2;
 
 #[allow(non_snake_case)]
 pub struct KeyedEvent {
-    handle: winapi::HANDLE,
+    handle: HANDLE,
     NtReleaseKeyedEvent: extern "system" fn(
-        EventHandle: winapi::HANDLE,
-        Key: winapi::PVOID,
-        Alertable: winapi::BOOLEAN,
-        Timeout: winapi::PLARGE_INTEGER,
-    ) -> winapi::NTSTATUS,
+        EventHandle: HANDLE,
+        Key: PVOID,
+        Alertable: BOOLEAN,
+        Timeout: PLARGE_INTEGER,
+    ) -> NTSTATUS,
     NtWaitForKeyedEvent: extern "system" fn(
-        EventHandle: winapi::HANDLE,
-        Key: winapi::PVOID,
-        Alertable: winapi::BOOLEAN,
-        Timeout: winapi::PLARGE_INTEGER,
-    ) -> winapi::NTSTATUS,
+        EventHandle: HANDLE,
+        Key: PVOID,
+        Alertable: BOOLEAN,
+        Timeout: PLARGE_INTEGER,
+    ) -> NTSTATUS,
 }
 
 impl KeyedEvent {
-    unsafe fn wait_for(
-        &self,
-        key: winapi::PVOID,
-        timeout: winapi::PLARGE_INTEGER,
-    ) -> winapi::NTSTATUS {
+    unsafe fn wait_for(&self, key: PVOID, timeout: PLARGE_INTEGER) -> NTSTATUS {
         (self.NtWaitForKeyedEvent)(self.handle, key, 0, timeout)
     }
 
-    unsafe fn release(&self, key: winapi::PVOID) -> winapi::NTSTATUS {
+    unsafe fn release(&self, key: PVOID) -> NTSTATUS {
         (self.NtReleaseKeyedEvent)(self.handle, key, 0, ptr::null_mut())
     }
 
     #[allow(non_snake_case)]
     pub unsafe fn create() -> Option<KeyedEvent> {
-        let ntdll = kernel32::GetModuleHandleA(b"ntdll.dll\0".as_ptr() as winapi::LPCSTR);
+        let ntdll = GetModuleHandleA(b"ntdll.dll\0".as_ptr() as LPCSTR);
         if ntdll.is_null() {
             return None;
         }
 
-        let NtCreateKeyedEvent =
-            kernel32::GetProcAddress(ntdll, b"NtCreateKeyedEvent\0".as_ptr() as winapi::LPCSTR);
+        let NtCreateKeyedEvent = GetProcAddress(ntdll, b"NtCreateKeyedEvent\0".as_ptr() as LPCSTR);
         if NtCreateKeyedEvent.is_null() {
             return None;
         }
         let NtReleaseKeyedEvent =
-            kernel32::GetProcAddress(ntdll, b"NtReleaseKeyedEvent\0".as_ptr() as winapi::LPCSTR);
+            GetProcAddress(ntdll, b"NtReleaseKeyedEvent\0".as_ptr() as LPCSTR);
         if NtReleaseKeyedEvent.is_null() {
             return None;
         }
         let NtWaitForKeyedEvent =
-            kernel32::GetProcAddress(ntdll, b"NtWaitForKeyedEvent\0".as_ptr() as winapi::LPCSTR);
+            GetProcAddress(ntdll, b"NtWaitForKeyedEvent\0".as_ptr() as LPCSTR);
         if NtWaitForKeyedEvent.is_null() {
             return None;
         }
 
         let NtCreateKeyedEvent: extern "system" fn(
-            KeyedEventHandle: winapi::PHANDLE,
-            DesiredAccess: winapi::ACCESS_MASK,
-            ObjectAttributes: winapi::PVOID,
-            Flags: winapi::ULONG,
-        ) -> winapi::NTSTATUS = mem::transmute(NtCreateKeyedEvent);
+            KeyedEventHandle: PHANDLE,
+            DesiredAccess: ACCESS_MASK,
+            ObjectAttributes: PVOID,
+            Flags: ULONG,
+        ) -> NTSTATUS = mem::transmute(NtCreateKeyedEvent);
         let mut handle = mem::uninitialized();
         let status = NtCreateKeyedEvent(
             &mut handle,
-            winapi::GENERIC_READ | winapi::GENERIC_WRITE,
+            GENERIC_READ | GENERIC_WRITE,
             ptr::null_mut(),
             0,
         );
-        if status != winapi::STATUS_SUCCESS {
+        if status != STATUS_SUCCESS {
             return None;
         }
 
         Some(KeyedEvent {
-            handle: handle,
+            handle,
             NtReleaseKeyedEvent: mem::transmute(NtReleaseKeyedEvent),
             NtWaitForKeyedEvent: mem::transmute(NtWaitForKeyedEvent),
         })
     }
 
     pub unsafe fn prepare_park(&'static self, key: &AtomicUsize) {
         key.store(STATE_PARKED, Ordering::Relaxed);
     }
 
     pub unsafe fn timed_out(&'static self, key: &AtomicUsize) -> bool {
         key.load(Ordering::Relaxed) == STATE_TIMED_OUT
     }
 
     pub unsafe fn park(&'static self, key: &AtomicUsize) {
-        let status = self.wait_for(key as *const _ as winapi::PVOID, ptr::null_mut());
-        debug_assert_eq!(status, winapi::STATUS_SUCCESS);
+        let status = self.wait_for(key as *const _ as PVOID, ptr::null_mut());
+        debug_assert_eq!(status, STATUS_SUCCESS);
     }
 
     pub unsafe fn park_until(&'static self, key: &AtomicUsize, timeout: Instant) -> bool {
         let now = Instant::now();
         if timeout <= now {
             // If another thread unparked us, we need to call
             // NtWaitForKeyedEvent otherwise that thread will stay stuck at
             // NtReleaseKeyedEvent.
@@ -119,36 +117,36 @@ impl KeyedEvent {
                 self.park(key);
                 return true;
             }
             return false;
         }
 
         // NT uses a timeout in units of 100ns. We use a negative value to
         // indicate a relative timeout based on a monotonic clock.
+        let mut nt_timeout: LARGE_INTEGER = mem::zeroed();
         let diff = timeout - now;
-        let nt_timeout = (diff.as_secs() as winapi::LARGE_INTEGER)
+        let value = (diff.as_secs() as i64)
             .checked_mul(-10000000)
-            .and_then(|x| {
-                x.checked_sub((diff.subsec_nanos() as winapi::LARGE_INTEGER + 99) / 100)
-            });
-        let mut nt_timeout = match nt_timeout {
-            Some(x) => x,
+            .and_then(|x| x.checked_sub((diff.subsec_nanos() as i64 + 99) / 100));
+
+        match value {
+            Some(x) => *nt_timeout.QuadPart_mut() = x,
             None => {
                 // Timeout overflowed, just sleep indefinitely
                 self.park(key);
                 return true;
             }
         };
 
-        let status = self.wait_for(key as *const _ as winapi::PVOID, &mut nt_timeout);
-        if status == winapi::STATUS_SUCCESS {
+        let status = self.wait_for(key as *const _ as PVOID, &mut nt_timeout);
+        if status == STATUS_SUCCESS {
             return true;
         }
-        debug_assert_eq!(status, winapi::STATUS_TIMEOUT);
+        debug_assert_eq!(status, STATUS_TIMEOUT);
 
         // If another thread unparked us, we need to call NtWaitForKeyedEvent
         // otherwise that thread will stay stuck at NtReleaseKeyedEvent.
         if key.swap(STATE_TIMED_OUT, Ordering::Relaxed) == STATE_UNPARKED {
             self.park(key);
             return true;
         }
         false
@@ -168,18 +166,18 @@ impl KeyedEvent {
             }
         }
     }
 }
 
 impl Drop for KeyedEvent {
     fn drop(&mut self) {
         unsafe {
-            let ok = kernel32::CloseHandle(self.handle);
-            debug_assert_eq!(ok, winapi::TRUE);
+            let ok = CloseHandle(self.handle);
+            debug_assert_eq!(ok, TRUE);
         }
     }
 }
 
 // Handle for a thread that is about to be unparked. We need to mark the thread
 // as unparked while holding the queue lock, but we delay the actual unparking
 // until after the queue lock is released.
 pub struct UnparkHandle {
@@ -187,13 +185,13 @@ pub struct UnparkHandle {
     keyed_event: &'static KeyedEvent,
 }
 
 impl UnparkHandle {
     // Wakes up the parked thread. This should be called after the queue lock is
     // released to avoid blocking the queue for too long.
     pub unsafe fn unpark(self) {
         if !self.key.is_null() {
-            let status = self.keyed_event.release(self.key as winapi::PVOID);
-            debug_assert_eq!(status, winapi::STATUS_SUCCESS);
+            let status = self.keyed_event.release(self.key as PVOID);
+            debug_assert_eq!(status, STATUS_SUCCESS);
         }
     }
 }
--- a/third_party/rust/parking_lot_core/src/thread_parker/windows/mod.rs
+++ b/third_party/rust/parking_lot_core/src/thread_parker/windows/mod.rs
@@ -1,19 +1,16 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(feature = "nightly")]
 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
-#[cfg(not(feature = "nightly"))]
-use stable::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
 use std::time::Instant;
 
 mod keyed_event;
 mod waitaddress;
 
 enum Backend {
     KeyedEvent(keyed_event::KeyedEvent),
     WaitAddress(waitaddress::WaitAddress),
--- a/third_party/rust/parking_lot_core/src/thread_parker/windows/waitaddress.rs
+++ b/third_party/rust/parking_lot_core/src/thread_parker/windows/waitaddress.rs
@@ -1,55 +1,54 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(feature = "nightly")]
 use std::sync::atomic::{AtomicUsize, Ordering};
-#[cfg(not(feature = "nightly"))]
-use stable::{AtomicUsize, Ordering};
 use std::time::Instant;
 use std::mem;
-use winapi;
-use kernel32;
+
+use winapi::shared::basetsd::SIZE_T;
+use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
+use winapi::shared::winerror::ERROR_TIMEOUT;
+use winapi::um::errhandlingapi::GetLastError;
+use winapi::um::libloaderapi::{GetModuleHandleA, GetProcAddress};
+use winapi::um::winbase::INFINITE;
+use winapi::um::winnt::{LPCSTR, PVOID};
 
 #[allow(non_snake_case)]
 pub struct WaitAddress {
     WaitOnAddress: extern "system" fn(
-        Address: winapi::PVOID,
-        CompareAddress: winapi::PVOID,
-        AddressSize: winapi::SIZE_T,
-        dwMilliseconds: winapi::DWORD,
-    ) -> winapi::BOOL,
-    WakeByAddressSingle: extern "system" fn(Address: winapi::PVOID),
+        Address: PVOID,
+        CompareAddress: PVOID,
+        AddressSize: SIZE_T,
+        dwMilliseconds: DWORD,
+    ) -> BOOL,
+    WakeByAddressSingle: extern "system" fn(Address: PVOID),
 }
 
 impl WaitAddress {
     #[allow(non_snake_case)]
     pub unsafe fn create() -> Option<WaitAddress> {
         // MSDN claims that that WaitOnAddress and WakeByAddressSingle are
         // located in kernel32.dll, but they are lying...
-        let synch_dll = kernel32::GetModuleHandleA(b"api-ms-win-core-synch-l1-2-0.dll\0".as_ptr()
-            as winapi::LPCSTR);
+        let synch_dll = GetModuleHandleA(b"api-ms-win-core-synch-l1-2-0.dll\0".as_ptr() as LPCSTR);
         if synch_dll.is_null() {
             return None;
         }
 
-        let WaitOnAddress =
-            kernel32::GetProcAddress(synch_dll, b"WaitOnAddress\0".as_ptr() as winapi::LPCSTR);
+        let WaitOnAddress = GetProcAddress(synch_dll, b"WaitOnAddress\0".as_ptr() as LPCSTR);
         if WaitOnAddress.is_null() {
             return None;
         }
-        let WakeByAddressSingle = kernel32::GetProcAddress(
-            synch_dll,
-            b"WakeByAddressSingle\0".as_ptr() as winapi::LPCSTR,
-        );
+        let WakeByAddressSingle =
+            GetProcAddress(synch_dll, b"WakeByAddressSingle\0".as_ptr() as LPCSTR);
         if WakeByAddressSingle.is_null() {
             return None;
         }
         Some(WaitAddress {
             WaitOnAddress: mem::transmute(WaitOnAddress),
             WakeByAddressSingle: mem::transmute(WakeByAddressSingle),
         })
     }
@@ -61,78 +60,75 @@ impl WaitAddress {
     pub unsafe fn timed_out(&'static self, key: &AtomicUsize) -> bool {
         key.load(Ordering::Relaxed) != 0
     }
 
     pub unsafe fn park(&'static self, key: &AtomicUsize) {
         while key.load(Ordering::Acquire) != 0 {
             let cmp = 1usize;
             let r = (self.WaitOnAddress)(
-                key as *const _ as winapi::PVOID,
-                &cmp as *const _ as winapi::PVOID,
-                mem::size_of::<usize>() as winapi::SIZE_T,
-                winapi::INFINITE,
+                key as *const _ as PVOID,
+                &cmp as *const _ as PVOID,
+                mem::size_of::<usize>() as SIZE_T,
+                INFINITE,
             );
-            debug_assert!(r == winapi::TRUE);
+            debug_assert!(r == TRUE);
         }
     }
 
     pub unsafe fn park_until(&'static self, key: &AtomicUsize, timeout: Instant) -> bool {
         while key.load(Ordering::Acquire) != 0 {
             let now = Instant::now();
             if timeout <= now {
                 return false;
             }
             let diff = timeout - now;
             let timeout = diff.as_secs()
                 .checked_mul(1000)
-                .and_then(|x| {
-                    x.checked_add((diff.subsec_nanos() as u64 + 999999) / 1000000)
-                })
+                .and_then(|x| x.checked_add((diff.subsec_nanos() as u64 + 999999) / 1000000))
                 .map(|ms| {
-                    if ms > <winapi::DWORD>::max_value() as u64 {
-                        winapi::INFINITE
+                    if ms > <DWORD>::max_value() as u64 {
+                        INFINITE
                     } else {
-                        ms as winapi::DWORD
+                        ms as DWORD
                     }
                 })
-                .unwrap_or(winapi::INFINITE);
+                .unwrap_or(INFINITE);
             let cmp = 1usize;
             let r = (self.WaitOnAddress)(
-                key as *const _ as winapi::PVOID,
-                &cmp as *const _ as winapi::PVOID,
-                mem::size_of::<usize>() as winapi::SIZE_T,
+                key as *const _ as PVOID,
+                &cmp as *const _ as PVOID,
+                mem::size_of::<usize>() as SIZE_T,
                 timeout,
             );
-            if r == winapi::FALSE {
-                debug_assert_eq!(kernel32::GetLastError(), winapi::ERROR_TIMEOUT);
+            if r == FALSE {
+                debug_assert_eq!(GetLastError(), ERROR_TIMEOUT);
             }
         }
         true
     }
 
     pub unsafe fn unpark_lock(&'static self, key: &AtomicUsize) -> UnparkHandle {
         // We don't need to lock anything, just clear the state
         key.store(0, Ordering::Release);
 
         UnparkHandle {
             key: key,
             waitaddress: self,
         }
     }
 }
 
-
 // Handle for a thread that is about to be unparked. We need to mark the thread
 // as unparked while holding the queue lock, but we delay the actual unparking
 // until after the queue lock is released.
 pub struct UnparkHandle {
     key: *const AtomicUsize,
     waitaddress: &'static WaitAddress,
 }
 
 impl UnparkHandle {
     // Wakes up the parked thread. This should be called after the queue lock is
     // released to avoid blocking the queue for too long.
     pub unsafe fn unpark(self) {
-        (self.waitaddress.WakeByAddressSingle)(self.key as winapi::PVOID);
+        (self.waitaddress.WakeByAddressSingle)(self.key as PVOID);
     }
 }
--- a/third_party/rust/parking_lot_core/src/word_lock.rs
+++ b/third_party/rust/parking_lot_core/src/word_lock.rs
@@ -1,19 +1,16 @@
 // Copyright 2016 Amanieu d'Antras
 //
 // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
 // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
 // http://opensource.org/licenses/MIT>, at your option. This file may not be
 // copied, modified, or distributed except according to those terms.
 
-#[cfg(feature = "nightly")]
 use std::sync::atomic::{fence, AtomicUsize, Ordering};
-#[cfg(not(feature = "nightly"))]
-use stable::{fence, AtomicUsize, Ordering};
 use std::ptr;
 use std::mem;
 use std::cell::Cell;
 use std::thread::LocalKey;
 #[cfg(not(feature = "nightly"))]
 use std::panic;
 use spinwait::SpinWait;
 use thread_parker::ThreadParker;
new file mode 100644
--- /dev/null
+++ b/third_party/rust/redox_termios/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{"Cargo.toml":"e7d20b4132bf22e2c98a5f846f8c16e7628a9181d485fd131b97127ea8719130","LICENSE":"cb46b697c3fd9d27d7bfe1b1ad48f8a58a284984504c6eb215ae2164538df7cb","README.md":"c62ac3d33174db7977cfc7fc77605885f5445c0c9c7d1310dcbe3564c2854edb","src/lib.rs":"af5bc7c59c695880fef0352f9e0f9e9f8682c262a9b1c0fce725b10553647b4e"},"package":"7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/redox_termios/Cargo.toml
@@ -0,0 +1,26 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "redox_termios"
+version = "0.1.1"
+authors = ["Jeremy Soller <jackpot51@gmail.com>"]
+description = "A Rust library to access Redox termios functions"
+documentation = "https://docs.rs/redox_termios"
+license = "MIT"
+repository = "https://github.com/redox-os/termios"
+
+[lib]
+name = "redox_termios"
+path = "src/lib.rs"
+[dependencies.redox_syscall]
+version = "0.1"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/redox_termios/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Redox OS
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/redox_termios/README.md
@@ -0,0 +1,2 @@
+# termios
+Redox Rust termios library
new file mode 100644
--- /dev/null
+++ b/third_party/rust/redox_termios/src/lib.rs
@@ -0,0 +1,218 @@
+#![allow(non_camel_case_types)]
+#![no_std]
+
+extern crate syscall;
+
+use core::{mem, slice};
+use core::ops::{Deref, DerefMut};
+
+pub type tcflag_t = u32;
+pub type cc_t = u8;
+
+/* c_cc { */
+pub const VEOF: usize = 0;
+pub const VEOL: usize = 1;
+pub const VEOL2: usize = 2;
+pub const VERASE: usize = 3;
+pub const VWERASE: usize = 4;
+pub const VKILL: usize = 5;
+pub const VREPRINT: usize = 6;
+pub const VSWTC: usize = 7;
+pub const VINTR: usize = 8;
+pub const VQUIT: usize = 9;
+pub const VSUSP: usize = 10;
+pub const VSTART: usize = 12;
+pub const VSTOP: usize = 13;
+pub const VLNEXT: usize = 14;
+pub const VDISCARD: usize = 15;
+pub const VMIN: usize = 16;
+pub const VTIME: usize = 17;
+pub const NCCS: usize = 32;
+/* } c_cc */
+
+/* c_iflag { */
+pub const IGNBRK: tcflag_t = 0o000001;
+pub const BRKINT: tcflag_t = 0o000002;
+pub const IGNPAR: tcflag_t = 0o000004;
+pub const PARMRK: tcflag_t = 0o000010;
+pub const INPCK: tcflag_t = 0o000020;
+pub const ISTRIP: tcflag_t = 0o000040;
+pub const INLCR: tcflag_t = 0o000100;
+pub const IGNCR: tcflag_t = 0o000200;
+pub const ICRNL: tcflag_t = 0o000400;
+pub const IXON: tcflag_t = 0o001000;
+pub const IXOFF: tcflag_t = 0o002000;
+/* } c_iflag */
+
+/* c_oflag { */
+pub const OPOST: tcflag_t = 0o000001;
+pub const ONLCR: tcflag_t = 0o000002;
+pub const OLCUC: tcflag_t = 0o000004;
+
+pub const OCRNL: tcflag_t = 0o000010;
+pub const ONOCR: tcflag_t = 0o000020;
+pub const ONLRET: tcflag_t = 0o000040;
+
+pub const OFILL: tcflag_t = 0o0000100;
+pub const OFDEL: tcflag_t = 0o0000200;
+/* } c_oflag */
+
+/* c_cflag { */
+pub const  B0: tcflag_t = 0o000000;
+pub const  B50: tcflag_t = 0o000001;
+pub const  B75: tcflag_t = 0o000002;
+pub const  B110: tcflag_t = 0o000003;
+pub const  B134: tcflag_t = 0o000004;
+pub const  B150: tcflag_t = 0o000005;
+pub const  B200: tcflag_t = 0o000006;
+pub const  B300: tcflag_t = 0o000007;
+pub const  B600: tcflag_t = 0o000010;
+pub const  B1200: tcflag_t = 0o000011;
+pub const  B1800: tcflag_t = 0o000012;
+pub const  B2400: tcflag_t = 0o000013;
+pub const  B4800: tcflag_t = 0o000014;
+pub const  B9600: tcflag_t = 0o000015;
+pub const  B19200: tcflag_t = 0o000016;
+pub const  B38400: tcflag_t = 0o000017;
+pub const  B57600: tcflag_t = 0o0020;
+pub const  B115200: tcflag_t = 0o0021;
+pub const  B230400: tcflag_t = 0o0022;
+pub const  B460800: tcflag_t = 0o0023;
+pub const  B500000: tcflag_t = 0o0024;
+pub const  B576000: tcflag_t = 0o0025;
+pub const  B921600: tcflag_t = 0o0026;
+pub const  B1000000: tcflag_t = 0o0027;
+pub const  B1152000: tcflag_t = 0o0030;
+pub const  B1500000: tcflag_t = 0o0031;
+pub const  B2000000: tcflag_t = 0o0032;
+pub const  B2500000: tcflag_t = 0o0033;
+pub const  B3000000: tcflag_t = 0o0034;
+pub const  B3500000: tcflag_t = 0o0035;
+pub const  B4000000: tcflag_t = 0o0036;
+
+pub const __MAX_BAUD: tcflag_t = B4000000;
+
+pub const CSIZE: tcflag_t = 0o0001400;
+pub const   CS5: tcflag_t = 0o0000000;
+pub const   CS6: tcflag_t = 0o0000400;
+pub const   CS7: tcflag_t = 0o0001000;
+pub const   CS8: tcflag_t = 0o0001400;
+
+pub const CSTOPB: tcflag_t = 0o0002000;
+pub const CREAD: tcflag_t = 0o0004000;
+pub const PARENB: tcflag_t = 0o0010000;
+pub const PARODD: tcflag_t = 0o0020000;
+pub const HUPCL: tcflag_t = 0o0040000;
+
+pub const CLOCAL: tcflag_t = 0o0100000;
+/* } c_clfag */
+
+/* c_lflag { */
+pub const ISIG: tcflag_t = 0x00000080;
+pub const ICANON: tcflag_t = 0x00000100;
+pub const ECHO: tcflag_t = 0x00000008;
+pub const ECHOE: tcflag_t = 0x00000002;
+pub const ECHOK: tcflag_t = 0x00000004;
+pub const ECHONL: tcflag_t = 0x00000010;
+pub const NOFLSH: tcflag_t = 0x80000000;
+pub const TOSTOP: tcflag_t = 0x00400000;
+pub const IEXTEN: tcflag_t = 0x00000400;
+/* } c_lflag */
+
+#[derive(Clone, Copy, Debug)]
+#[repr(C)]
+pub struct Termios {
+    pub c_iflag: tcflag_t,
+    pub c_oflag: tcflag_t,
+    pub c_cflag: tcflag_t,
+    pub c_lflag: tcflag_t,
+    pub c_cc: [cc_t; 32]
+}
+
+impl Default for Termios {
+    fn default() -> Termios {
+        let mut termios = Termios {
+            c_iflag: ICRNL | IXON,
+            c_oflag: OPOST | ONLCR,
+            c_cflag: B38400 | CS8 | CREAD | HUPCL,
+            c_lflag: ISIG | ICANON | ECHO | ECHOE | ECHOK | IEXTEN,
+            c_cc: [0; 32]
+        };
+
+        {
+            let mut cc = |i: usize, b: cc_t| {
+                termios.c_cc[i] = b;
+            };
+
+            cc(VEOF, 0o004);    // CTRL-D
+            cc(VEOL, 0o000);    // NUL
+            cc(VEOL2, 0o000);   // NUL
+            cc(VERASE, 0o177);  // DEL
+            cc(VWERASE, 0o027); // CTRL-W
+            cc(VKILL, 0o025);   // CTRL-U
+            cc(VREPRINT, 0o022);// CTRL-R
+            cc(VINTR, 0o003);   // CTRL-C
+            cc(VQUIT, 0o034);   // CTRL-\
+            cc(VSUSP, 0o032);   // CTRL-Z
+            cc(VSTART, 0o021);  // CTRL-Q
+            cc(VSTOP, 0o023);   // CTRL-S
+            cc(VLNEXT, 0o026);  // CTRL-V
+            cc(VDISCARD, 0o017);// CTRL-U
+            cc(VMIN, 1);
+            cc(VTIME, 0);
+        }
+
+        termios
+    }
+}
+
+impl Termios {
+    pub fn make_raw(&mut self) {
+        self.c_iflag &= !(IGNBRK | BRKINT | PARMRK | ISTRIP | INLCR | IGNCR | ICRNL | IXON);
+        self.c_oflag &= !OPOST;
+        self.c_cflag &= !(CSIZE | PARENB);
+        self.c_cflag |= CS8;
+        self.c_lflag &= !(ECHO | ECHONL | ICANON | ISIG | IEXTEN);
+    }
+}
+
+impl Deref for Termios {
+    type Target = [u8];
+    fn deref(&self) -> &[u8] {
+        unsafe {
+            slice::from_raw_parts(self as *const Termios as *const u8, mem::size_of::<Termios>()) as &[u8]
+        }
+    }
+}
+
+impl DerefMut for Termios {
+    fn deref_mut(&mut self) -> &mut [u8] {
+        unsafe {
+            slice::from_raw_parts_mut(self as *mut Termios as *mut u8, mem::size_of::<Termios>()) as &mut [u8]
+        }
+    }
+}
+
+#[derive(Clone, Copy, Debug, Default)]
+#[repr(C)]
+pub struct Winsize {
+    pub ws_row: u16,
+    pub ws_col: u16
+}
+
+impl Deref for Winsize {
+    type Target = [u8];
+    fn deref(&self) -> &[u8] {
+        unsafe {
+            slice::from_raw_parts(self as *const Winsize as *const u8, mem::size_of::<Winsize>()) as &[u8]
+        }
+    }
+}
+
+impl DerefMut for Winsize {
+    fn deref_mut(&mut self) -> &mut [u8] {
+        unsafe {
+            slice::from_raw_parts_mut(self as *mut Winsize as *mut u8, mem::size_of::<Winsize>()) as &mut [u8]
+        }
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{".travis.yml":"5ac96d0b7956730882c41fa620f04388ff3100205fac606ae3bd8353661794a9","Cargo.toml":"b061b09fc4bbf280c932f4b7d3c4641a5a2f0eb8c57029de7d0e4441888a1765","LICENSE":"6252f0c8d4a0df9b2dc0c6464cb2489dbe8859b0eb727e19c14e6af1ee432394","README.md":"71cac837cd6f1326865add7dd565ea2738756648de87fc2d35d0cf22a2512630","examples/alternate_screen.rs":"37978473e77331ad613843049b4f355e32a51a7b8ece9ee52efe02997391e4ec","examples/alternate_screen_raw.rs":"bfd68f86de929952aaed7e1e7175694d771be2a0b0943092fec4f58bf7473dc3","examples/async.rs":"2fdf5fe69edd3b407de3c1c8caf23f9e19a7f25b55578c84838e4305a9857c42","examples/click.rs":"bb3a76f4817292a82b00d92281a86a400038a8282b6c9d41a34dbaca84fb0caf","examples/color.rs":"808219c739677b9f2645e0ae7975a4bd8981255ed8bfc9df2413f8f8059bbda5","examples/commie.rs":"7bb00a7f669c74ccb3a9e9c8ff39fd01aa984f7c38983e5c1cfe6940f90b0c73","examples/detect_color.rs":"764d6465c6879efc38aca8d980a433bac058c1f03d578ac764437af289931824","examples/is_tty.rs":"9a76bdfb11ea84e7b25f6efc06c8d3a8d13a03448bd826bd283b935852f409c7","examples/keys.rs":"cd89f31a21062486e7c014843b360cee35dcee07d8022fc0282eddc48e46bdc7","examples/mouse.rs":"eeccab8043cec987e40e175f03b63c9c6cdc7bcd5808f51a57d0e970cae1c04e","examples/rainbow.rs":"c015176eba7c7a81c6d302d6fc41abea8817935ee804af59d64e3098b127c333","examples/read.rs":"b95fb9b02d2cbf978096825b18ffa25772467df3417c3f3cf5edfe811181a67a","examples/rustc_fun.rs":"f39bd8dbe224cb592d58a0084d15d1eb80d86eade72423fe51acc455e0e1ea68","examples/simple.rs":"eac1aab0d251f884a7d7d7d0d017b6ac4854b38ffcfb244d6a80102ed7a981ab","examples/size.rs":"ae89e7b98a29040f1b641d75226f5738770e2e043cee3f49649accaaeaabbeac","examples/truecolor.rs":"67128ef4870e9e742b1090fa129097484633d82fb015009b1760295f7a24e6af","logo.svg":"09b7a6bca3185acddc217d3d3baab23627c5e03a192f815116e71cf00cb31ddc","src/async.rs":"f0126ff1ee18c8c355bb86deb76e852c5f6bb47fbbaa286cce3d38cb656dfa28","src/clear.rs":"a9cf9a9f92cef2430239dc33d8ecb4e291c46b650f1f71b63248d0a0215768ba","src/color.rs":"2f2c8f6f572b22fc40a62d0502fc192c843dbeed21bb7ecc1954e881d41c9a1c","src/cursor.rs":"9132638902c4e42728efaac247ca363c1bda3e80d6c3cc20bc6928b9fe9a1d10","src/event.rs":"321c0a73cd6208a8ef17083d1bd907e308d5197cfd198a3f1ed1498b7e2b8055","src/input.rs":"a3e226a53b6536aef89d66a1e4974453389c86116c175c3efc4fc4bb9b41d60c","src/lib.rs":"549beee5f5a6fb7954af37df7fa598ba6c716f2287123e09d300e048af2a70ac","src/macros.rs":"8611f257ffc046ed25870611f1353a96e17a5671efbe2bbf33aae525b53a4df3","src/raw.rs":"6949b164cf1bd0a4146c169b948100ed626b3e0747df15a54b7f41853d348ea1","src/screen.rs":"19a9a2c42abe2afdb75d4850dc6ee54f33808f6da7f59644668e683418cbce4a","src/scroll.rs":"d0ba08663127e09e111fafc9f84c1fe42662b74f9acbca6b5d26d0b335ee4136","src/style.rs":"b2c805d710cf553835a01482f014aff7395f79d5f71f1e5029ae679e002ac010","src/sys/redox/attr.rs":"834d21cc17090fb7989906ce885f4efed252d65086c05e32ecc05792eb69151a","src/sys/redox/mod.rs":"4066ba97f10c8b87a6ebf32257c89d36338366e1230781aeb5fca98bd5f5bae0","src/sys/redox/size.rs":"b892d0053f40c343eeb40ea06a10fb77abd2758eecda6a7cccb0b07c07fff6eb","src/sys/redox/tty.rs":"3369a6ee3a21400053a023c01912bdd92b709915b833e25e557214dfad425224","src/sys/unix/attr.rs":"7b17e4841eab69533d2561764506211e2967cd4d7464a309e6d2832473b75dd7","src/sys/unix/mod.rs":"8ddebce9f5b2dbbd419519beb7d41f6d6a7eaed40a70cd0d0676ee893549f7a7","src/sys/unix/size.rs":"19f3de9ced2b329a50a9752ba1be406b25648b298c1a2b3e83582a34e21d2998","src/sys/unix/tty.rs":"eeea0279f76838aa4badd807325403d32ab359334cf59f9574191afcbe86c811"},"package":"689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/.travis.yml
@@ -0,0 +1,14 @@
+language: rust
+cache: cargo
+rust:
+    - stable
+    - beta
+    - nightly
+os:
+    - linux
+    - osx
+script:
+    - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then FAKETTY="script -q /dev/null"; fi
+    - $FAKETTY cargo build --verbose
+    - $FAKETTY cargo test --verbose
+    - $FAKETTY cargo test --release --verbose
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "termion"
+version = "1.5.1"
+authors = ["ticki <Ticki@users.noreply.github.com>", "gycos <alexandre.bury@gmail.com>", "IGI-111 <igi-111@protonmail.com>"]
+description = "A bindless library for manipulating terminals."
+repository = "https://github.com/ticki/termion"
+documentation = "https://docs.rs/termion"
+license = "MIT"
+keywords = ["tty", "color", "terminal", "password", "tui"]
+exclude = ["target", "CHANGELOG.md", "image.png", "Cargo.lock"]
+
+[target.'cfg(not(target_os = "redox"))'.dependencies]
+libc = "0.2.8"
+
+[target.'cfg(target_os = "redox")'.dependencies]
+redox_syscall = "0.1"
+redox_termios = "0.1"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Ticki
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/README.md
@@ -0,0 +1,181 @@
+<p align="center">
+<img alt="Termion logo" src="https://rawgit.com/ticki/termion/master/logo.svg" />
+</p>
+
+[![Build Status](https://travis-ci.org/ticki/termion.svg?branch=master)](https://travis-ci.org/ticki/termion) [![Latest Version](https://img.shields.io/crates/v/termion.svg)](https://crates.io/crates/termion) | [Documentation](https://docs.rs/termion) | [Examples](https://github.com/Ticki/termion/tree/master/examples) | [Changelog](https://github.com/Ticki/termion/tree/master/CHANGELOG.md) | [Tutorial](http://ticki.github.io/blog/making-terminal-applications-in-rust-with-termion/)
+|----|----|----|----|----
+
+
+**Termion** is a pure Rust, bindless library for low-level handling, manipulating
+and reading information about terminals. This provides a full-featured
+alternative to Termbox.
+
+Termion aims to be simple and yet expressive. It is bindless, meaning that it
+is not a front-end to some other library (e.g., ncurses or termbox), but a
+standalone library directly talking to the TTY.
+
+Termion is quite convenient, due to its complete coverage of essential TTY
+features, providing one consistent API. Termion is rather low-level containing
+only abstraction aligned with what actually happens behind the scenes. For
+something more high-level, refer to inquirer-rs, which uses Termion as backend.
+
+Termion generates escapes and API calls for the user. This makes it a whole lot
+cleaner to use escapes.
+
+Supports Redox, Mac OS X, BSD, and Linux (or, in general, ANSI terminals).
+
+## A note on stability
+
+This crate is stable.
+
+## Cargo.toml
+
+```toml
+[dependencies]
+termion = "*"
+```
+
+## 0.1.0 to 1.0.0 guide
+
+This sample table gives an idea of how to go about converting to the new major
+version of Termion.
+
+| 0.1.0                          | 1.0.0
+|--------------------------------|---------------------------
+| `use termion::IntoRawMode`     | `use termion::raw::IntoRawMode`
+| `use termion::TermRead`        | `use termion::input::TermRead`
+| `stdout.color(color::Red);`    | `write!(stdout, "{}", color::Fg(color::Red));`
+| `stdout.color_bg(color::Red);` | `write!(stdout, "{}", color::Bg(color::Red));`
+| `stdout.goto(x, y);`           | `write!(stdout, "{}", cursor::Goto(x, y));`
+| `color::rgb(r, g, b);`         | `color::Rgb(r, g, b)` (truecolor)
+| `x.with_mouse()`               | `MouseTerminal::from(x)`
+
+## Features
+
+- Raw mode.
+- TrueColor.
+- 256-color mode.
+- Cursor movement.
+- Text formatting.
+- Console size.
+- TTY-only stream.
+- Control sequences.
+- Termios control.
+- Password input.
+- Redox support.
+- Safe `isatty` wrapper.
+- Panic-free error handling.
+- Special keys events (modifiers, special keys, etc.).
+- Allocation-free.
+- Asynchronous key events.
+- Mouse input.
+- Carefully tested.
+- Detailed documentation on every item.
+
+and much more.
+
+## Examples
+
+### Style and colors.
+
+```rust
+extern crate termion;
+
+use termion::{color, style};
+
+use std::io;
+
+fn main() {
+    println!("{}Red", color::Fg(color::Red));
+    println!("{}Blue", color::Fg(color::Blue));
+    println!("{}Blue'n'Bold{}", style::Bold, style::Reset);
+    println!("{}Just plain italic", style::Italic);
+}
+```
+
+### Moving the cursor
+
+```rust
+extern crate termion;
+
+fn main() {
+    print!("{}{}Stuff", termion::clear::All, termion::cursor::Goto(1, 1));
+}
+
+```
+
+### Mouse
+
+```rust
+extern crate termion;
+
+use termion::event::{Key, Event, MouseEvent};
+use termion::input::{TermRead, MouseTerminal};
+use termion::raw::IntoRawMode;
+use std::io::{Write, stdout, stdin};
+
+fn main() {
+    let stdin = stdin();
+    let mut stdout = MouseTerminal::from(stdout().into_raw_mode().unwrap());
+
+    write!(stdout, "{}{}q to exit. Click, click, click!", termion::clear::All, termion::cursor::Goto(1, 1)).unwrap();
+    stdout.flush().unwrap();
+
+    for c in stdin.events() {
+        let evt = c.unwrap();
+        match evt {
+            Event::Key(Key::Char('q')) => break,
+            Event::Mouse(me) => {
+                match me {
+                    MouseEvent::Press(_, x, y) => {
+                        write!(stdout, "{}x", termion::cursor::Goto(x, y)).unwrap();
+                    },
+                    _ => (),
+                }
+            }
+            _ => {}
+        }
+        stdout.flush().unwrap();
+    }
+}
+```
+
+### Read a password
+
+```rust
+extern crate termion;
+
+use termion::input::TermRead;
+use std::io::{Write, stdout, stdin};
+
+fn main() {
+    let stdout = stdout();
+    let mut stdout = stdout.lock();
+    let stdin = stdin();
+    let mut stdin = stdin.lock();
+
+    stdout.write_all(b"password: ").unwrap();
+    stdout.flush().unwrap();
+
+    let pass = stdin.read_passwd(&mut stdout);
+
+    if let Ok(Some(pass)) = pass {
+        stdout.write_all(pass.as_bytes()).unwrap();
+        stdout.write_all(b"\n").unwrap();
+    } else {
+        stdout.write_all(b"Error\n").unwrap();
+    }
+}
+```
+
+## Usage
+
+See `examples/`, and the documentation, which can be rendered using `cargo doc`.
+
+For a more complete example, see [a minesweeper implementation](https://github.com/redox-os/games-for-redox/blob/master/src/minesweeper/main.rs), that I made for Redox using termion.
+
+<img src="image.png" width="200">
+
+## License
+
+MIT/X11.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/alternate_screen.rs
@@ -0,0 +1,17 @@
+extern crate termion;
+
+use termion::screen::*;
+use std::io::{Write, stdout};
+use std::{time, thread};
+
+fn main() {
+    {
+        let mut screen = AlternateScreen::from(stdout());
+        write!(screen, "Welcome to the alternate screen.\n\nPlease wait patiently until we arrive back at the main screen in a about three seconds.").unwrap();
+        screen.flush().unwrap();
+
+        thread::sleep(time::Duration::from_secs(3));
+    }
+
+    println!("Phew! We are back.");
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/alternate_screen_raw.rs
@@ -0,0 +1,40 @@
+extern crate termion;
+
+use termion::event::Key;
+use termion::input::TermRead;
+use termion::raw::IntoRawMode;
+use termion::screen::*;
+use std::io::{Write, stdout, stdin};
+
+fn write_alt_screen_msg<W: Write>(screen: &mut W) {
+    write!(screen, "{}{}Welcome to the alternate screen.{}Press '1' to switch to the main screen or '2' to switch to the alternate screen.{}Press 'q' to exit (and switch back to the main screen).",
+           termion::clear::All,
+           termion::cursor::Goto(1, 1),
+           termion::cursor::Goto(1, 3),
+           termion::cursor::Goto(1, 4)).unwrap();
+}
+
+fn main() {
+    let stdin = stdin();
+    let mut screen = AlternateScreen::from(stdout().into_raw_mode().unwrap());
+    write!(screen, "{}", termion::cursor::Hide).unwrap();
+    write_alt_screen_msg(&mut screen);
+
+    screen.flush().unwrap();
+
+    for c in stdin.keys() {
+        match c.unwrap() {
+            Key::Char('q') => break,
+            Key::Char('1') => {
+                write!(screen, "{}", ToMainScreen).unwrap();
+            }
+            Key::Char('2') => {
+                write!(screen, "{}", ToAlternateScreen).unwrap();
+                write_alt_screen_msg(&mut screen);
+            }
+            _ => {}
+        }
+        screen.flush().unwrap();
+    }
+    write!(screen, "{}", termion::cursor::Show).unwrap();
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/async.rs
@@ -0,0 +1,39 @@
+extern crate termion;
+
+use termion::raw::IntoRawMode;
+use termion::async_stdin;
+use std::io::{Read, Write, stdout};
+use std::thread;
+use std::time::Duration;
+
+fn main() {
+    let stdout = stdout();
+    let mut stdout = stdout.lock().into_raw_mode().unwrap();
+    let mut stdin = async_stdin().bytes();
+
+    write!(stdout,
+           "{}{}",
+           termion::clear::All,
+           termion::cursor::Goto(1, 1))
+            .unwrap();
+
+    loop {
+        write!(stdout, "{}", termion::clear::CurrentLine).unwrap();
+
+        let b = stdin.next();
+        write!(stdout, "\r{:?}    <- This demonstrates the async read input char. Between each update a 100 ms. is waited, simply to demonstrate the async fashion. \n\r", b).unwrap();
+        if let Some(Ok(b'q')) = b {
+            break;
+        }
+
+        stdout.flush().unwrap();
+
+        thread::sleep(Duration::from_millis(50));
+        stdout.write_all(b"# ").unwrap();
+        stdout.flush().unwrap();
+        thread::sleep(Duration::from_millis(50));
+        stdout.write_all(b"\r #").unwrap();
+        write!(stdout, "{}", termion::cursor::Goto(1, 1)).unwrap();
+        stdout.flush().unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/click.rs
@@ -0,0 +1,35 @@
+extern crate termion;
+
+use termion::event::{Key, Event, MouseEvent};
+use termion::input::{TermRead, MouseTerminal};
+use termion::raw::IntoRawMode;
+use std::io::{Write, stdout, stdin};
+
+fn main() {
+    let stdin = stdin();
+    let mut stdout = MouseTerminal::from(stdout().into_raw_mode().unwrap());
+
+    write!(stdout,
+           "{}{}q to exit. Click, click, click!",
+           termion::clear::All,
+           termion::cursor::Goto(1, 1))
+            .unwrap();
+    stdout.flush().unwrap();
+
+    for c in stdin.events() {
+        let evt = c.unwrap();
+        match evt {
+            Event::Key(Key::Char('q')) => break,
+            Event::Mouse(me) => {
+                match me {
+                    MouseEvent::Press(_, x, y) => {
+                        write!(stdout, "{}x", termion::cursor::Goto(x, y)).unwrap();
+                    }
+                    _ => (),
+                }
+            }
+            _ => {}
+        }
+        stdout.flush().unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/color.rs
@@ -0,0 +1,10 @@
+extern crate termion;
+
+use termion::{color, style};
+
+fn main() {
+    println!("{}Red", color::Fg(color::Red));
+    println!("{}Blue", color::Fg(color::Blue));
+    println!("{}Blue'n'Bold{}", style::Bold, style::Reset);
+    println!("{}Just plain italic", style::Italic);
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/commie.rs
@@ -0,0 +1,51 @@
+extern crate termion;
+
+use termion::{clear, color, cursor};
+
+use std::{time, thread};
+
+const COMMUNISM: &'static str = r#"
+              !#########       #                 
+            !########!          ##!              
+         !########!               ###            
+      !##########                  ####          
+    ######### #####                ######        
+     !###!      !####!              ######       
+       !           #####            ######!      
+                     !####!         #######      
+                        #####       #######      
+                          !####!   #######!      
+                             ####!########       
+          ##                   ##########        
+        ,######!          !#############         
+      ,#### ########################!####!       
+    ,####'     ##################!'    #####     
+  ,####'            #######              !####!  
+ ####'                                      #####
+ ~##                                          ##~
+"#;
+
+fn main() {
+    let mut state = 0;
+
+    println!("\n{}{}{}{}{}{}",
+             cursor::Hide,
+             clear::All,
+             cursor::Goto(1, 1),
+             color::Fg(color::Black),
+             color::Bg(color::Red),
+             COMMUNISM);
+    loop {
+        println!("{}{}           ☭ GAY ☭ SPACE ☭ COMMUNISM ☭           ",
+                 cursor::Goto(1, 1),
+                 color::Bg(color::AnsiValue(state)));
+        println!("{}{}             WILL PREVAIL, COMRADES!             ",
+                 cursor::Goto(1, 20),
+                 color::Bg(color::AnsiValue(state)));
+
+        state += 1;
+        state %= 8;
+
+        thread::sleep(time::Duration::from_millis(90));
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/detect_color.rs
@@ -0,0 +1,19 @@
+extern crate termion;
+
+use termion::color::{DetectColors, AnsiValue, Bg};
+use termion::raw::IntoRawMode;
+use std::io::stdout;
+
+fn main() {
+    let count;
+    {
+        let mut term = stdout().into_raw_mode().unwrap();
+        count = term.available_colors().unwrap();
+    }
+
+    println!("This terminal supports {} colors.", count);
+    for i in 0..count {
+        print!("{} {}", Bg(AnsiValue(i as u8)), Bg(AnsiValue(0)));
+    }
+    println!();
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/is_tty.rs
@@ -0,0 +1,11 @@
+extern crate termion;
+
+use std::fs;
+
+fn main() {
+    if termion::is_tty(&fs::File::create("/dev/stdout").unwrap()) {
+        println!("This is a TTY!");
+    } else {
+        println!("This is not a TTY :(");
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/keys.rs
@@ -0,0 +1,44 @@
+extern crate termion;
+
+use termion::event::Key;
+use termion::input::TermRead;
+use termion::raw::IntoRawMode;
+use std::io::{Write, stdout, stdin};
+
+fn main() {
+    let stdin = stdin();
+    let mut stdout = stdout().into_raw_mode().unwrap();
+
+    write!(stdout,
+           "{}{}q to exit. Type stuff, use alt, and so on.{}",
+           termion::clear::All,
+           termion::cursor::Goto(1, 1),
+           termion::cursor::Hide)
+            .unwrap();
+    stdout.flush().unwrap();
+
+    for c in stdin.keys() {
+        write!(stdout,
+               "{}{}",
+               termion::cursor::Goto(1, 1),
+               termion::clear::CurrentLine)
+                .unwrap();
+
+        match c.unwrap() {
+            Key::Char('q') => break,
+            Key::Char(c) => println!("{}", c),
+            Key::Alt(c) => println!("^{}", c),
+            Key::Ctrl(c) => println!("*{}", c),
+            Key::Esc => println!("ESC"),
+            Key::Left => println!("←"),
+            Key::Right => println!("→"),
+            Key::Up => println!("↑"),
+            Key::Down => println!("↓"),
+            Key::Backspace => println!("×"),
+            _ => {}
+        }
+        stdout.flush().unwrap();
+    }
+
+    write!(stdout, "{}", termion::cursor::Show).unwrap();
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/mouse.rs
@@ -0,0 +1,46 @@
+extern crate termion;
+
+use termion::event::*;
+use termion::cursor::{self, DetectCursorPos};
+use termion::input::{TermRead, MouseTerminal};
+use termion::raw::IntoRawMode;
+use std::io::{self, Write};
+
+fn main() {
+    let stdin = io::stdin();
+    let mut stdout = MouseTerminal::from(io::stdout().into_raw_mode().unwrap());
+
+    writeln!(stdout,
+             "{}{}q to exit. Type stuff, use alt, click around...",
+             termion::clear::All,
+             termion::cursor::Goto(1, 1))
+            .unwrap();
+
+    for c in stdin.events() {
+        let evt = c.unwrap();
+        match evt {
+            Event::Key(Key::Char('q')) => break,
+            Event::Mouse(me) => {
+                match me {
+                    MouseEvent::Press(_, a, b) |
+                    MouseEvent::Release(a, b) |
+                    MouseEvent::Hold(a, b) => {
+                        write!(stdout, "{}", cursor::Goto(a, b)).unwrap();
+                        let (x, y) = stdout.cursor_pos().unwrap();
+                        write!(stdout,
+                               "{}{}Cursor is at: ({},{}){}",
+                               cursor::Goto(5, 5),
+                               termion::clear::UntilNewline,
+                               x,
+                               y,
+                               cursor::Goto(a, b))
+                                .unwrap();
+                    }
+                }
+            }
+            _ => {}
+        }
+
+        stdout.flush().unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/rainbow.rs
@@ -0,0 +1,60 @@
+extern crate termion;
+
+use termion::event::Key;
+use termion::input::TermRead;
+use termion::raw::IntoRawMode;
+use std::io::{Write, stdout, stdin};
+
+fn rainbow<W: Write>(stdout: &mut W, blue: u8) {
+    write!(stdout,
+           "{}{}",
+           termion::cursor::Goto(1, 1),
+           termion::clear::All)
+            .unwrap();
+
+    for red in 0..32 {
+        let red = red * 8;
+        for green in 0..64 {
+            let green = green * 4;
+            write!(stdout,
+                   "{} ",
+                   termion::color::Bg(termion::color::Rgb(red, green, blue)))
+                    .unwrap();
+        }
+        write!(stdout, "\n\r").unwrap();
+    }
+
+    writeln!(stdout, "{}b = {}", termion::style::Reset, blue).unwrap();
+}
+
+fn main() {
+    let stdin = stdin();
+    let mut stdout = stdout().into_raw_mode().unwrap();
+
+    writeln!(stdout,
+             "{}{}{}Use the up/down arrow keys to change the blue in the rainbow.",
+             termion::clear::All,
+             termion::cursor::Goto(1, 1),
+             termion::cursor::Hide)
+            .unwrap();
+
+    let mut blue = 172u8;
+
+    for c in stdin.keys() {
+        match c.unwrap() {
+            Key::Up => {
+                blue = blue.saturating_add(4);
+                rainbow(&mut stdout, blue);
+            }
+            Key::Down => {
+                blue = blue.saturating_sub(4);
+                rainbow(&mut stdout, blue);
+            }
+            Key::Char('q') => break,
+            _ => {}
+        }
+        stdout.flush().unwrap();
+    }
+
+    write!(stdout, "{}", termion::cursor::Show).unwrap();
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/read.rs
@@ -0,0 +1,23 @@
+extern crate termion;
+
+use termion::input::TermRead;
+use std::io::{Write, stdout, stdin};
+
+fn main() {
+    let stdout = stdout();
+    let mut stdout = stdout.lock();
+    let stdin = stdin();
+    let mut stdin = stdin.lock();
+
+    stdout.write_all(b"password: ").unwrap();
+    stdout.flush().unwrap();
+
+    let pass = stdin.read_passwd(&mut stdout);
+
+    if let Ok(Some(pass)) = pass {
+        stdout.write_all(pass.as_bytes()).unwrap();
+        stdout.write_all(b"\n").unwrap();
+    } else {
+        stdout.write_all(b"Error\n").unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/rustc_fun.rs
@@ -0,0 +1,24 @@
+extern crate termion;
+
+use termion::{color, style};
+
+fn main() {
+    println!("{lighgreen}-- src/test/ui/borrow-errors.rs at 82:18 --\n\
+              {red}error: {reset}{bold}two closures require unique access to `vec` at the same time {reset}{bold}{magenta}[E0524]{reset}\n\
+              {line_num_fg}{line_num_bg}79 {reset}     let append = |e| {{\n\
+              {line_num_fg}{line_num_bg}{info_line}{reset}                  {red}^^^{reset} {error_fg}first closure is constructed here\n\
+              {line_num_fg}{line_num_bg}80 {reset}         vec.push(e)\n\
+              {line_num_fg}{line_num_bg}{info_line}{reset}                 {red}^^^{reset} {error_fg}previous borrow occurs due to use of `vec` in closure\n\
+              {line_num_fg}{line_num_bg}84 {reset}     }};\n\
+              {line_num_fg}{line_num_bg}85 {reset} }}\n\
+              {line_num_fg}{line_num_bg}{info_line}{reset} {red}^{reset} {error_fg}borrow from first closure ends here",
+             lighgreen = color::Fg(color::LightGreen),
+             red = color::Fg(color::Red),
+             bold = style::Bold,
+             reset = style::Reset,
+             magenta = color::Fg(color::Magenta),
+             line_num_bg = color::Bg(color::AnsiValue::grayscale(3)),
+             line_num_fg = color::Fg(color::AnsiValue::grayscale(18)),
+             info_line = "|  ",
+             error_fg = color::Fg(color::AnsiValue::grayscale(17)))
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/simple.rs
@@ -0,0 +1,42 @@
+extern crate termion;
+
+use termion::color;
+use termion::raw::IntoRawMode;
+use std::io::{Read, Write, stdout, stdin};
+
+fn main() {
+    // Initialize 'em all.
+    let stdout = stdout();
+    let mut stdout = stdout.lock().into_raw_mode().unwrap();
+    let stdin = stdin();
+    let stdin = stdin.lock();
+
+    write!(stdout,
+           "{}{}{}yo, 'q' will exit.{}{}",
+           termion::clear::All,
+           termion::cursor::Goto(5, 5),
+           termion::style::Bold,
+           termion::style::Reset,
+           termion::cursor::Goto(20, 10))
+            .unwrap();
+    stdout.flush().unwrap();
+
+    let mut bytes = stdin.bytes();
+    loop {
+        let b = bytes.next().unwrap().unwrap();
+
+        match b {
+                // Quit
+                b'q' => return,
+                // Clear the screen
+                b'c' => write!(stdout, "{}", termion::clear::All),
+                // Set red color
+                b'r' => write!(stdout, "{}", color::Fg(color::Rgb(5, 0, 0))),
+                // Write it to stdout.
+                a => write!(stdout, "{}", a),
+            }
+            .unwrap();
+
+        stdout.flush().unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/size.rs
@@ -0,0 +1,7 @@
+extern crate termion;
+
+use termion::terminal_size;
+
+fn main() {
+    println!("Size is {:?}", terminal_size().unwrap())
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/examples/truecolor.rs
@@ -0,0 +1,12 @@
+extern crate termion;
+
+use termion::{color, cursor, clear};
+use std::{thread, time};
+
+fn main() {
+    for r in 0..255 {
+        let c = color::Rgb(r, !r, 2 * ((r % 128) as i8 - 64).abs() as u8);
+        println!("{}{}{}wow", cursor::Goto(1, 1), color::Bg(c), clear::All);
+        thread::sleep(time::Duration::from_millis(100));
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/logo.svg
@@ -0,0 +1,9 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="60.099598mm" height="18.291185mm" viewBox="0 0 212.95 64.81">
+  <style>
+    .blink { animation: blinker 3s linear infinite; } @keyframes blinker { 50% { opacity: 0; } }
+  </style>
+
+  <path d="M0 0h212.95v64.82H0z" opacity=".71"/>
+  <path fill="#f9f9f9" d="M12.24 17.8H34.5v3.33h-9.13v25.84H21.4V21.13h-9.16V17.8zm27 0h17.3v3.33H43.2v8.63h12.77v3.32h-12.8v10.57H56.9v3.32H39.24V17.8zM74.3 33.2q1.5.4 2.6 1.48 1.06 1.08 2.66 4.32l3.97 7.97H79.3l-3.5-7.37q-1.5-3.14-2.7-4.04-1.2-.92-3.13-.92H66.2v12.33h-3.96V17.8h8.13q4.8 0 7.36 2.17 2.56 2.17 2.56 6.27 0 2.9-1.6 4.73-1.6 1.82-4.4 2.23zm-8.1-12.15V31.4h4.32q2.83 0 4.22-1.27 1.4-1.27 1.4-3.9 0-2.5-1.5-3.83-1.46-1.35-4.27-1.35H66.2zm19-3.25h5.26l5.04 14.85 5.08-14.84h5.3V47h-3.66V21.2l-5.2 15.38h-2.98L88.82 21.2v25.77H85.2V17.8zm26.3 0h16.2v3.33h-6.12v22.52h6.1v3.32H111.5v-3.32h6.1V21.13h-6.1V17.8zm37.8 14.62q0-6.43-1.32-9.18-1.3-2.76-4.3-2.76t-4.33 2.76q-1.3 2.75-1.3 9.18 0 6.4 1.3 9.16 1.33 2.75 4.32 2.75 3 0 4.3-2.73 1.34-2.76 1.34-9.18zm4.13 0q0 7.6-2.42 11.36-2.4 3.75-7.3 3.75t-7.3-3.73q-2.4-3.73-2.4-11.38 0-7.64 2.4-11.4 2.4-3.74 7.35-3.74t7.34 3.75q2.42 3.75 2.42 11.4zm4.97-14.62h5l9.86 24v-24h3.8v29.17h-5l-9.84-24v24h-3.8V17.8z"/>
+  <path fill="#f9f9f9" d="M192.7 8.66v47.5h-3.93V8.66h3.94z" class="blink"/>
+</svg>
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/async.rs
@@ -0,0 +1,78 @@
+use std::io::{self, Read};
+use std::sync::mpsc;
+use std::thread;
+
+use sys::tty::get_tty;
+
+/// Construct an asynchronous handle to the TTY standard input.
+///
+/// This allows you to read from standard input _without blocking_ the current thread.
+/// Specifically, it works by firing up another thread to handle the event stream, which will then
+/// be buffered in a mpsc queue, which will eventually be read by the current thread.
+///
+/// This will not read the piped standard input, but rather read from the TTY device, since reading
+/// asyncronized from piped input would rarely make sense. In other words, if you pipe standard
+/// output from another process, it won't be reflected in the stream returned by this function, as
+/// this represents the TTY device, and not the piped standard input.
+pub fn async_stdin() -> AsyncReader {
+    let (send, recv) = mpsc::channel();
+
+    thread::spawn(move || for i in get_tty().unwrap().bytes() {
+                      if send.send(i).is_err() {
+                          return;
+                      }
+                  });
+
+    AsyncReader { recv: recv }
+}
+
+/// An asynchronous reader.
+///
+/// This acts as any other stream, with the exception that reading from it won't block. Instead,
+/// the buffer will only be partially updated based on how much the internal buffer holds.
+pub struct AsyncReader {
+    /// The underlying mpsc receiver.
+    recv: mpsc::Receiver<io::Result<u8>>,
+}
+
+// FIXME: Allow constructing an async reader from an arbitrary stream.
+
+impl Read for AsyncReader {
+    /// Read from the byte stream.
+    ///
+    /// This will never block, but try to drain the event queue until empty. If the total number of
+    /// bytes written is lower than the buffer's length, the event queue is empty or that the event
+    /// stream halted.
+    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+        let mut total = 0;
+
+        loop {
+            if total >= buf.len() {
+                break;
+            }
+
+            match self.recv.try_recv() {
+                Ok(Ok(b)) => {
+                    buf[total] = b;
+                    total += 1;
+                }
+                Ok(Err(e)) => return Err(e),
+                Err(_) => break,
+            }
+        }
+
+        Ok(total)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+    use std::io::Read;
+
+    #[test]
+    fn test_async_stdin() {
+        let stdin = async_stdin();
+        stdin.bytes().next();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/clear.rs
@@ -0,0 +1,9 @@
+//! Clearing the screen.
+
+use std::fmt;
+
+derive_csi_sequence!("Clear the entire screen.", All, "2J");
+derive_csi_sequence!("Clear everything after the cursor.", AfterCursor, "J");
+derive_csi_sequence!("Clear everything before the cursor.", BeforeCursor, "1J");
+derive_csi_sequence!("Clear the current line.", CurrentLine, "2K");
+derive_csi_sequence!("Clear from cursor to newline.", UntilNewline, "K");
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/color.rs
@@ -0,0 +1,242 @@
+//! Color managemement.
+//!
+//! # Example
+//!
+//! ```rust
+//! use termion::color;
+//!
+//! fn main() {
+//!     println!("{}Red", color::Fg(color::Red));
+//!     println!("{}Blue", color::Fg(color::Blue));
+//!     println!("{}Back again", color::Fg(color::Reset));
+//! }
+//! ```
+
+use std::fmt;
+use raw::CONTROL_SEQUENCE_TIMEOUT;
+use std::io::{self, Write, Read};
+use std::time::{SystemTime, Duration};
+use async::async_stdin;
+use std::env;
+
+/// A terminal color.
+pub trait Color {
+    /// Write the foreground version of this color.
+    fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result;
+    /// Write the background version of this color.
+    fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result;
+}
+
+macro_rules! derive_color {
+    ($doc:expr, $name:ident, $value:expr) => {
+        #[doc = $doc]
+        #[derive(Copy, Clone, Debug)]
+        pub struct $name;
+
+        impl Color for $name {
+            #[inline]
+            fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+                write!(f, csi!("38;5;", $value, "m"))
+            }
+
+            #[inline]
+            fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+                write!(f, csi!("48;5;", $value, "m"))
+            }
+        }
+    };
+}
+
+derive_color!("Black.", Black, "0");
+derive_color!("Red.", Red, "1");
+derive_color!("Green.", Green, "2");
+derive_color!("Yellow.", Yellow, "3");
+derive_color!("Blue.", Blue, "4");
+derive_color!("Magenta.", Magenta, "5");
+derive_color!("Cyan.", Cyan, "6");
+derive_color!("White.", White, "7");
+derive_color!("High-intensity light black.", LightBlack, "8");
+derive_color!("High-intensity light red.", LightRed, "9");
+derive_color!("High-intensity light green.", LightGreen, "10");
+derive_color!("High-intensity light yellow.", LightYellow, "11");
+derive_color!("High-intensity light blue.", LightBlue, "12");
+derive_color!("High-intensity light magenta.", LightMagenta, "13");
+derive_color!("High-intensity light cyan.", LightCyan, "14");
+derive_color!("High-intensity light white.", LightWhite, "15");
+
+impl<'a> Color for &'a Color {
+    #[inline]
+    fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        (*self).write_fg(f)
+    }
+
+    #[inline]
+    fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        (*self).write_bg(f)
+    }
+}
+
+/// An arbitrary ANSI color value.
+#[derive(Clone, Copy, Debug)]
+pub struct AnsiValue(pub u8);
+
+impl AnsiValue {
+    /// 216-color (r, g, b ≤ 5) RGB.
+    pub fn rgb(r: u8, g: u8, b: u8) -> AnsiValue {
+        debug_assert!(r <= 5,
+                      "Red color fragment (r = {}) is out of bound. Make sure r ≤ 5.",
+                      r);
+        debug_assert!(g <= 5,
+                      "Green color fragment (g = {}) is out of bound. Make sure g ≤ 5.",
+                      g);
+        debug_assert!(b <= 5,
+                      "Blue color fragment (b = {}) is out of bound. Make sure b ≤ 5.",
+                      b);
+
+        AnsiValue(16 + 36 * r + 6 * g + b)
+    }
+
+    /// Grayscale color.
+    ///
+    /// There are 24 shades of gray.
+    pub fn grayscale(shade: u8) -> AnsiValue {
+        // Unfortunately, there are a little less than fifty shades.
+        debug_assert!(shade < 24,
+                      "Grayscale out of bound (shade = {}). There are only 24 shades of \
+                      gray.",
+                      shade);
+
+        AnsiValue(0xE8 + shade)
+    }
+}
+
+impl Color for AnsiValue {
+    #[inline]
+    fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("38;5;{}m"), self.0)
+    }
+
+    #[inline]
+    fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("48;5;{}m"), self.0)
+    }
+}
+
+/// A truecolor RGB.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub struct Rgb(pub u8, pub u8, pub u8);
+
+impl Color for Rgb {
+    #[inline]
+    fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("38;2;{};{};{}m"), self.0, self.1, self.2)
+    }
+
+    #[inline]
+    fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("48;2;{};{};{}m"), self.0, self.1, self.2)
+    }
+}
+
+/// Reset colors to defaults.
+#[derive(Debug, Clone, Copy)]
+pub struct Reset;
+
+impl Color for Reset {
+    #[inline]
+    fn write_fg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("39m"))
+    }
+
+    #[inline]
+    fn write_bg(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("49m"))
+    }
+}
+
+/// A foreground color.
+#[derive(Debug, Clone, Copy)]
+pub struct Fg<C: Color>(pub C);
+
+impl<C: Color> fmt::Display for Fg<C> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.0.write_fg(f)
+    }
+}
+
+/// A background color.
+#[derive(Debug, Clone, Copy)]
+pub struct Bg<C: Color>(pub C);
+
+impl<C: Color> fmt::Display for Bg<C> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.0.write_bg(f)
+    }
+}
+
+/// Types that allow detection of the colors they support.
+pub trait DetectColors {
+    /// How many ANSI colors are supported (from 8 to 256)?
+    ///
+    /// Beware: the information given isn't authoritative, it's infered through escape codes or the
+    /// value of `TERM`, more colors may be available.
+    fn available_colors(&mut self) -> io::Result<u16>;
+}
+
+impl<W: Write> DetectColors for W {
+    fn available_colors(&mut self) -> io::Result<u16> {
+        let mut stdin = async_stdin();
+
+        if detect_color(self, &mut stdin, 0)? {
+            // OSC 4 is supported, detect how many colors there are.
+            // Do a binary search of the last supported color.
+            let mut min = 8;
+            let mut max = 256;
+            let mut i;
+            while min + 1 < max {
+                i = (min + max) / 2;
+                if detect_color(self, &mut stdin, i)? {
+                    min = i
+                } else {
+                    max = i
+                }
+            }
+            Ok(max)
+        } else {
+            // OSC 4 is not supported, trust TERM contents.
+            Ok(match env::var_os("TERM") {
+                   Some(val) => {
+                       if val.to_str().unwrap_or("").contains("256color") {
+                           256
+                       } else {
+                           8
+                       }
+                   }
+                   None => 8,
+               })
+        }
+    }
+}
+
+/// Detect a color using OSC 4.
+fn detect_color(stdout: &mut Write, stdin: &mut Read, color: u16) -> io::Result<bool> {
+    // Is the color available?
+    // Use `ESC ] 4 ; color ; ? BEL`.
+    write!(stdout, "\x1B]4;{};?\x07", color)?;
+    stdout.flush()?;
+
+    let mut buf: [u8; 1] = [0];
+    let mut total_read = 0;
+
+    let timeout = Duration::from_millis(CONTROL_SEQUENCE_TIMEOUT);
+    let now = SystemTime::now();
+    let bell = 7u8;
+
+    // Either consume all data up to bell or wait for a timeout.
+    while buf[0] != bell && now.elapsed().unwrap() < timeout {
+        total_read += stdin.read(&mut buf)?;
+    }
+
+    // If there was a response, the color is supported.
+    Ok(total_read > 0)
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/cursor.rs
@@ -0,0 +1,140 @@
+//! Cursor movement.
+
+use std::fmt;
+use std::io::{self, Write, Error, ErrorKind, Read};
+use async::async_stdin;
+use std::time::{SystemTime, Duration};
+use raw::CONTROL_SEQUENCE_TIMEOUT;
+
+derive_csi_sequence!("Hide the cursor.", Hide, "?25l");
+derive_csi_sequence!("Show the cursor.", Show, "?25h");
+
+derive_csi_sequence!("Restore the cursor.", Restore, "u");
+derive_csi_sequence!("Save the cursor.", Save, "s");
+
+/// Goto some position ((1,1)-based).
+///
+/// # Why one-based?
+///
+/// ANSI escapes are very poorly designed, and one of the many odd aspects is being one-based. This
+/// can be quite strange at first, but it is not that big of an obstruction once you get used to
+/// it.
+///
+/// # Example
+///
+/// ```rust
+/// extern crate termion;
+///
+/// fn main() {
+///     print!("{}{}Stuff", termion::clear::All, termion::cursor::Goto(5, 3));
+/// }
+/// ```
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Goto(pub u16, pub u16);
+
+impl Default for Goto {
+    fn default() -> Goto {
+        Goto(1, 1)
+    }
+}
+
+impl fmt::Display for Goto {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        debug_assert!(self != &Goto(0, 0), "Goto is one-based.");
+
+        write!(f, csi!("{};{}H"), self.1, self.0)
+    }
+}
+
+/// Move cursor left.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Left(pub u16);
+
+impl fmt::Display for Left {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}D"), self.0)
+    }
+}
+
+/// Move cursor right.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Right(pub u16);
+
+impl fmt::Display for Right {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}C"), self.0)
+    }
+}
+
+/// Move cursor up.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Up(pub u16);
+
+impl fmt::Display for Up {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}A"), self.0)
+    }
+}
+
+/// Move cursor down.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Down(pub u16);
+
+impl fmt::Display for Down {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}B"), self.0)
+    }
+}
+
+/// Types that allow detection of the cursor position.
+pub trait DetectCursorPos {
+    /// Get the (1,1)-based cursor position from the terminal.
+    fn cursor_pos(&mut self) -> io::Result<(u16, u16)>;
+}
+
+impl<W: Write> DetectCursorPos for W {
+    fn cursor_pos(&mut self) -> io::Result<(u16, u16)> {
+        let mut stdin = async_stdin();
+
+        // Where is the cursor?
+        // Use `ESC [ 6 n`.
+        write!(self, "\x1B[6n")?;
+        self.flush()?;
+
+        let mut buf: [u8; 1] = [0];
+        let mut read_chars = Vec::new();
+
+        let timeout = Duration::from_millis(CONTROL_SEQUENCE_TIMEOUT);
+        let now = SystemTime::now();
+
+        // Either consume all data up to R or wait for a timeout.
+        while buf[0] != b'R' && now.elapsed().unwrap() < timeout {
+            if stdin.read(&mut buf)? > 0 {
+                read_chars.push(buf[0]);
+            }
+        }
+
+        if read_chars.len() == 0 {
+            return Err(Error::new(ErrorKind::Other, "Cursor position detection timed out."));
+        }
+
+        // The answer will look like `ESC [ Cy ; Cx R`.
+
+        read_chars.pop(); // remove trailing R.
+        let read_str = String::from_utf8(read_chars).unwrap();
+        let beg = read_str.rfind('[').unwrap();
+        let coords: String = read_str.chars().skip(beg + 1).collect();
+        let mut nums = coords.split(';');
+
+        let cy = nums.next()
+            .unwrap()
+            .parse::<u16>()
+            .unwrap();
+        let cx = nums.next()
+            .unwrap()
+            .parse::<u16>()
+            .unwrap();
+
+        Ok((cx, cy))
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/event.rs
@@ -0,0 +1,351 @@
+//! Mouse and key events.
+
+use std::io::{Error, ErrorKind};
+use std::ascii::AsciiExt;
+use std::str;
+
+/// An event reported by the terminal.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Event {
+    /// A key press.
+    Key(Key),
+    /// A mouse button press, release or wheel use at specific coordinates.
+    Mouse(MouseEvent),
+    /// An event that cannot currently be evaluated.
+    Unsupported(Vec<u8>),
+}
+
+/// A mouse related event.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum MouseEvent {
+    /// A mouse button was pressed.
+    ///
+    /// The coordinates are one-based.
+    Press(MouseButton, u16, u16),
+    /// A mouse button was released.
+    ///
+    /// The coordinates are one-based.
+    Release(u16, u16),
+    /// A mouse button is held over the given coordinates.
+    ///
+    /// The coordinates are one-based.
+    Hold(u16, u16),
+}
+
+/// A mouse button.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum MouseButton {
+    /// The left mouse button.
+    Left,
+    /// The right mouse button.
+    Right,
+    /// The middle mouse button.
+    Middle,
+    /// Mouse wheel is going up.
+    ///
+    /// This event is typically only used with Mouse::Press.
+    WheelUp,
+    /// Mouse wheel is going down.
+    ///
+    /// This event is typically only used with Mouse::Press.
+    WheelDown,
+}
+
+/// A key.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum Key {
+    /// Backspace.
+    Backspace,
+    /// Left arrow.
+    Left,
+    /// Right arrow.
+    Right,
+    /// Up arrow.
+    Up,
+    /// Down arrow.
+    Down,
+    /// Home key.
+    Home,
+    /// End key.
+    End,
+    /// Page Up key.
+    PageUp,
+    /// Page Down key.
+    PageDown,
+    /// Delete key.
+    Delete,
+    /// Insert key.
+    Insert,
+    /// Function keys.
+    ///
+    /// Only function keys 1 through 12 are supported.
+    F(u8),
+    /// Normal character.
+    Char(char),
+    /// Alt modified character.
+    Alt(char),
+    /// Ctrl modified character.
+    ///
+    /// Note that certain keys may not be modifiable with `ctrl`, due to limitations of terminals.
+    Ctrl(char),
+    /// Null byte.
+    Null,
+    /// Esc key.
+    Esc,
+
+    #[doc(hidden)]
+    __IsNotComplete,
+}
+
+/// Parse an Event from `item` and possibly subsequent bytes through `iter`.
+pub fn parse_event<I>(item: u8, iter: &mut I) -> Result<Event, Error>
+    where I: Iterator<Item = Result<u8, Error>>
+{
+    let error = Error::new(ErrorKind::Other, "Could not parse an event");
+    match item {
+        b'\x1B' => {
+            // This is an escape character, leading a control sequence.
+            Ok(match iter.next() {
+                   Some(Ok(b'O')) => {
+                match iter.next() {
+                    // F1-F4
+                    Some(Ok(val @ b'P'...b'S')) => Event::Key(Key::F(1 + val - b'P')),
+                    _ => return Err(error),
+                }
+            }
+                   Some(Ok(b'[')) => {
+                // This is a CSI sequence.
+                parse_csi(iter).ok_or(error)?
+            }
+                   Some(Ok(c)) => {
+                let ch = parse_utf8_char(c, iter);
+                Event::Key(Key::Alt(try!(ch)))
+            }
+                   Some(Err(_)) | None => return Err(error),
+               })
+        }
+        b'\n' | b'\r' => Ok(Event::Key(Key::Char('\n'))),
+        b'\t' => Ok(Event::Key(Key::Char('\t'))),
+        b'\x7F' => Ok(Event::Key(Key::Backspace)),
+        c @ b'\x01'...b'\x1A' => Ok(Event::Key(Key::Ctrl((c as u8 - 0x1 + b'a') as char))),
+        c @ b'\x1C'...b'\x1F' => Ok(Event::Key(Key::Ctrl((c as u8 - 0x1C + b'4') as char))),
+        b'\0' => Ok(Event::Key(Key::Null)),
+        c => {
+            Ok({
+                   let ch = parse_utf8_char(c, iter);
+                   Event::Key(Key::Char(try!(ch)))
+               })
+        }
+    }
+}
+
+/// Parses a CSI sequence, just after reading ^[
+///
+/// Returns None if an unrecognized sequence is found.
+fn parse_csi<I>(iter: &mut I) -> Option<Event>
+    where I: Iterator<Item = Result<u8, Error>>
+{
+    Some(match iter.next() {
+             Some(Ok(b'[')) => match iter.next() {
+                 Some(Ok(val @ b'A'...b'E')) => Event::Key(Key::F(1 + val - b'A')),
+                 _ => return None,
+             },
+             Some(Ok(b'D')) => Event::Key(Key::Left),
+             Some(Ok(b'C')) => Event::Key(Key::Right),
+             Some(Ok(b'A')) => Event::Key(Key::Up),
+             Some(Ok(b'B')) => Event::Key(Key::Down),
+             Some(Ok(b'H')) => Event::Key(Key::Home),
+             Some(Ok(b'F')) => Event::Key(Key::End),
+             Some(Ok(b'M')) => {
+        // X10 emulation mouse encoding: ESC [ CB Cx Cy (6 characters only).
+        let mut next = || iter.next().unwrap().unwrap();
+
+        let cb = next() as i8 - 32;
+        // (1, 1) are the coords for upper left.
+        let cx = next().saturating_sub(32) as u16;
+        let cy = next().saturating_sub(32) as u16;
+        Event::Mouse(match cb & 0b11 {
+                         0 => {
+                             if cb & 0x40 != 0 {
+                                 MouseEvent::Press(MouseButton::WheelUp, cx, cy)
+                             } else {
+                                 MouseEvent::Press(MouseButton::Left, cx, cy)
+                             }
+                         }
+                         1 => {
+                             if cb & 0x40 != 0 {
+                                 MouseEvent::Press(MouseButton::WheelDown, cx, cy)
+                             } else {
+                                 MouseEvent::Press(MouseButton::Middle, cx, cy)
+                             }
+                         }
+                         2 => MouseEvent::Press(MouseButton::Right, cx, cy),
+                         3 => MouseEvent::Release(cx, cy),
+                         _ => return None,
+                     })
+    }
+             Some(Ok(b'<')) => {
+        // xterm mouse encoding:
+        // ESC [ < Cb ; Cx ; Cy (;) (M or m)
+        let mut buf = Vec::new();
+        let mut c = iter.next().unwrap().unwrap();
+        while match c {
+                  b'm' | b'M' => false,
+                  _ => true,
+              } {
+            buf.push(c);
+            c = iter.next().unwrap().unwrap();
+        }
+        let str_buf = String::from_utf8(buf).unwrap();
+        let nums = &mut str_buf.split(';');
+
+        let cb = nums.next()
+            .unwrap()
+            .parse::<u16>()
+            .unwrap();
+        let cx = nums.next()
+            .unwrap()
+            .parse::<u16>()
+            .unwrap();
+        let cy = nums.next()
+            .unwrap()
+            .parse::<u16>()
+            .unwrap();
+
+        let event = match cb {
+            0...2 | 64...65 => {
+                let button = match cb {
+                    0 => MouseButton::Left,
+                    1 => MouseButton::Middle,
+                    2 => MouseButton::Right,
+                    64 => MouseButton::WheelUp,
+                    65 => MouseButton::WheelDown,
+                    _ => unreachable!(),
+                };
+                match c {
+                    b'M' => MouseEvent::Press(button, cx, cy),
+                    b'm' => MouseEvent::Release(cx, cy),
+                    _ => return None,
+                }
+            }
+            32 => MouseEvent::Hold(cx, cy),
+            3 => MouseEvent::Release(cx, cy),
+            _ => return None,
+        };
+
+        Event::Mouse(event)
+    }
+             Some(Ok(c @ b'0'...b'9')) => {
+        // Numbered escape code.
+        let mut buf = Vec::new();
+        buf.push(c);
+        let mut c = iter.next().unwrap().unwrap();
+        // The final byte of a CSI sequence can be in the range 64-126, so
+        // let's keep reading anything else.
+        while c < 64 || c > 126 {
+            buf.push(c);
+            c = iter.next().unwrap().unwrap();
+        }
+
+        match c {
+            // rxvt mouse encoding:
+            // ESC [ Cb ; Cx ; Cy ; M
+            b'M' => {
+                let str_buf = String::from_utf8(buf).unwrap();
+
+                let nums: Vec<u16> = str_buf.split(';').map(|n| n.parse().unwrap()).collect();
+
+                let cb = nums[0];
+                let cx = nums[1];
+                let cy = nums[2];
+
+                let event = match cb {
+                    32 => MouseEvent::Press(MouseButton::Left, cx, cy),
+                    33 => MouseEvent::Press(MouseButton::Middle, cx, cy),
+                    34 => MouseEvent::Press(MouseButton::Right, cx, cy),
+                    35 => MouseEvent::Release(cx, cy),
+                    64 => MouseEvent::Hold(cx, cy),
+                    96 | 97 => MouseEvent::Press(MouseButton::WheelUp, cx, cy),
+                    _ => return None,
+                };
+
+                Event::Mouse(event)
+            }
+            // Special key code.
+            b'~' => {
+                let str_buf = String::from_utf8(buf).unwrap();
+
+                // This CSI sequence can be a list of semicolon-separated
+                // numbers.
+                let nums: Vec<u8> = str_buf.split(';').map(|n| n.parse().unwrap()).collect();
+
+                if nums.is_empty() {
+                    return None;
+                }
+
+                // TODO: handle multiple values for key modififiers (ex: values
+                // [3, 2] means Shift+Delete)
+                if nums.len() > 1 {
+                    return None;
+                }
+
+                match nums[0] {
+                    1 | 7 => Event::Key(Key::Home),
+                    2 => Event::Key(Key::Insert),
+                    3 => Event::Key(Key::Delete),
+                    4 | 8 => Event::Key(Key::End),
+                    5 => Event::Key(Key::PageUp),
+                    6 => Event::Key(Key::PageDown),
+                    v @ 11...15 => Event::Key(Key::F(v - 10)),
+                    v @ 17...21 => Event::Key(Key::F(v - 11)),
+                    v @ 23...24 => Event::Key(Key::F(v - 12)),
+                    _ => return None,
+                }
+            }
+            _ => return None,
+        }
+    }
+             _ => return None,
+         })
+
+}
+
+/// Parse `c` as either a single byte ASCII char or a variable size UTF-8 char.
+fn parse_utf8_char<I>(c: u8, iter: &mut I) -> Result<char, Error>
+    where I: Iterator<Item = Result<u8, Error>>
+{
+    let error = Err(Error::new(ErrorKind::Other, "Input character is not valid UTF-8"));
+    if c.is_ascii() {
+        Ok(c as char)
+    } else {
+        let bytes = &mut Vec::new();
+        bytes.push(c);
+
+        loop {
+            match iter.next() {
+                Some(Ok(next)) => {
+                    bytes.push(next);
+                    if let Ok(st) = str::from_utf8(bytes) {
+                        return Ok(st.chars().next().unwrap());
+                    }
+                    if bytes.len() >= 4 {
+                        return error;
+                    }
+                }
+                _ => return error,
+            }
+        }
+    }
+}
+
+#[cfg(test)]
+#[test]
+fn test_parse_utf8() {
+    let st = "abcéŷ¤£€ù%323";
+    let ref mut bytes = st.bytes().map(|x| Ok(x));
+    let chars = st.chars();
+    for c in chars {
+        let b = bytes.next().unwrap().unwrap();
+        assert!(c == parse_utf8_char(b, bytes).unwrap());
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/input.rs
@@ -0,0 +1,388 @@
+//! User input.
+
+use std::io::{self, Read, Write};
+use std::ops;
+
+use event::{self, Event, Key};
+use raw::IntoRawMode;
+
+/// An iterator over input keys.
+pub struct Keys<R> {
+    iter: Events<R>,
+}
+
+impl<R: Read> Iterator for Keys<R> {
+    type Item = Result<Key, io::Error>;
+
+    fn next(&mut self) -> Option<Result<Key, io::Error>> {
+        loop {
+            match self.iter.next() {
+                Some(Ok(Event::Key(k))) => return Some(Ok(k)),
+                Some(Ok(_)) => continue,
+                e @ Some(Err(_)) => e,
+                None => return None,
+            };
+        }
+    }
+}
+
+/// An iterator over input events.
+pub struct Events<R>  {
+    inner: EventsAndRaw<R>
+}
+
+impl<R: Read> Iterator for Events<R> {
+    type Item = Result<Event, io::Error>;
+
+    fn next(&mut self) -> Option<Result<Event, io::Error>> {
+        self.inner.next().map(|tuple| tuple.map(|(event, _raw)| event))
+    }
+}
+
+/// An iterator over input events and the bytes that define them.
+pub struct EventsAndRaw<R> {
+    source: R,
+    leftover: Option<u8>,
+}
+
+impl<R: Read> Iterator for EventsAndRaw<R> {
+    type Item = Result<(Event, Vec<u8>), io::Error>;
+
+    fn next(&mut self) -> Option<Result<(Event, Vec<u8>), io::Error>> {
+        let mut source = &mut self.source;
+
+        if let Some(c) = self.leftover {
+            // we have a leftover byte, use it
+            self.leftover = None;
+            return Some(parse_event(c, &mut source.bytes()));
+        }
+
+        // Here we read two bytes at a time. We need to distinguish between single ESC key presses,
+        // and escape sequences (which start with ESC or a x1B byte). The idea is that if this is
+        // an escape sequence, we will read multiple bytes (the first byte being ESC) but if this
+        // is a single ESC keypress, we will only read a single byte.
+        let mut buf = [0u8; 2];
+        let res = match source.read(&mut buf) {
+            Ok(0) => return None,
+            Ok(1) => {
+                match buf[0] {
+                    b'\x1B' => Ok((Event::Key(Key::Esc), vec![b'\x1B'])),
+                    c => parse_event(c, &mut source.bytes()),
+                }
+            }
+            Ok(2) => {
+                let mut option_iter = &mut Some(buf[1]).into_iter();
+                let result = {
+                    let mut iter = option_iter.map(|c| Ok(c)).chain(source.bytes());
+                    parse_event(buf[0], &mut iter)
+                };
+                // If the option_iter wasn't consumed, keep the byte for later.
+                self.leftover = option_iter.next();
+                result
+            }
+            Ok(_) => unreachable!(),
+            Err(e) => Err(e),
+        };
+
+        Some(res)
+    }
+}
+
+fn parse_event<I>(item: u8, iter: &mut I) -> Result<(Event, Vec<u8>), io::Error>
+    where I: Iterator<Item = Result<u8, io::Error>>
+{
+    let mut buf = vec![item];
+    let result = {
+        let mut iter = iter.inspect(|byte| if let &Ok(byte) = byte {
+                                        buf.push(byte);
+                                    });
+        event::parse_event(item, &mut iter)
+    };
+    result.or(Ok(Event::Unsupported(buf.clone()))).map(|e| (e, buf))
+}
+
+
+/// Extension to `Read` trait.
+pub trait TermRead {
+    /// An iterator over input events.
+    fn events(self) -> Events<Self> where Self: Sized;
+
+    /// An iterator over key inputs.
+    fn keys(self) -> Keys<Self> where Self: Sized;
+
+    /// Read a line.
+    ///
+    /// EOT and ETX will abort the prompt, returning `None`. Newline or carriage return will
+    /// complete the input.
+    fn read_line(&mut self) -> io::Result<Option<String>>;
+
+    /// Read a password.
+    ///
+    /// EOT and ETX will abort the prompt, returning `None`. Newline or carriage return will
+    /// complete the input.
+    fn read_passwd<W: Write>(&mut self, writer: &mut W) -> io::Result<Option<String>> {
+        let _raw = try!(writer.into_raw_mode());
+        self.read_line()
+    }
+}
+
+
+impl<R: Read + TermReadEventsAndRaw> TermRead for R {
+    fn events(self) -> Events<Self> {
+        Events {
+            inner: self.events_and_raw()
+        }
+    }
+    fn keys(self) -> Keys<Self> {
+        Keys { iter: self.events() }
+    }
+
+    fn read_line(&mut self) -> io::Result<Option<String>> {
+        let mut buf = Vec::with_capacity(30);
+
+        for c in self.bytes() {
+            match c {
+                Err(e) => return Err(e),
+                Ok(0) | Ok(3) | Ok(4) => return Ok(None),
+                Ok(0x7f) => {
+                    buf.pop();
+                }
+                Ok(b'\n') | Ok(b'\r') => break,
+                Ok(c) => buf.push(c),
+            }
+        }
+
+        let string = try!(String::from_utf8(buf)
+            .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)));
+        Ok(Some(string))
+    }
+}
+
+/// Extension to `TermRead` trait. A separate trait in order to maintain backwards compatibility.
+pub trait TermReadEventsAndRaw {
+    /// An iterator over input events and the bytes that define them.
+    fn events_and_raw(self) -> EventsAndRaw<Self> where Self: Sized;
+}
+
+impl<R: Read> TermReadEventsAndRaw for R {
+    fn events_and_raw(self) -> EventsAndRaw<Self> {
+        EventsAndRaw {
+            source: self,
+            leftover: None,
+        }
+    }
+}
+
+/// A sequence of escape codes to enable terminal mouse support.
+const ENTER_MOUSE_SEQUENCE: &'static str = csi!("?1000h\x1b[?1002h\x1b[?1015h\x1b[?1006h");
+
+/// A sequence of escape codes to disable terminal mouse support.
+const EXIT_MOUSE_SEQUENCE: &'static str = csi!("?1006l\x1b[?1015l\x1b[?1002l\x1b[?1000l");
+
+/// A terminal with added mouse support.
+///
+/// This can be obtained through the `From` implementations.
+pub struct MouseTerminal<W: Write> {
+    term: W,
+}
+
+impl<W: Write> From<W> for MouseTerminal<W> {
+    fn from(mut from: W) -> MouseTerminal<W> {
+        from.write_all(ENTER_MOUSE_SEQUENCE.as_bytes()).unwrap();
+
+        MouseTerminal { term: from }
+    }
+}
+
+impl<W: Write> Drop for MouseTerminal<W> {
+    fn drop(&mut self) {
+        self.term.write_all(EXIT_MOUSE_SEQUENCE.as_bytes()).unwrap();
+    }
+}
+
+impl<W: Write> ops::Deref for MouseTerminal<W> {
+    type Target = W;
+
+    fn deref(&self) -> &W {
+        &self.term
+    }
+}
+
+impl<W: Write> ops::DerefMut for MouseTerminal<W> {
+    fn deref_mut(&mut self) -> &mut W {
+        &mut self.term
+    }
+}
+
+impl<W: Write> Write for MouseTerminal<W> {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.term.write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.term.flush()
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+    use std::io;
+    use event::{Key, Event, MouseEvent, MouseButton};
+
+    #[test]
+    fn test_keys() {
+        let mut i = b"\x1Bayo\x7F\x1B[D".keys();
+
+        assert_eq!(i.next().unwrap().unwrap(), Key::Alt('a'));
+        assert_eq!(i.next().unwrap().unwrap(), Key::Char('y'));
+        assert_eq!(i.next().unwrap().unwrap(), Key::Char('o'));
+        assert_eq!(i.next().unwrap().unwrap(), Key::Backspace);
+        assert_eq!(i.next().unwrap().unwrap(), Key::Left);
+        assert!(i.next().is_none());
+    }
+
+    #[test]
+    fn test_events() {
+        let mut i =
+            b"\x1B[\x00bc\x7F\x1B[D\
+                    \x1B[M\x00\x22\x24\x1B[<0;2;4;M\x1B[32;2;4M\x1B[<0;2;4;m\x1B[35;2;4Mb"
+                    .events();
+
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Unsupported(vec![0x1B, b'[', 0x00]));
+        assert_eq!(i.next().unwrap().unwrap(), Event::Key(Key::Char('b')));
+        assert_eq!(i.next().unwrap().unwrap(), Event::Key(Key::Char('c')));
+        assert_eq!(i.next().unwrap().unwrap(), Event::Key(Key::Backspace));
+        assert_eq!(i.next().unwrap().unwrap(), Event::Key(Key::Left));
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Mouse(MouseEvent::Press(MouseButton::WheelUp, 2, 4)));
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Mouse(MouseEvent::Press(MouseButton::Left, 2, 4)));
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Mouse(MouseEvent::Press(MouseButton::Left, 2, 4)));
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Mouse(MouseEvent::Release(2, 4)));
+        assert_eq!(i.next().unwrap().unwrap(),
+                   Event::Mouse(MouseEvent::Release(2, 4)));
+        assert_eq!(i.next().unwrap().unwrap(), Event::Key(Key::Char('b')));
+        assert!(i.next().is_none());
+    }
+
+    #[test]
+    fn test_events_and_raw() {
+        let input = b"\x1B[\x00bc\x7F\x1B[D\
+                    \x1B[M\x00\x22\x24\x1B[<0;2;4;M\x1B[32;2;4M\x1B[<0;2;4;m\x1B[35;2;4Mb";
+        let mut output = Vec::<u8>::new();
+        {
+            let mut i = input.events_and_raw().map(|res| res.unwrap())
+                .inspect(|&(_, ref raw)| { output.extend(raw); }).map(|(event, _)| event);
+
+            assert_eq!(i.next().unwrap(),
+            Event::Unsupported(vec![0x1B, b'[', 0x00]));
+            assert_eq!(i.next().unwrap(), Event::Key(Key::Char('b')));
+            assert_eq!(i.next().unwrap(), Event::Key(Key::Char('c')));
+            assert_eq!(i.next().unwrap(), Event::Key(Key::Backspace));
+            assert_eq!(i.next().unwrap(), Event::Key(Key::Left));
+            assert_eq!(i.next().unwrap(),
+            Event::Mouse(MouseEvent::Press(MouseButton::WheelUp, 2, 4)));
+            assert_eq!(i.next().unwrap(),
+            Event::Mouse(MouseEvent::Press(MouseButton::Left, 2, 4)));
+            assert_eq!(i.next().unwrap(),
+            Event::Mouse(MouseEvent::Press(MouseButton::Left, 2, 4)));
+            assert_eq!(i.next().unwrap(),
+            Event::Mouse(MouseEvent::Release(2, 4)));
+            assert_eq!(i.next().unwrap(),
+            Event::Mouse(MouseEvent::Release(2, 4)));
+            assert_eq!(i.next().unwrap(), Event::Key(Key::Char('b')));
+            assert!(i.next().is_none());
+        }
+
+        assert_eq!(input.iter().map(|b| *b).collect::<Vec<u8>>(), output)
+    }
+
+    #[test]
+    fn test_function_keys() {
+        let mut st = b"\x1BOP\x1BOQ\x1BOR\x1BOS".keys();
+        for i in 1..5 {
+            assert_eq!(st.next().unwrap().unwrap(), Key::F(i));
+        }
+
+        let mut st = b"\x1B[11~\x1B[12~\x1B[13~\x1B[14~\x1B[15~\
+        \x1B[17~\x1B[18~\x1B[19~\x1B[20~\x1B[21~\x1B[23~\x1B[24~"
+                .keys();
+        for i in 1..13 {
+            assert_eq!(st.next().unwrap().unwrap(), Key::F(i));
+        }
+    }
+
+    #[test]
+    fn test_special_keys() {
+        let mut st = b"\x1B[2~\x1B[H\x1B[7~\x1B[5~\x1B[3~\x1B[F\x1B[8~\x1B[6~".keys();
+        assert_eq!(st.next().unwrap().unwrap(), Key::Insert);
+        assert_eq!(st.next().unwrap().unwrap(), Key::Home);
+        assert_eq!(st.next().unwrap().unwrap(), Key::Home);
+        assert_eq!(st.next().unwrap().unwrap(), Key::PageUp);
+        assert_eq!(st.next().unwrap().unwrap(), Key::Delete);
+        assert_eq!(st.next().unwrap().unwrap(), Key::End);
+        assert_eq!(st.next().unwrap().unwrap(), Key::End);
+        assert_eq!(st.next().unwrap().unwrap(), Key::PageDown);
+        assert!(st.next().is_none());
+    }
+
+    #[test]
+    fn test_esc_key() {
+        let mut st = b"\x1B".keys();
+        assert_eq!(st.next().unwrap().unwrap(), Key::Esc);
+        assert!(st.next().is_none());
+    }
+
+    fn line_match(a: &str, b: Option<&str>) {
+        let mut sink = io::sink();
+
+        let line = a.as_bytes().read_line().unwrap();
+        let pass = a.as_bytes().read_passwd(&mut sink).unwrap();
+
+        // godammit rustc
+
+        assert_eq!(line, pass);
+
+        if let Some(l) = line {
+            assert_eq!(Some(l.as_str()), b);
+        } else {
+            assert!(b.is_none());
+        }
+    }
+
+    #[test]
+    fn test_read() {
+        let test1 = "this is the first test";
+        let test2 = "this is the second test";
+
+        line_match(test1, Some(test1));
+        line_match(test2, Some(test2));
+    }
+
+    #[test]
+    fn test_backspace() {
+        line_match("this is the\x7f first\x7f\x7f test",
+                   Some("this is th fir test"));
+        line_match("this is the seco\x7fnd test\x7f",
+                   Some("this is the secnd tes"));
+    }
+
+    #[test]
+    fn test_end() {
+        line_match("abc\nhttps://www.youtube.com/watch?v=dQw4w9WgXcQ",
+                   Some("abc"));
+        line_match("hello\rhttps://www.youtube.com/watch?v=yPYZpwSpKmA",
+                   Some("hello"));
+    }
+
+    #[test]
+    fn test_abort() {
+        line_match("abc\x03https://www.youtube.com/watch?v=dQw4w9WgXcQ", None);
+        line_match("hello\x04https://www.youtube.com/watch?v=yPYZpwSpKmA", None);
+    }
+
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/lib.rs
@@ -0,0 +1,61 @@
+//! Termion is a pure Rust, bindless library for low-level handling, manipulating
+//! and reading information about terminals. This provides a full-featured
+//! alternative to Termbox.
+//!
+//! Termion aims to be simple and yet expressive. It is bindless, meaning that it
+//! is not a front-end to some other library (e.g., ncurses or termbox), but a
+//! standalone library directly talking to the TTY.
+//!
+//! Supports Redox, Mac OS X, and Linux (or, in general, ANSI terminals).
+//!
+//! For more information refer to the [README](https://github.com/ticki/termion).
+#![warn(missing_docs)]
+
+#[cfg(target_os = "redox")]
+#[path="sys/redox/mod.rs"]
+mod sys;
+
+#[cfg(unix)]
+#[path="sys/unix/mod.rs"]
+mod sys;
+
+pub use sys::size::terminal_size;
+pub use sys::tty::{is_tty, get_tty};
+
+mod async;
+pub use async::{AsyncReader, async_stdin};
+
+#[macro_use]
+mod macros;
+pub mod clear;
+pub mod color;
+pub mod cursor;
+pub mod event;
+pub mod input;
+pub mod raw;
+pub mod screen;
+pub mod scroll;
+pub mod style;
+
+#[cfg(test)]
+mod test {
+    use super::sys;
+
+    #[test]
+    fn test_get_terminal_attr() {
+        sys::attr::get_terminal_attr().unwrap();
+        sys::attr::get_terminal_attr().unwrap();
+        sys::attr::get_terminal_attr().unwrap();
+    }
+
+    #[test]
+    fn test_set_terminal_attr() {
+        let ios = sys::attr::get_terminal_attr().unwrap();
+        sys::attr::set_terminal_attr(&ios).unwrap();
+    }
+
+    #[test]
+    fn test_size() {
+        sys::size::terminal_size().unwrap();
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/macros.rs
@@ -0,0 +1,19 @@
+/// Create a CSI-introduced sequence.
+macro_rules! csi {
+    ($( $l:expr ),*) => { concat!("\x1B[", $( $l ),*) };
+}
+
+/// Derive a CSI sequence struct.
+macro_rules! derive_csi_sequence {
+    ($doc:expr, $name:ident, $value:expr) => {
+        #[doc = $doc]
+        #[derive(Copy, Clone)]
+        pub struct $name;
+
+        impl fmt::Display for $name {
+            fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+                write!(f, csi!($value))
+            }
+        }
+    };
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/raw.rs
@@ -0,0 +1,117 @@
+//! Managing raw mode.
+//!
+//! Raw mode is a particular state a TTY can have. It signifies that:
+//!
+//! 1. No line buffering (the input is given byte-by-byte).
+//! 2. The input is not written out, instead it has to be done manually by the programmer.
+//! 3. The output is not canonicalized (for example, `\n` means "go one line down", not "line
+//!    break").
+//!
+//! It is essential to design terminal programs.
+//!
+//! # Example
+//!
+//! ```rust,no_run
+//! use termion::raw::IntoRawMode;
+//! use std::io::{Write, stdout};
+//!
+//! fn main() {
+//!     let mut stdout = stdout().into_raw_mode().unwrap();
+//!
+//!     write!(stdout, "Hey there.").unwrap();
+//! }
+//! ```
+
+use std::io::{self, Write};
+use std::ops;
+
+use sys::Termios;
+use sys::attr::{get_terminal_attr, raw_terminal_attr, set_terminal_attr};
+
+/// The timeout of an escape code control sequence, in milliseconds.
+pub const CONTROL_SEQUENCE_TIMEOUT: u64 = 100;
+
+/// A terminal restorer, which keeps the previous state of the terminal, and restores it, when
+/// dropped.
+///
+/// Restoring will entirely bring back the old TTY state.
+pub struct RawTerminal<W: Write> {
+    prev_ios: Termios,
+    output: W,
+}
+
+impl<W: Write> Drop for RawTerminal<W> {
+    fn drop(&mut self) {
+        set_terminal_attr(&self.prev_ios).unwrap();
+    }
+}
+
+impl<W: Write> ops::Deref for RawTerminal<W> {
+    type Target = W;
+
+    fn deref(&self) -> &W {
+        &self.output
+    }
+}
+
+impl<W: Write> ops::DerefMut for RawTerminal<W> {
+    fn deref_mut(&mut self) -> &mut W {
+        &mut self.output
+    }
+}
+
+impl<W: Write> Write for RawTerminal<W> {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.output.write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.output.flush()
+    }
+}
+
+/// Types which can be converted into "raw mode".
+///
+/// # Why is this type defined on writers and not readers?
+///
+/// TTYs has their state controlled by the writer, not the reader. You use the writer to clear the
+/// screen, move the cursor and so on, so naturally you use the writer to change the mode as well.
+pub trait IntoRawMode: Write + Sized {
+    /// Switch to raw mode.
+    ///
+    /// Raw mode means that stdin won't be printed (it will instead have to be written manually by
+    /// the program). Furthermore, the input isn't canonicalised or buffered (that is, you can
+    /// read from stdin one byte of a time). The output is neither modified in any way.
+    fn into_raw_mode(self) -> io::Result<RawTerminal<Self>>;
+}
+
+impl<W: Write> IntoRawMode for W {
+    fn into_raw_mode(self) -> io::Result<RawTerminal<W>> {
+        let mut ios = get_terminal_attr()?;
+        let prev_ios = ios;
+
+        raw_terminal_attr(&mut ios);
+
+        set_terminal_attr(&ios)?;
+
+        Ok(RawTerminal {
+            prev_ios: prev_ios,
+            output: self,
+        })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+    use std::io::{Write, stdout};
+
+    #[test]
+    fn test_into_raw_mode() {
+        let mut out = stdout().into_raw_mode().unwrap();
+
+        out.write_all(b"this is a test, muahhahahah\r\n").unwrap();
+
+        drop(out);
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/screen.rs
@@ -0,0 +1,91 @@
+//! Managing switching between main and alternate screen buffers.
+//!
+//! Note that this implementation uses xterm's new escape sequences for screen switching and thus
+//! only works for xterm compatible terminals (which should be most terminals nowadays).
+//!
+//! # Example
+//!
+//! ```rust
+//! use termion::screen::AlternateScreen;
+//! use std::io::{Write, stdout};
+//!
+//! fn main() {
+//!     {
+//!         let mut screen = AlternateScreen::from(stdout());
+//!         write!(screen, "Writing to alternate screen!").unwrap();
+//!         screen.flush().unwrap();
+//!     }
+//!     println!("Writing to main screen.");
+//! }
+//! ```
+
+use std::io::{self, Write};
+use std::ops;
+use std::fmt;
+
+/// Switch to the main screen buffer of the terminal.
+pub struct ToMainScreen;
+
+impl fmt::Display for ToMainScreen {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("?1049l"))
+    }
+}
+
+/// Switch to the alternate screen buffer of the terminal.
+pub struct ToAlternateScreen;
+
+impl fmt::Display for ToAlternateScreen {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("?1049h"))
+    }
+}
+
+/// A terminal restorer, which wraps a type implementing Write, and causes all writes to be written
+/// to an alternate screen.
+///
+/// This is achieved by switching the terminal to the alternate screen on creation and
+/// automatically switching it back to the original screen on drop.
+pub struct AlternateScreen<W: Write> {
+    /// The output target.
+    output: W,
+}
+
+impl<W: Write> AlternateScreen<W> {
+    /// Create an alternate screen wrapper struct for the provided output and switch the terminal
+    /// to the alternate screen.
+    pub fn from(mut output: W) -> Self {
+        write!(output, "{}", ToAlternateScreen).expect("switch to alternate screen");
+        AlternateScreen { output: output }
+    }
+}
+
+impl<W: Write> Drop for AlternateScreen<W> {
+    fn drop(&mut self) {
+        write!(self, "{}", ToMainScreen).expect("switch to main screen");
+    }
+}
+
+impl<W: Write> ops::Deref for AlternateScreen<W> {
+    type Target = W;
+
+    fn deref(&self) -> &W {
+        &self.output
+    }
+}
+
+impl<W: Write> ops::DerefMut for AlternateScreen<W> {
+    fn deref_mut(&mut self) -> &mut W {
+        &mut self.output
+    }
+}
+
+impl<W: Write> Write for AlternateScreen<W> {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.output.write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.output.flush()
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/scroll.rs
@@ -0,0 +1,23 @@
+//! Scrolling.
+
+use std::fmt;
+
+/// Scroll up.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Up(pub u16);
+
+impl fmt::Display for Up {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}S"), self.0)
+    }
+}
+
+/// Scroll down.
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub struct Down(pub u16);
+
+impl fmt::Display for Down {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, csi!("{}T"), self.0)
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/style.rs
@@ -0,0 +1,22 @@
+//! Text styling management.
+
+use std::fmt;
+
+derive_csi_sequence!("Reset SGR parameters.", Reset, "m");
+derive_csi_sequence!("Bold text.", Bold, "1m");
+derive_csi_sequence!("Fainted text (not widely supported).", Faint, "2m");
+derive_csi_sequence!("Italic text.", Italic, "3m");
+derive_csi_sequence!("Underlined text.", Underline, "4m");
+derive_csi_sequence!("Blinking text (not widely supported).", Blink, "5m");
+derive_csi_sequence!("Inverted colors (negative mode).", Invert, "7m");
+derive_csi_sequence!("Crossed out text (not widely supported).", CrossedOut, "9m");
+derive_csi_sequence!("Undo bold text.", NoBold, "21m");
+derive_csi_sequence!("Undo fainted text (not widely supported).", NoFaint, "22m");
+derive_csi_sequence!("Undo italic text.", NoItalic, "23m");
+derive_csi_sequence!("Undo underlined text.", NoUnderline, "24m");
+derive_csi_sequence!("Undo blinking text (not widely supported).", NoBlink, "25m");
+derive_csi_sequence!("Undo inverted colors (negative mode).", NoInvert, "27m");
+derive_csi_sequence!("Undo crossed out text (not widely supported).",
+                     NoCrossedOut,
+                     "29m");
+derive_csi_sequence!("Framed text (not widely supported).", Framed, "51m");
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/sys/redox/attr.rs
@@ -0,0 +1,33 @@
+use std::io;
+
+use super::{cvt, syscall, Termios};
+
+pub fn get_terminal_attr() -> io::Result<Termios> {
+    let mut termios = Termios::default();
+
+    let fd = cvt(syscall::dup(0, b"termios"))?;
+    let res = cvt(syscall::read(fd, &mut termios));
+    let _ = syscall::close(fd);
+
+    if res? == termios.len() {
+        Ok(termios)
+    } else {
+        Err(io::Error::new(io::ErrorKind::Other, "Unable to get the terminal attributes."))
+    }
+}
+
+pub fn set_terminal_attr(termios: &Termios) -> io::Result<()> {
+    let fd = cvt(syscall::dup(0, b"termios"))?;
+    let res = cvt(syscall::write(fd, termios));
+    let _ = syscall::close(fd);
+
+    if res? == termios.len() {
+        Ok(())
+    } else {
+        Err(io::Error::new(io::ErrorKind::Other, "Unable to set the terminal attributes."))
+    }
+}
+
+pub fn raw_terminal_attr(ios: &mut Termios) {
+    ios.make_raw()
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/sys/redox/mod.rs
@@ -0,0 +1,15 @@
+extern crate redox_termios;
+extern crate syscall;
+
+use std::io;
+
+pub use self::redox_termios::Termios;
+
+pub mod attr;
+pub mod size;
+pub mod tty;
+
+// Support function for converting syscall error to io error
+fn cvt(result: Result<usize, syscall::Error>) -> io::Result<usize> {
+    result.map_err(|err| io::Error::from_raw_os_error(err.errno))
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/sys/redox/size.rs
@@ -0,0 +1,18 @@
+use std::io;
+
+use super::{cvt, redox_termios, syscall};
+
+/// Get the size of the terminal.
+pub fn terminal_size() -> io::Result<(u16, u16)> {
+    let mut winsize = redox_termios::Winsize::default();
+
+    let fd = cvt(syscall::dup(1, b"winsize"))?;
+    let res = cvt(syscall::read(fd, &mut winsize));
+    let _ = syscall::close(fd);
+
+    if res? == winsize.len() {
+        Ok((winsize.ws_col, winsize.ws_row))
+    } else {
+        Err(io::Error::new(io::ErrorKind::Other, "Unable to get the terminal size."))
+    }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/sys/redox/tty.rs
@@ -0,0 +1,22 @@
+use std::{env, fs, io};
+use std::os::unix::io::AsRawFd;
+
+use super::syscall;
+
+/// Is this stream a TTY?
+pub fn is_tty<T: AsRawFd>(stream: &T) -> bool {
+    if let Ok(fd) = syscall::dup(stream.as_raw_fd(), b"termios") {
+        let _ = syscall::close(fd);
+        true
+    } else {
+        false
+    }
+}
+
+/// Get the TTY device.
+///
+/// This allows for getting stdio representing _only_ the TTY, and not other streams.
+pub fn get_tty() -> io::Result<fs::File> {
+    let tty = try!(env::var("TTY").map_err(|x| io::Error::new(io::ErrorKind::NotFound, x)));
+    fs::OpenOptions::new().read(true).write(true).open(tty)
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/termion/src/sys/unix/attr.rs
@@ -0,0 +1,29 @@
+use std::{io, mem};
+
+use super::{cvt, Termios};
+use super::libc::c_int;
+
+pub fn get_terminal_attr() -> io::Result<Termios> {
+    extern "C" {
+        pub fn tcgetattr(fd: c_int, termptr: *mut Termios) -> c_int;
+    }
+    unsafe {
+        let mut termios = mem::zeroed();
+        cvt(tcgetattr(0, &mut termios))?;
+        Ok(termios)
+    }
+}
+
+pub fn set_terminal_attr(termios: &Termios) -> io::Result<()> {
+    extern "C" {