Merge m-c to fx-team a=merge
authorWes Kocher <wkocher@mozilla.com>
Wed, 27 Aug 2014 17:07:39 -0700
changeset 223644 3be45b58fc4787d8267e1b5879a3b25d9e391ac4
parent 223643 07d046eea6349989e92f1c99ad3fb24b1fb59c39 (current diff)
parent 223633 7bd309e55a3dfe86debfa3fe06eeb2ca50e95694 (diff)
child 223692 70c1c94cfaecca52a63cca04a07f96f62b3128da
child 223749 88847ed61a3544fdf05713e34f8a9b45b1cce91a
push id3979
push userraliiev@mozilla.com
push dateMon, 13 Oct 2014 16:35:44 +0000
treeherdermozilla-beta@30f2cc610691 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone34.0a1
first release with
nightly linux32
3be45b58fc47 / 34.0a1 / 20140828030205 / files
nightly linux64
3be45b58fc47 / 34.0a1 / 20140828030205 / files
nightly mac
3be45b58fc47 / 34.0a1 / 20140828030205 / files
nightly win32
3be45b58fc47 / 34.0a1 / 20140828030205 / files
nightly win64
3be45b58fc47 / 34.0a1 / 20140828030205 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge m-c to fx-team a=merge
content/media/eme/MediaKeyNeededEvent.cpp
content/media/eme/MediaKeyNeededEvent.h
dom/webidl/MediaKeyNeededEvent.webidl
js/src/tests/ecma_5/eval/strict-eval-json-object-repeated-property-name.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5-4-4-a-1-s.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-b-1.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-b-2.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-c-1.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-c-2.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-d-1.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-d-2.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-d-3.js
js/src/tests/test262/ch11/11.1/11.1.5/11.1.5_4-4-d-4.js
media/libpng/mozpngconf.h
--- a/b2g/config/dolphin/sources.xml
+++ b/b2g/config/dolphin/sources.xml
@@ -10,37 +10,37 @@
   <!--original fetch url was git://codeaurora.org/-->
   <remote fetch="https://git.mozilla.org/external/caf" name="caf"/>
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <!-- B2G specific things. -->
   <project name="platform_build" path="build" remote="b2g" revision="53a59364ce4f14068034c8d6fe01f4f6b9f78f23">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
-  <project name="gaia" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="valgrind" path="external/valgrind" remote="b2g" revision="daa61633c32b9606f58799a3186395fd2bbb8d8c"/>
   <project name="vex" path="external/VEX" remote="b2g" revision="47f031c320888fe9f3e656602588565b52d43010"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <!-- Stock Android things -->
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" revision="95bb5b66b3ec5769c3de8d3f25d681787418e7d2"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" revision="ebdad82e61c16772f6cd47e9f11936bf6ebe9aa0"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" revision="8b880805d454664b3eed11d0f053cdeafa1ff06e"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.7" revision="a1e239a0bb5cd1d69680bf1075883aa9a7bf2429"/>
   <project groups="linux,x86" name="platform/prebuilts/gcc/linux-x86/x86/i686-linux-android-4.7" path="prebuilts/gcc/linux-x86/x86/i686-linux-android-4.7" revision="c7931763d41be602407ed9d71e2c0292c6597e00"/>
   <project groups="linux,x86" name="platform/prebuilts/python/linux-x86/2.7.5" path="prebuilts/python/linux-x86/2.7.5" revision="83760d213fb3bec7b4117d266fcfbf6fe2ba14ab"/>
   <project name="device/common" path="device/common" revision="6a2995683de147791e516aae2ccb31fdfbe2ad30"/>
   <project name="device/sample" path="device/sample" revision="1a3d8efa0ad32ec8f145367a3cf0f54b97385c3c"/>
   <project name="platform/abi/cpp" path="abi/cpp" revision="18f1b5e28734183ff8073fe86dc46bc4ebba8a59"/>
   <project name="platform/bionic" path="bionic" revision="86b1f589c313422a7da1812512b9ec8d1cf9ba3c"/>
-  <project name="platform/bootable/recovery" path="bootable/recovery" revision="1f68d4c6a5d2e72bc02fa837af94c0a51afa94de"/>
+  <project name="platform/bootable/recovery" path="bootable/recovery" revision="034ede23ad7ad8cddce9420a98c66064cbc1ecfd"/>
   <project name="platform/external/aac" path="external/aac" revision="fa3eba16446cc8f2f5e2dfc20d86a49dbd37299e"/>
   <project name="platform/external/bison" path="external/bison" revision="c2418b886165add7f5a31fc5609f0ce2d004a90e"/>
   <project name="platform/external/bluetooth/bluedroid" path="external/bluetooth/bluedroid" revision="c50830cae1b748024eec7e73ad98a4e427f663c7"/>
   <project name="platform/external/bsdiff" path="external/bsdiff" revision="23e322ab19fb7d74c2c37e40ce364d9f709bdcee"/>
   <project name="platform/external/bzip2" path="external/bzip2" revision="1cb636bd8e9e5cdfd5d5b2909a122f6e80db62de"/>
   <project name="platform/external/checkpolicy" path="external/checkpolicy" revision="0d73ef7049feee794f14cf1af88d05dae8139914"/>
   <project name="platform/external/dhcpcd" path="external/dhcpcd" revision="84b7252b0a9d0edc9a1db1e0c518771d26b23058"/>
   <project name="platform/external/dnsmasq" path="external/dnsmasq" revision="41d356427a632f5336384bfa45c8420ffc274f66"/>
@@ -122,17 +122,17 @@
   <project name="platform/system/security" path="system/security" revision="ee8068b9e7bfb2770635062fc9c2035be2142bd8"/>
   <project name="platform/system/vold" path="system/vold" revision="118dec582647895a863dbbce8ec26bc7af457bbe"/>
   <!--original fetch url was http://sprdsource.spreadtrum.com:8085/b2g/android-->
   <remote fetch="https://git.mozilla.org/external/sprd-aosp" name="sprd-aosp"/>
   <default remote="sprd-aosp" revision="sprdb2g_gonk4.4" sync-j="4"/>
   <!-- Stock Android things -->
   <project name="platform/external/icu4c" path="external/icu4c" revision="2bb01561780583cc37bc667f0ea79f48a122d8a2"/>
   <!-- dolphin specific things -->
-  <project name="device/sprd" path="device/sprd" revision="9a1f8e59b0cbf91d99b02f836b5197a822eadf1a"/>
+  <project name="device/sprd" path="device/sprd" revision="a5507c0bea958453f5e640343e6c60530558ac9d"/>
   <project name="platform/external/wpa_supplicant_8" path="external/wpa_supplicant_8" revision="4e58336019b5cbcfd134caf55b142236cf986618"/>
   <project name="platform/frameworks/av" path="frameworks/av" revision="facca8d3e35431b66f85a4eb42bc6c5b24bd04da"/>
   <project name="platform/hardware/akm" path="hardware/akm" revision="6d3be412647b0eab0adff8a2768736cf4eb68039"/>
   <project groups="invensense" name="platform/hardware/invensense" path="hardware/invensense" revision="e6d9ab28b4f4e7684f6c07874ee819c9ea0002a2"/>
   <project name="platform/hardware/ril" path="hardware/ril" revision="865ce3b4a2ba0b3a31421ca671f4d6c5595f8690"/>
   <project name="kernel/common" path="kernel" revision="f3a717dd8dbb08e558c807c5ede071d83d454207"/>
   <project name="platform/system/core" path="system/core" revision="b5de04ae22343b6bdaa3455aee291bdf9a872738"/>
   <project name="u-boot" path="u-boot" revision="d18f67c122a69f5ff7596a604cf484de5e824f9b"/>
--- a/b2g/config/emulator-ics/sources.xml
+++ b/b2g/config/emulator-ics/sources.xml
@@ -14,23 +14,23 @@
   <!--original fetch url was git://github.com/apitrace/-->
   <remote fetch="https://git.mozilla.org/external/apitrace" name="apitrace"/>
   <default remote="caf" revision="refs/tags/android-4.0.4_r2.1" sync-j="4"/>
   <!-- Gonk specific things and forks -->
   <project name="platform_build" path="build" remote="b2g" revision="0d616942c300d9fb142483210f1dda9096c9a9fc">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
-  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="cd88d860656c31c7da7bb310d6a160d0011b0961"/>
   <project name="platform_external_qemu" path="external/qemu" remote="b2g" revision="c058843242068d0df7c107e09da31b53d2e08fa6"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <!-- Stock Android things -->
   <project name="platform/abi/cpp" path="abi/cpp" revision="dd924f92906085b831bf1cbbc7484d3c043d613c"/>
   <project name="platform/bionic" path="bionic" revision="c72b8f6359de7ed17c11ddc9dfdde3f615d188a9"/>
   <project name="platform/bootable/recovery" path="bootable/recovery" revision="425f8b5fadf5889834c5acd27d23c9e0b2129c28"/>
   <project name="device/common" path="device/common" revision="42b808b7e93d0619286ae8e59110b176b7732389"/>
   <project name="device/sample" path="device/sample" revision="237bd668d0f114d801a8d6455ef5e02cc3577587"/>
   <project name="platform_external_apriori" path="external/apriori" remote="b2g" revision="11816ad0406744f963537b23d68ed9c2afb412bd"/>
   <project name="platform/external/bluetooth/bluez" path="external/bluetooth/bluez" revision="52a1a862a8bac319652b8f82d9541ba40bfa45ce"/>
--- a/b2g/config/emulator-jb/sources.xml
+++ b/b2g/config/emulator-jb/sources.xml
@@ -12,20 +12,20 @@
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <!-- B2G specific things. -->
   <project name="platform_build" path="build" remote="b2g" revision="7eef86294cd794ab9e6a53d218c238bfc63c3a6d">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
-  <project name="gaia" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <project name="valgrind" path="external/valgrind" remote="b2g" revision="daa61633c32b9606f58799a3186395fd2bbb8d8c"/>
   <project name="vex" path="external/VEX" remote="b2g" revision="47f031c320888fe9f3e656602588565b52d43010"/>
   <!-- Stock Android things -->
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.1" path="prebuilts/clang/linux-x86/3.1" revision="5c45f43419d5582949284eee9cef0c43d866e03b"/>
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.2" path="prebuilts/clang/linux-x86/3.2" revision="3748b4168e7bd8d46457d4b6786003bc6a5223ce"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" revision="9025e50b9d29b3cabbbb21e1dd94d0d13121a17e"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" revision="b89fda71fcd0fa0cf969310e75be3ea33e048b44"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" revision="2e7d5348f35575870b3c7e567a9a9f6d66f8d6c5"/>
@@ -97,17 +97,17 @@
   <project name="platform/external/tinyalsa" path="external/tinyalsa" revision="a6e1da53a63cc2b676ee75a97a5953531fafa27a"/>
   <project name="platform/external/tinyxml" path="external/tinyxml" revision="ed2c5fc8937f8225ec6fd94ebcabac65621a2dc8"/>
   <project name="platform/external/tinyxml2" path="external/tinyxml2" revision="f5549cc02ee15888ecc31cf85b346578260975f9"/>
   <project name="platform/external/tremolo" path="external/tremolo" revision="10b2146109b54c868dec73f1e67abe03fa3acfed"/>
   <project name="platform/external/webp" path="external/webp" revision="020c9b164f4a9d8bce6b48d5d2b783136a8de669"/>
   <project name="platform/external/webrtc" path="external/webrtc" revision="1a1433203ddf6395516e065ada1dcdfc8bd5c654"/>
   <project name="platform/external/yaffs2" path="external/yaffs2" revision="d94a17182a88c2c2d865f50b728de8561d251efa"/>
   <project name="platform/external/zlib" path="external/zlib" revision="06608b270da9ec8a3e618f201d7356aad83f9ffe"/>
-  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="f9000e9ec6fb84a24ffe049e6a6372c0305ee0f1"/>
+  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="7a1003b370cde4406afd9186de0b1a5aa95543db"/>
   <project name="platform/frameworks/base" path="frameworks/base" revision="8fafbc6692a52d1f1417693f24f6349b4de5afbd"/>
   <project name="platform/frameworks/native" path="frameworks/native" revision="c135c11c422c1570fdae2e19336f06f39e723c5a"/>
   <project name="platform/frameworks/opt/emoji" path="frameworks/opt/emoji" revision="bc06a1779be6919a581a938e1c3118b3a4ab4c18"/>
   <project name="platform/frameworks/wilhelm" path="frameworks/wilhelm" revision="4f330e2d671e230c106a3c41ecddbced8ff5d826"/>
   <project name="platform/hardware/libhardware" path="hardware/libhardware" revision="bb653159145842bd86a3522073fcbf5d6450c598"/>
   <project name="platform/hardware/libhardware_legacy" path="hardware/libhardware_legacy" revision="7044afe8fa0a992a2926370e7abe9d06cc9df67b"/>
   <project name="platform/libcore" path="libcore" revision="3552ed1686d04a65b85e56ccc24ff3fcf77725e6"/>
   <project name="platform/libnativehelper" path="libnativehelper" revision="4792069e90385889b0638e97ae62c67cdf274e22"/>
--- a/b2g/config/emulator-kk/sources.xml
+++ b/b2g/config/emulator-kk/sources.xml
@@ -10,25 +10,25 @@
   <!--original fetch url was git://codeaurora.org/-->
   <remote fetch="https://git.mozilla.org/external/caf" name="caf"/>
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <!-- B2G specific things. -->
   <project name="platform_build" path="build" remote="b2g" revision="53a59364ce4f14068034c8d6fe01f4f6b9f78f23">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
-  <project name="gaia" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="valgrind" path="external/valgrind" remote="b2g" revision="daa61633c32b9606f58799a3186395fd2bbb8d8c"/>
   <project name="vex" path="external/VEX" remote="b2g" revision="47f031c320888fe9f3e656602588565b52d43010"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <!-- Stock Android things -->
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" revision="f92a936f2aa97526d4593386754bdbf02db07a12"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" revision="6e47ff2790f5656b5b074407829ceecf3e6188c4"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" revision="1950e4760fa14688b83cdbb5acaa1af9f82ef434"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.7" revision="ac6eb97a37035c09fb5ede0852f0881e9aadf9ad"/>
   <project groups="linux,x86" name="platform/prebuilts/gcc/linux-x86/x86/i686-linux-android-4.7" path="prebuilts/gcc/linux-x86/x86/i686-linux-android-4.7" revision="737f591c5f95477148d26602c7be56cbea0cdeb9"/>
   <project groups="linux,x86" name="platform/prebuilts/python/linux-x86/2.7.5" path="prebuilts/python/linux-x86/2.7.5" revision="51da9b1981be481b92a59a826d4d78dc73d0989a"/>
   <project name="device/common" path="device/common" revision="798a3664597e6041985feab9aef42e98d458bc3d"/>
@@ -117,17 +117,17 @@
   <project name="platform/prebuilts/sdk" path="prebuilts/sdk" revision="b562b01c93de9578d5db537b6a602a38e1aaa0ce"/>
   <project name="platform/prebuilts/tools" path="prebuilts/tools" revision="387f03e815f57d536dd922706db1622bddba8d81"/>
   <project name="platform/system/extras" path="system/extras" revision="5356165f67f4a81c2ef28671c13697f1657590df"/>
   <project name="platform/system/media" path="system/media" revision="be0e2fe59a8043fa5200f75697df9220a99abe9d"/>
   <project name="platform/system/netd" path="system/netd" revision="36704b0da24debcab8090156568ac236315036bb"/>
   <project name="platform/system/security" path="system/security" revision="583374f69f531ba68fc3dcbff1f74893d2a96406"/>
   <project name="platform/system/vold" path="system/vold" revision="d4455b8cf361f8353e8aebac15ffd64b4aedd2b9"/>
   <project name="platform/external/icu4c" path="external/icu4c" remote="aosp" revision="b4c6379528887dc25ca9991a535a8d92a61ad6b6"/>
-  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="9c6cb3231dd096df10a11b4d76be3727bdeec08d"/>
+  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="614747e5e6755ffcdb36156ea82d8b5c1609a3af"/>
   <project name="platform_system_core" path="system/core" remote="b2g" revision="9395eb5aa885cf6d305a202de6e9694a58a89717"/>
   <default remote="caf" revision="refs/tags/android-4.4.2_r1" sync-j="4"/>
   <!-- Emulator specific things -->
   <project name="device/generic/armv7-a-neon" path="device/generic/armv7-a-neon" revision="72ffdf71c68a96309212eb13d63560d66db14c9e"/>
   <project name="device_generic_goldfish" path="device/generic/goldfish" remote="b2g" revision="c0e0019a6ec1a6199a9c7bc4ace041259f3b8512"/>
   <project name="platform_external_qemu" path="external/qemu" remote="b2g" revision="5f184e4aa6ad784e20b4c5e6be24db4b9a848087"/>
   <project name="platform/external/wpa_supplicant_8" path="external/wpa_supplicant_8" revision="694cecf256122d0cb3b6a1a4efb4b5c7401db223"/>
   <project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="97d63c256a047b491565d624aea1dd5f1f8593ea"/>
--- a/b2g/config/emulator/sources.xml
+++ b/b2g/config/emulator/sources.xml
@@ -14,23 +14,23 @@
   <!--original fetch url was git://github.com/apitrace/-->
   <remote fetch="https://git.mozilla.org/external/apitrace" name="apitrace"/>
   <default remote="caf" revision="refs/tags/android-4.0.4_r2.1" sync-j="4"/>
   <!-- Gonk specific things and forks -->
   <project name="platform_build" path="build" remote="b2g" revision="0d616942c300d9fb142483210f1dda9096c9a9fc">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
-  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="cd88d860656c31c7da7bb310d6a160d0011b0961"/>
   <project name="platform_external_qemu" path="external/qemu" remote="b2g" revision="c058843242068d0df7c107e09da31b53d2e08fa6"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <!-- Stock Android things -->
   <project name="platform/abi/cpp" path="abi/cpp" revision="dd924f92906085b831bf1cbbc7484d3c043d613c"/>
   <project name="platform/bionic" path="bionic" revision="c72b8f6359de7ed17c11ddc9dfdde3f615d188a9"/>
   <project name="platform/bootable/recovery" path="bootable/recovery" revision="425f8b5fadf5889834c5acd27d23c9e0b2129c28"/>
   <project name="device/common" path="device/common" revision="42b808b7e93d0619286ae8e59110b176b7732389"/>
   <project name="device/sample" path="device/sample" revision="237bd668d0f114d801a8d6455ef5e02cc3577587"/>
   <project name="platform_external_apriori" path="external/apriori" remote="b2g" revision="11816ad0406744f963537b23d68ed9c2afb412bd"/>
   <project name="platform/external/bluetooth/bluez" path="external/bluetooth/bluez" revision="52a1a862a8bac319652b8f82d9541ba40bfa45ce"/>
--- a/b2g/config/flame/sources.xml
+++ b/b2g/config/flame/sources.xml
@@ -12,20 +12,20 @@
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <!-- B2G specific things. -->
   <project name="platform_build" path="build" remote="b2g" revision="7eef86294cd794ab9e6a53d218c238bfc63c3a6d">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
-  <project name="gaia" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <project name="valgrind" path="external/valgrind" remote="b2g" revision="daa61633c32b9606f58799a3186395fd2bbb8d8c"/>
   <project name="vex" path="external/VEX" remote="b2g" revision="47f031c320888fe9f3e656602588565b52d43010"/>
   <!-- Stock Android things -->
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.1" path="prebuilts/clang/linux-x86/3.1" revision="e95b4ce22c825da44d14299e1190ea39a5260bde"/>
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.2" path="prebuilts/clang/linux-x86/3.2" revision="471afab478649078ad7c75ec6b252481a59e19b8"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" revision="95bb5b66b3ec5769c3de8d3f25d681787418e7d2"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" revision="ebdad82e61c16772f6cd47e9f11936bf6ebe9aa0"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" revision="8b880805d454664b3eed11d0f053cdeafa1ff06e"/>
--- a/b2g/config/gaia.json
+++ b/b2g/config/gaia.json
@@ -1,9 +1,9 @@
 {
     "git": {
         "git_revision": "", 
         "remote": "", 
         "branch": ""
     }, 
-    "revision": "f2dfcf31b96b8ffdf3dda0bd7b7272a07643e916", 
+    "revision": "d4afc0a7f72fd7793359b9575ea7c90cd54e2348", 
     "repo_path": "/integration/gaia-central"
 }
--- a/b2g/config/hamachi/sources.xml
+++ b/b2g/config/hamachi/sources.xml
@@ -12,22 +12,22 @@
   <!--original fetch url was git://github.com/apitrace/-->
   <remote fetch="https://git.mozilla.org/external/apitrace" name="apitrace"/>
   <default remote="caf" revision="b2g/ics_strawberry" sync-j="4"/>
   <!-- Gonk specific things and forks -->
   <project name="platform_build" path="build" remote="b2g" revision="0d616942c300d9fb142483210f1dda9096c9a9fc">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
-  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <!-- Stock Android things -->
   <project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>
   <project name="platform/bionic" path="bionic" revision="d2eb6c7b6e1bc7643c17df2d9d9bcb1704d0b9ab"/>
   <project name="platform/bootable/recovery" path="bootable/recovery" revision="746bc48f34f5060f90801925dcdd964030c1ab6d"/>
   <project name="platform/development" path="development" revision="2460485184bc8535440bb63876d4e63ec1b4770c"/>
   <project name="device/common" path="device/common" revision="0dcc1e03659db33b77392529466f9eb685cdd3c7"/>
   <project name="device/sample" path="device/sample" revision="68b1cb978a20806176123b959cb05d4fa8adaea4"/>
   <project name="platform_external_apriori" path="external/apriori" remote="b2g" revision="11816ad0406744f963537b23d68ed9c2afb412bd"/>
--- a/b2g/config/helix/sources.xml
+++ b/b2g/config/helix/sources.xml
@@ -10,17 +10,17 @@
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <default remote="caf" revision="b2g/ics_strawberry" sync-j="4"/>
   <!-- Gonk specific things and forks -->
   <project name="platform_build" path="build" remote="b2g" revision="0d616942c300d9fb142483210f1dda9096c9a9fc">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
-  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
   <project name="gonk-patches" path="patches" remote="b2g" revision="223a2421006e8f5da33f516f6891c87cae86b0f6"/>
   <!-- Stock Android things -->
   <project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>
   <project name="platform/bionic" path="bionic" revision="d2eb6c7b6e1bc7643c17df2d9d9bcb1704d0b9ab"/>
--- a/b2g/config/nexus-4/sources.xml
+++ b/b2g/config/nexus-4/sources.xml
@@ -12,20 +12,20 @@
   <!--original fetch url was https://git.mozilla.org/releases-->
   <remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
   <!-- B2G specific things. -->
   <project name="platform_build" path="build" remote="b2g" revision="7eef86294cd794ab9e6a53d218c238bfc63c3a6d">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
-  <project name="gaia" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <project name="valgrind" path="external/valgrind" remote="b2g" revision="daa61633c32b9606f58799a3186395fd2bbb8d8c"/>
   <project name="vex" path="external/VEX" remote="b2g" revision="47f031c320888fe9f3e656602588565b52d43010"/>
   <!-- Stock Android things -->
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.1" path="prebuilts/clang/linux-x86/3.1" revision="5c45f43419d5582949284eee9cef0c43d866e03b"/>
   <project groups="linux" name="platform/prebuilts/clang/linux-x86/3.2" path="prebuilts/clang/linux-x86/3.2" revision="3748b4168e7bd8d46457d4b6786003bc6a5223ce"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6" revision="9025e50b9d29b3cabbbb21e1dd94d0d13121a17e"/>
   <project groups="linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6" revision="b89fda71fcd0fa0cf969310e75be3ea33e048b44"/>
   <project groups="linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.7" revision="2e7d5348f35575870b3c7e567a9a9f6d66f8d6c5"/>
@@ -97,17 +97,17 @@
   <project name="platform/external/tinyalsa" path="external/tinyalsa" revision="a6e1da53a63cc2b676ee75a97a5953531fafa27a"/>
   <project name="platform/external/tinyxml" path="external/tinyxml" revision="ed2c5fc8937f8225ec6fd94ebcabac65621a2dc8"/>
   <project name="platform/external/tinyxml2" path="external/tinyxml2" revision="f5549cc02ee15888ecc31cf85b346578260975f9"/>
   <project name="platform/external/tremolo" path="external/tremolo" revision="10b2146109b54c868dec73f1e67abe03fa3acfed"/>
   <project name="platform/external/webp" path="external/webp" revision="020c9b164f4a9d8bce6b48d5d2b783136a8de669"/>
   <project name="platform/external/webrtc" path="external/webrtc" revision="1a1433203ddf6395516e065ada1dcdfc8bd5c654"/>
   <project name="platform/external/yaffs2" path="external/yaffs2" revision="d94a17182a88c2c2d865f50b728de8561d251efa"/>
   <project name="platform/external/zlib" path="external/zlib" revision="06608b270da9ec8a3e618f201d7356aad83f9ffe"/>
-  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="f9000e9ec6fb84a24ffe049e6a6372c0305ee0f1"/>
+  <project name="platform_frameworks_av" path="frameworks/av" remote="b2g" revision="7a1003b370cde4406afd9186de0b1a5aa95543db"/>
   <project name="platform/frameworks/base" path="frameworks/base" revision="8fafbc6692a52d1f1417693f24f6349b4de5afbd"/>
   <project name="platform/frameworks/native" path="frameworks/native" revision="c135c11c422c1570fdae2e19336f06f39e723c5a"/>
   <project name="platform/frameworks/opt/emoji" path="frameworks/opt/emoji" revision="bc06a1779be6919a581a938e1c3118b3a4ab4c18"/>
   <project name="platform/frameworks/wilhelm" path="frameworks/wilhelm" revision="4f330e2d671e230c106a3c41ecddbced8ff5d826"/>
   <project name="platform/hardware/libhardware" path="hardware/libhardware" revision="bb653159145842bd86a3522073fcbf5d6450c598"/>
   <project name="platform/hardware/libhardware_legacy" path="hardware/libhardware_legacy" revision="7044afe8fa0a992a2926370e7abe9d06cc9df67b"/>
   <project name="platform/libcore" path="libcore" revision="3552ed1686d04a65b85e56ccc24ff3fcf77725e6"/>
   <project name="platform/libnativehelper" path="libnativehelper" revision="4792069e90385889b0638e97ae62c67cdf274e22"/>
--- a/b2g/config/wasabi/sources.xml
+++ b/b2g/config/wasabi/sources.xml
@@ -12,22 +12,22 @@
   <!--original fetch url was git://github.com/apitrace/-->
   <remote fetch="https://git.mozilla.org/external/apitrace" name="apitrace"/>
   <default remote="caf" revision="ics_chocolate_rb4.2" sync-j="4"/>
   <!-- Gonk specific things and forks -->
   <project name="platform_build" path="build" remote="b2g" revision="0d616942c300d9fb142483210f1dda9096c9a9fc">
     <copyfile dest="Makefile" src="core/root.mk"/>
   </project>
   <project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
-  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="6e804a42ab90f4251c7fe8c68731dc1c6abd8006"/>
+  <project name="gaia.git" path="gaia" remote="mozillaorg" revision="3a838afca295c9db32e1a3ec76d49fb7fe7fd2d2"/>
   <project name="gonk-misc" path="gonk-misc" remote="b2g" revision="3bb61a27cd2941b2ba9b616a11aaa44269210396"/>
   <project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
   <project name="librecovery" path="librecovery" remote="b2g" revision="891e5069c0ad330d8191bf8c7b879c814258c89f"/>
   <project name="moztt" path="external/moztt" remote="b2g" revision="562d357b72279a9e35d4af5aeecc8e1ffa2f44f1"/>
-  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="1085a4b05f19f1c43def7e87362d4498ce20b70f"/>
+  <project name="apitrace" path="external/apitrace" remote="apitrace" revision="320650844ec7cba40a70317b761b88b47a8dca0e"/>
   <project name="gonk-patches" path="patches" remote="b2g" revision="223a2421006e8f5da33f516f6891c87cae86b0f6"/>
   <!-- Stock Android things -->
   <project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>
   <project name="platform/bionic" path="bionic" revision="cd5dfce80bc3f0139a56b58aca633202ccaee7f8"/>
   <project name="platform/bootable/recovery" path="bootable/recovery" revision="e0a9ac010df3afaa47ba107192c05ac8b5516435"/>
   <project name="platform/development" path="development" revision="a384622f5fcb1d2bebb9102591ff7ae91fe8ed2d"/>
   <project name="device/common" path="device/common" revision="7c65ea240157763b8ded6154a17d3c033167afb7"/>
   <project name="device/sample" path="device/sample" revision="c328f3d4409db801628861baa8d279fb8855892f"/>
--- a/browser/base/content/popup-notifications.inc
+++ b/browser/base/content/popup-notifications.inc
@@ -16,18 +16,17 @@
                control="webRTC-selectCamera-menulist"/>
         <menulist id="webRTC-selectCamera-menulist">
           <menupopup id="webRTC-selectCamera-menupopup"/>
         </menulist>
       </popupnotificationcontent>
 
       <popupnotificationcontent id="webRTC-selectWindowOrScreen" orient="vertical">
         <separator class="thin"/>
-        <label value="&getUserMedia.selectWindowOrScreen.label;"
-               accesskey="&getUserMedia.selectWindowOrScreen.accesskey;"
+        <label id="webRTC-selectWindow-label"
                control="webRTC-selectWindow-menulist"/>
         <menulist id="webRTC-selectWindow-menulist"
                   oncommand="gWebRTCUI.updateMainActionLabel(this);">
           <menupopup id="webRTC-selectWindow-menupopup"/>
         </menulist>
         <description id="webRTC-all-windows-shared" hidden="true">&getUserMedia.allWindowsShared.message;</description>
       </popupnotificationcontent>
 
--- a/browser/components/places/content/treeView.js
+++ b/browser/components/places/content/treeView.js
@@ -314,17 +314,17 @@ PlacesTreeView.prototype = {
       // Recursively do containers.
       if (!this._flatList &&
           curChild instanceof Ci.nsINavHistoryContainerResultNode &&
           !this._controller.hasCachedLivemarkInfo(curChild)) {
         let uri = curChild.uri;
         let isopen = false;
 
         if (uri) {
-          let val = this._xulStore.getValue(document.documentURIObject, uri, "open");
+          let val = this._xulStore.getValue(document.documentURI, uri, "open");
           isopen = (val == "true");
         }
 
         if (isopen != curChild.containerOpen)
           aToOpen.push(curChild);
         else if (curChild.containerOpen && curChild.childCount > 0)
           rowsInserted += this._buildVisibleSection(curChild, row + 1, aToOpen);
       }
@@ -1499,17 +1499,17 @@ PlacesTreeView.prototype = {
       return;
     }
 
     // Persist containers open status, but never persist livemarks.
     if (!this._controller.hasCachedLivemarkInfo(node)) {
       let uri = node.uri;
 
       if (uri) {
-        let docURI = document.documentURIObject;
+        let docURI = document.documentURI;
 
         if (node.containerOpen) {
           this._xulStore.removeValue(docURI, uri, "open");
         } else {
           this._xulStore.setValue(docURI, uri, "open", "true");
         }
       }
     }
--- a/browser/locales/en-US/chrome/browser/browser.dtd
+++ b/browser/locales/en-US/chrome/browser/browser.dtd
@@ -713,18 +713,16 @@ just addresses the organization to follo
 
 <!ENTITY social.markpageMenu.accesskey "P">
 <!ENTITY social.markpageMenu.label "Save Page To…">
 <!ENTITY social.marklinkMenu.accesskey "L">
 <!ENTITY social.marklinkMenu.label "Save Link To…">
 
 <!ENTITY getUserMedia.selectCamera.label "Camera to share:">
 <!ENTITY getUserMedia.selectCamera.accesskey "C">
-<!ENTITY getUserMedia.selectWindowOrScreen.label "Window or screen to share:">
-<!ENTITY getUserMedia.selectWindowOrScreen.accesskey "W">
 <!ENTITY getUserMedia.selectMicrophone.label "Microphone to share:">
 <!ENTITY getUserMedia.selectMicrophone.accesskey "M">
 <!ENTITY getUserMedia.allWindowsShared.message "All visible windows on your screen will be shared.">
 
 <!-- Bad Content Blocker Doorhanger Notification -->
 <!ENTITY badContentBlocked.moreinfo "Most websites will work properly even if content is blocked.">
 
 <!ENTITY mixedContentBlocked2.message "Insecure content">
--- a/browser/locales/en-US/chrome/browser/browser.properties
+++ b/browser/locales/en-US/chrome/browser/browser.properties
@@ -510,62 +510,84 @@ identity.loggedIn.signOut.accessKey = O
 #                    getUserMedia.shareScreen.message, getUserMedia.shareCameraAndMicrophone.message,
 #                    getUserMedia.shareScreenAndMicrophone.message):
 #  %S is the website origin (e.g. www.mozilla.org)
 getUserMedia.shareCamera.message = Would you like to share your camera with %S?
 getUserMedia.shareMicrophone.message = Would you like to share your microphone with %S?
 getUserMedia.shareScreen.message = Would you like to share your screen with %S?
 getUserMedia.shareCameraAndMicrophone.message = Would you like to share your camera and microphone with %S?
 getUserMedia.shareScreenAndMicrophone.message = Would you like to share your microphone and screen with %S?
+getUserMedia.selectWindow.label=Window to share:
+getUserMedia.selectWindow.accesskey=W
+getUserMedia.selectScreen.label=Screen to share:
+getUserMedia.selectScreen.accesskey=S
+getUserMedia.selectApplication.label=Application to share:
+getUserMedia.selectApplication.accesskey=A
 getUserMedia.noVideo.label = No Video
-getUserMedia.noWindowOrScreen.label = No Window or Screen
+getUserMedia.noApplication.label = No Application
+getUserMedia.noScreen.label = No Screen
+getUserMedia.noWindow.label = No Window
 getUserMedia.noAudio.label = No Audio
 getUserMedia.shareEntireScreen.label = Entire screen
+# LOCALIZATION NOTE (getUserMedia.shareApplicationWindowCount.label):
+# Semicolon-separated list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# Replacement for #1 is the name of the application.
+# Replacement for #2 is the number of windows currently displayed by the application.
+getUserMedia.shareApplicationWindowCount.label=#1 (#2 window);#1 (#2 windows)
 # LOCALIZATION NOTE (getUserMedia.shareSelectedDevices.label):
 # Semicolon-separated list of plural forms. See:
 # http://developer.mozilla.org/en/docs/Localization_and_Plurals
 # The number of devices can be either one or two.
 getUserMedia.shareSelectedDevices.label = Share Selected Device;Share Selected Devices
 getUserMedia.shareSelectedDevices.accesskey = S
 getUserMedia.shareScreen.label = Share Screen
+getUserMedia.shareApplication.label = Share Selected Application
 getUserMedia.shareWindow.label = Share Selected Window
 getUserMedia.shareSelectedItems.label = Share Selected Items
 getUserMedia.always.label = Always Share
 getUserMedia.always.accesskey = A
 getUserMedia.denyRequest.label = Don't Share
 getUserMedia.denyRequest.accesskey = D
 getUserMedia.never.label = Never Share
 getUserMedia.never.accesskey = N
 getUserMedia.sharingCamera.message2 = You are currently sharing your camera with this page.
 getUserMedia.sharingMicrophone.message2 = You are currently sharing your microphone with this page.
 getUserMedia.sharingCameraAndMicrophone.message2 = You are currently sharing your camera and microphone with this page.
+getUserMedia.sharingApplication.message = You are currently sharing an application with this page.
 getUserMedia.sharingScreen.message = You are currently sharing your screen with this page.
 getUserMedia.sharingWindow.message = You are currently sharing a window with this page.
 getUserMedia.continueSharing.label = Continue Sharing
 getUserMedia.continueSharing.accesskey = C
 getUserMedia.stopSharing.label = Stop Sharing
 getUserMedia.stopSharing.accesskey = S
 
 getUserMedia.sharingMenu.label = Tabs sharing devices
 getUserMedia.sharingMenu.accesskey = d
 # LOCALIZATION NOTE (getUserMedia.sharingMenuCamera, getUserMedia.sharingMenuCamera,
-#                    getUserMedia.sharingMenuMicrophone, getUserMedia.sharingMenuScreen,
-#                    getUserMedia.sharingMenuWindow, getUserMedia.sharingMenuCameraMicrophone,
+#                    getUserMedia.sharingMenuMicrophone, getUserMedia.sharingMenuApplication,
+#                    getUserMedia.sharingMenuScreen, getUserMedia.sharingMenuWindow,
+#                    getUserMedia.sharingMenuCameraMicrophone,
+#                    getUserMedia.sharingMenuCameraMicrophoneApplication,
 #                    getUserMedia.sharingMenuCameraMicrophoneScreen,
 #                    getUserMedia.sharingMenuCameraMicrophoneWindow,
+#                    getUserMedia.sharingMenuMicrophoneApplication,
 #                    getUserMedia.sharingMenuMicrophoneScreen,
 #                    getUserMedia.sharingMenuMicrophoneWindow):
 # %S is the website origin (e.g. www.mozilla.org)
 getUserMedia.sharingMenuCamera = %S (camera)
 getUserMedia.sharingMenuMicrophone = %S (microphone)
+getUserMedia.sharingMenuApplication = %S (application)
 getUserMedia.sharingMenuScreen = %S (screen)
 getUserMedia.sharingMenuWindow = %S (window)
 getUserMedia.sharingMenuCameraMicrophone = %S (camera and microphone)
+getUserMedia.sharingMenuCameraMicrophoneApplication = %S (camera, microphone and application)
 getUserMedia.sharingMenuCameraMicrophoneScreen = %S (camera, microphone and screen)
 getUserMedia.sharingMenuCameraMicrophoneWindow = %S (camera, microphone and window)
+getUserMedia.sharingMenuMicrophoneApplication = %S (microphone and application)
 getUserMedia.sharingMenuMicrophoneScreen = %S (microphone and screen)
 getUserMedia.sharingMenuMicrophoneWindow = %S (microphone and window)
 # LOCALIZATION NOTE(getUserMedia.sharingMenuUnknownHost): this is used for the website
 # origin for the sharing menu if no readable origin could be deduced from the URL.
 getUserMedia.sharingMenuUnknownHost = Unknown origin
 
 # LOCALIZATION NOTE - %S is brandShortName
 slowStartup.message = %S seems slow… to… start.
--- a/browser/locales/en-US/chrome/browser/webrtcIndicator.properties
+++ b/browser/locales/en-US/chrome/browser/webrtcIndicator.properties
@@ -7,31 +7,34 @@
 # LOCALIZATION NOTE (webrtcIndicator.windowtitle): %S is the brand name (e.g. Firefox).
 # This string is used so that the window has a title in tools that enumerate/look for window
 # titles. It is not normally visible anywhere.
 webrtcIndicator.windowtitle = %S - Sharing Indicator
 
 webrtcIndicator.sharingCameraAndMicrophone.tooltip = Your camera and microphone are being shared. Click to control sharing.
 webrtcIndicator.sharingCamera.tooltip              = Your camera is being shared. Click to control sharing.
 webrtcIndicator.sharingMicrophone.tooltip          = Your microphone is being shared. Click to control sharing.
+webrtcIndicator.sharingApplication.tooltip = An application is being shared. Click to control sharing.
 webrtcIndicator.sharingScreen.tooltip = Your screen is being shared. Click to control sharing.
 webrtcIndicator.sharingWindow.tooltip = A window is being shared. Click to control sharing.
 
 
 # LOCALIZATION NOTE : The following strings are only used on Mac for
 # menus attached to icons near the clock on the mac menubar.
 
 # LOCALIZATION NOTE (webrtcIndicator.sharing*With.menuitem):
 # %S is the title of the tab using the share.
 webrtcIndicator.sharingCameraWith.menuitem = Sharing Camera with "%S"
 webrtcIndicator.sharingMicrophoneWith.menuitem = Sharing Microphone with "%S"
+webrtcIndicator.sharingApplicationWith.menuitem = Sharing an Application with "%S"
 webrtcIndicator.sharingScreenWith.menuitem = Sharing Screen with "%S"
 webrtcIndicator.sharingWindowWith.menuitem = Sharing a Window with "%S"
 webrtcIndicator.controlSharing.menuitem = Control Sharing
 # LOCALIZATION NOTE (webrtcIndicator.sharing*WithNTabs.menuitem):
 # Semicolon-separated list of plural forms.
 webrtcIndicator.sharingCameraWithNTabs.menuitem = Sharing Camera with #1 tab;Sharing Camera with #1 tabs
 webrtcIndicator.sharingMicrophoneWithNTabs.menuitem = Sharing Microphone with #1 tab;Sharing Microphone with #1 tabs
+webrtcIndicator.sharingApplicationWithNTabs.menuitem = Sharing an Application with #1 tab;Sharing Applications with #1 tabs
 webrtcIndicator.sharingScreenWithNTabs.menuitem = Sharing Screen with #1 tab;Sharing Screen with #1 tabs
 webrtcIndicator.sharingWindowWithNTabs.menuitem = Sharing a Window with #1 tab;Sharing Windows with #1 tabs
 # LOCALIZATION NOTE (webrtcIndicator.controlSharingOn.menuitem):
 # %S is the title of the tab using the share.
 webrtcIndicator.controlSharingOn.menuitem = Control Sharing on "%S"
--- a/browser/modules/webrtcUI.jsm
+++ b/browser/modules/webrtcUI.jsm
@@ -33,38 +33,40 @@ this.webrtcUI = {
     Services.obs.removeObserver(updateIndicators, "recording-device-events");
     Services.obs.removeObserver(removeBrowserSpecificIndicator, "recording-window-ended");
     Services.obs.removeObserver(maybeAddMenuIndicator, "browser-delayed-startup-finished");
   },
 
   showGlobalIndicator: false,
   showCameraIndicator: false,
   showMicrophoneIndicator: false,
-  showScreenSharingIndicator: "", // either "Screen" or "Window"
+  showScreenSharingIndicator: "", // either "Application", "Screen" or "Window"
 
   // The boolean parameters indicate which streams should be included in the result.
   getActiveStreams: function(aCamera, aMicrophone, aScreen) {
     let contentWindowSupportsArray = MediaManagerService.activeMediaCaptureWindows;
     let count = contentWindowSupportsArray.Count();
     let activeStreams = [];
     for (let i = 0; i < count; i++) {
       let contentWindow = contentWindowSupportsArray.GetElementAt(i);
 
       let info = {
         Camera: {},
         Microphone: {},
         Window: {},
         Screen: {},
+        Application: {}
       };
       MediaManagerService.mediaCaptureWindowState(contentWindow, info.Camera,
                                                   info.Microphone, info.Screen,
-                                                  info.Window);
+                                                  info.Window, info.Application);
       if (!(aCamera && info.Camera.value ||
             aMicrophone && info.Microphone.value ||
-            aScreen && (info.Screen.value || info.Window.value)))
+            aScreen && (info.Screen.value || info.Window.value ||
+                        info.Application.value)))
         continue;
 
       let browser = getBrowserForWindow(contentWindow);
       let browserWindow = browser.ownerDocument.defaultView;
       let tab = browserWindow.gBrowser &&
                 browserWindow.gBrowser._getTabForContentWindow(contentWindow.top);
       activeStreams.push({
         uri: contentWindow.location.href,
@@ -324,46 +326,58 @@ function prompt(aContentWindow, aCallID,
           deviceIndex++;
         }
       }
 
       function listScreenShareDevices(menupopup, devices) {
         while (menupopup.lastChild)
           menupopup.removeChild(menupopup.lastChild);
 
-        // "No Window or Screen" is the default because we can't pick a
+        let type = devices[0].mediaSource;
+        let typeName = type.charAt(0).toUpperCase() + type.substr(1);
+
+        let label = chromeDoc.getElementById("webRTC-selectWindow-label");
+        let stringId = "getUserMedia.select" + typeName;
+        label.setAttribute("value",
+                           stringBundle.getString(stringId + ".label"));
+        label.setAttribute("accesskey",
+                           stringBundle.getString(stringId + ".accesskey"));
+
+        // "No <type>" is the default because we can't pick a
         // 'default' window to share.
         addDeviceToList(menupopup,
-                        stringBundle.getString("getUserMedia.noWindowOrScreen.label"),
+                        stringBundle.getString("getUserMedia.no" + typeName + ".label"),
                         "-1");
+        menupopup.appendChild(chromeDoc.createElement("menuseparator"));
 
-        // Then add the 'Entire screen' item if mozGetUserMediaDevices returned it.
+        // Build the list of 'devices'.
         for (let i = 0; i < devices.length; ++i) {
-          if (devices[i].mediaSource == "screen") {
-            menupopup.appendChild(chromeDoc.createElement("menuseparator"));
-            addDeviceToList(menupopup,
-                            stringBundle.getString("getUserMedia.shareEntireScreen.label"),
-                            i, "Screen");
-            break;
+          let name;
+          // Screen has a special treatment because we currently only support
+          // sharing the primary screen and want to display a localized string.
+          if (type == "screen") {
+            name = stringBundle.getString("getUserMedia.shareEntireScreen.label");
           }
+          else {
+            name = devices[i].name;
+            if (type == "application") {
+              // The application names returned by the platform are of the form:
+              // <window count>\x1e<application name>
+              let sepIndex = name.indexOf("\x1e");
+              let count = name.slice(0, sepIndex);
+              let stringId = "getUserMedia.shareApplicationWindowCount.label";
+              name = PluralForm.get(parseInt(count), stringBundle.getString(stringId))
+                               .replace("#1", name.slice(sepIndex + 1))
+                               .replace("#2", count);
+            }
+          }
+          addDeviceToList(menupopup, name, i, typeName);
         }
 
-        // Finally add all the window names.
-        let separatorNeeded = true;
-        for (let i = 0; i < devices.length; ++i) {
-          if (devices[i].mediaSource == "window") {
-            if (separatorNeeded) {
-              menupopup.appendChild(chromeDoc.createElement("menuseparator"));
-              separatorNeeded = false;
-            }
-            addDeviceToList(menupopup, devices[i].name, i, "Window");
-          }
-        }
-
-        // Always re-select the "No Window or Screen" item.
+        // Always re-select the "No <type>" item.
         chromeDoc.getElementById("webRTC-selectWindow-menulist").removeAttribute("value");
         chromeDoc.getElementById("webRTC-all-windows-shared").hidden = true;
       }
 
       function addDeviceToList(menupopup, deviceName, deviceIndex, type) {
         let menuitem = chromeDoc.createElement("menuitem");
         menuitem.setAttribute("value", deviceIndex);
         menuitem.setAttribute("label", deviceName);
@@ -455,17 +469,17 @@ function getGlobalIndicator() {
                   .hiddenDOMWindow.document,
     _statusBar: Cc["@mozilla.org/widget/macsystemstatusbar;1"]
                   .getService(Ci.nsISystemStatusBar),
 
     _command: function(aEvent) {
       let type = this.getAttribute("type");
       if (type == "Camera" || type == "Microphone")
         type = "Devices";
-      else if (type == "Window")
+      else if (type == "Window" || type == "Application")
         type = "Screen";
       webrtcUI.showSharingDoorhanger(aEvent.target.stream, type);
     },
 
     _popupShowing: function(aEvent) {
       let type = this.getAttribute("type");
       let activeStreams;
       if (type == "Camera") {
@@ -573,25 +587,27 @@ function getGlobalIndicator() {
   return indicator;
 #endif
 }
 
 function onTabSharingMenuPopupShowing(e) {
   let streams = webrtcUI.getActiveStreams(true, true, true);
   for (let streamInfo of streams) {
     let stringName = "getUserMedia.sharingMenu";
-    // Guarantee sorting order here or bad things will happen if
-    // the not-specced-but-implemented-everywhere object key sorting changes:
-    let types = Object.keys(streamInfo.types).sort();
-    // Then construct a string ID out of these types:
-    for (let type of types) {
-      if (streamInfo.types[type].value) {
-        stringName += type;
-      }
-    }
+    let types = streamInfo.types;
+    if (types.Camera.value)
+      stringName += "Camera";
+    if (types.Microphone.value)
+      stringName += "Microphone";
+    if (types.Screen.value)
+      stringName += "Screen";
+    else if (types.Application.value)
+      stringName += "Application";
+    else if (types.Window.value)
+      stringName += "Window";
 
     let doc = e.target.ownerDocument;
     let bundle = doc.defaultView.gNavigatorBundle;
 
     let origin;
     let uri;
     let href = streamInfo.uri;
     try {
@@ -608,20 +624,20 @@ function onTabSharingMenuPopupShowing(e)
       }
     }
 
     let menuitem = doc.createElement("menuitem");
     menuitem.setAttribute("label", bundle.getFormattedString(stringName, [origin]));
     menuitem.stream = streamInfo;
 
     // We can only open 1 doorhanger at a time. Guessing that users would be
-    // most eager to control screen/window sharing, and only then
+    // most eager to control screen/window/app sharing, and only then
     // camera/microphone sharing, in that (decreasing) order of priority.
     let doorhangerType;
-    if ((/Screen|Window/).test(stringName)) {
+    if ((/Screen|Window|Application/).test(stringName)) {
       doorhangerType = "Screen";
     } else {
       doorhangerType = "Devices";
     }
     menuitem.setAttribute("doorhangertype", doorhangerType);
     menuitem.addEventListener("command", onTabSharingMenuPopupCommand);
     e.target.appendChild(menuitem);
   }
@@ -686,27 +702,29 @@ function updateIndicators() {
 
   webrtcUI.showGlobalIndicator = count > 0;
   webrtcUI.showCameraIndicator = false;
   webrtcUI.showMicrophoneIndicator = false;
   webrtcUI.showScreenSharingIndicator = "";
 
   for (let i = 0; i < count; ++i) {
     let contentWindow = contentWindowSupportsArray.GetElementAt(i);
-    let camera = {}, microphone = {}, screen = {}, window = {};
+    let camera = {}, microphone = {}, screen = {}, window = {}, app = {};
     MediaManagerService.mediaCaptureWindowState(contentWindow, camera,
-                                                microphone, screen, window);
+                                                microphone, screen, window, app);
     if (camera.value)
       webrtcUI.showCameraIndicator = true;
     if (microphone.value)
       webrtcUI.showMicrophoneIndicator = true;
     if (screen.value)
       webrtcUI.showScreenSharingIndicator = "Screen";
-    else if (window.value && !webrtcUI.showScreenSharingIndicator)
+    else if (window.value && webrtcUI.showScreenSharingIndicator != "Screen")
       webrtcUI.showScreenSharingIndicator = "Window";
+    else if (app.value && !webrtcUI.showScreenSharingIndicator)
+      webrtcUI.showScreenSharingIndicator = "Application";
 
     updateBrowserSpecificIndicator(getBrowserForWindow(contentWindow));
   }
 
   let browserWindowEnum = Services.wm.getEnumerator("navigator:browser");
   while (browserWindowEnum.hasMoreElements()) {
     let chromeWin = browserWindowEnum.getNext();
     if (webrtcUI.showGlobalIndicator) {
@@ -733,19 +751,20 @@ function updateIndicators() {
       gIndicatorWindow.updateIndicatorState();
   } else if (gIndicatorWindow) {
     gIndicatorWindow.close();
     gIndicatorWindow = null;
   }
 }
 
 function updateBrowserSpecificIndicator(aBrowser) {
-  let camera = {}, microphone = {}, screen = {}, window = {};
+  let camera = {}, microphone = {}, screen = {}, window = {}, app = {};
   MediaManagerService.mediaCaptureWindowState(aBrowser.contentWindow,
-                                              camera, microphone, screen, window);
+                                              camera, microphone, screen,
+                                              window, app);
   let captureState;
   if (camera.value && microphone.value) {
     captureState = "CameraAndMicrophone";
   } else if (camera.value) {
     captureState = "Camera";
   } else if (microphone.value) {
     captureState = "Microphone";
   }
@@ -798,17 +817,17 @@ function updateBrowserSpecificIndicator(
     chromeWin.PopupNotifications.show(aBrowser, "webRTC-sharingDevices", message,
                                       anchorId, mainAction, secondaryActions, options);
   }
   else {
     removeBrowserNotification(aBrowser,"webRTC-sharingDevices");
   }
 
   // Now handle the screen sharing indicator.
-  if (!screen.value && !window.value) {
+  if (!screen.value && !window.value && !app.value) {
     removeBrowserNotification(aBrowser,"webRTC-sharingScreen");
     return;
   }
 
   options = {
     hideNotNow: true,
     dismissed: true,
     eventCallback: function(aTopic) {
@@ -822,26 +841,35 @@ function updateBrowserSpecificIndicator(
   secondaryActions = [{
     label: stringBundle.getString("getUserMedia.stopSharing.label"),
     accessKey: stringBundle.getString("getUserMedia.stopSharing.accesskey"),
     callback: function () {
       Services.obs.notifyObservers(null, "getUserMedia:revoke", "screen:" + windowId);
     }
   }];
   // If we are sharing both a window and the screen, show 'Screen'.
-  let stringId = "getUserMedia.sharing" + (screen.value ? "Screen" : "Window") + ".message";
+  let stringId = "getUserMedia.sharing";
+  if (screen.value)
+    stringId += "Screen";
+  else if (app.value)
+    stringId += "Application";
+  else
+    stringId += "Window";
   chromeWin.PopupNotifications.show(aBrowser, "webRTC-sharingScreen",
-                                    stringBundle.getString(stringId),
+                                    stringBundle.getString(stringId + ".message"),
                                     "webRTC-sharingScreen-notification-icon",
                                     mainAction, secondaryActions, options);
 }
 
 function removeBrowserNotification(aBrowser, aNotificationId) {
   let win = aBrowser.ownerDocument.defaultView;
   let notification =
     win.PopupNotifications.getNotification(aNotificationId, aBrowser);
   if (notification)
     win.PopupNotifications.remove(notification);
 }
 
 function removeBrowserSpecificIndicator(aSubject, aTopic, aData) {
-  updateBrowserSpecificIndicator(getBrowserForWindowId(aData));
+  let browser = getBrowserForWindowId(aData);
+  // If the tab has already been closed, ignore the notification.
+  if (browser.contentWindow)
+    updateBrowserSpecificIndicator(browser);
 }
--- a/config/config.mk
+++ b/config/config.mk
@@ -518,35 +518,28 @@ endif # MACOSX_DEPLOYMENT_TARGET
 
 ifdef MOZ_USING_CCACHE
 ifdef CLANG_CXX
 export CCACHE_CPP2=1
 endif
 endif
 
 # Set link flags according to whether we want a console.
+ifeq ($(OS_ARCH),WINNT)
 ifdef MOZ_WINCONSOLE
 ifeq ($(MOZ_WINCONSOLE),1)
-ifeq ($(OS_ARCH),WINNT)
-ifdef GNU_CC
-WIN32_EXE_LDFLAGS	+= -mconsole
-else
-WIN32_EXE_LDFLAGS	+= -SUBSYSTEM:CONSOLE
-endif
-endif
+WIN32_EXE_LDFLAGS	+= $(WIN32_CONSOLE_EXE_LDFLAGS)
 else # MOZ_WINCONSOLE
-ifeq ($(OS_ARCH),WINNT)
-ifdef GNU_CC
-WIN32_EXE_LDFLAGS	+= -mwindows
+WIN32_EXE_LDFLAGS	+= $(WIN32_GUI_EXE_LDFLAGS)
+endif
 else
-WIN32_EXE_LDFLAGS	+= -SUBSYSTEM:WINDOWS
-endif
+# For setting subsystem version
+WIN32_EXE_LDFLAGS	+= $(WIN32_CONSOLE_EXE_LDFLAGS)
 endif
-endif
-endif
+endif # WINNT
 
 ifdef _MSC_VER
 ifeq ($(CPU_ARCH),x86_64)
 # set stack to 2MB on x64 build.  See bug 582910
 WIN32_EXE_LDFLAGS	+= -STACK:2097152
 endif
 endif
 
--- a/configure.in
+++ b/configure.in
@@ -46,17 +46,17 @@ dnl ====================================
 _SUBDIR_HOST_CFLAGS="$HOST_CFLAGS"
 _SUBDIR_HOST_CXXFLAGS="$HOST_CXXFLAGS"
 _SUBDIR_HOST_LDFLAGS="$HOST_LDFLAGS"
 _SUBDIR_CONFIG_ARGS="$ac_configure_args"
 
 dnl Set the version number of the libs included with mozilla
 dnl ========================================================
 MOZJPEG=62
-MOZPNG=10610
+MOZPNG=10613
 NSPR_VERSION=4
 NSPR_MINVER=4.10.3
 NSS_VERSION=3
 
 dnl Set the minimum version of toolkit libs used by mozilla
 dnl ========================================================
 GLIB_VERSION=1.2.0
 PERL_VERSION=5.006
@@ -2055,16 +2055,19 @@ ia64*-hpux*)
         # Use temp file for windres (bug 213281)
         RCFLAGS='-O coff --use-temp-file'
         # mingw doesn't require kernel32, user32, and advapi32 explicitly
         LIBS="$LIBS -luuid -lgdi32 -lwinmm -lwsock32 -luserenv -lsecur32 -lnetapi32"
         MOZ_FIX_LINK_PATHS=
         DLL_PREFIX=
         IMPORT_LIB_SUFFIX=dll.a
 
+        WIN32_CONSOLE_EXE_LDFLAGS=-mconsole
+        WIN32_GUI_EXE_LDFLAGS=-mwindows
+
         # We use mix of both POSIX and Win32 printf format across the tree, so format
         # warnings are useless on mingw.
         MOZ_C_SUPPORTS_WARNING(-Wno-, format, ac_c_has_wno_format)
         MOZ_CXX_SUPPORTS_WARNING(-Wno-, format, ac_cxx_has_wno_format)
     else
         TARGET_COMPILER_ABI=msvc
         HOST_CC='$(CC)'
         HOST_CXX='$(CXX)'
@@ -2087,17 +2090,25 @@ ia64*-hpux*)
         LIB_SUFFIX=lib
         DLL_PREFIX=
         LIB_PREFIX=
         IMPORT_LIB_SUFFIX=lib
         MKSHLIB='$(LD) -NOLOGO -DLL -OUT:$@ -PDB:$(LINK_PDBFILE) $(DSO_LDOPTS)'
         MKCSHLIB='$(LD) -NOLOGO -DLL -OUT:$@ -PDB:$(LINK_PDBFILE) $(DSO_LDOPTS)'
         MKSHLIB_FORCE_ALL=
         MKSHLIB_UNFORCE_ALL=
-        DSO_LDOPTS=-SUBSYSTEM:WINDOWS
+        dnl Set subsystem version 5 for Windows XP.
+        if test "$CPU_ARCH" = "x86"; then
+            WIN32_SUBSYSTEM_VERSION=5.01
+        else
+            WIN32_SUBSYSTEM_VERSION=5.02
+        fi
+        WIN32_CONSOLE_EXE_LDFLAGS=-SUBSYSTEM:CONSOLE,$WIN32_SUBSYSTEM_VERSION
+        WIN32_GUI_EXE_LDFLAGS=-SUBSYSTEM:WINDOWS,$WIN32_SUBSYSTEM_VERSION
+        DSO_LDOPTS=-SUBSYSTEM:WINDOWS,$WIN32_SUBSYSTEM_VERSION
         _USE_CPP_INCLUDE_FLAG=1
         _DEFINES_CFLAGS='-FI $(DEPTH)/dist/include/mozilla-config.h -DMOZILLA_CLIENT'
         _DEFINES_CXXFLAGS='-FI $(DEPTH)/dist/include/mozilla-config.h -DMOZILLA_CLIENT'
         CFLAGS="$CFLAGS -W3 -Gy"
         CXXFLAGS="$CXXFLAGS -W3 -Gy"
         if test "$_CC_SUITE" -ge "11" -a "$CPU_ARCH" = "x86"; then
             dnl VS2012+ defaults to -arch:SSE2.
             CFLAGS="$CFLAGS -arch:IA32"
@@ -3824,16 +3835,17 @@ MOZ_ANDROID_SEARCH_ACTIVITY=
 MOZ_ANDROID_MLS_STUMBLER=
 ACCESSIBILITY=1
 MOZ_TIME_MANAGER=
 MOZ_PAY=
 MOZ_AUDIO_CHANNEL_MANAGER=
 NSS_NO_LIBPKIX=
 MOZ_CONTENT_SANDBOX=
 MOZ_GMP_SANDBOX=
+MOZ_SANDBOX=1
 JSGC_USE_EXACT_ROOTING=1
 JSGC_GENERATIONAL=
 
 case "$target_os" in
     mingw*)
         NS_ENABLE_TSF=1
         AC_DEFINE(NS_ENABLE_TSF)
         ;;
@@ -6338,20 +6350,28 @@ dnl ====================================
 dnl = Disable libpkix
 dnl ========================================================
 if test -n "$NSS_NO_LIBPKIX"; then
     AC_DEFINE(NSS_NO_LIBPKIX)
 fi
 AC_SUBST(NSS_NO_LIBPKIX)
 
 dnl ========================================================
+dnl = Sandboxing support
+dnl ========================================================
+MOZ_ARG_DISABLE_BOOL(sandbox,
+[  --disable-sandbox        Disable sandboxing support],
+    MOZ_SANDBOX=,
+    MOZ_SANDBOX=1)
+
+dnl ========================================================
 dnl = Content process sandboxing
 dnl ========================================================
 if test -n "$gonkdir"; then
-    MOZ_CONTENT_SANDBOX=1
+    MOZ_CONTENT_SANDBOX=$MOZ_SANDBOX
 fi
 
 MOZ_ARG_ENABLE_BOOL(content-sandbox,
 [  --enable-content-sandbox        Enable sandboxing support for content-processes],
     MOZ_CONTENT_SANDBOX=1,
     MOZ_CONTENT_SANDBOX=)
 
 if test -n "$MOZ_CONTENT_SANDBOX"; then
@@ -6360,36 +6380,47 @@ fi
 
 AC_SUBST(MOZ_CONTENT_SANDBOX)
 
 dnl ========================================================
 dnl = Gecko Media Plugin sandboxing
 dnl ========================================================
 case $OS_TARGET in
 WINNT)
-    MOZ_GMP_SANDBOX=1
+    MOZ_GMP_SANDBOX=$MOZ_SANDBOX
     ;;
 Linux)
     case $CPU_ARCH in
     x86_64|x86)
-        MOZ_GMP_SANDBOX=1
+        MOZ_GMP_SANDBOX=$MOZ_SANDBOX
         ;;
     esac
     ;;
 Darwin)
-    MOZ_GMP_SANDBOX=1
+    MOZ_GMP_SANDBOX=$MOZ_SANDBOX
     ;;
 esac
 
 if test -n "$MOZ_GMP_SANDBOX"; then
     AC_DEFINE(MOZ_GMP_SANDBOX)
 fi
 
 AC_SUBST(MOZ_GMP_SANDBOX)
 
+if test -z "$MOZ_CONTENT_SANDBOX" -a -z "$MOZ_GMP_SANDBOX"; then
+    MOZ_SANDBOX=
+fi
+
+if test -n "$MOZ_SANDBOX"; then
+    AC_DEFINE(MOZ_SANDBOX)
+fi
+
+AC_SUBST(MOZ_SANDBOX)
+
+
 dnl ========================================================
 dnl =
 dnl = Module specific options
 dnl =
 dnl ========================================================
 MOZ_ARG_HEADER(Individual module options)
 
 dnl ========================================================
@@ -8693,16 +8724,18 @@ AC_SUBST(OBJ_SUFFIX)
 AC_SUBST(BIN_SUFFIX)
 AC_SUBST(ASM_SUFFIX)
 AC_SUBST(IMPORT_LIB_SUFFIX)
 AC_SUBST(USE_N32)
 AC_SUBST(CC_VERSION)
 AC_SUBST(CXX_VERSION)
 AC_SUBST(MSMANIFEST_TOOL)
 AC_SUBST(NS_ENABLE_TSF)
+AC_SUBST(WIN32_CONSOLE_EXE_LDFLAGS)
+AC_SUBST(WIN32_GUI_EXE_LDFLAGS)
 
 AC_SUBST(MOZ_WAVE)
 AC_SUBST(MOZ_VORBIS)
 AC_SUBST(MOZ_TREMOR)
 AC_SUBST(MOZ_OPUS)
 AC_SUBST(MOZ_WEBM)
 AC_SUBST(MOZ_WMF)
 AC_SUBST(MOZ_FFMPEG)
--- a/content/base/public/nsDOMFile.h
+++ b/content/base/public/nsDOMFile.h
@@ -196,19 +196,16 @@ public:
   CreateSlice(uint64_t aStart, uint64_t aLength,
               const nsAString& aContentType) = 0;
 
   virtual const nsTArray<nsRefPtr<DOMFileImpl>>*
   GetSubBlobImpls() const = 0;
 
   virtual nsresult GetInternalStream(nsIInputStream** aStream) = 0;
 
-  virtual nsresult
-  GetInternalUrl(nsIPrincipal* aPrincipal, nsAString& aURL) = 0;
-
   virtual int64_t GetFileId() = 0;
 
   virtual void AddFileInfo(indexedDB::FileInfo* aFileInfo) = 0;
 
   virtual indexedDB::FileInfo*
   GetFileInfo(indexedDB::FileManager* aFileManager) = 0;
 
   virtual nsresult GetSendInfo(nsIInputStream** aBody,
@@ -331,18 +328,16 @@ public:
   virtual const nsTArray<nsRefPtr<DOMFileImpl>>*
   GetSubBlobImpls() const MOZ_OVERRIDE
   {
     return nullptr;
   }
 
   virtual nsresult GetInternalStream(nsIInputStream** aStream) MOZ_OVERRIDE;
 
-  virtual nsresult GetInternalUrl(nsIPrincipal* aPrincipal, nsAString& aURL) MOZ_OVERRIDE;
-
   virtual int64_t GetFileId() MOZ_OVERRIDE;
 
   virtual void AddFileInfo(indexedDB::FileInfo* aFileInfo) MOZ_OVERRIDE;
 
   virtual indexedDB::FileInfo*
   GetFileInfo(indexedDB::FileManager* aFileManager) MOZ_OVERRIDE;
 
   virtual nsresult GetSendInfo(nsIInputStream** aBody,
--- a/content/base/public/nsIDOMFile.idl
+++ b/content/base/public/nsIDOMFile.idl
@@ -21,26 +21,23 @@ class FileManager;
 [ptr] native FileManager(mozilla::dom::indexedDB::FileManager);
 
 interface nsIDOMFileError;
 interface nsIInputStream;
 interface nsIURI;
 interface nsIPrincipal;
 interface nsIDOMBlob;
 
-[scriptable, builtinclass, uuid(b1723fac-4814-4429-82cb-dc54ba0d46d6)]
+[scriptable, builtinclass, uuid(6ef56697-7c9c-41ac-83df-e01c079fb1d1)]
 interface nsIDOMBlob : nsISupports
 {
   readonly attribute unsigned long long size;
   readonly attribute DOMString type;
 
   [noscript] readonly attribute nsIInputStream internalStream;
-  // The caller is responsible for releasing the internalUrl from the
-  // blob: protocol handler
-  [noscript] DOMString getInternalUrl(in nsIPrincipal principal);
 
   [optional_argc] nsIDOMBlob slice([optional] in long long start,
                                    [optional] in long long end,
                                    [optional] in DOMString contentType);
 
   // Get internal id of stored file. Returns -1 if it is not a stored file.
   // Intended only for testing. It can be called on any thread.
   [notxpcom] long long getFileId();
--- a/content/base/src/nsDOMFile.cpp
+++ b/content/base/src/nsDOMFile.cpp
@@ -404,22 +404,16 @@ DOMFile::Slice(int64_t aStart, int64_t a
 }
 
 NS_IMETHODIMP
 DOMFile::GetInternalStream(nsIInputStream** aStream)
 {
  return mImpl->GetInternalStream(aStream);
 }
 
-NS_IMETHODIMP
-DOMFile::GetInternalUrl(nsIPrincipal* aPrincipal, nsAString& aURL)
-{
-  return mImpl->GetInternalUrl(aPrincipal, aURL);
-}
-
 NS_IMETHODIMP_(int64_t)
 DOMFile::GetFileId()
 {
   return mImpl->GetFileId();
 }
 
 NS_IMETHODIMP_(void)
 DOMFile::AddFileInfo(indexedDB::FileInfo* aFileInfo)
@@ -589,32 +583,16 @@ DOMFileImplBase::CreateSlice(uint64_t aS
 }
 
 nsresult
 DOMFileImplBase::GetInternalStream(nsIInputStream** aStream)
 {
   return NS_ERROR_NOT_IMPLEMENTED;
 }
 
-nsresult
-DOMFileImplBase::GetInternalUrl(nsIPrincipal* aPrincipal, nsAString& aURL)
-{
-  NS_ENSURE_STATE(aPrincipal);
-
-  nsCString url;
-  nsresult rv = nsBlobProtocolHandler::AddDataEntry(
-    NS_LITERAL_CSTRING(BLOBURI_SCHEME), this, aPrincipal, url);
-  if (NS_FAILED(rv)) {
-    return rv;
-  }
-
-  CopyASCIItoUTF16(url, aURL);
-  return NS_OK;
-}
-
 int64_t
 DOMFileImplBase::GetFileId()
 {
   int64_t id = -1;
 
   if (IsStoredFile() && IsWholeFile() && !IsSnapshot()) {
     if (!indexedDB::IndexedDatabaseManager::IsClosed()) {
       indexedDB::IndexedDatabaseManager::FileMutex().Lock();
--- a/content/base/src/nsGkAtomList.h
+++ b/content/base/src/nsGkAtomList.h
@@ -1961,18 +1961,18 @@ GK_ATOM(onended, "onended")
 GK_ATOM(onratechange, "onratechange")
 GK_ATOM(ondurationchange, "ondurationchange")
 GK_ATOM(onvolumechange, "onvolumechange")
 GK_ATOM(onaddtrack, "onaddtrack")
 GK_ATOM(oncontrollerchange, "oncontrollerchange")
 GK_ATOM(oncuechange, "oncuechange")
 GK_ATOM(onenter, "onenter")
 GK_ATOM(onexit, "onexit")
-GK_ATOM(onneedkey, "onneedkey")
-GK_ATOM(needkey, "needkey")
+GK_ATOM(onencrypted, "onencrypted")
+GK_ATOM(encrypted, "encrypted")
 GK_ATOM(onremovetrack, "onremovetrack")
 GK_ATOM(loadstart, "loadstart")
 GK_ATOM(suspend, "suspend")
 GK_ATOM(emptied, "emptied")
 GK_ATOM(stalled, "stalled")
 GK_ATOM(play, "play")
 GK_ATOM(pause, "pause")
 GK_ATOM(loadedmetadata, "loadedmetadata")
--- a/content/html/content/public/HTMLMediaElement.h
+++ b/content/html/content/public/HTMLMediaElement.h
@@ -27,16 +27,21 @@
 
 // Something on Linux #defines None, which is an entry in the
 // MediaWaitingFor enum, so undef it here before including the binfing,
 // so that the build doesn't fail...
 #ifdef None
 #undef None
 #endif
 
+// X.h on Linux #defines CurrentTime as 0L, so we have to #undef it here.
+#ifdef CurrentTime
+#undef CurrentTime
+#endif
+
 #include "mozilla/dom/HTMLMediaElementBinding.h"
 
 // Define to output information on decoding and painting framerate
 /* #define DEBUG_FRAME_RATE 1 */
 
 class nsIChannel;
 class nsIHttpChannel;
 class nsILoadGroup;
@@ -527,21 +532,21 @@ public:
 #ifdef MOZ_EME
   MediaKeys* GetMediaKeys() const;
 
   already_AddRefed<Promise> SetMediaKeys(MediaKeys* mediaKeys,
                                          ErrorResult& aRv);
   
   MediaWaitingFor WaitingFor() const;
 
-  mozilla::dom::EventHandlerNonNull* GetOnneedkey();
-  void SetOnneedkey(mozilla::dom::EventHandlerNonNull* listener);
+  mozilla::dom::EventHandlerNonNull* GetOnencrypted();
+  void SetOnencrypted(mozilla::dom::EventHandlerNonNull* listener);
 
-  void DispatchNeedKey(const nsTArray<uint8_t>& aInitData,
-                       const nsAString& aInitDataType);
+  void DispatchEncrypted(const nsTArray<uint8_t>& aInitData,
+                         const nsAString& aInitDataType);
 
 
   bool IsEventAttributeName(nsIAtom* aName) MOZ_OVERRIDE;
 #endif // MOZ_EME
 
   bool MozAutoplayEnabled() const
   {
     return mAutoplayEnabled;
--- a/content/html/content/src/HTMLMediaElement.cpp
+++ b/content/html/content/src/HTMLMediaElement.cpp
@@ -7,17 +7,17 @@
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "mozilla/dom/HTMLMediaElementBinding.h"
 #include "mozilla/dom/HTMLSourceElement.h"
 #include "mozilla/dom/ElementInlines.h"
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/AsyncEventDispatcher.h"
 #ifdef MOZ_EME
-#include "mozilla/dom/MediaKeyNeededEvent.h"
+#include "mozilla/dom/MediaEncryptedEvent.h"
 #endif
 
 #include "base/basictypes.h"
 #include "nsIDOMHTMLMediaElement.h"
 #include "nsIDOMHTMLSourceElement.h"
 #include "TimeRanges.h"
 #include "nsGenericHTMLElement.h"
 #include "nsAttrValueInlines.h"
@@ -4013,47 +4013,47 @@ HTMLMediaElement::SetMediaKeys(mozilla::
 
 MediaWaitingFor
 HTMLMediaElement::WaitingFor() const
 {
   return mWaitingFor;
 }
 
 EventHandlerNonNull*
-HTMLMediaElement::GetOnneedkey()
+HTMLMediaElement::GetOnencrypted()
 {
   EventListenerManager *elm = GetExistingListenerManager();
-  return elm ? elm->GetEventHandler(nsGkAtoms::onneedkey, EmptyString())
+  return elm ? elm->GetEventHandler(nsGkAtoms::onencrypted, EmptyString())
               : nullptr;
 }
 
 void
-HTMLMediaElement::SetOnneedkey(EventHandlerNonNull* handler)
+HTMLMediaElement::SetOnencrypted(EventHandlerNonNull* handler)
 {
   EventListenerManager *elm = GetOrCreateListenerManager();
   if (elm) {
-    elm->SetEventHandler(nsGkAtoms::onneedkey, EmptyString(), handler);
+    elm->SetEventHandler(nsGkAtoms::onencrypted, EmptyString(), handler);
   }
 }
 
 void
-HTMLMediaElement::DispatchNeedKey(const nsTArray<uint8_t>& aInitData,
-                                  const nsAString& aInitDataType)
+HTMLMediaElement::DispatchEncrypted(const nsTArray<uint8_t>& aInitData,
+                                    const nsAString& aInitDataType)
 {
-  nsRefPtr<MediaKeyNeededEvent> event(
-    MediaKeyNeededEvent::Constructor(this, aInitDataType, aInitData));
+  nsRefPtr<MediaEncryptedEvent> event(
+    MediaEncryptedEvent::Constructor(this, aInitDataType, aInitData));
   nsRefPtr<AsyncEventDispatcher> asyncDispatcher =
     new AsyncEventDispatcher(this, event);
   asyncDispatcher->PostDOMEvent();
 }
 
 bool
 HTMLMediaElement::IsEventAttributeName(nsIAtom* aName)
 {
-  return aName == nsGkAtoms::onneedkey ||
+  return aName == nsGkAtoms::onencrypted ||
          nsGenericHTMLElement::IsEventAttributeName(aName);
 }
 #endif // MOZ_EME
 
 NS_IMETHODIMP HTMLMediaElement::WindowVolumeChanged()
 {
   SetVolumeInternal();
   return NS_OK;
new file mode 100644
--- /dev/null
+++ b/content/media/AudioBufferUtils.h
@@ -0,0 +1,151 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_SCRATCHBUFFER_H_
+#define MOZILLA_SCRATCHBUFFER_H_
+#include <mozilla/PodOperations.h>
+#include <algorithm>
+
+namespace mozilla {
+
+/**
+ * The classes in this file provide a interface that uses frames as a unit.
+ * However, they store their offsets in samples (because it's handy for pointer
+ * operations). Those functions can convert between the two units.
+ */
+static inline uint32_t FramesToSamples(uint32_t aChannels, uint32_t aFrames) {
+  return aFrames * aChannels;
+}
+
+static inline uint32_t SamplesToFrames(uint32_t aChannels, uint32_t aSamples) {
+  MOZ_ASSERT(!(aSamples % aChannels), "Frame alignment is wrong.");
+  return aSamples / aChannels;
+}
+
+/**
+ * Class that gets a buffer pointer from an audio callback and provides a safe
+ * interface to manipulate this buffer, and to ensure we are not missing frames
+ * by the end of the callback.
+ */
+template<typename T, uint32_t CHANNELS>
+class AudioCallbackBufferWrapper
+{
+public:
+  AudioCallbackBufferWrapper()
+    : mBuffer(nullptr),
+      mSamples(0),
+      mSampleWriteOffset(1)
+  {}
+  /**
+   * Set the buffer in this wrapper. This is to be called at the beginning of
+   * the callback.
+   */
+  void SetBuffer(T* aBuffer, uint32_t aFrames) {
+    MOZ_ASSERT(!mBuffer && !mSamples,
+        "SetBuffer called twice.");
+    mBuffer = aBuffer;
+    mSamples = FramesToSamples(CHANNELS, aFrames);
+    mSampleWriteOffset = 0;
+  }
+
+  /**
+   * Write some frames to the internal buffer. Free space in the buffer should
+   * be check prior to calling this.
+   */
+  void WriteFrames(T* aBuffer, uint32_t aFrames) {
+    MOZ_ASSERT(aFrames <= Available(),
+        "Writing more that we can in the audio buffer.");
+
+    PodCopy(mBuffer + mSampleWriteOffset, aBuffer, FramesToSamples(CHANNELS,
+                                                                   aFrames));
+    mSampleWriteOffset += FramesToSamples(CHANNELS, aFrames);
+  }
+
+  /**
+   * Number of frames that can be written to the buffer.
+   */
+  uint32_t Available() {
+    return SamplesToFrames(CHANNELS, mSamples - mSampleWriteOffset);
+  }
+
+  /**
+   * Check that the buffer is completly filled, and reset internal state so this
+   * instance can be reused.
+   */
+  void BufferFilled() {
+    // It's okay to have exactly zero samples here, it can happen we have an
+    // audio callback driver because of a hint on MSG creation, but the
+    // AudioOutputStream has not been created yet.
+    NS_WARN_IF_FALSE(Available() == 0 || mSampleWriteOffset == 0,
+            "Audio Buffer is not full by the end of the callback.");
+    MOZ_ASSERT(mSamples, "Buffer not set.");
+    mSamples = 0;
+    mSampleWriteOffset = 0;
+    mBuffer = nullptr;
+  }
+
+private:
+  /* This is not an owned pointer, but the pointer passed to use via the audio
+   * callback. */
+  T* mBuffer;
+  /* The number of samples of this audio buffer. */
+  uint32_t mSamples;
+  /* The position at which new samples should be written. We want to return to
+   * the audio callback iff this is equal to mSamples. */
+  uint32_t mSampleWriteOffset;
+};
+
+/**
+ * This is a class that interfaces with the AudioCallbackBufferWrapper, and is
+ * responsible for storing the excess of data produced by the MediaStreamGraph
+ * because of different rounding constraints, to be used the next time the audio
+ * backend calls back.
+ */
+template<typename T, uint32_t BLOCK_SIZE, uint32_t CHANNELS>
+class SpillBuffer
+{
+public:
+  SpillBuffer()
+  : mPosition(0)
+  {
+    PodArrayZero(mBuffer);
+  }
+  /* Empty the spill buffer into the buffer of the audio callback. This returns
+   * the number of frames written. */
+  uint32_t Empty(AudioCallbackBufferWrapper<T, CHANNELS>& aBuffer) {
+    uint32_t framesToWrite = std::min(aBuffer.Available(),
+                                      SamplesToFrames(CHANNELS, mPosition));
+
+    aBuffer.WriteFrames(mBuffer, framesToWrite);
+
+    mPosition -= FramesToSamples(CHANNELS, framesToWrite);
+
+    return framesToWrite;
+  }
+  /* Fill the spill buffer from aInput, containing aFrames frames, return the
+   * number of frames written to the spill buffer */
+  uint32_t Fill(T* aInput, uint32_t aFrames) {
+    uint32_t framesToWrite = std::min(aFrames,
+                                      BLOCK_SIZE - SamplesToFrames(CHANNELS,
+                                                                   mPosition));
+
+    PodCopy(mBuffer + mPosition, aInput, FramesToSamples(CHANNELS,
+                                                         framesToWrite));
+
+    mPosition += FramesToSamples(CHANNELS, framesToWrite);
+
+    return framesToWrite;
+  }
+private:
+  /* The spilled data. */
+  T mBuffer[BLOCK_SIZE * CHANNELS];
+  /* The current write position, in samples, in the buffer when filling, or the
+   * amount of buffer filled when emptying. */
+  uint32_t mPosition;
+};
+
+}
+
+#endif // MOZILLA_SCRATCHBUFFER_H_
--- a/content/media/AudioMixer.h
+++ b/content/media/AudioMixer.h
@@ -4,54 +4,74 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_AUDIOMIXER_H_
 #define MOZILLA_AUDIOMIXER_H_
 
 #include "AudioSampleFormat.h"
 #include "nsTArray.h"
 #include "mozilla/PodOperations.h"
+#include "mozilla/LinkedList.h"
+#include "AudioStream.h"
 
 namespace mozilla {
-typedef void(*MixerFunc)(AudioDataValue* aMixedBuffer,
-                         AudioSampleFormat aFormat,
-                         uint32_t aChannels,
-                         uint32_t aFrames,
-                         uint32_t aSampleRate);
 
+struct MixerCallbackReceiver {
+  virtual void MixerCallback(AudioDataValue* aMixedBuffer,
+                             AudioSampleFormat aFormat,
+                             uint32_t aChannels,
+                             uint32_t aFrames,
+                             uint32_t aSampleRate) = 0;
+};
 /**
  * This class mixes multiple streams of audio together to output a single audio
  * stream.
  *
  * AudioMixer::Mix is to be called repeatedly with buffers that have the same
  * length, sample rate, sample format and channel count.
  *
  * When all the tracks have been mixed, calling FinishMixing will call back with
  * a buffer containing the mixed audio data.
  *
  * This class is not thread safe.
  */
 class AudioMixer
 {
 public:
-  AudioMixer(MixerFunc aCallback)
-    : mCallback(aCallback),
-      mFrames(0),
+  AudioMixer()
+    : mFrames(0),
       mChannels(0),
       mSampleRate(0)
   { }
 
+  ~AudioMixer()
+  {
+    MixerCallback* cb;
+    while ((cb = mCallbacks.popFirst())) {
+      delete cb;
+    }
+  }
+
+  void StartMixing()
+  {
+    mSampleRate = mChannels = mFrames = 0;
+  }
+
   /* Get the data from the mixer. This is supposed to be called when all the
    * tracks have been mixed in. The caller should not hold onto the data. */
   void FinishMixing() {
-    mCallback(mMixedAudio.Elements(),
-              AudioSampleTypeToFormat<AudioDataValue>::Format,
-              mChannels,
-              mFrames,
-              mSampleRate);
+    MOZ_ASSERT(mChannels && mFrames && mSampleRate, "Mix not called for this cycle?");
+    for (MixerCallback* cb = mCallbacks.getFirst();
+         cb != nullptr; cb = cb->getNext()) {
+      cb->mReceiver->MixerCallback(mMixedAudio.Elements(),
+                                   AudioSampleTypeToFormat<AudioDataValue>::Format,
+                                   mChannels,
+                                   mFrames,
+                                   mSampleRate);
+    }
     PodZero(mMixedAudio.Elements(), mMixedAudio.Length());
     mSampleRate = mChannels = mFrames = 0;
   }
 
   /* Add a buffer to the mix. aSamples is interleaved. */
   void Mix(AudioDataValue* aSamples,
            uint32_t aChannels,
            uint32_t aFrames,
@@ -66,26 +86,61 @@ public:
     MOZ_ASSERT(aFrames == mFrames);
     MOZ_ASSERT(aChannels == mChannels);
     MOZ_ASSERT(aSampleRate == mSampleRate);
 
     for (uint32_t i = 0; i < aFrames * aChannels; i++) {
       mMixedAudio[i] += aSamples[i];
     }
   }
+
+  void AddCallback(MixerCallbackReceiver* aReceiver) {
+    mCallbacks.insertBack(new MixerCallback(aReceiver));
+  }
+
+  bool FindCallback(MixerCallbackReceiver* aReceiver) {
+    for (MixerCallback* cb = mCallbacks.getFirst();
+         cb != nullptr; cb = cb->getNext()) {
+      if (cb->mReceiver == aReceiver) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  bool RemoveCallback(MixerCallbackReceiver* aReceiver) {
+    for (MixerCallback* cb = mCallbacks.getFirst();
+         cb != nullptr; cb = cb->getNext()) {
+      if (cb->mReceiver == aReceiver) {
+        cb->remove();
+        delete cb;
+        return true;
+      }
+    }
+    return false;
+  }
 private:
   void EnsureCapacityAndSilence() {
     if (mFrames * mChannels > mMixedAudio.Length()) {
       mMixedAudio.SetLength(mFrames* mChannels);
     }
     PodZero(mMixedAudio.Elements(), mMixedAudio.Length());
   }
 
+  class MixerCallback : public LinkedListElement<MixerCallback>
+  {
+  public:
+    MixerCallback(MixerCallbackReceiver* aReceiver)
+      : mReceiver(aReceiver)
+    { }
+    MixerCallbackReceiver* mReceiver;
+  };
+
   /* Function that is called when the mixing is done. */
-  MixerFunc mCallback;
+  LinkedList<MixerCallback> mCallbacks;
   /* Number of frames for this mixing block. */
   uint32_t mFrames;
   /* Number of channels for this mixing block. */
   uint32_t mChannels;
   /* Sample rate the of the mixed data. */
   uint32_t mSampleRate;
   /* Buffer containing the mixed audio data. */
   nsTArray<AudioDataValue> mMixedAudio;
--- a/content/media/AudioSegment.cpp
+++ b/content/media/AudioSegment.cpp
@@ -142,83 +142,73 @@ void AudioSegment::ResampleChunks(SpeexR
     break;
     default:
       MOZ_ASSERT(false);
     break;
   }
 }
 
 void
-AudioSegment::WriteTo(uint64_t aID, AudioStream* aOutput, AudioMixer* aMixer)
+AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
 {
-  uint32_t outputChannels = aOutput->GetChannels();
   nsAutoTArray<AudioDataValue,AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
   nsAutoTArray<const void*,GUESS_AUDIO_CHANNELS> channelData;
   // Offset in the buffer that will end up sent to the AudioStream, in samples.
   uint32_t offset = 0;
 
   if (!GetDuration()) {
     return;
   }
 
-  uint32_t outBufferLength = GetDuration() * outputChannels;
+  uint32_t outBufferLength = GetDuration() * aOutputChannels;
   buf.SetLength(outBufferLength);
 
 
   for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
     AudioChunk& c = *ci;
     uint32_t frames = c.mDuration;
 
     // If we have written data in the past, or we have real (non-silent) data
     // to write, we can proceed. Otherwise, it means we just started the
     // AudioStream, and we don't have real data to write to it (just silence).
     // To avoid overbuffering in the AudioStream, we simply drop the silence,
     // here. The stream will underrun and output silence anyways.
-    if (c.mBuffer || aOutput->GetWritten()) {
-      if (c.mBuffer && c.mBufferFormat != AUDIO_FORMAT_SILENCE) {
-        channelData.SetLength(c.mChannelData.Length());
-        for (uint32_t i = 0; i < channelData.Length(); ++i) {
-          channelData[i] = c.mChannelData[i];
-        }
-
-        if (channelData.Length() < outputChannels) {
-          // Up-mix. Note that this might actually make channelData have more
-          // than outputChannels temporarily.
-          AudioChannelsUpMix(&channelData, outputChannels, gZeroChannel);
-        }
+    if (c.mBuffer && c.mBufferFormat != AUDIO_FORMAT_SILENCE) {
+      channelData.SetLength(c.mChannelData.Length());
+      for (uint32_t i = 0; i < channelData.Length(); ++i) {
+        channelData[i] = c.mChannelData[i];
+      }
+      if (channelData.Length() < aOutputChannels) {
+        // Up-mix. Note that this might actually make channelData have more
+        // than aOutputChannels temporarily.
+        AudioChannelsUpMix(&channelData, aOutputChannels, gZeroChannel);
+      }
+      if (channelData.Length() > aOutputChannels) {
+        // Down-mix.
+        DownmixAndInterleave(channelData, c.mBufferFormat, frames,
+                             c.mVolume, aOutputChannels, buf.Elements() + offset);
+      } else {
+        InterleaveAndConvertBuffer(channelData.Elements(), c.mBufferFormat,
+                                   frames, c.mVolume,
+                                   aOutputChannels,
+                                   buf.Elements() + offset);
+      }
+    } else {
+      // Assumes that a bit pattern of zeroes == 0.0f
+      memset(buf.Elements() + offset, 0, aOutputChannels * frames * sizeof(AudioDataValue));
+    }
 
-        if (channelData.Length() > outputChannels) {
-          // Down-mix.
-          DownmixAndInterleave(channelData, c.mBufferFormat, frames,
-                               c.mVolume, outputChannels, buf.Elements() + offset);
-        } else {
-          InterleaveAndConvertBuffer(channelData.Elements(), c.mBufferFormat,
-                                     frames, c.mVolume,
-                                     outputChannels,
-                                     buf.Elements() + offset);
-        }
-      } else {
-        // Assumes that a bit pattern of zeroes == 0.0f
-        memset(buf.Elements() + offset, 0, outputChannels * frames * sizeof(AudioDataValue));
-      }
-      offset += frames * outputChannels;
-    }
+    offset += frames * aOutputChannels;
 
     if (!c.mTimeStamp.IsNull()) {
       TimeStamp now = TimeStamp::Now();
       // would be more efficient to c.mTimeStamp to ms on create time then pass here
       LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
               (now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
     }
   }
 
-  aOutput->Write(buf.Elements(), offset / outputChannels, &(mChunks[mChunks.Length() - 1].mTimeStamp));
-
-  // `offset` is zero when all the chunks above are null (silence). We can
-  // safely skip the mixing here because filling `buf` with zero and then mixing
-  // it would have absolutly no effect in the mix.
-  if (aMixer && offset) {
-    aMixer->Mix(buf.Elements(), outputChannels, GetDuration(), aOutput->GetRate());
+  if (offset) {
+    aMixer.Mix(buf.Elements(), aOutputChannels, offset / aOutputChannels, aSampleRate);
   }
-  aOutput->Start();
 }
 
 }
--- a/content/media/AudioSegment.h
+++ b/content/media/AudioSegment.h
@@ -269,17 +269,17 @@ public:
     chunk->mVolume = aChunk->mVolume;
     chunk->mBufferFormat = aChunk->mBufferFormat;
 #ifdef MOZILLA_INTERNAL_API
     chunk->mTimeStamp = TimeStamp::Now();
 #endif
     return chunk;
   }
   void ApplyVolume(float aVolume);
-  void WriteTo(uint64_t aID, AudioStream* aOutput, AudioMixer* aMixer = nullptr);
+  void WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aChannelCount, uint32_t aSampleRate);
 
   int ChannelCount() {
     NS_WARN_IF_FALSE(!mChunks.IsEmpty(),
         "Cannot query channel count on a AudioSegment with no chunks.");
     // Find the first chunk that has non-zero channels. A chunk that hs zero
     // channels is just silence and we can simply discard it.
     for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
       if (ci->ChannelCount()) {
--- a/content/media/AudioStream.cpp
+++ b/content/media/AudioStream.cpp
@@ -8,20 +8,20 @@
 #include <string.h>
 #include "prlog.h"
 #include "prdtoa.h"
 #include "AudioStream.h"
 #include "VideoUtils.h"
 #include "mozilla/Monitor.h"
 #include "mozilla/Mutex.h"
 #include <algorithm>
-#include "mozilla/Preferences.h"
 #include "mozilla/Telemetry.h"
 #include "soundtouch/SoundTouch.h"
 #include "Latency.h"
+#include "CubebUtils.h"
 #include "nsPrintfCString.h"
 #ifdef XP_MACOSX
 #include <sys/sysctl.h>
 #endif
 
 namespace mozilla {
 
 #ifdef LOG
@@ -39,29 +39,16 @@ PRLogModuleInfo* gAudioStreamLog = nullp
 /**
  * When MOZ_DUMP_AUDIO is set in the environment (to anything),
  * we'll drop a series of files in the current working directory named
  * dumped-audio-<nnn>.wav, one per AudioStream created, containing
  * the audio for the stream including any skips due to underruns.
  */
 static int gDumpedAudioCount = 0;
 
-#define PREF_VOLUME_SCALE "media.volume_scale"
-#define PREF_CUBEB_LATENCY "media.cubeb_latency_ms"
-
-static const uint32_t CUBEB_NORMAL_LATENCY_MS = 100;
-
-StaticMutex AudioStream::sMutex;
-cubeb* AudioStream::sCubebContext;
-uint32_t AudioStream::sPreferredSampleRate;
-double AudioStream::sVolumeScale;
-uint32_t AudioStream::sCubebLatency;
-bool AudioStream::sCubebLatencyPrefSet;
-
-
 /**
  * Keep a list of frames sent to the audio engine in each DataCallback along
  * with the playback rate at the moment. Since the playback rate and number of
  * underrun frames can vary in each callback. We need to keep the whole history
  * in order to calculate the playback position of the audio engine correctly.
  */
 class FrameHistory {
   struct Chunk {
@@ -133,118 +120,16 @@ public:
     }
   }
 private:
   nsAutoTArray<Chunk, 7> mChunks;
   int64_t mBaseOffset;
   double mBasePosition;
 };
 
-/*static*/ void AudioStream::PrefChanged(const char* aPref, void* aClosure)
-{
-  if (strcmp(aPref, PREF_VOLUME_SCALE) == 0) {
-    nsAdoptingString value = Preferences::GetString(aPref);
-    StaticMutexAutoLock lock(sMutex);
-    if (value.IsEmpty()) {
-      sVolumeScale = 1.0;
-    } else {
-      NS_ConvertUTF16toUTF8 utf8(value);
-      sVolumeScale = std::max<double>(0, PR_strtod(utf8.get(), nullptr));
-    }
-  } else if (strcmp(aPref, PREF_CUBEB_LATENCY) == 0) {
-    // Arbitrary default stream latency of 100ms.  The higher this
-    // value, the longer stream volume changes will take to become
-    // audible.
-    sCubebLatencyPrefSet = Preferences::HasUserValue(aPref);
-    uint32_t value = Preferences::GetUint(aPref, CUBEB_NORMAL_LATENCY_MS);
-    StaticMutexAutoLock lock(sMutex);
-    sCubebLatency = std::min<uint32_t>(std::max<uint32_t>(value, 1), 1000);
-  }
-}
-
-/*static*/ bool AudioStream::GetFirstStream()
-{
-  static bool sFirstStream = true;
-
-  StaticMutexAutoLock lock(sMutex);
-  bool result = sFirstStream;
-  sFirstStream = false;
-  return result;
-}
-
-/*static*/ double AudioStream::GetVolumeScale()
-{
-  StaticMutexAutoLock lock(sMutex);
-  return sVolumeScale;
-}
-
-/*static*/ cubeb* AudioStream::GetCubebContext()
-{
-  StaticMutexAutoLock lock(sMutex);
-  return GetCubebContextUnlocked();
-}
-
-/*static*/ void AudioStream::InitPreferredSampleRate()
-{
-  StaticMutexAutoLock lock(sMutex);
-  if (sPreferredSampleRate == 0 &&
-      cubeb_get_preferred_sample_rate(GetCubebContextUnlocked(),
-                                      &sPreferredSampleRate) != CUBEB_OK) {
-    sPreferredSampleRate = 44100;
-  }
-}
-
-/*static*/ cubeb* AudioStream::GetCubebContextUnlocked()
-{
-  sMutex.AssertCurrentThreadOwns();
-  if (sCubebContext ||
-      cubeb_init(&sCubebContext, "AudioStream") == CUBEB_OK) {
-    return sCubebContext;
-  }
-  NS_WARNING("cubeb_init failed");
-  return nullptr;
-}
-
-/*static*/ uint32_t AudioStream::GetCubebLatency()
-{
-  StaticMutexAutoLock lock(sMutex);
-  return sCubebLatency;
-}
-
-/*static*/ bool AudioStream::CubebLatencyPrefSet()
-{
-  StaticMutexAutoLock lock(sMutex);
-  return sCubebLatencyPrefSet;
-}
-
-#if defined(__ANDROID__) && defined(MOZ_B2G)
-static cubeb_stream_type ConvertChannelToCubebType(dom::AudioChannel aChannel)
-{
-  switch(aChannel) {
-    case dom::AudioChannel::Normal:
-      return CUBEB_STREAM_TYPE_SYSTEM;
-    case dom::AudioChannel::Content:
-      return CUBEB_STREAM_TYPE_MUSIC;
-    case dom::AudioChannel::Notification:
-      return CUBEB_STREAM_TYPE_NOTIFICATION;
-    case dom::AudioChannel::Alarm:
-      return CUBEB_STREAM_TYPE_ALARM;
-    case dom::AudioChannel::Telephony:
-      return CUBEB_STREAM_TYPE_VOICE_CALL;
-    case dom::AudioChannel::Ringer:
-      return CUBEB_STREAM_TYPE_RING;
-    case dom::AudioChannel::Publicnotification:
-      return CUBEB_STREAM_TYPE_SYSTEM_ENFORCED;
-    default:
-      NS_ERROR("The value of AudioChannel is invalid");
-      return CUBEB_STREAM_TYPE_MAX;
-  }
-}
-#endif
-
 AudioStream::AudioStream()
   : mMonitor("AudioStream")
   , mInRate(0)
   , mOutRate(0)
   , mChannels(0)
   , mOutChannels(0)
   , mWritten(0)
   , mAudioClock(MOZ_THIS_IN_INITIALIZER_LIST())
@@ -282,39 +167,16 @@ AudioStream::SizeOfIncludingThis(MallocS
   // - mCubebStream
 
   amount += mInserts.SizeOfExcludingThis(aMallocSizeOf);
   amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
 
   return amount;
 }
 
-/*static*/ void AudioStream::InitLibrary()
-{
-#ifdef PR_LOGGING
-  gAudioStreamLog = PR_NewLogModule("AudioStream");
-#endif
-  PrefChanged(PREF_VOLUME_SCALE, nullptr);
-  Preferences::RegisterCallback(PrefChanged, PREF_VOLUME_SCALE);
-  PrefChanged(PREF_CUBEB_LATENCY, nullptr);
-  Preferences::RegisterCallback(PrefChanged, PREF_CUBEB_LATENCY);
-}
-
-/*static*/ void AudioStream::ShutdownLibrary()
-{
-  Preferences::UnregisterCallback(PrefChanged, PREF_VOLUME_SCALE);
-  Preferences::UnregisterCallback(PrefChanged, PREF_CUBEB_LATENCY);
-
-  StaticMutexAutoLock lock(sMutex);
-  if (sCubebContext) {
-    cubeb_destroy(sCubebContext);
-    sCubebContext = nullptr;
-  }
-}
-
 nsresult AudioStream::EnsureTimeStretcherInitializedUnlocked()
 {
   mMonitor.AssertCurrentThreadOwns();
   if (!mTimeStretcher) {
     mTimeStretcher = new soundtouch::SoundTouch();
     mTimeStretcher->setSampleRate(mInRate);
     mTimeStretcher->setChannels(mOutChannels);
     mTimeStretcher->setPitch(1.0);
@@ -382,36 +244,16 @@ nsresult AudioStream::SetPreservesPitch(
 }
 
 int64_t AudioStream::GetWritten()
 {
   MonitorAutoLock mon(mMonitor);
   return mWritten;
 }
 
-/*static*/ int AudioStream::MaxNumberOfChannels()
-{
-  cubeb* cubebContext = GetCubebContext();
-  uint32_t maxNumberOfChannels;
-  if (cubebContext &&
-      cubeb_get_max_channel_count(cubebContext,
-                                  &maxNumberOfChannels) == CUBEB_OK) {
-    return static_cast<int>(maxNumberOfChannels);
-  }
-
-  return 0;
-}
-
-/*static*/ int AudioStream::PreferredSampleRate()
-{
-  MOZ_ASSERT(sPreferredSampleRate,
-             "sPreferredSampleRate has not been initialized!");
-  return sPreferredSampleRate;
-}
-
 static void SetUint16LE(uint8_t* aDest, uint16_t aValue)
 {
   aDest[0] = aValue & 0xFF;
   aDest[1] = aValue >> 8;
 }
 
 static void SetUint32LE(uint8_t* aDest, uint32_t aValue)
 {
@@ -479,19 +321,19 @@ WriteDumpFile(FILE* aDumpFile, AudioStre
 // NOTE: this must not block a LowLatency stream for any significant amount
 // of time, or it will block the entirety of MSG
 nsresult
 AudioStream::Init(int32_t aNumChannels, int32_t aRate,
                   const dom::AudioChannel aAudioChannel,
                   LatencyRequest aLatencyRequest)
 {
   mStartTime = TimeStamp::Now();
-  mIsFirst = GetFirstStream();
+  mIsFirst = CubebUtils::GetFirstStream();
 
-  if (!GetCubebContext() || aNumChannels < 0 || aRate < 0) {
+  if (!CubebUtils::GetCubebContext() || aNumChannels < 0 || aRate < 0) {
     return NS_ERROR_FAILURE;
   }
 
   PR_LOG(gAudioStreamLog, PR_LOG_DEBUG,
     ("%s  channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this));
   mInRate = mOutRate = aRate;
   mChannels = aNumChannels;
   mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels;
@@ -500,17 +342,17 @@ AudioStream::Init(int32_t aNumChannels, 
   mDumpFile = OpenDumpFile(this);
 
   cubeb_stream_params params;
   params.rate = aRate;
   params.channels = mOutChannels;
 #if defined(__ANDROID__)
 #if defined(MOZ_B2G)
   mAudioChannel = aAudioChannel;
-  params.stream_type = ConvertChannelToCubebType(aAudioChannel);
+  params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel);
 #else
   mAudioChannel = dom::AudioChannel::Content;
   params.stream_type = CUBEB_STREAM_TYPE_MUSIC;
 #endif
 
   if (params.stream_type == CUBEB_STREAM_TYPE_MAX) {
     return NS_ERROR_INVALID_ARG;
   }
@@ -629,34 +471,34 @@ void AudioStream::DeviceChangedCallback(
 }
 
 // This code used to live inside AudioStream::Init(), but on Mac (others?)
 // it has been known to take 300-800 (or even 8500) ms to execute(!)
 nsresult
 AudioStream::OpenCubeb(cubeb_stream_params &aParams,
                        LatencyRequest aLatencyRequest)
 {
-  cubeb* cubebContext = GetCubebContext();
+  cubeb* cubebContext = CubebUtils::GetCubebContext();
   if (!cubebContext) {
     NS_WARNING("Can't get cubeb context!");
     MonitorAutoLock mon(mMonitor);
     mState = AudioStream::ERRORED;
     return NS_ERROR_FAILURE;
   }
 
   // If the latency pref is set, use it. Otherwise, if this stream is intended
   // for low latency playback, try to get the lowest latency possible.
   // Otherwise, for normal streams, use 100ms.
   uint32_t latency;
-  if (aLatencyRequest == LowLatency && !CubebLatencyPrefSet()) {
+  if (aLatencyRequest == LowLatency && !CubebUtils::CubebLatencyPrefSet()) {
     if (cubeb_get_min_latency(cubebContext, aParams, &latency) != CUBEB_OK) {
-      latency = GetCubebLatency();
+      latency = CubebUtils::GetCubebLatency();
     }
   } else {
-    latency = GetCubebLatency();
+    latency = CubebUtils::GetCubebLatency();
   }
 
   {
     cubeb_stream* stream;
     if (cubeb_stream_init(cubebContext, &stream, "AudioStream", aParams,
                           latency, DataCallback_S, StateCallback_S, this) == CUBEB_OK) {
       MonitorAutoLock mon(mMonitor);
       MOZ_ASSERT(mState != SHUTDOWN);
@@ -839,17 +681,17 @@ AudioStream::Available()
   return BytesToFrames(mBuffer.Available());
 }
 
 void
 AudioStream::SetVolume(double aVolume)
 {
   NS_ABORT_IF_FALSE(aVolume >= 0.0 && aVolume <= 1.0, "Invalid volume");
 
-  if (cubeb_stream_set_volume(mCubebStream.get(), aVolume * GetVolumeScale()) != CUBEB_OK) {
+  if (cubeb_stream_set_volume(mCubebStream.get(), aVolume * CubebUtils::GetVolumeScale()) != CUBEB_OK) {
     NS_WARNING("Could not change volume on cubeb stream.");
   }
 }
 
 void
 AudioStream::SetMicrophoneActive(bool aActive)
 {
   MonitorAutoLock mon(mMonitor);
@@ -1161,17 +1003,17 @@ AudioStream::Reset()
   mShouldDropFrames = true;
   mNeedsStart = true;
 
   cubeb_stream_params params;
   params.rate = mInRate;
   params.channels = mOutChannels;
 #if defined(__ANDROID__)
 #if defined(MOZ_B2G)
-  params.stream_type = ConvertChannelToCubebType(mAudioChannel);
+  params.stream_type = CubebUtils::ConvertChannelToCubebType(mAudioChannel);
 #else
   params.stream_type = CUBEB_STREAM_TYPE_MUSIC;
 #endif
 
   if (params.stream_type == CUBEB_STREAM_TYPE_MAX) {
     return;
   }
 #endif
--- a/content/media/AudioStream.h
+++ b/content/media/AudioStream.h
@@ -8,20 +8,18 @@
 
 #include "AudioSampleFormat.h"
 #include "nsAutoPtr.h"
 #include "nsCOMPtr.h"
 #include "nsThreadUtils.h"
 #include "Latency.h"
 #include "mozilla/dom/AudioChannelBinding.h"
 #include "mozilla/RefPtr.h"
-#include "mozilla/StaticMutex.h"
 #include "mozilla/UniquePtr.h"
-
-#include "cubeb/cubeb.h"
+#include "CubebUtils.h"
 
 namespace soundtouch {
 class SoundTouch;
 }
 
 namespace mozilla {
 
 template<>
@@ -181,34 +179,16 @@ class AudioInitTask;
 // callers, or made from a single thread.  One exception is that access to
 // GetPosition, GetPositionInFrames, SetVolume, and Get{Rate,Channels},
 // SetMicrophoneActive is thread-safe without external synchronization.
 class AudioStream MOZ_FINAL
 {
   virtual ~AudioStream();
 
 public:
-  // Initialize Audio Library. Some Audio backends require initializing the
-  // library before using it.
-  static void InitLibrary();
-
-  // Shutdown Audio Library. Some Audio backends require shutting down the
-  // library after using it.
-  static void ShutdownLibrary();
-
-  // Returns the maximum number of channels supported by the audio hardware.
-  static int MaxNumberOfChannels();
-
-  // Queries the samplerate the hardware/mixer runs at, and stores it.
-  // Can be called on any thread. When this returns, it is safe to call
-  // PreferredSampleRate without locking.
-  static void InitPreferredSampleRate();
-  // Get the aformentionned sample rate. Does not lock.
-  static int PreferredSampleRate();
-
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioStream)
   AudioStream();
 
   enum LatencyRequest {
     HighLatency,
     LowLatency
   };
 
@@ -300,24 +280,16 @@ private:
 
   // So we can call it asynchronously from AudioInitTask
   nsresult OpenCubeb(cubeb_stream_params &aParams,
                      LatencyRequest aLatencyRequest);
   void AudioInitTaskFinished();
 
   void CheckForStart();
 
-  static void PrefChanged(const char* aPref, void* aClosure);
-  static double GetVolumeScale();
-  static bool GetFirstStream();
-  static cubeb* GetCubebContext();
-  static cubeb* GetCubebContextUnlocked();
-  static uint32_t GetCubebLatency();
-  static bool CubebLatencyPrefSet();
-
   static long DataCallback_S(cubeb_stream*, void* aThis, void* aBuffer, long aFrames)
   {
     return static_cast<AudioStream*>(aThis)->DataCallback(aBuffer, aFrames);
   }
 
   static void StateCallback_S(cubeb_stream*, void* aThis, cubeb_state aState)
   {
     static_cast<AudioStream*>(aThis)->StateCallback(aState);
@@ -425,28 +397,16 @@ private:
   bool mMicrophoneActive;
   // When we are in the process of changing the output device, and the callback
   // is not going to be called for a little while, simply drop incoming frames.
   // This is only on OSX for now, because other systems handle this gracefully.
   bool mShouldDropFrames;
   // True if there is a pending AudioInitTask. Shutdown() will wait until the
   // pending AudioInitTask is finished.
   bool mPendingAudioInitTask;
-
-  // This mutex protects the static members below.
-  static StaticMutex sMutex;
-  static cubeb* sCubebContext;
-
-  // Prefered samplerate, in Hz (characteristic of the
-  // hardware/mixer/platform/API used).
-  static uint32_t sPreferredSampleRate;
-
-  static double sVolumeScale;
-  static uint32_t sCubebLatency;
-  static bool sCubebLatencyPrefSet;
 };
 
 class AudioInitTask : public nsRunnable
 {
 public:
   AudioInitTask(AudioStream *aStream,
                 AudioStream::LatencyRequest aLatencyRequest,
                 const cubeb_stream_params &aParams)
new file mode 100644
--- /dev/null
+++ b/content/media/CubebUtils.cpp
@@ -0,0 +1,174 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <stdint.h>
+#include <algorithm>
+#include "mozilla/Preferences.h"
+#include "CubebUtils.h"
+#include "prdtoa.h"
+
+#define PREF_VOLUME_SCALE "media.volume_scale"
+#define PREF_CUBEB_LATENCY "media.cubeb_latency_ms"
+
+namespace mozilla {
+
+extern PRLogModuleInfo* gAudioStreamLog;
+
+static const uint32_t CUBEB_NORMAL_LATENCY_MS = 100;
+
+StaticMutex CubebUtils::sMutex;
+cubeb* CubebUtils::sCubebContext;
+uint32_t CubebUtils::sPreferredSampleRate;
+double CubebUtils::sVolumeScale;
+uint32_t CubebUtils::sCubebLatency;
+bool CubebUtils::sCubebLatencyPrefSet;
+
+/*static*/ void CubebUtils::PrefChanged(const char* aPref, void* aClosure)
+{
+  if (strcmp(aPref, PREF_VOLUME_SCALE) == 0) {
+    nsAdoptingString value = Preferences::GetString(aPref);
+    StaticMutexAutoLock lock(sMutex);
+    if (value.IsEmpty()) {
+      sVolumeScale = 1.0;
+    } else {
+      NS_ConvertUTF16toUTF8 utf8(value);
+      sVolumeScale = std::max<double>(0, PR_strtod(utf8.get(), nullptr));
+    }
+  } else if (strcmp(aPref, PREF_CUBEB_LATENCY) == 0) {
+    // Arbitrary default stream latency of 100ms.  The higher this
+    // value, the longer stream volume changes will take to become
+    // audible.
+    sCubebLatencyPrefSet = Preferences::HasUserValue(aPref);
+    uint32_t value = Preferences::GetUint(aPref, CUBEB_NORMAL_LATENCY_MS);
+    StaticMutexAutoLock lock(sMutex);
+    sCubebLatency = std::min<uint32_t>(std::max<uint32_t>(value, 1), 1000);
+  }
+}
+
+/*static*/ bool CubebUtils::GetFirstStream()
+{
+  static bool sFirstStream = true;
+
+  StaticMutexAutoLock lock(sMutex);
+  bool result = sFirstStream;
+  sFirstStream = false;
+  return result;
+}
+
+/*static*/ double CubebUtils::GetVolumeScale()
+{
+  StaticMutexAutoLock lock(sMutex);
+  return sVolumeScale;
+}
+
+/*static*/ cubeb* CubebUtils::GetCubebContext()
+{
+  StaticMutexAutoLock lock(sMutex);
+  return GetCubebContextUnlocked();
+}
+
+/*static*/ void CubebUtils::InitPreferredSampleRate()
+{
+  StaticMutexAutoLock lock(sMutex);
+  if (sPreferredSampleRate == 0 &&
+      cubeb_get_preferred_sample_rate(GetCubebContextUnlocked(),
+                                      &sPreferredSampleRate) != CUBEB_OK) {
+    sPreferredSampleRate = 44100;
+  }
+}
+
+/*static*/ cubeb* CubebUtils::GetCubebContextUnlocked()
+{
+  sMutex.AssertCurrentThreadOwns();
+  if (sCubebContext ||
+      cubeb_init(&sCubebContext, "CubebUtils") == CUBEB_OK) {
+    return sCubebContext;
+  }
+  NS_WARNING("cubeb_init failed");
+  return nullptr;
+}
+
+/*static*/ uint32_t CubebUtils::GetCubebLatency()
+{
+  StaticMutexAutoLock lock(sMutex);
+  return sCubebLatency;
+}
+
+/*static*/ bool CubebUtils::CubebLatencyPrefSet()
+{
+  StaticMutexAutoLock lock(sMutex);
+  return sCubebLatencyPrefSet;
+}
+
+/*static*/ void CubebUtils::InitLibrary()
+{
+#ifdef PR_LOGGING
+  gAudioStreamLog = PR_NewLogModule("AudioStream");
+#endif
+  PrefChanged(PREF_VOLUME_SCALE, nullptr);
+  Preferences::RegisterCallback(PrefChanged, PREF_VOLUME_SCALE);
+  PrefChanged(PREF_CUBEB_LATENCY, nullptr);
+  Preferences::RegisterCallback(PrefChanged, PREF_CUBEB_LATENCY);
+}
+
+/*static*/ void CubebUtils::ShutdownLibrary()
+{
+  Preferences::UnregisterCallback(PrefChanged, PREF_VOLUME_SCALE);
+  Preferences::UnregisterCallback(PrefChanged, PREF_CUBEB_LATENCY);
+
+  StaticMutexAutoLock lock(sMutex);
+  if (sCubebContext) {
+    cubeb_destroy(sCubebContext);
+    sCubebContext = nullptr;
+  }
+}
+
+/*static*/ int CubebUtils::MaxNumberOfChannels()
+{
+  cubeb* cubebContext = CubebUtils::GetCubebContext();
+  uint32_t maxNumberOfChannels;
+  if (cubebContext &&
+      cubeb_get_max_channel_count(cubebContext,
+                                  &maxNumberOfChannels) == CUBEB_OK) {
+    return static_cast<int>(maxNumberOfChannels);
+  }
+
+  return 0;
+}
+
+/*static*/ int CubebUtils::PreferredSampleRate()
+{
+  MOZ_ASSERT(sPreferredSampleRate,
+             "sPreferredSampleRate has not been initialized!");
+  return sPreferredSampleRate;
+}
+
+#if defined(__ANDROID__) && defined(MOZ_B2G)
+/*static*/ cubeb_stream_type CubebUtils::ConvertChannelToCubebType(dom::AudioChannel aChannel)
+{
+  switch(aChannel) {
+    case dom::AudioChannel::Normal:
+      return CUBEB_STREAM_TYPE_SYSTEM;
+    case dom::AudioChannel::Content:
+      return CUBEB_STREAM_TYPE_MUSIC;
+    case dom::AudioChannel::Notification:
+      return CUBEB_STREAM_TYPE_NOTIFICATION;
+    case dom::AudioChannel::Alarm:
+      return CUBEB_STREAM_TYPE_ALARM;
+    case dom::AudioChannel::Telephony:
+      return CUBEB_STREAM_TYPE_VOICE_CALL;
+    case dom::AudioChannel::Ringer:
+      return CUBEB_STREAM_TYPE_RING;
+    case dom::AudioChannel::Publicnotification:
+      return CUBEB_STREAM_TYPE_SYSTEM_ENFORCED;
+    default:
+      NS_ERROR("The value of AudioChannel is invalid");
+      return CUBEB_STREAM_TYPE_MAX;
+  }
+}
+#endif
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/CubebUtils.h
@@ -0,0 +1,72 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(CubebUtils_h_)
+#define CubebUtils_h_
+
+#include "cubeb/cubeb.h"
+#include "nsAutoRef.h"
+#include "mozilla/StaticMutex.h"
+#include "mozilla/dom/AudioChannelBinding.h"
+
+template <>
+class nsAutoRefTraits<cubeb_stream> : public nsPointerRefTraits<cubeb_stream>
+{
+public:
+  static void Release(cubeb_stream* aStream) { cubeb_stream_destroy(aStream); }
+};
+
+namespace mozilla {
+
+class CubebUtils {
+public:
+  // Initialize Audio Library. Some Audio backends require initializing the
+  // library before using it.
+  static void InitLibrary();
+
+  // Shutdown Audio Library. Some Audio backends require shutting down the
+  // library after using it.
+  static void ShutdownLibrary();
+
+  // Returns the maximum number of channels supported by the audio hardware.
+  static int MaxNumberOfChannels();
+
+  // Queries the samplerate the hardware/mixer runs at, and stores it.
+  // Can be called on any thread. When this returns, it is safe to call
+  // PreferredSampleRate without locking.
+  static void InitPreferredSampleRate();
+  // Get the aformentionned sample rate. Does not lock.
+  static int PreferredSampleRate();
+
+  static void PrefChanged(const char* aPref, void* aClosure);
+  static double GetVolumeScale();
+  static bool GetFirstStream();
+  static cubeb* GetCubebContext();
+  static cubeb* GetCubebContextUnlocked();
+  static uint32_t GetCubebLatency();
+  static bool CubebLatencyPrefSet();
+#if defined(__ANDROID__) && defined(MOZ_B2G)
+  static cubeb_stream_type ConvertChannelToCubebType(dom::AudioChannel aChannel);
+#endif
+
+private:
+  // This mutex protects the static members below.
+  static StaticMutex sMutex;
+  static cubeb* sCubebContext;
+
+  // Prefered samplerate, in Hz (characteristic of the
+  // hardware/mixer/platform/API used).
+  static uint32_t sPreferredSampleRate;
+
+  static double sVolumeScale;
+  static uint32_t sCubebLatency;
+  static bool sCubebLatencyPrefSet;
+};
+}
+
+
+
+#endif // CubebUtils_h_
--- a/content/media/DOMMediaStream.cpp
+++ b/content/media/DOMMediaStream.cpp
@@ -214,26 +214,26 @@ DOMMediaStream::IsFinished()
   return !mStream || mStream->IsFinished();
 }
 
 void
 DOMMediaStream::InitSourceStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents)
 {
   mWindow = aWindow;
   SetHintContents(aHintContents);
-  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
+  MediaStreamGraph* gm = MediaStreamGraph::GetInstance(aHintContents);
   InitStreamCommon(gm->CreateSourceStream(this));
 }
 
 void
 DOMMediaStream::InitTrackUnionStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents)
 {
   mWindow = aWindow;
   SetHintContents(aHintContents);
-  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
+  MediaStreamGraph* gm = MediaStreamGraph::GetInstance(aHintContents);
   InitStreamCommon(gm->CreateTrackUnionStream(this));
 }
 
 void
 DOMMediaStream::InitStreamCommon(MediaStream* aStream)
 {
   mStream = aStream;
 
@@ -261,16 +261,24 @@ DOMMediaStream::CreateTrackUnionStream(n
 void
 DOMMediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
 {
   if (mStream) {
     mStream->SetTrackEnabled(aTrackID, aEnabled);
   }
 }
 
+void
+DOMMediaStream::StopTrack(TrackID aTrackID)
+{
+  if (mStream && mStream->AsSourceStream()) {
+    mStream->AsSourceStream()->EndTrack(aTrackID);
+  }
+}
+
 bool
 DOMMediaStream::CombineWithPrincipal(nsIPrincipal* aPrincipal)
 {
   bool changed =
     nsContentUtils::CombineResourcePrincipals(&mPrincipal, aPrincipal);
   if (changed) {
     NotifyPrincipalChanged();
   }
--- a/content/media/DOMMediaStream.h
+++ b/content/media/DOMMediaStream.h
@@ -95,16 +95,18 @@ public:
   virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) {}
 
   /**
    * Overridden in DOMLocalMediaStreams to allow getUserMedia to disable
    * media at the SourceMediaStream.
    */
   virtual void SetTrackEnabled(TrackID aTrackID, bool aEnabled);
 
+  virtual void StopTrack(TrackID aTrackID);
+
   bool IsFinished();
   /**
    * Returns a principal indicating who may access this stream. The stream contents
    * can only be accessed by principals subsuming this principal.
    */
   nsIPrincipal* GetPrincipal() { return mPrincipal; }
 
   /**
@@ -154,17 +156,18 @@ public:
   /**
    * Called when the main-thread state of the MediaStream changed.
    */
   void NotifyStreamStateChanged();
 
   // Indicate what track types we eventually expect to add to this stream
   enum {
     HINT_CONTENTS_AUDIO = 1 << 0,
-    HINT_CONTENTS_VIDEO = 1 << 1
+    HINT_CONTENTS_VIDEO = 1 << 1,
+    HINT_CONTENTS_UNKNOWN = 1 << 2
   };
   TrackTypeHints GetHintContents() const { return mHintContents; }
   void SetHintContents(TrackTypeHints aHintContents) { mHintContents = aHintContents; }
 
   /**
    * Create an nsDOMMediaStream whose underlying stream is a SourceMediaStream.
    */
   static already_AddRefed<DOMMediaStream>
@@ -222,19 +225,19 @@ public:
   /**
    * If loading and playing a MediaStream in a media element, for each
    * MediaStreamTrack in the MediaStream, create a corresponding AudioTrack or
    * VideoTrack during the phase of resource fetching.
    */
   void ConstructMediaTracks(AudioTrackList* aAudioTrackList,
                             VideoTrackList* aVideoTrackList);
 
-  void NotifyMediaStreamTrackCreated(MediaStreamTrack* aTrack);
+  virtual void NotifyMediaStreamTrackCreated(MediaStreamTrack* aTrack);
 
-  void NotifyMediaStreamTrackEnded(MediaStreamTrack* aTrack);
+  virtual void NotifyMediaStreamTrackEnded(MediaStreamTrack* aTrack);
 
 protected:
   virtual ~DOMMediaStream();
 
   void Destroy();
   void InitSourceStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents);
   void InitTrackUnionStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents);
   void InitStreamCommon(MediaStream* aStream);
new file mode 100644
--- /dev/null
+++ b/content/media/GraphDriver.cpp
@@ -0,0 +1,901 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <MediaStreamGraphImpl.h>
+#include "CubebUtils.h"
+
+#ifdef XP_MACOSX
+#include <sys/sysctl.h>
+#endif
+
+#ifdef PR_LOGGING
+extern PRLogModuleInfo* gMediaStreamGraphLog;
+#define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
+#else
+#define STREAM_LOG(type, msg)
+#endif
+
+namespace mozilla {
+
+struct AutoProfilerUnregisterThread
+{
+  // The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning.
+  AutoProfilerUnregisterThread()
+  {
+  }
+
+  ~AutoProfilerUnregisterThread()
+  {
+    profiler_unregister_thread();
+  }
+};
+
+GraphDriver::GraphDriver(MediaStreamGraphImpl* aGraphImpl)
+  : mIterationStart(0),
+    mIterationEnd(0),
+    mStateComputedTime(0),
+    mNextStateComputedTime(0),
+    mGraphImpl(aGraphImpl),
+    mWaitState(WAITSTATE_RUNNING),
+    mNeedAnotherIteration(false),
+    mCurrentTimeStamp(TimeStamp::Now()),
+    mPreviousDriver(nullptr),
+    mNextDriver(nullptr)
+{ }
+
+void GraphDriver::SetGraphTime(GraphDriver* aPreviousDriver,
+                               GraphTime aLastSwitchNextIterationStart,
+                               GraphTime aLastSwitchNextIterationEnd,
+                               GraphTime aLastSwitchStateComputedTime,
+                               GraphTime aLastSwitchNextStateComputedTime)
+{
+  // We set mIterationEnd here, because the first thing a driver do when it
+  // does an iteration is to update graph times, so we are in fact setting
+  // mIterationStart of the next iteration by setting the end of the previous
+  // iteration.
+  mIterationStart = aLastSwitchNextIterationStart;
+  mIterationEnd = aLastSwitchNextIterationEnd;
+  mStateComputedTime = aLastSwitchStateComputedTime;
+  mNextStateComputedTime = aLastSwitchNextStateComputedTime;
+
+  STREAM_LOG(PR_LOG_DEBUG, ("Setting previous driver: %p (%s)", aPreviousDriver, aPreviousDriver->AsAudioCallbackDriver() ? "AudioCallbackDriver" : "SystemClockDriver"));
+  MOZ_ASSERT(!mPreviousDriver);
+  mPreviousDriver = aPreviousDriver;
+}
+
+void GraphDriver::SwitchAtNextIteration(GraphDriver* aNextDriver)
+{
+
+  STREAM_LOG(PR_LOG_DEBUG, ("Switching to new driver: %p (%s)", aNextDriver, aNextDriver->AsAudioCallbackDriver() ? "AudioCallbackDriver" : "SystemClockDriver"));
+  // Sometimes we switch twice to a new driver per iteration, this is probably a
+  // bug.
+  MOZ_ASSERT(!mNextDriver || !mNextDriver->AsAudioCallbackDriver());
+  mNextDriver = aNextDriver;
+}
+
+void GraphDriver::EnsureImmediateWakeUpLocked()
+{
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+  mWaitState = WAITSTATE_WAKING_UP;
+  mGraphImpl->GetMonitor().Notify();
+}
+
+void GraphDriver::UpdateStateComputedTime(GraphTime aStateComputedTime)
+{
+  MOZ_ASSERT(aStateComputedTime > mIterationEnd);
+  // The next state computed time can be the same as the previous, here: it
+  // means the driver would be have been blocking indefinitly, but the graph has
+  // been woken up right after having been to sleep.
+  MOZ_ASSERT(aStateComputedTime >= mStateComputedTime, "State time can't go backward.");
+
+  mStateComputedTime = aStateComputedTime;
+}
+
+void GraphDriver::EnsureNextIteration()
+{
+  MonitorAutoLock lock(mGraphImpl->GetMonitor());
+  EnsureNextIterationLocked();
+}
+
+void GraphDriver::EnsureNextIterationLocked()
+{
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+
+  if (IsWaitingIndefinitly()) {
+    WakeUp();
+  }
+
+  if (mNeedAnotherIteration) {
+    return;
+  }
+  mNeedAnotherIteration = true;
+}
+
+ThreadedDriver::ThreadedDriver(MediaStreamGraphImpl* aGraphImpl)
+  : GraphDriver(aGraphImpl)
+{ }
+
+ThreadedDriver::~ThreadedDriver()
+{
+  if (mThread) {
+    mThread->Shutdown();
+  }
+}
+
+class MediaStreamGraphShutdownThreadRunnable : public nsRunnable {
+public:
+  explicit MediaStreamGraphShutdownThreadRunnable(GraphDriver* aDriver)
+    : mDriver(aDriver)
+  {
+  }
+  NS_IMETHOD Run()
+  {
+    MOZ_ASSERT(NS_IsMainThread());
+    // We can't release an audio driver on the main thread, because it can be
+    // blocking.
+    if (mDriver->AsAudioCallbackDriver()) {
+      STREAM_LOG(PR_LOG_DEBUG, ("Releasing audio driver off main thread.\n"));
+      nsRefPtr<AsyncCubebTask> releaseEvent =
+        new AsyncCubebTask(mDriver->AsAudioCallbackDriver(), AsyncCubebTask::SHUTDOWN);
+      mDriver = nullptr;
+      releaseEvent->Dispatch();
+    } else {
+      mDriver = nullptr;
+    }
+    return NS_OK;
+  }
+private:
+  nsRefPtr<GraphDriver> mDriver;
+};
+
+class MediaStreamGraphInitThreadRunnable : public nsRunnable {
+public:
+  explicit MediaStreamGraphInitThreadRunnable(ThreadedDriver* aDriver)
+    : mDriver(aDriver)
+  {
+  }
+  NS_IMETHOD Run()
+  {
+    char aLocal;
+    STREAM_LOG(PR_LOG_DEBUG, ("Starting system thread"));
+    profiler_register_thread("MediaStreamGraph", &aLocal);
+    if (mDriver->mPreviousDriver) {
+      MOZ_ASSERT(!mDriver->AsAudioCallbackDriver());
+      // Stop and release the previous driver off-main-thread.
+      nsRefPtr<AsyncCubebTask> releaseEvent =
+        new AsyncCubebTask(mDriver->mPreviousDriver->AsAudioCallbackDriver(), AsyncCubebTask::SHUTDOWN);
+      mDriver->mPreviousDriver = nullptr;
+      releaseEvent->Dispatch();
+    } else {
+      MonitorAutoLock mon(mDriver->mGraphImpl->GetMonitor());
+      MOZ_ASSERT(mDriver->mGraphImpl->MessagesQueued(), "Don't start a graph without messages queued.");
+      mDriver->mGraphImpl->SwapMessageQueues();
+    }
+    mDriver->RunThread();
+    return NS_OK;
+  }
+private:
+  ThreadedDriver* mDriver;
+};
+
+void
+ThreadedDriver::Start()
+{
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
+  NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event);
+}
+
+void
+ThreadedDriver::Resume()
+{
+  Start();
+}
+
+void
+ThreadedDriver::Revive()
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver reviving."));
+  // If we were switching, switch now. Otherwise, tell thread to run the main
+  // loop again.
+  if (mNextDriver) {
+    mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd,
+                               mStateComputedTime, mNextStateComputedTime);
+    mGraphImpl->SetCurrentDriver(mNextDriver);
+    mNextDriver->Start();
+  } else {
+    nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
+    mThread->Dispatch(event, NS_DISPATCH_NORMAL);
+  }
+}
+
+void
+ThreadedDriver::Stop()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
+  // mGraph's thread is not running so it's OK to do whatever here
+  STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this));
+
+  if (mThread) {
+    mThread->Shutdown();
+  }
+}
+
+SystemClockDriver::SystemClockDriver(MediaStreamGraphImpl* aGraphImpl)
+  : ThreadedDriver(aGraphImpl),
+    mInitialTimeStamp(TimeStamp::Now()),
+    mLastTimeStamp(TimeStamp::Now())
+{}
+
+SystemClockDriver::~SystemClockDriver()
+{ }
+
+void
+ThreadedDriver::RunThread()
+{
+  AutoProfilerUnregisterThread autoUnregister;
+
+  bool stillProcessing = true;
+  while (stillProcessing) {
+    GraphTime prevCurrentTime, nextCurrentTime;
+    GetIntervalForIteration(prevCurrentTime, nextCurrentTime);
+
+    mStateComputedTime = mNextStateComputedTime;
+    mNextStateComputedTime =
+      mGraphImpl->RoundUpToNextAudioBlock(
+        nextCurrentTime + mGraphImpl->MillisecondsToMediaTime(AUDIO_TARGET_MS));
+    STREAM_LOG(PR_LOG_DEBUG,
+               ("interval[%ld; %ld] state[%ld; %ld]",
+               (long)mIterationStart, (long)mIterationEnd,
+               (long)mStateComputedTime, (long)mNextStateComputedTime));
+
+    stillProcessing = mGraphImpl->OneIteration(prevCurrentTime,
+                                               nextCurrentTime,
+                                               StateComputedTime(),
+                                               mNextStateComputedTime);
+
+    if (mNextDriver && stillProcessing) {
+      STREAM_LOG(PR_LOG_DEBUG, ("Switching to AudioCallbackDriver"));
+      mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd,
+                                 mStateComputedTime, mNextStateComputedTime);
+      mGraphImpl->SetCurrentDriver(mNextDriver);
+      mNextDriver->Start();
+      return;
+    }
+  }
+}
+
+void
+SystemClockDriver::GetIntervalForIteration(GraphTime& aFrom, GraphTime& aTo)
+{
+  TimeStamp now = TimeStamp::Now();
+  aFrom = mIterationStart = IterationEnd();
+  aTo = mIterationEnd = mGraphImpl->SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + IterationEnd();
+
+  mCurrentTimeStamp = now;
+
+  PR_LOG(gMediaStreamGraphLog, PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)",
+         mGraphImpl->MediaTimeToSeconds(aTo),
+         (now - mInitialTimeStamp).ToSeconds(),
+         mGraphImpl->MediaTimeToSeconds(StateComputedTime())));
+
+  if (mStateComputedTime < aTo) {
+    STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
+    aTo = mIterationEnd = mStateComputedTime;
+  }
+
+  if (aFrom >= aTo) {
+    NS_ASSERTION(aFrom == aTo , "Time can't go backwards!");
+    // This could happen due to low clock resolution, maybe?
+    STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance"));
+  }
+}
+
+GraphTime
+SystemClockDriver::GetCurrentTime()
+{
+  return IterationEnd();
+}
+
+TimeStamp
+OfflineClockDriver::GetCurrentTimeStamp()
+{
+  MOZ_CRASH("This driver does not support getting the current timestamp.");
+  return TimeStamp();
+}
+
+void
+SystemClockDriver::WaitForNextIteration()
+{
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+
+  PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
+  TimeStamp now = TimeStamp::Now();
+  if (mNeedAnotherIteration) {
+    int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
+      int64_t((now - mCurrentTimeStamp).ToMilliseconds());
+    // Make sure timeoutMS doesn't overflow 32 bits by waking up at
+    // least once a minute, if we need to wake up at all
+    timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000));
+    timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS));
+    STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f", (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
+    mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
+  } else {
+    mWaitState = WAITSTATE_WAITING_INDEFINITELY;
+  }
+  if (timeout > 0) {
+    mGraphImpl->GetMonitor().Wait(timeout);
+    STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f",
+          (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
+          (TimeStamp::Now() - now).ToSeconds()));
+  }
+
+  mWaitState = WAITSTATE_RUNNING;
+  mNeedAnotherIteration = false;
+}
+
+void
+SystemClockDriver::WakeUp()
+{
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+  mWaitState = WAITSTATE_WAKING_UP;
+  mGraphImpl->GetMonitor().Notify();
+}
+
+OfflineClockDriver::OfflineClockDriver(MediaStreamGraphImpl* aGraphImpl, GraphTime aSlice)
+  : ThreadedDriver(aGraphImpl),
+    mSlice(aSlice)
+{
+
+}
+
+class MediaStreamGraphShutdownThreadRunnable2 : public nsRunnable {
+public:
+  explicit MediaStreamGraphShutdownThreadRunnable2(nsIThread* aThread)
+    : mThread(aThread)
+  {
+  }
+  NS_IMETHOD Run()
+  {
+    MOZ_ASSERT(NS_IsMainThread());
+    mThread->Shutdown();
+    return NS_OK;
+  }
+private:
+  nsRefPtr<nsIThread> mThread;
+};
+
+OfflineClockDriver::~OfflineClockDriver()
+{
+  // transfer the ownership of mThread to the event
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutdownThreadRunnable2(mThread);
+  mThread = nullptr;
+  NS_DispatchToMainThread(event);
+}
+
+void
+OfflineClockDriver::GetIntervalForIteration(GraphTime& aFrom, GraphTime& aTo)
+{
+  aFrom = mIterationStart = IterationEnd();
+  aTo = mIterationEnd = IterationEnd() + mGraphImpl->MillisecondsToMediaTime(mSlice);
+
+  if (mStateComputedTime < aTo) {
+    STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
+    aTo = mIterationEnd = mStateComputedTime;
+  }
+
+  if (aFrom >= aTo) {
+    NS_ASSERTION(aFrom == aTo , "Time can't go backwards!");
+    // This could happen due to low clock resolution, maybe?
+    STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance"));
+  }
+}
+
+GraphTime
+OfflineClockDriver::GetCurrentTime()
+{
+  return mIterationEnd;
+}
+
+
+void
+OfflineClockDriver::WaitForNextIteration()
+{
+  // No op: we want to go as fast as possible when we are offline
+}
+
+void
+OfflineClockDriver::WakeUp()
+{
+  MOZ_ASSERT(false, "An offline graph should not have to wake up.");
+}
+
+
+NS_IMETHODIMP
+AsyncCubebTask::Run()
+{
+  MOZ_ASSERT(mThread);
+  if (NS_IsMainThread()) {
+    mThread->Shutdown(); // can't shutdown from the thread itself, darn
+    // don't null out mThread!
+    // See bug 999104.  we must hold a ref to the thread across Dispatch()
+    // since the internal mthread ref could be released while processing
+    // the Dispatch(), and Dispatch/PutEvent itself doesn't hold a ref; it
+    // assumes the caller does.
+    return NS_OK;
+  }
+
+  MOZ_ASSERT(mDriver);
+
+  switch(mOperation) {
+    case AsyncCubebOperation::INIT:
+      mDriver->Init();
+      break;
+    case AsyncCubebOperation::SHUTDOWN:
+      mDriver->Stop();
+      mDriver = nullptr;
+      break;
+    case AsyncCubebOperation::SLEEP: {
+      {
+        MonitorAutoLock mon(mDriver->mGraphImpl->GetMonitor());
+        // We might just have been awoken
+        if (mDriver->mNeedAnotherIteration) {
+          mDriver->mPauseRequested = false;
+          mDriver->mWaitState = AudioCallbackDriver::WAITSTATE_RUNNING;
+          break;
+        }
+        mDriver->Stop();
+        mDriver->mWaitState = AudioCallbackDriver::WAITSTATE_WAITING_INDEFINITELY;
+        mDriver->mPauseRequested = false;
+        mDriver->mGraphImpl->GetMonitor().Wait(PR_INTERVAL_NO_TIMEOUT);
+      }
+      STREAM_LOG(PR_LOG_DEBUG, ("Restarting audio stream from sleep."));
+      mDriver->StartStream();
+      break;
+    }
+    default:
+      MOZ_CRASH("Operation not implemented.");
+  }
+
+  // and now kill this thread
+  NS_DispatchToMainThread(this);
+
+  return NS_OK;
+}
+
+AudioCallbackDriver::AudioCallbackDriver(MediaStreamGraphImpl* aGraphImpl, dom::AudioChannel aChannel)
+  : GraphDriver(aGraphImpl)
+  , mStarted(false)
+  , mAudioChannel(aChannel)
+  , mInCallback(false)
+  , mPauseRequested(false)
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver ctor for graph %p", aGraphImpl));
+}
+
+AudioCallbackDriver::~AudioCallbackDriver()
+{}
+
+void
+AudioCallbackDriver::Init()
+{
+  cubeb_stream_params params;
+  uint32_t latency;
+
+  MOZ_ASSERT(!NS_IsMainThread(),
+      "This is blocking and should never run on the main thread.");
+
+  mSampleRate = params.rate = CubebUtils::PreferredSampleRate();
+
+#if defined(__ANDROID__)
+#if defined(MOZ_B2G)
+  params.stream_type = CubebUtils::ConvertChannelToCubebType(mAudioChannel);
+#else
+  params.stream_type = CUBEB_STREAM_TYPE_MUSIC;
+#endif
+  if (params.stream_type == CUBEB_STREAM_TYPE_MAX) {
+    NS_WARNING("Bad stream type");
+    return;
+  }
+#else
+  (void)mAudioChannel;
+#endif
+
+  params.channels = mGraphImpl->AudioChannelCount();
+  if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) {
+    params.format = CUBEB_SAMPLE_S16NE;
+  } else {
+    params.format = CUBEB_SAMPLE_FLOAT32NE;
+  }
+
+  if (cubeb_get_min_latency(CubebUtils::GetCubebContext(), params, &latency) != CUBEB_OK) {
+    NS_WARNING("Could not get minimal latency from cubeb.");
+    return;
+  }
+
+  cubeb_stream* stream;
+  if (cubeb_stream_init(CubebUtils::GetCubebContext(), &stream,
+                        "AudioCallbackDriver", params, latency,
+                        DataCallback_s, StateCallback_s, this) == CUBEB_OK) {
+    mAudioStream.own(stream);
+  } else {
+    NS_WARNING("Could not create a cubeb stream for MediaStreamGraph.");
+    return;
+  }
+
+  cubeb_stream_register_device_changed_callback(mAudioStream,
+                                                AudioCallbackDriver::DeviceChangedCallback_s);
+
+  StartStream();
+
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver started."));
+}
+
+
+void
+AudioCallbackDriver::Destroy()
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver destroyed."));
+  mAudioStream.reset();
+}
+
+void
+AudioCallbackDriver::Resume()
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("Resuming audio threads for MediaStreamGraph %p", mGraphImpl));
+  if (cubeb_stream_start(mAudioStream) != CUBEB_OK) {
+    NS_WARNING("Could not start cubeb stream for MSG.");
+  }
+}
+
+void
+AudioCallbackDriver::Start()
+{
+  // If this is running on the main thread, we can't open the stream directly,
+  // because it is a blocking operation.
+  if (NS_IsMainThread()) {
+    STREAM_LOG(PR_LOG_DEBUG, ("Starting audio threads for MediaStreamGraph %p from a new thread.", mGraphImpl));
+    nsRefPtr<AsyncCubebTask> initEvent =
+      new AsyncCubebTask(this, AsyncCubebTask::INIT);
+    initEvent->Dispatch();
+  } else {
+    STREAM_LOG(PR_LOG_DEBUG, ("Starting audio threads for MediaStreamGraph %p from the previous driver's thread", mGraphImpl));
+    Init();
+
+    if (mPreviousDriver) {
+      nsCOMPtr<nsIRunnable> event =
+        new MediaStreamGraphShutdownThreadRunnable(mPreviousDriver);
+      mPreviousDriver = nullptr;
+      NS_DispatchToMainThread(event);
+    }
+  }
+}
+
+void
+AudioCallbackDriver::StartStream()
+{
+  if (cubeb_stream_start(mAudioStream) != CUBEB_OK) {
+    MOZ_CRASH("Could not start cubeb stream for MSG.");
+  }
+
+  {
+    MonitorAutoLock mon(mGraphImpl->GetMonitor());
+    mStarted = true;
+    mWaitState = WAITSTATE_RUNNING;
+  }
+}
+
+void
+AudioCallbackDriver::Stop()
+{
+  if (cubeb_stream_stop(mAudioStream) != CUBEB_OK) {
+    NS_WARNING("Could not stop cubeb stream for MSG.");
+  }
+}
+
+void
+AudioCallbackDriver::Revive()
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver reviving."));
+  // If we were switching, switch now. Otherwise, start the audio thread again.
+  if (mNextDriver) {
+    mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd,
+                               mStateComputedTime, mNextStateComputedTime);
+    mGraphImpl->SetCurrentDriver(mNextDriver);
+    mNextDriver->Start();
+  } else {
+    Init();
+    Start();
+  }
+}
+
+void
+AudioCallbackDriver::GetIntervalForIteration(GraphTime& aFrom,
+                                             GraphTime& aTo)
+{
+}
+
+GraphTime
+AudioCallbackDriver::GetCurrentTime()
+{
+  uint64_t position = 0;
+
+  if (cubeb_stream_get_position(mAudioStream, &position) != CUBEB_OK) {
+    NS_WARNING("Could not get current time from cubeb.");
+  }
+
+  return mSampleRate * position;
+}
+
+void AudioCallbackDriver::WaitForNextIteration()
+{
+#if 0
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+
+  // We can't block on the monitor in the audio callback, so we kick off a new
+  // thread that will pause the audio stream, and restart it when unblocked.
+  // We don't want to sleep when we haven't started the driver yet.
+  if (!mNeedAnotherIteration && mAudioStream && mGraphImpl->Running()) {
+    STREAM_LOG(PR_LOG_DEBUG+1, ("AudioCallbackDriver going to sleep"));
+    mPauseRequested = true;
+    nsRefPtr<AsyncCubebTask> sleepEvent =
+      new AsyncCubebTask(this, AsyncCubebTask::SLEEP);
+    sleepEvent->Dispatch();
+  }
+#endif
+}
+
+void
+AudioCallbackDriver::WakeUp()
+{
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+  mGraphImpl->GetMonitor().Notify();
+}
+
+/* static */ long
+AudioCallbackDriver::DataCallback_s(cubeb_stream* aStream,
+                                    void* aUser, void* aBuffer,
+                                    long aFrames)
+{
+  AudioCallbackDriver* driver = reinterpret_cast<AudioCallbackDriver*>(aUser);
+  return driver->DataCallback(static_cast<AudioDataValue*>(aBuffer), aFrames);
+}
+
+/* static */ void
+AudioCallbackDriver::StateCallback_s(cubeb_stream* aStream, void * aUser,
+                                     cubeb_state aState)
+{
+  AudioCallbackDriver* driver = reinterpret_cast<AudioCallbackDriver*>(aUser);
+  driver->StateCallback(aState);
+}
+
+/* static */ void
+AudioCallbackDriver::DeviceChangedCallback_s(void* aUser)
+{
+  AudioCallbackDriver* driver = reinterpret_cast<AudioCallbackDriver*>(aUser);
+  driver->DeviceChangedCallback();
+}
+
+bool AudioCallbackDriver::InCallback() {
+  MonitorAutoLock mon(mGraphImpl->GetMonitor());
+  return mInCallback;
+}
+
+AudioCallbackDriver::AutoInCallback::AutoInCallback(AudioCallbackDriver* aDriver)
+  : mDriver(aDriver)
+{
+  MonitorAutoLock mon(mDriver->mGraphImpl->GetMonitor());
+  mDriver->mInCallback = true;
+}
+
+AudioCallbackDriver::AutoInCallback::~AutoInCallback() {
+  MonitorAutoLock mon(mDriver->mGraphImpl->GetMonitor());
+  mDriver->mInCallback = false;
+}
+
+long
+AudioCallbackDriver::DataCallback(AudioDataValue* aBuffer, long aFrames)
+{
+  bool stillProcessing;
+
+  if (mPauseRequested) {
+    PodZero(aBuffer, aFrames * mGraphImpl->AudioChannelCount());
+    return aFrames;
+  }
+
+  DebugOnly<AutoInCallback> aic(this);
+
+  if (mStateComputedTime == 0) {
+    MonitorAutoLock mon(mGraphImpl->GetMonitor());
+    // Because this function is called during cubeb_stream_init (to prefill the
+    // audio buffers), it can be that we don't have a message here (because this
+    // driver is the first one for this graph), and the graph would exit. Simply
+    // return here until we have messages.
+    if (!mGraphImpl->MessagesQueued()) {
+      PodZero(aBuffer, aFrames * mGraphImpl->AudioChannelCount());
+      return aFrames;
+    }
+    mGraphImpl->SwapMessageQueues();
+  }
+
+  uint32_t durationMS = aFrames * 1000 / mSampleRate;
+
+  // For now, simply average the duration with the previous
+  // duration so there is some damping against sudden changes.
+  if (!mIterationDurationMS) {
+    mIterationDurationMS = durationMS;
+  } else {
+    mIterationDurationMS += durationMS;
+    mIterationDurationMS /= 2;
+  }
+
+  mBuffer.SetBuffer(aBuffer, aFrames);
+
+  mScratchBuffer.Empty(mBuffer);
+
+  mStateComputedTime = mNextStateComputedTime;
+
+  // State computed time is decided by the audio callback's buffer length. We
+  // compute the iteration start and end from there, trying to keep the amount
+  // of buffering in the graph constant.
+  mNextStateComputedTime =
+    mGraphImpl->RoundUpToNextAudioBlock(mStateComputedTime + mBuffer.Available());
+
+  mIterationStart = mIterationEnd;
+  // inGraph is the number of audio frames there is between the state time and
+  // the current time, i.e. the maximum theoretical length of the interval we
+  // could use as [mIterationStart; mIterationEnd].
+  GraphTime inGraph = mStateComputedTime - mIterationStart;
+  // We want the interval [mIterationStart; mIterationEnd] to be before the
+  // interval [mStateComputedTime; mNextStateComputedTime]. We also want
+  // the distance between these intervals to be roughly equivalent each time, to
+  // ensure there is no clock drift between current time and state time. Since
+  // we can't act on the state time because we have to fill the audio buffer, we
+  // reclock the current time against the state time, here.
+  mIterationEnd = mIterationStart + 0.8 * inGraph;
+
+  STREAM_LOG(PR_LOG_DEBUG, ("interval[%ld; %ld] state[%ld; %ld] (frames: %ld) (durationMS: %u) (duration ticks: %ld)\n",
+             (long)mIterationStart, (long)mIterationEnd,
+             (long)mStateComputedTime, (long)mNextStateComputedTime,
+             (long)aFrames, (uint32_t)durationMS,
+             (long)(mNextStateComputedTime - mStateComputedTime)));
+
+  mCurrentTimeStamp = TimeStamp::Now();
+
+  if (mStateComputedTime < mIterationEnd) {
+    STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
+    mIterationEnd = mStateComputedTime;
+  }
+
+  stillProcessing = mGraphImpl->OneIteration(mIterationStart,
+                                             mIterationEnd,
+                                             mStateComputedTime,
+                                             mNextStateComputedTime);
+
+  mBuffer.BufferFilled();
+
+  if (mNextDriver && stillProcessing) {
+    {
+      // If the audio stream has not been started by the previous driver or
+      // the graph itself, keep it alive.
+      MonitorAutoLock mon(mGraphImpl->GetMonitor());
+      if (!IsStarted()) {
+        return aFrames;
+      }
+    }
+    STREAM_LOG(PR_LOG_DEBUG, ("Switching to system driver."));
+    mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd,
+                               mStateComputedTime, mNextStateComputedTime);
+    mGraphImpl->SetCurrentDriver(mNextDriver);
+    mNextDriver->Start();
+    // Returning less than aFrames starts the draining and eventually stops the
+    // audio thread. This function will never get called again.
+    return aFrames - 1;
+  }
+
+  if (!stillProcessing) {
+    STREAM_LOG(PR_LOG_DEBUG, ("Stopping audio thread for MediaStreamGraph %p", this));
+    return aFrames - 1;
+  }
+  return aFrames;
+}
+
+void
+AudioCallbackDriver::StateCallback(cubeb_state aState)
+{
+  STREAM_LOG(PR_LOG_DEBUG, ("AudioCallbackDriver State: %d", aState));
+}
+
+void
+AudioCallbackDriver::MixerCallback(AudioDataValue* aMixedBuffer,
+                                   AudioSampleFormat aFormat,
+                                   uint32_t aChannels,
+                                   uint32_t aFrames,
+                                   uint32_t aSampleRate)
+{
+  uint32_t toWrite = mBuffer.Available();
+
+  if (!mBuffer.Available()) {
+    NS_WARNING("MediaStreamGraph SpillBuffer full, expect frame drop.");
+  }
+
+  MOZ_ASSERT(mBuffer.Available() <= aFrames);
+
+  mBuffer.WriteFrames(aMixedBuffer, mBuffer.Available());
+  MOZ_ASSERT(mBuffer.Available() == 0, "Missing frames to fill audio callback's buffer.");
+
+  DebugOnly<uint32_t> written = mScratchBuffer.Fill(aMixedBuffer + toWrite * aChannels, aFrames - toWrite);
+  NS_WARN_IF_FALSE(written == aFrames - toWrite, "Dropping frames.");
+};
+
+void AudioCallbackDriver::PanOutputIfNeeded(bool aMicrophoneActive)
+{
+#ifdef XP_MACOSX
+  cubeb_device* out;
+  int rv;
+  char name[128];
+  size_t length = sizeof(name);
+
+  rv = sysctlbyname("hw.model", name, &length, NULL, 0);
+  if (rv) {
+    return;
+  }
+
+  if (!strncmp(name, "MacBookPro", 10)) {
+    if (cubeb_stream_get_current_device(mAudioStream, &out) == CUBEB_OK) {
+      // Check if we are currently outputing sound on external speakers.
+      if (!strcmp(out->output_name, "ispk")) {
+        // Pan everything to the right speaker.
+        if (aMicrophoneActive) {
+          if (cubeb_stream_set_panning(mAudioStream, 1.0) != CUBEB_OK) {
+            NS_WARNING("Could not pan audio output to the right.");
+          }
+        } else {
+          if (cubeb_stream_set_panning(mAudioStream, 0.0) != CUBEB_OK) {
+            NS_WARNING("Could not pan audio output to the center.");
+          }
+        }
+      } else {
+        if (cubeb_stream_set_panning(mAudioStream, 0.0) != CUBEB_OK) {
+          NS_WARNING("Could not pan audio output to the center.");
+        }
+      }
+      cubeb_stream_device_destroy(mAudioStream, out);
+    }
+  }
+#endif
+}
+
+void
+AudioCallbackDriver::DeviceChangedCallback() {
+  MonitorAutoLock mon(mGraphImpl->GetMonitor());
+  PanOutputIfNeeded(mMicrophoneActive);
+}
+
+void
+AudioCallbackDriver::SetMicrophoneActive(bool aActive)
+{
+  MonitorAutoLock mon(mGraphImpl->GetMonitor());
+
+  mMicrophoneActive = aActive;
+
+  PanOutputIfNeeded(mMicrophoneActive);
+}
+
+uint32_t
+AudioCallbackDriver::IterationDuration()
+{
+  // The real fix would be to have an API in cubeb to give us the number. Short
+  // of that, we approximate it here. bug 1019507
+  return mIterationDurationMS;
+}
+
+bool
+AudioCallbackDriver::IsStarted() {
+  mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
+  return mStarted;
+}
+
+
+} // namepace mozilla
new file mode 100644
--- /dev/null
+++ b/content/media/GraphDriver.h
@@ -0,0 +1,493 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GRAPHDRIVER_H_
+#define GRAPHDRIVER_H_
+
+#include "nsAutoPtr.h"
+#include "nsAutoRef.h"
+#include "AudioBufferUtils.h"
+#include "AudioMixer.h"
+#include "AudioSegment.h"
+
+struct cubeb_stream;
+
+namespace mozilla {
+
+
+/**
+ * Assume we can run an iteration of the MediaStreamGraph loop in this much time
+ * or less.
+ * We try to run the control loop at this rate.
+ */
+static const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
+
+/**
+ * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
+ * this much.
+ */
+static const int SCHEDULE_SAFETY_MARGIN_MS = 10;
+
+/**
+ * Try have this much audio buffered in streams and queued to the hardware.
+ * The maximum delay to the end of the next control loop
+ * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
+ * There is no point in buffering more audio than this in a stream at any
+ * given time (until we add processing).
+ * This is not optimal yet.
+ */
+static const int AUDIO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+    SCHEDULE_SAFETY_MARGIN_MS;
+
+/**
+ * Try have this much video buffered. Video frames are set
+ * near the end of the iteration of the control loop. The maximum delay
+ * to the setting of the next video frame is 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+ * SCHEDULE_SAFETY_MARGIN_MS. This is not optimal yet.
+ */
+static const int VIDEO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+    SCHEDULE_SAFETY_MARGIN_MS;
+
+class MediaStreamGraphImpl;
+class MessageBlock;
+
+/**
+ * Microseconds relative to the start of the graph timeline.
+ */
+typedef int64_t GraphTime;
+const GraphTime GRAPH_TIME_MAX = MEDIA_TIME_MAX;
+
+class AudioCallbackDriver;
+
+/**
+ * A driver is responsible for the scheduling of the processing, the thread
+ * management, and give the different clocks to a MediaStreamGraph. This is an
+ * abstract base class. A MediaStreamGraph can be driven by an
+ * OfflineClockDriver, if the graph is offline, or a SystemClockDriver, if the
+ * graph is real time.
+ * A MediaStreamGraph holds an owning reference to its driver.
+ */
+class GraphDriver
+{
+public:
+  GraphDriver(MediaStreamGraphImpl* aGraphImpl);
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(GraphDriver);
+  /* When the graph wakes up to do an iteration, this returns the range of time
+   * that will be processed. */
+  virtual void GetIntervalForIteration(GraphTime& aFrom,
+                                       GraphTime& aTo) = 0;
+  /* Returns the current time for this graph. This is the end of the current
+   * iteration. */
+  virtual GraphTime GetCurrentTime() = 0;
+  /* For real-time graphs, this waits until it's time to process more data. For
+   * offline graphs, this is a no-op. */
+  virtual void WaitForNextIteration() = 0;
+  /* Wakes up the graph if it is waiting. */
+  virtual void WakeUp() = 0;
+  virtual void Destroy() {}
+  /* Start the graph, init the driver, start the thread. */
+  virtual void Start() = 0;
+  /* Stop the graph, shutting down the thread. */
+  virtual void Stop() = 0;
+  /* Resume after a stop */
+  virtual void Resume() = 0;
+  /* Revive this driver, as more messages just arrived. */
+  virtual void Revive() = 0;
+  /* Rate at which the GraphDriver runs, in ms. This can either be user
+   * controlled (because we are using a {System,Offline}ClockDriver, and decide
+   * how often we want to wakeup/how much we want to process per iteration), or
+   * it can be indirectly set by the latency of the audio backend, and the
+   * number of buffers of this audio backend: say we have four buffers, and 40ms
+   * latency, we will get a callback approximately every 10ms. */
+  virtual uint32_t IterationDuration() = 0;
+
+  /* Return whether we are switching or not. */
+  bool Switching() {
+    return mNextDriver || mPreviousDriver;
+  }
+
+  /**
+   * If we are running a real time graph, get the current time stamp to schedule
+   * video frames. This has to be reimplemented by real time drivers.
+   */
+  virtual TimeStamp GetCurrentTimeStamp() {
+    return mCurrentTimeStamp;
+  }
+
+  bool IsWaiting() {
+    return mWaitState == WAITSTATE_WAITING_INDEFINITELY ||
+           mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION;
+  }
+
+  bool IsWaitingIndefinitly() {
+    return mWaitState == WAITSTATE_WAITING_INDEFINITELY;
+  }
+
+  GraphTime IterationStart() {
+    return mIterationStart;
+  }
+
+  GraphTime IterationEnd() {
+    return mIterationEnd;
+  }
+
+  GraphTime StateComputedTime() {
+    return mStateComputedTime;
+  }
+
+  virtual void GetAudioBuffer(float** aBuffer, long& aFrames) {
+    MOZ_CRASH("This is not an Audio GraphDriver!");
+  }
+
+  virtual AudioCallbackDriver* AsAudioCallbackDriver() {
+    return nullptr;
+  }
+
+  /**
+   * Tell the driver it has to stop and return the current time of the graph, so
+   * another driver can start from the right point in time.
+   */
+  virtual void SwitchAtNextIteration(GraphDriver* aDriver);
+
+  /**
+   * Set the time for a graph, on a driver. This is used so a new driver just
+   * created can start at the right point in time.
+   */
+  void SetGraphTime(GraphDriver* aPreviousDriver,
+                    GraphTime aLastSwitchNextIterationStart,
+                    GraphTime aLastSwitchNextIterationEnd,
+                    GraphTime aLastSwitchNextStateComputedTime,
+                    GraphTime aLastSwitchStateComputedTime);
+
+  /**
+   * Whenever the graph has computed the time until it has all state
+   * (mStateComputedState), it calls this to indicate the new time until which
+   * we have computed state.
+   */
+  void UpdateStateComputedTime(GraphTime aStateComputedTime);
+
+  /**
+   * Call this to indicate that another iteration of the control loop is
+   * required immediately. The monitor must already be held.
+   */
+  void EnsureImmediateWakeUpLocked();
+
+  /**
+   * Call this to indicate that another iteration of the control loop is
+   * required on its regular schedule. The monitor must not be held.
+   * This function has to be idempotent.
+   */
+  void EnsureNextIteration();
+
+  /**
+   * Same thing, but not locked.
+   */
+  void EnsureNextIterationLocked();
+
+protected:
+  // Time of the start of this graph iteration.
+  GraphTime mIterationStart;
+  // Time of the end of this graph iteration.
+  GraphTime mIterationEnd;
+  // Time, in the future, for which blocking has been computed.
+  GraphTime mStateComputedTime;
+  GraphTime mNextStateComputedTime;
+  // The MediaStreamGraphImpl that owns this driver. This has a lifetime longer
+  // than the driver, and will never be null.
+  MediaStreamGraphImpl* mGraphImpl;
+
+  // This enum specifies the wait state of the driver.
+  enum WaitState {
+    // RunThread() is running normally
+    WAITSTATE_RUNNING,
+    // RunThread() is paused waiting for its next iteration, which will
+    // happen soon
+    WAITSTATE_WAITING_FOR_NEXT_ITERATION,
+    // RunThread() is paused indefinitely waiting for something to change
+    WAITSTATE_WAITING_INDEFINITELY,
+    // Something has signaled RunThread() to wake up immediately,
+    // but it hasn't done so yet
+    WAITSTATE_WAKING_UP
+  };
+  WaitState mWaitState;
+
+  // True if the graph needs another iteration after the current iteration.
+  bool mNeedAnotherIteration;
+  TimeStamp mCurrentTimeStamp;
+  // This is non-null only when this driver has recently switched from an other
+  // driver, and has not cleaned it up yet (for example because the audio stream
+  // is currently calling the callback during initialization).
+  nsRefPtr<GraphDriver> mPreviousDriver;
+  // This is non-null only when this driver is going to switch to an other
+  // driver at the end of this iteration.
+  nsRefPtr<GraphDriver> mNextDriver;
+  virtual ~GraphDriver()
+  { }
+};
+
+class MediaStreamGraphInitThreadRunnable;
+
+/**
+ * This class is a driver that manages its own thread.
+ */
+class ThreadedDriver : public GraphDriver
+{
+public:
+  ThreadedDriver(MediaStreamGraphImpl* aGraphImpl);
+  virtual ~ThreadedDriver();
+  virtual void Start() MOZ_OVERRIDE;
+  virtual void Stop() MOZ_OVERRIDE;
+  virtual void Resume() MOZ_OVERRIDE;
+  virtual void Revive() MOZ_OVERRIDE;
+  /**
+   * Runs main control loop on the graph thread. Normally a single invocation
+   * of this runs for the entire lifetime of the graph thread.
+   */
+  void RunThread();
+  friend class MediaStreamGraphInitThreadRunnable;
+  uint32_t IterationDuration() {
+    return MEDIA_GRAPH_TARGET_PERIOD_MS;
+  }
+protected:
+  nsCOMPtr<nsIThread> mThread;
+};
+
+/**
+ * A SystemClockDriver drives a MediaStreamGraph using a system clock, and waits
+ * using a monitor, between each iteration.
+ */
+class SystemClockDriver : public ThreadedDriver
+{
+public:
+  SystemClockDriver(MediaStreamGraphImpl* aGraphImpl);
+  virtual ~SystemClockDriver();
+  virtual void GetIntervalForIteration(GraphTime& aFrom,
+                                       GraphTime& aTo) MOZ_OVERRIDE;
+  virtual GraphTime GetCurrentTime() MOZ_OVERRIDE;
+  virtual void WaitForNextIteration() MOZ_OVERRIDE;
+  virtual void WakeUp() MOZ_OVERRIDE;
+
+
+private:
+  TimeStamp mInitialTimeStamp;
+  TimeStamp mLastTimeStamp;
+};
+
+/**
+ * An OfflineClockDriver runs the graph as fast as possible, without waiting
+ * between iteration.
+ */
+class OfflineClockDriver : public ThreadedDriver
+{
+public:
+  OfflineClockDriver(MediaStreamGraphImpl* aGraphImpl, GraphTime aSlice);
+  virtual ~OfflineClockDriver();
+  virtual void GetIntervalForIteration(GraphTime& aFrom,
+                                       GraphTime& aTo) MOZ_OVERRIDE;
+  virtual GraphTime GetCurrentTime() MOZ_OVERRIDE;
+  virtual void WaitForNextIteration() MOZ_OVERRIDE;
+  virtual void WakeUp() MOZ_OVERRIDE;
+  virtual TimeStamp GetCurrentTimeStamp() MOZ_OVERRIDE;
+
+private:
+  // Time, in GraphTime, for each iteration
+  GraphTime mSlice;
+};
+
+/**
+ * This is a graph driver that is based on callback functions called by the
+ * audio api. This ensures minimal audio latency, because it means there is no
+ * buffering happening: the audio is generated inside the callback.
+ *
+ * This design is less flexible than running our own thread:
+ * - We have no control over the thread:
+ * - It cannot block, and it has to run for a shorter amount of time than the
+ *   buffer it is going to fill, or an under-run is going to occur (short burst
+ *   of silence in the final audio output).
+ * - We can't know for sure when the callback function is going to be called
+ *   (although we compute an estimation so we can schedule video frames)
+ * - Creating and shutting the thread down is a blocking operation, that can
+ *   take _seconds_ in some cases (because IPC has to be set up, and
+ *   sometimes hardware components are involved and need to be warmed up)
+ * - We have no control on how much audio we generate, we have to return exactly
+ *   the number of frames asked for by the callback. Since for the Web Audio
+ *   API, we have to do block processing at 128 frames per block, we need to
+ *   keep a little spill buffer to store the extra frames.
+ */
+class AudioCallbackDriver : public GraphDriver,
+                            public MixerCallbackReceiver
+{
+public:
+  AudioCallbackDriver(MediaStreamGraphImpl* aGraphImpl,
+                      dom::AudioChannel aChannel = dom::AudioChannel::Normal);
+  virtual ~AudioCallbackDriver();
+
+  virtual void Destroy() MOZ_OVERRIDE;
+  virtual void Start() MOZ_OVERRIDE;
+  virtual void Stop() MOZ_OVERRIDE;
+  virtual void Resume() MOZ_OVERRIDE;
+  virtual void Revive() MOZ_OVERRIDE;
+  virtual void GetIntervalForIteration(GraphTime& aFrom,
+                                       GraphTime& aTo) MOZ_OVERRIDE;
+  virtual GraphTime GetCurrentTime() MOZ_OVERRIDE;
+  virtual void WaitForNextIteration() MOZ_OVERRIDE;
+  virtual void WakeUp() MOZ_OVERRIDE;
+
+  /* Static wrapper function cubeb calls back. */
+  static long DataCallback_s(cubeb_stream * aStream,
+                             void * aUser, void * aBuffer,
+                             long aFrames);
+  static void StateCallback_s(cubeb_stream* aStream, void * aUser,
+                              cubeb_state aState);
+  static void DeviceChangedCallback_s(void * aUser);
+  /* This function is called by the underlying audio backend when a refill is
+   * needed. This is what drives the whole graph when it is used to output
+   * audio. If the return value is exactly aFrames, this function will get
+   * called again. If it is less than aFrames, the stream will go in draining
+   * mode, and this function will not be called again. */
+  long DataCallback(AudioDataValue* aBuffer, long aFrames);
+  /* This function is called by the underlying audio backend, but is only used
+   * for informational purposes at the moment. */
+  void StateCallback(cubeb_state aState);
+  /* This is an approximation of the number of millisecond there are between two
+   * iterations of the graph. */
+  uint32_t IterationDuration();
+
+  /* This function gets called when the graph has produced the audio frames for
+   * this iteration. */
+  virtual void MixerCallback(AudioDataValue* aMixedBuffer,
+                             AudioSampleFormat aFormat,
+                             uint32_t aChannels,
+                             uint32_t aFrames,
+                             uint32_t aSampleRate) MOZ_OVERRIDE;
+
+  virtual AudioCallbackDriver* AsAudioCallbackDriver() {
+    return this;
+  }
+
+  /**
+   * Whether the audio callback is processing. This is for asserting only.
+   */
+  bool InCallback();
+
+  /* Whether the underlying cubeb stream has been started. See comment for
+   * mStarted for details. */
+  bool IsStarted();
+
+  /* Tell the driver whether this process is using a microphone or not. This is
+   * thread safe. */
+  void SetMicrophoneActive(bool aActive);
+private:
+  /**
+   * On certain MacBookPro, the microphone is located near the left speaker.
+   * We need to pan the sound output to the right speaker if we are using the
+   * mic and the built-in speaker, or we will have terrible echo.  */
+  void PanOutputIfNeeded(bool aMicrophoneActive);
+  /**
+   * This is called when the output device used by the cubeb stream changes. */
+  void DeviceChangedCallback();
+  /* Start the cubeb stream */
+  void StartStream();
+  friend class AsyncCubebTask;
+  void Init();
+  /* MediaStreamGraphs are always down/up mixed to stereo for now. */
+  static const uint32_t ChannelCount = 2;
+  /* The size of this buffer comes from the fact that some audio backends can
+   * call back with a number of frames lower than one block (128 frames), so we
+   * need to keep at most two block in the SpillBuffer, because we always round
+   * up to block boundaries during an iteration. */
+  SpillBuffer<AudioDataValue, WEBAUDIO_BLOCK_SIZE * 2, ChannelCount> mScratchBuffer;
+  /* Wrapper to ensure we write exactly the number of frames we need in the
+   * audio buffer cubeb passes us. */
+  AudioCallbackBufferWrapper<AudioDataValue, ChannelCount> mBuffer;
+  /* cubeb stream for this graph. This is guaranteed to be non-null after Init()
+   * has been called. */
+  nsAutoRef<cubeb_stream> mAudioStream;
+  /* The sample rate for the aforementionned cubeb stream. */
+  uint32_t mSampleRate;
+  /* Approximation of the time between two callbacks. This is used to schedule
+   * video frames. This is in milliseconds. */
+  uint32_t mIterationDurationMS;
+  /* cubeb_stream_init calls the audio callback to prefill the buffers. The
+   * previous driver has to be kept alive until the audio stream has been
+   * started, because it is responsible to call cubeb_stream_start, so we delay
+   * the cleanup of the previous driver until it has started the audio stream.
+   * Otherwise, there is a race where we kill the previous driver thread
+   * between cubeb_stream_init and cubeb_stream_start,
+   * and callbacks after the prefill never get called.
+   * This is written on the previous driver's thread (if switching) or main
+   * thread (if this driver is the first one).
+   * This is read on previous driver's thread (during callbacks from
+   * cubeb_stream_init) and the audio thread (when switching away from this
+   * driver back to a SystemClockDriver).
+   * This is synchronized by the Graph's monitor.
+   * */
+  bool mStarted;
+
+  struct AutoInCallback
+  {
+    AutoInCallback(AudioCallbackDriver* aDriver);
+    ~AutoInCallback();
+    AudioCallbackDriver* mDriver;
+  };
+
+  /* Thread for off-main-thread initialization and
+   * shutdown of the audio stream. */
+  nsCOMPtr<nsIThread> mInitShutdownThread;
+  dom::AudioChannel mAudioChannel;
+  /* This can only be accessed with the graph's monitor held. */
+  bool mInCallback;
+  /* A thread has been created to be able to pause and restart the audio thread,
+   * but has not done so yet. This indicates that the callback should return
+   * early */
+  bool mPauseRequested;
+  /**
+   * True if microphone is being used by this process. This is synchronized by
+   * the graph's monitor. */
+  bool mMicrophoneActive;
+};
+
+class AsyncCubebTask : public nsRunnable
+{
+public:
+  enum AsyncCubebOperation {
+    INIT,
+    SHUTDOWN,
+    SLEEP
+  };
+
+
+  AsyncCubebTask(AudioCallbackDriver* aDriver, AsyncCubebOperation aOperation)
+    : mDriver(aDriver),
+      mOperation(aOperation)
+  {
+    MOZ_ASSERT(mDriver->mAudioStream || aOperation == INIT, "No audio stream !");
+  }
+
+  nsresult Dispatch()
+  {
+    // Can't add 'this' as the event to run, since mThread may not be set yet
+    nsresult rv = NS_NewNamedThread("CubebOperation", getter_AddRefs(mThread));
+    if (NS_SUCCEEDED(rv)) {
+      // Note: event must not null out mThread!
+      rv = mThread->Dispatch(this, NS_DISPATCH_NORMAL);
+    }
+    return rv;
+  }
+
+protected:
+  virtual ~AsyncCubebTask() {};
+
+private:
+  NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL;
+  nsCOMPtr<nsIThread> mThread;
+  nsRefPtr<AudioCallbackDriver> mDriver;
+  AsyncCubebOperation mOperation;
+};
+
+}
+
+#endif // GRAPHDRIVER_H_
--- a/content/media/MediaDecoderOwner.h
+++ b/content/media/MediaDecoderOwner.h
@@ -134,20 +134,20 @@ public:
   // ImageContainer containing the video data.
   virtual VideoFrameContainer* GetVideoFrameContainer() = 0;
 
   // Called by the media decoder object, on the main thread,
   // when the connection between Rtsp server and client gets lost.
   virtual void ResetConnectionState() = 0;
 
 #ifdef MOZ_EME
-  // Dispatches a "needkey" event to the HTMLMediaElement, with the
+  // Dispatches a "encrypted" event to the HTMLMediaElement, with the
   // provided init data.
   // Main thread only.
-  virtual void DispatchNeedKey(const nsTArray<uint8_t>& aInitData,
-                               const nsAString& aInitDataType) = 0;
+  virtual void DispatchEncrypted(const nsTArray<uint8_t>& aInitData,
+                                 const nsAString& aInitDataType) = 0;
 #endif
 };
 
 }
 
 #endif
 
--- a/content/media/MediaDecoderStateMachine.cpp
+++ b/content/media/MediaDecoderStateMachine.cpp
@@ -1467,16 +1467,20 @@ void MediaDecoderStateMachine::NotifyDat
   }
 }
 
 void MediaDecoderStateMachine::Seek(const SeekTarget& aTarget)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
 
+  if (mState == DECODER_STATE_SHUTDOWN) {
+    return;
+  }
+
   // We need to be able to seek both at a transport level and at a media level
   // to seek.
   if (!mDecoder->IsMediaSeekable()) {
     DECODER_WARN("Seek() function should not be called on a non-seekable state machine");
     return;
   }
   // MediaDecoder::mPlayState should be SEEKING while we seek, and
   // in that case MediaDecoder shouldn't be calling us.
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -56,43 +56,43 @@ MediaStreamGraphImpl::~MediaStreamGraphI
                "All streams should have been destroyed by messages from the main thread");
   STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
 }
 
 
 StreamTime
 MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream)
 {
-  StreamTime current = mCurrentTime - aStream->mBufferStartTime;
+  StreamTime current = IterationEnd() - aStream->mBufferStartTime;
   // When waking up media decoders, we need a longer safety margin, as it can
   // take more time to get new samples. A factor of two seem to work.
   return current +
       2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS));
 }
 
 void
 MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
 {
   if (aStream->mFinished)
     return;
   STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream));
   aStream->mFinished = true;
   aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX);
   // Force at least one more iteration of the control loop, since we rely
-  // on UpdateCurrentTime to notify our listeners once the stream end
+  // on UpdateCurrentTimeForStreams to notify our listeners once the stream end
   // has been reached.
-  EnsureNextIteration();
+  CurrentDriver()->EnsureNextIteration();
 
   SetStreamOrderDirty();
 }
 
 void
 MediaStreamGraphImpl::AddStream(MediaStream* aStream)
 {
-  aStream->mBufferStartTime = mCurrentTime;
+  aStream->mBufferStartTime = IterationEnd();
   mStreams.AppendElement(aStream);
   STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream));
 
   SetStreamOrderDirty();
 }
 
 void
 MediaStreamGraphImpl::RemoveStream(MediaStream* aStream)
@@ -142,18 +142,18 @@ MediaStreamGraphImpl::ExtractPendingInpu
   {
     MutexAutoLock lock(aStream->mMutex);
     if (aStream->mPullEnabled && !aStream->mFinished &&
         !aStream->mListeners.IsEmpty()) {
       // Compute how much stream time we'll need assuming we don't block
       // the stream at all between mBlockingDecisionsMadeUntilTime and
       // aDesiredUpToTime.
       StreamTime t =
-        GraphTimeToStreamTime(aStream, mStateComputedTime) +
-        (aDesiredUpToTime - mStateComputedTime);
+        GraphTimeToStreamTime(aStream, CurrentDriver()->StateComputedTime()) +
+        (aDesiredUpToTime - CurrentDriver()->StateComputedTime());
       STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
                                   MediaTimeToSeconds(t),
                                   MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
       if (t > aStream->mBuffer.GetEnd()) {
         *aEnsureNextIteration = true;
 #ifdef DEBUG
         if (aStream->mListeners.Length() == 0) {
           STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
@@ -255,68 +255,68 @@ MediaStreamGraphImpl::UpdateBufferSuffic
     runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0);
   }
 }
 
 StreamTime
 MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
                                             GraphTime aTime)
 {
-  NS_ASSERTION(aTime <= mStateComputedTime,
+  MOZ_ASSERT(aTime <= CurrentDriver()->StateComputedTime(),
                "Don't ask about times where we haven't made blocking decisions yet");
-  if (aTime <= mCurrentTime) {
+  if (aTime <= IterationEnd()) {
     return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime);
   }
-  GraphTime t = mCurrentTime;
+  GraphTime t = IterationEnd();
   StreamTime s = t - aStream->mBufferStartTime;
   while (t < aTime) {
     GraphTime end;
     if (!aStream->mBlocked.GetAt(t, &end)) {
       s += std::min(aTime, end) - t;
     }
     t = end;
   }
   return std::max<StreamTime>(0, s);
 }
 
 StreamTime
 MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream,
                                                       GraphTime aTime)
 {
-  GraphTime computedUpToTime = std::min(mStateComputedTime, aTime);
+  GraphTime computedUpToTime = std::min(CurrentDriver()->StateComputedTime(), aTime);
   StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime);
   return s + (aTime - computedUpToTime);
 }
 
 GraphTime
 MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
                                             StreamTime aTime, uint32_t aFlags)
 {
   if (aTime >= STREAM_TIME_MAX) {
     return GRAPH_TIME_MAX;
   }
-  MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime;
+  MediaTime bufferElapsedToCurrentTime =  IterationEnd() - aStream->mBufferStartTime;
   if (aTime < bufferElapsedToCurrentTime ||
       (aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) {
     return aTime + aStream->mBufferStartTime;
   }
 
   MediaTime streamAmount = aTime - bufferElapsedToCurrentTime;
   NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time");
 
-  GraphTime t = mCurrentTime;
+  GraphTime t = IterationEnd();
   while (t < GRAPH_TIME_MAX) {
     if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) {
       return t;
     }
     bool blocked;
     GraphTime end;
-    if (t < mStateComputedTime) {
+    if (t < CurrentDriver()->StateComputedTime()) {
       blocked = aStream->mBlocked.GetAt(t, &end);
-      end = std::min(end, mStateComputedTime);
+      end = std::min(end, CurrentDriver()->StateComputedTime());
     } else {
       blocked = false;
       end = GRAPH_TIME_MAX;
     }
     if (blocked) {
       t = end;
     } else {
       if (streamAmount == 0) {
@@ -329,134 +329,95 @@ MediaStreamGraphImpl::StreamTimeToGraphT
     }
   }
   return t;
 }
 
 GraphTime
 MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
 {
-  if (aStream->mAudioOutputStreams.IsEmpty()) {
-    return mCurrentTime;
-  }
-  int64_t positionInFrames = aStream->mAudioOutputStreams[0].mStream->GetPositionInFrames();
-  if (positionInFrames < 0) {
-    return mCurrentTime;
-  }
-  return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime +
-    RateConvertTicksRoundDown(GraphRate(), mSampleRate, positionInFrames);
+  /* This is correlated to the audio clock when using an AudioCallbackDriver,
+   * and is using a system timer otherwise. */
+  return IterationEnd();
+}
+
+GraphTime
+MediaStreamGraphImpl::IterationEnd()
+{
+  return CurrentDriver()->IterationEnd();
 }
 
 void
-MediaStreamGraphImpl::UpdateCurrentTime()
+MediaStreamGraphImpl::UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime, GraphTime aNextCurrentTime)
 {
-  GraphTime prevCurrentTime, nextCurrentTime;
-  if (mRealtime) {
-    TimeStamp now = TimeStamp::Now();
-    prevCurrentTime = mCurrentTime;
-    nextCurrentTime =
-      SecondsToMediaTime((now - mInitialTimeStamp).ToSeconds());
-
-    mCurrentTimeStamp = now;
-    STREAM_LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)",
-               MediaTimeToSeconds(nextCurrentTime),
-               (now - mInitialTimeStamp).ToSeconds(),
-               MediaTimeToSeconds(mStateComputedTime)));
-  } else {
-    prevCurrentTime = mCurrentTime;
-    nextCurrentTime = mCurrentTime + MillisecondsToMediaTime(MEDIA_GRAPH_TARGET_PERIOD_MS);
-    STREAM_LOG(PR_LOG_DEBUG+1, ("Updating offline current time to %f (mStateComputedTime %f)",
-               MediaTimeToSeconds(nextCurrentTime),
-               MediaTimeToSeconds(mStateComputedTime)));
-  }
-
-  if (mStateComputedTime < nextCurrentTime) {
-    STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
-    if (mRealtime) {
-      // Adjust mInitialTimeStamp to remove the missed time.
-      mInitialTimeStamp += TimeDuration::
-        FromSeconds(MediaTimeToSeconds(nextCurrentTime - mStateComputedTime));
-    }
-    nextCurrentTime = mStateComputedTime;
-  }
-
-  if (prevCurrentTime >= nextCurrentTime) {
-    NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!");
-    // This could happen due to low clock resolution, maybe?
-    STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance"));
-    // There's not much left to do here, but the code below that notifies
-    // listeners that streams have ended still needs to run.
-  }
-
   nsTArray<MediaStream*> streamsReadyToFinish;
   nsAutoTArray<bool,800> streamHasOutput;
   streamHasOutput.SetLength(mStreams.Length());
   for (uint32_t i = 0; i < mStreams.Length(); ++i) {
     MediaStream* stream = mStreams[i];
 
     // Calculate blocked time and fire Blocked/Unblocked events
     GraphTime blockedTime = 0;
-    GraphTime t = prevCurrentTime;
+    GraphTime t = aPrevCurrentTime;
     // include |nextCurrentTime| to ensure NotifyBlockingChanged() is called
     // before NotifyEvent(this, EVENT_FINISHED) when |nextCurrentTime == stream end time|
-    while (t <= nextCurrentTime) {
+    while (t <= aNextCurrentTime) {
       GraphTime end;
       bool blocked = stream->mBlocked.GetAt(t, &end);
       if (blocked) {
-        blockedTime += std::min(end, nextCurrentTime) - t;
+        blockedTime += std::min(end, aNextCurrentTime) - t;
       }
       if (blocked != stream->mNotifiedBlocked) {
         for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
           MediaStreamListener* l = stream->mListeners[j];
           l->NotifyBlockingChanged(this,
               blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
         }
         stream->mNotifiedBlocked = blocked;
       }
       t = end;
     }
 
-    stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime);
+
+    stream->AdvanceTimeVaryingValuesToCurrentTime(aNextCurrentTime, blockedTime);
     // Advance mBlocked last so that implementations of
     // AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked.
-    stream->mBlocked.AdvanceCurrentTime(nextCurrentTime);
+    stream->mBlocked.AdvanceCurrentTime(aNextCurrentTime);
 
-    streamHasOutput[i] = blockedTime < nextCurrentTime - prevCurrentTime;
+    streamHasOutput[i] = blockedTime < aNextCurrentTime - aPrevCurrentTime;
     // Make this an assertion when bug 957832 is fixed.
     NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished,
       "Shouldn't have already notified of finish *and* have output!");
 
     if (stream->mFinished && !stream->mNotifiedFinished) {
       streamsReadyToFinish.AppendElement(stream);
     }
     STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
                                 stream, MediaTimeToSeconds(stream->mBufferStartTime),
                                 MediaTimeToSeconds(blockedTime)));
   }
 
-  mCurrentTime = nextCurrentTime;
 
-  // Do these after setting mCurrentTime so that StreamTimeToGraphTime works properly.
   for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) {
     if (!streamHasOutput[i]) {
       continue;
     }
     MediaStream* stream = mStreams[i];
     for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
       MediaStreamListener* l = stream->mListeners[j];
-      l->NotifyOutput(this, mCurrentTime);
+      l->NotifyOutput(this, IterationEnd());
     }
   }
 
   for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) {
     MediaStream* stream = streamsReadyToFinish[i];
     // The stream is fully finished when all of its track data has been played
     // out.
-    if (mCurrentTime >=
-          stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd()))  {
+    if (IterationEnd() >=
+        stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd()))  {
       NS_WARN_IF_FALSE(stream->mNotifiedBlocked,
         "Should've notified blocked=true for a fully finished stream");
       stream->mNotifiedFinished = true;
       stream->mLastPlayedVideoFrame.SetNull();
       SetStreamOrderDirty();
       for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
         MediaStreamListener* l = stream->mListeners[j];
         l->NotifyEvent(this, MediaStreamListener::EVENT_FINISHED);
@@ -474,41 +435,41 @@ MediaStreamGraphImpl::WillUnderrun(Media
   // unless they block on some other stream.
   if (aStream->mFinished || aStream->AsProcessedStream()) {
     return false;
   }
   GraphTime bufferEnd =
     StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(),
                           INCLUDE_TRAILING_BLOCKED_INTERVAL);
 #ifdef DEBUG
-  if (bufferEnd < mCurrentTime) {
+  if (bufferEnd < IterationEnd()) {
     STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, "
-                              "bufferEnd %f < mCurrentTime %f (%lld < %lld), Streamtime %lld",
-                              aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime),
-                              bufferEnd, mCurrentTime, aStream->GetBufferEnd()));
+                              "bufferEnd %f < IterationEnd() %f (%lld < %lld), Streamtime %lld",
+                              aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(IterationEnd()),
+                              bufferEnd, IterationEnd(), aStream->GetBufferEnd()));
     aStream->DumpTrackInfo();
-    NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran");
+    NS_ASSERTION(bufferEnd >= IterationEnd(), "Buffer underran");
   }
 #endif
   // We should block after bufferEnd.
   if (bufferEnd <= aTime) {
-    STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, "
-                                "bufferEnd %f",
-                                aStream, MediaTimeToSeconds(bufferEnd)));
+    STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun at %ld, "
+                                "bufferEnd %ld",
+                                aStream, aTime, bufferEnd));
     return true;
   }
   // We should keep blocking if we're currently blocked and we don't have
   // data all the way through to aEndBlockingDecisions. If we don't have
   // data all the way through to aEndBlockingDecisions, we'll block soon,
   // but we might as well remain unblocked and play the data we've got while
   // we can.
-  if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
+  if (bufferEnd < aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
     STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, "
-                                "bufferEnd %f",
-                                aStream, MediaTimeToSeconds(bufferEnd)));
+                                "bufferEnd %f (end at %ld)",
+                                aStream, MediaTimeToSeconds(bufferEnd), bufferEnd));
     return true;
   }
   // Reconsider decisions at bufferEnd
   *aEnd = std::min(*aEnd, bufferEnd);
   return false;
 }
 
 void
@@ -524,66 +485,60 @@ MediaStreamGraphImpl::MarkConsumed(Media
     return;
   }
   // Mark all the inputs to this stream as consumed
   for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
     MarkConsumed(ps->mInputs[i]->mSource);
   }
 }
 
-static void AudioMixerCallback(AudioDataValue* aMixedBuffer,
-                               AudioSampleFormat aFormat,
-                               uint32_t aChannels,
-                               uint32_t aFrames,
-                               uint32_t aSampleRate)
-{
-  // Need an api to register mixer callbacks, bug 989921
-#ifdef MOZ_WEBRTC
-  if (aFrames > 0 && aChannels > 0) {
-    // XXX need Observer base class and registration API
-    if (gFarendObserver) {
-      gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false,
-                                    aSampleRate, aChannels, aFormat);
-    }
-  }
-#endif
-}
-
 void
 MediaStreamGraphImpl::UpdateStreamOrder()
 {
-  bool shouldMix = false;
+  bool shouldAEC = false;
+  bool audioTrackPresent = false;
   // Value of mCycleMarker for unvisited streams in cycle detection.
   const uint32_t NOT_VISITED = UINT32_MAX;
   // Value of mCycleMarker for ordered streams in muted cycles.
   const uint32_t IN_MUTED_CYCLE = 1;
 
   for (uint32_t i = 0; i < mStreams.Length(); ++i) {
     MediaStream* stream = mStreams[i];
     stream->mIsConsumed = false;
     stream->mInBlockingSet = false;
     if (stream->AsSourceStream() &&
         stream->AsSourceStream()->NeedsMixing()) {
-      shouldMix = true;
+      shouldAEC = true;
+    }
+    for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer(), MediaSegment::AUDIO);
+         !tracks.IsEnded(); tracks.Next()) {
+      audioTrackPresent = true;
     }
   }
 
-  if (!mMixer && shouldMix) {
-    mMixer = new AudioMixer(AudioMixerCallback);
-    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-      for (uint32_t j = 0; j < mStreams[i]->mAudioOutputStreams.Length(); ++j) {
-        mStreams[i]->mAudioOutputStreams[j].mStream->SetMicrophoneActive(true);
-      }
+  if (!audioTrackPresent &&
+      CurrentDriver()->AsAudioCallbackDriver()) {
+    bool started;
+    {
+      MonitorAutoLock mon(mMonitor);
+      started = CurrentDriver()->AsAudioCallbackDriver()->IsStarted();
     }
-  } else if (mMixer && !shouldMix) {
-    mMixer = nullptr;
-    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-      for (uint32_t j = 0; j < mStreams[i]->mAudioOutputStreams.Length(); ++j) {
-        mStreams[i]->mAudioOutputStreams[j].mStream->SetMicrophoneActive(false);
-      }
+    if (started) {
+      SystemClockDriver* driver = new SystemClockDriver(this);
+      CurrentDriver()->SwitchAtNextIteration(driver);
+    }
+  }
+
+  if (shouldAEC && !mFarendObserverRef && gFarendObserver) {
+    mFarendObserverRef = gFarendObserver;
+    mMixer.AddCallback(mFarendObserverRef);
+  } else if (!shouldAEC && mFarendObserverRef){
+    if (mMixer.FindCallback(mFarendObserverRef)) {
+      mMixer.RemoveCallback(mFarendObserverRef);
+      mFarendObserverRef = nullptr;
     }
   }
 
   // The algorithm for finding cycles is based on Tim Leslie's iterative
   // implementation [1][2] of Pearce's variant [3] of Tarjan's strongly
   // connected components (SCC) algorithm.  There are variations (a) to
   // distinguish whether streams in SCCs of size 1 are in a cycle and (b) to
   // re-run the algorithm over SCCs with breaks at DelayNodes.
@@ -755,50 +710,51 @@ MediaStreamGraphImpl::UpdateStreamOrder(
 }
 
 void
 MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions)
 {
   bool blockingDecisionsWillChange = false;
 
   STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f",
-                              this, MediaTimeToSeconds(mStateComputedTime)));
+                              this, MediaTimeToSeconds(CurrentDriver()->StateComputedTime())));
   for (uint32_t i = 0; i < mStreams.Length(); ++i) {
     MediaStream* stream = mStreams[i];
     if (!stream->mInBlockingSet) {
       // Compute a partition of the streams containing 'stream' such that we can
       // compute the blocking status of each subset independently.
       nsAutoTArray<MediaStream*,10> streamSet;
       AddBlockingRelatedStreamsToSet(&streamSet, stream);
 
       GraphTime end;
-      for (GraphTime t = mStateComputedTime;
+      for (GraphTime t = CurrentDriver()->StateComputedTime();
            t < aEndBlockingDecisions; t = end) {
         end = GRAPH_TIME_MAX;
         RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end);
         if (end < GRAPH_TIME_MAX) {
           blockingDecisionsWillChange = true;
         }
       }
     }
 
     GraphTime end;
-    stream->mBlocked.GetAt(mCurrentTime, &end);
+    stream->mBlocked.GetAt(IterationEnd(), &end);
     if (end < GRAPH_TIME_MAX) {
       blockingDecisionsWillChange = true;
     }
   }
   STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f",
-                              this, MediaTimeToSeconds(mStateComputedTime),
+                              this, MediaTimeToSeconds(CurrentDriver()->StateComputedTime()),
                               MediaTimeToSeconds(aEndBlockingDecisions)));
-  mStateComputedTime = aEndBlockingDecisions;
+
+  CurrentDriver()->UpdateStateComputedTime(aEndBlockingDecisions);
 
   if (blockingDecisionsWillChange) {
     // Make sure we wake up to notify listeners about these changes.
-    EnsureNextIteration();
+    CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams,
                                                      MediaStream* aStream)
 {
   if (aStream->mInBlockingSet)
@@ -931,55 +887,35 @@ MediaStreamGraphImpl::CreateOrDestroyAud
       for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
         if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) {
           break;
         }
       }
       if (i < audioOutputStreamsFound.Length()) {
         audioOutputStreamsFound[i] = true;
       } else {
-        // No output stream created for this track yet. Check if it's time to
-        // create one.
-        GraphTime startTime =
-          StreamTimeToGraphTime(aStream, tracks->GetStartTimeRoundDown(),
-                                INCLUDE_TRAILING_BLOCKED_INTERVAL);
-        if (startTime >= mStateComputedTime) {
-          // The stream wants to play audio, but nothing will play for the forseeable
-          // future, so don't create the stream.
-          continue;
-        }
-
-        // Allocating a AudioStream would be slow, so we finish the Init async
         MediaStream::AudioOutputStream* audioOutputStream =
           aStream->mAudioOutputStreams.AppendElement();
         audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
         audioOutputStream->mBlockedAudioTime = 0;
         audioOutputStream->mLastTickWritten = 0;
-        audioOutputStream->mStream = new AudioStream();
-        // XXX for now, allocate stereo output. But we need to fix this to
-        // match the system's ideal channel configuration.
-        // NOTE: we presume this is either fast or async-under-the-covers
-        audioOutputStream->mStream->Init(2, mSampleRate,
-                                         aStream->mAudioChannelType,
-                                         AudioStream::LowLatency);
         audioOutputStream->mTrackID = tracks->GetID();
 
-        // If there is a mixer, there is a micrphone active.
-        audioOutputStream->mStream->SetMicrophoneActive(mMixer);
-
-        LogLatency(AsyncLatencyLogger::AudioStreamCreate,
-                   reinterpret_cast<uint64_t>(aStream),
-                   reinterpret_cast<int64_t>(audioOutputStream->mStream.get()));
+        if (!CurrentDriver()->AsAudioCallbackDriver() &&
+            !CurrentDriver()->Switching()) {
+          AudioCallbackDriver* driver = new AudioCallbackDriver(this);
+          mMixer.AddCallback(driver);
+          CurrentDriver()->SwitchAtNextIteration(driver);
+        }
       }
     }
   }
 
   for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) {
     if (!audioOutputStreamsFound[i]) {
-      aStream->mAudioOutputStreams[i].mStream->Shutdown();
       aStream->mAudioOutputStreams.RemoveElementAt(i);
     }
   }
 }
 
 TrackTicks
 MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
                                 GraphTime aFrom, GraphTime aTo)
@@ -993,18 +929,16 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
   // on the absolute value of the {Graph,Stream}Time, and so that number of
   // ticks to play is the same for each cycle.
   TrackTicks ticksNeeded = TimeToTicksRoundDown(mSampleRate, aTo) - TimeToTicksRoundDown(mSampleRate, aFrom);
 
   if (aStream->mAudioOutputStreams.IsEmpty()) {
     return 0;
   }
 
-  // When we're playing multiple copies of this stream at the same time, they're
-  // perfectly correlated so adding volumes is the right thing to do.
   float volume = 0.0f;
   for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
     volume += aStream->mAudioOutputs[i].mVolume;
   }
 
   for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
     MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
     StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
@@ -1021,21 +955,21 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
         audioOutput.mLastTickWritten != offset) {
       // If there is a global underrun of the MSG, this property won't hold, and
       // we reset the sample count tracking.
       if (offset - audioOutput.mLastTickWritten == 1) {
         offset = audioOutput.mLastTickWritten;
       }
     }
 
-    // We don't update aStream->mBufferStartTime here to account for
-    // time spent blocked. Instead, we'll update it in UpdateCurrentTime after the
-    // blocked period has completed. But we do need to make sure we play from the
-    // right offsets in the stream buffer, even if we've already written silence for
-    // some amount of blocked time after the current time.
+    // We don't update aStream->mBufferStartTime here to account for time spent
+    // blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
+    // the blocked period has completed. But we do need to make sure we play
+    // from the right offsets in the stream buffer, even if we've already
+    // written silence for some amount of blocked time after the current time.
     GraphTime t = aFrom;
     while (ticksNeeded) {
       GraphTime end;
       bool blocked = aStream->mBlocked.GetAt(t, &end);
       end = std::min(end, aTo);
 
       // Check how many ticks of sound we can provide if we are blocked some
       // time in the middle of this cycle.
@@ -1044,50 +978,54 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
         toWrite = ticksNeeded;
       } else {
         toWrite = TimeToTicksRoundDown(mSampleRate, end - t);
       }
       ticksNeeded -= toWrite;
 
       if (blocked) {
         output.InsertNullDataAtStart(toWrite);
+        ticksWritten += toWrite;
         STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n",
                                     aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
                                     offset, offset + toWrite));
       } else {
         TrackTicks endTicksNeeded = offset + toWrite;
         TrackTicks endTicksAvailable = audio->GetDuration();
         STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n",
                                      aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
                                      offset, endTicksNeeded));
 
         if (endTicksNeeded <= endTicksAvailable) {
           output.AppendSlice(*audio, offset, endTicksNeeded);
+          ticksWritten += toWrite;
           offset = endTicksNeeded;
         } else {
-          MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
+          // MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
           // If we are at the end of the track, maybe write the remaining
           // samples, and pad with/output silence.
           if (endTicksNeeded > endTicksAvailable &&
               offset < endTicksAvailable) {
             output.AppendSlice(*audio, offset, endTicksAvailable);
+            ticksWritten += toWrite;
             toWrite -= endTicksAvailable - offset;
             offset = endTicksAvailable;
           }
           output.AppendNullData(toWrite);
         }
         output.ApplyVolume(volume);
       }
       t = end;
     }
     audioOutput.mLastTickWritten = offset;
 
     // Need unique id for stream & track - and we want it to match the inserter
     output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
-                   audioOutput.mStream, mMixer);
+                                     mMixer, AudioChannelCount(),
+                                     mSampleRate);
   }
   return ticksWritten;
 }
 
 static void
 SetImageToBlackPixel(PlanarYCbCrImage* aImage)
 {
   uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
@@ -1105,19 +1043,26 @@ void
 MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
 {
   MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
 
   if (aStream->mVideoOutputs.IsEmpty())
     return;
 
   // Display the next frame a bit early. This is better than letting the current
-  // frame be displayed for too long.
-  GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS;
-  NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
+  // frame be displayed for too long. Because depending on the GraphDriver in
+  // use, we can't really estimate the graph interval duration, we clamp it to
+  // the current state computed time.
+  GraphTime framePosition = IterationEnd() + MillisecondsToMediaTime(CurrentDriver()->IterationDuration());
+  if (framePosition > CurrentDriver()->StateComputedTime()) {
+    NS_WARN_IF_FALSE(std::abs(framePosition - CurrentDriver()->StateComputedTime()) <
+                     MillisecondsToMediaTime(5), "Graph thread slowdown?");
+    framePosition = CurrentDriver()->StateComputedTime();
+  }
+  MOZ_ASSERT(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
   StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
 
   TrackTicks start;
   const VideoFrame* frame = nullptr;
   StreamBuffer::Track* track;
   for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO);
        !tracks.IsEnded(); tracks.Next()) {
     VideoSegment* segment = tracks->Get<VideoSegment>();
@@ -1133,18 +1078,18 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
   if (!frame || *frame == aStream->mLastPlayedVideoFrame)
     return;
 
   STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
                               aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
                               frame->GetIntrinsicSize().height));
   GraphTime startTime = StreamTimeToGraphTime(aStream,
       track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
-  TimeStamp targetTime = mCurrentTimeStamp +
-      TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
+  TimeStamp targetTime = CurrentDriver()->GetCurrentTimeStamp() +
+      TimeDuration::FromMilliseconds(double(startTime - IterationEnd()));
   for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
     VideoFrameContainer* output = aStream->mVideoOutputs[i];
 
     if (frame->GetForceBlack()) {
       nsRefPtr<Image> image =
         output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
       if (image) {
         // Sets the image to a single black pixel, which will be scaled to fill
@@ -1170,17 +1115,17 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
 bool
 MediaStreamGraphImpl::ShouldUpdateMainThread()
 {
   if (mRealtime) {
     return true;
   }
 
   TimeStamp now = TimeStamp::Now();
-  if ((now - mLastMainThreadUpdate).ToMilliseconds() > MEDIA_GRAPH_TARGET_PERIOD_MS) {
+  if ((now - mLastMainThreadUpdate).ToMilliseconds() > CurrentDriver()->IterationDuration()) {
     mLastMainThreadUpdate = now;
     return true;
   }
   return false;
 }
 
 void
 MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate)
@@ -1192,73 +1137,44 @@ MediaStreamGraphImpl::PrepareUpdatesToMa
   if (aFinalUpdate || ShouldUpdateMainThread()) {
     mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length());
     for (uint32_t i = 0; i < mStreams.Length(); ++i) {
       MediaStream* stream = mStreams[i];
       if (!stream->MainThreadNeedsUpdates()) {
         continue;
       }
       StreamUpdate* update = mStreamUpdates.AppendElement();
-      update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime);
+      update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(IterationEnd());
       update->mStream = stream;
       update->mNextMainThreadCurrentTime =
-        GraphTimeToStreamTime(stream, mCurrentTime);
+        GraphTimeToStreamTime(stream, IterationEnd());
       update->mNextMainThreadFinished = stream->mNotifiedFinished;
     }
     if (!mPendingUpdateRunnables.IsEmpty()) {
       mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables);
     }
   }
 
   // Don't send the message to the main thread if it's not going to have
   // any work to do.
   if (aFinalUpdate ||
       !mUpdateRunnables.IsEmpty() ||
       !mStreamUpdates.IsEmpty()) {
     EnsureStableStateEventPosted();
   }
 }
 
-void
-MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock)
-{
-  if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION ||
-      mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
-    mWaitState = WAITSTATE_WAKING_UP;
-    aLock.Notify();
-  }
-}
-
-void
-MediaStreamGraphImpl::EnsureNextIteration()
-{
-  MonitorAutoLock lock(mMonitor);
-  EnsureNextIterationLocked(lock);
-}
-
-void
-MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock)
-{
-  if (mNeedAnotherIteration)
-    return;
-  mNeedAnotherIteration = true;
-  if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
-    mWaitState = WAITSTATE_WAKING_UP;
-    aLock.Notify();
-  }
-}
-
 /**
  * Returns smallest value of t such that
  * TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE
  * and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) >
  * floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE).
  */
-static GraphTime
-RoundUpToNextAudioBlock(TrackRate aSampleRate, GraphTime aTime)
+GraphTime
+MediaStreamGraphImpl::RoundUpToNextAudioBlock(GraphTime aTime)
 {
   TrackTicks ticks = aTime;
   uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS;
   uint64_t nextBlock = block + 1;
   TrackTicks nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS;
   return nextTicks;
 }
 
@@ -1267,17 +1183,17 @@ MediaStreamGraphImpl::ProduceDataForStre
                                                         TrackRate aSampleRate,
                                                         GraphTime aFrom,
                                                         GraphTime aTo)
 {
   MOZ_ASSERT(aStreamIndex <= mFirstCycleBreaker,
              "Cycle breaker is not AudioNodeStream?");
   GraphTime t = aFrom;
   while (t < aTo) {
-    GraphTime next = RoundUpToNextAudioBlock(aSampleRate, t);
+    GraphTime next = RoundUpToNextAudioBlock(t);
     for (uint32_t i = mFirstCycleBreaker; i < mStreams.Length(); ++i) {
       auto ns = static_cast<AudioNodeStream*>(mStreams[i]);
       MOZ_ASSERT(ns->AsAudioNodeStream());
       ns->ProduceOutputBeforeInput(t);
     }
     for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
       ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
       if (ps) {
@@ -1297,257 +1213,203 @@ MediaStreamGraphImpl::AllFinishedStreams
     if (s->mFinished && !s->mNotifiedFinished) {
       return false;
     }
   }
   return true;
 }
 
 void
-MediaStreamGraphImpl::PauseAllAudioOutputs()
+MediaStreamGraphImpl::UpdateGraph(GraphTime aEndBlockingDecision)
 {
-  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-    MediaStream* s = mStreams[i];
-    for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
-      s->mAudioOutputStreams[j].mStream->Pause();
+  // Calculate independent action times for each batch of messages (each
+  // batch corresponding to an event loop task). This isolates the performance
+  // of different scripts to some extent.
+  for (uint32_t i = 0; i < mFrontMessageQueue.Length(); ++i) {
+    mProcessingGraphUpdateIndex = mFrontMessageQueue[i].mGraphUpdateIndex;
+    nsTArray<nsAutoPtr<ControlMessage> >& messages = mFrontMessageQueue[i].mMessages;
+
+    for (uint32_t j = 0; j < messages.Length(); ++j) {
+      messages[j]->Run();
     }
   }
+  mFrontMessageQueue.Clear();
+
+  if (mStreamOrderDirty) {
+    UpdateStreamOrder();
+  }
+
+  bool ensureNextIteration = false;
+
+  // Grab pending stream input.
+  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
+    SourceMediaStream* is = mStreams[i]->AsSourceStream();
+    if (is) {
+      UpdateConsumptionState(is);
+      ExtractPendingInput(is, aEndBlockingDecision, &ensureNextIteration);
+    }
+  }
+
+  // The loop is woken up so soon that IterationEnd() barely advances and we
+  // end up having aEndBlockingDecision == CurrentDriver()->StateComputedTime().
+  // Since stream blocking is computed in the interval of
+  // [CurrentDriver()->StateComputedTime(), aEndBlockingDecision), it won't be computed at all.
+  // We should ensure next iteration so that pending blocking changes will be
+  // computed in next loop.
+  if (ensureNextIteration ||
+      aEndBlockingDecision == CurrentDriver()->StateComputedTime()) {
+    CurrentDriver()->EnsureNextIteration();
+  }
+
+  // Figure out which streams are blocked and when.
+  RecomputeBlocking(aEndBlockingDecision);
 }
 
 void
-MediaStreamGraphImpl::ResumeAllAudioOutputs()
+MediaStreamGraphImpl::Process(GraphTime aFrom, GraphTime aTo)
 {
+  // Play stream contents.
+  bool allBlockedForever = true;
+  // True when we've done ProcessInput for all processed streams.
+  bool doneAllProducing = false;
+  // This is the number of frame that are written to the AudioStreams, for
+  // this cycle.
+  TrackTicks ticksPlayed = 0;
+
+  mMixer.StartMixing();
+
+  // Figure out what each stream wants to do
   for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-    MediaStream* s = mStreams[i];
-    for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
-      s->mAudioOutputStreams[j].mStream->Resume();
+    MediaStream* stream = mStreams[i];
+    if (!doneAllProducing) {
+      ProcessedMediaStream* ps = stream->AsProcessedStream();
+      if (ps) {
+        AudioNodeStream* n = stream->AsAudioNodeStream();
+        if (n) {
+#ifdef DEBUG
+          // Verify that the sampling rate for all of the following streams is the same
+          for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
+            AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
+            if (nextStream) {
+              MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
+                         "All AudioNodeStreams in the graph must have the same sampling rate");
+            }
+          }
+#endif
+          // Since an AudioNodeStream is present, go ahead and
+          // produce audio block by block for all the rest of the streams.
+          ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), aFrom, aTo);
+          doneAllProducing = true;
+        } else {
+          ps->ProcessInput(aFrom, aTo, ProcessedMediaStream::ALLOW_FINISH);
+          NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
+                           GraphTimeToStreamTime(stream, aTo),
+                           "Stream did not produce enough data");
+        }
+      }
     }
+    NotifyHasCurrentData(stream);
+    // Only playback audio and video in real-time mode
+    if (mRealtime) {
+      CreateOrDestroyAudioStreams(aFrom, stream);
+      TrackTicks ticksPlayedForThisStream = PlayAudio(stream, aFrom, aTo);
+      if (!ticksPlayed) {
+        ticksPlayed = ticksPlayedForThisStream;
+      } else {
+        MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
+            "Each stream should have the same number of frame.");
+      }
+      PlayVideo(stream);
+    }
+    SourceMediaStream* is = stream->AsSourceStream();
+    if (is) {
+      UpdateBufferSufficiencyState(is);
+    }
+    GraphTime end;
+    if (!stream->mBlocked.GetAt(aTo, &end) || end < GRAPH_TIME_MAX) {
+      allBlockedForever = false;
+    }
+  }
+
+  if (CurrentDriver()->AsAudioCallbackDriver() && ticksPlayed) {
+    mMixer.FinishMixing();
+  }
+
+  // If we are switching away from an AudioCallbackDriver, we don't need the
+  // mixer anymore.
+  if (CurrentDriver()->AsAudioCallbackDriver() &&
+      CurrentDriver()->Switching()) {
+    bool isStarted;
+    {
+      MonitorAutoLock mon(mMonitor);
+      isStarted = CurrentDriver()->AsAudioCallbackDriver()->IsStarted();
+    }
+    if (isStarted) {
+      mMixer.RemoveCallback(CurrentDriver()->AsAudioCallbackDriver());
+    }
+  }
+
+  if (!allBlockedForever) {
+    CurrentDriver()->EnsureNextIteration();
   }
 }
 
-struct AutoProfilerUnregisterThread
+bool
+MediaStreamGraphImpl::OneIteration(GraphTime aFrom, GraphTime aTo,
+                                   GraphTime aStateFrom, GraphTime aStateEnd)
 {
-  // The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning.
-  AutoProfilerUnregisterThread()
   {
+    MonitorAutoLock lock(mMemoryReportMonitor);
+    if (mNeedsMemoryReport) {
+      mNeedsMemoryReport = false;
+
+      for (uint32_t i = 0; i < mStreams.Length(); ++i) {
+        AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream();
+        if (stream) {
+          AudioNodeSizes usage;
+          stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage);
+          mAudioStreamSizes.AppendElement(usage);
+        }
+      }
+
+      lock.Notify();
+    }
   }
 
-  ~AutoProfilerUnregisterThread()
-  {
-    profiler_unregister_thread();
-  }
-};
+  UpdateCurrentTimeForStreams(aFrom, aTo);
+
+  UpdateGraph(aStateEnd);
 
-void
-MediaStreamGraphImpl::RunThread()
-{
-  nsTArray<MessageBlock> messageQueue;
+  Process(aStateFrom, aStateEnd);
+
+  // Send updates to the main thread and wait for the next control loop
+  // iteration.
   {
     MonitorAutoLock lock(mMonitor);
-    messageQueue.SwapElements(mMessageQueue);
-  }
-  NS_ASSERTION(!messageQueue.IsEmpty(),
-               "Shouldn't have started a graph with empty message queue!");
-
-  uint32_t ticksProcessed = 0;
-  AutoProfilerUnregisterThread autoUnregister;
-
-  for (;;) {
-    // Check if a memory report has been requested.
-    {
-      MonitorAutoLock lock(mMemoryReportMonitor);
-      if (mNeedsMemoryReport) {
-        mNeedsMemoryReport = false;
-
-        for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-          AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream();
-          if (stream) {
-            AudioNodeSizes usage;
-            stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage);
-            mAudioStreamSizes.AppendElement(usage);
-          }
-        }
-
-        lock.Notify();
-      }
-    }
-
-    // Update mCurrentTime to the min of the playing audio times, or using the
-    // wall-clock time change if no audio is playing.
-    UpdateCurrentTime();
-
-    // Calculate independent action times for each batch of messages (each
-    // batch corresponding to an event loop task). This isolates the performance
-    // of different scripts to some extent.
-    for (uint32_t i = 0; i < messageQueue.Length(); ++i) {
-      mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex;
-      nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages;
-
-      for (uint32_t j = 0; j < messages.Length(); ++j) {
-        messages[j]->Run();
-      }
-    }
-    messageQueue.Clear();
-
-    if (mStreamOrderDirty) {
-      UpdateStreamOrder();
-    }
-
-    GraphTime endBlockingDecisions =
-      RoundUpToNextAudioBlock(mSampleRate, mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS));
-    bool ensureNextIteration = false;
-
-    // Grab pending stream input.
-    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-      SourceMediaStream* is = mStreams[i]->AsSourceStream();
-      if (is) {
-        UpdateConsumptionState(is);
-        ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration);
-      }
-    }
-
-    // The loop is woken up so soon that mCurrentTime barely advances and we
-    // end up having endBlockingDecisions == mStateComputedTime.
-    // Since stream blocking is computed in the interval of
-    // [mStateComputedTime, endBlockingDecisions), it won't be computed at all.
-    // We should ensure next iteration so that pending blocking changes will be
-    // computed in next loop.
-    if (endBlockingDecisions == mStateComputedTime) {
-      ensureNextIteration = true;
+    bool finalUpdate = mForceShutDown ||
+      (IterationEnd() >= mEndTime && AllFinishedStreamsNotified()) ||
+      (IsEmpty() && mBackMessageQueue.IsEmpty());
+    PrepareUpdatesToMainThreadState(finalUpdate);
+    if (finalUpdate) {
+      // Enter shutdown mode. The stable-state handler will detect this
+      // and complete shutdown. Destroy any streams immediately.
+      STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
+      // We'll shut down this graph object if it does not get restarted.
+      mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
+      // No need to Destroy streams here. The main-thread owner of each
+      // stream is responsible for calling Destroy on them.
+      return false;
     }
 
-    // Figure out which streams are blocked and when.
-    GraphTime prevComputedTime = mStateComputedTime;
-    RecomputeBlocking(endBlockingDecisions);
+    CurrentDriver()->WaitForNextIteration();
 
-    // Play stream contents.
-    bool allBlockedForever = true;
-    // True when we've done ProcessInput for all processed streams.
-    bool doneAllProducing = false;
-    // This is the number of frame that are written to the AudioStreams, for
-    // this cycle.
-    TrackTicks ticksPlayed = 0;
-    // Figure out what each stream wants to do
-    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-      MediaStream* stream = mStreams[i];
-      if (!doneAllProducing) {
-        ProcessedMediaStream* ps = stream->AsProcessedStream();
-        if (ps) {
-          AudioNodeStream* n = stream->AsAudioNodeStream();
-          if (n) {
-#ifdef DEBUG
-            // Verify that the sampling rate for all of the following streams is the same
-            for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
-              AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
-              if (nextStream) {
-                MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
-                           "All AudioNodeStreams in the graph must have the same sampling rate");
-              }
-            }
-#endif
-            // Since an AudioNodeStream is present, go ahead and
-            // produce audio block by block for all the rest of the streams.
-            ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime);
-            ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime);
-            doneAllProducing = true;
-          } else {
-            ps->ProcessInput(prevComputedTime, mStateComputedTime,
-                             ProcessedMediaStream::ALLOW_FINISH);
-            NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
-                             GraphTimeToStreamTime(stream, mStateComputedTime),
-                             "Stream did not produce enough data");
-          }
-        }
-      }
-      NotifyHasCurrentData(stream);
-      if (mRealtime) {
-        // Only playback audio and video in real-time mode
-        CreateOrDestroyAudioStreams(prevComputedTime, stream);
-        TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime);
-        if (!ticksPlayed) {
-          ticksPlayed = ticksPlayedForThisStream;
-        } else {
-          MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
-              "Each stream should have the same number of frame.");
-        }
-        PlayVideo(stream);
-      }
-      SourceMediaStream* is = stream->AsSourceStream();
-      if (is) {
-        UpdateBufferSufficiencyState(is);
-      }
-      GraphTime end;
-      if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
-        allBlockedForever = false;
-      }
-    }
+    SwapMessageQueues();
+  }
 
-    if (mMixer) {
-      mMixer->FinishMixing();
-    }
-
-    if (ensureNextIteration || !allBlockedForever) {
-      EnsureNextIteration();
-    }
-
-    // Send updates to the main thread and wait for the next control loop
-    // iteration.
-    {
-      MonitorAutoLock lock(mMonitor);
-      bool finalUpdate = mForceShutDown ||
-        (mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) ||
-        (IsEmpty() && mMessageQueue.IsEmpty());
-      PrepareUpdatesToMainThreadState(finalUpdate);
-      if (finalUpdate) {
-        // Enter shutdown mode. The stable-state handler will detect this
-        // and complete shutdown. Destroy any streams immediately.
-        STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
-        // We'll shut down this graph object if it does not get restarted.
-        mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
-        // No need to Destroy streams here. The main-thread owner of each
-        // stream is responsible for calling Destroy on them.
-        return;
-      }
-
-      // No need to wait in non-realtime mode, just churn through the input as soon
-      // as possible.
-      if (mRealtime) {
-        PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
-        TimeStamp now = TimeStamp::Now();
-        bool pausedOutputs = false;
-        if (mNeedAnotherIteration) {
-          int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
-            int64_t((now - mCurrentTimeStamp).ToMilliseconds());
-          // Make sure timeoutMS doesn't overflow 32 bits by waking up at
-          // least once a minute, if we need to wake up at all
-          timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000));
-          timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS));
-          STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f",
-                                     (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
-          mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
-        } else {
-          mWaitState = WAITSTATE_WAITING_INDEFINITELY;
-          PauseAllAudioOutputs();
-          pausedOutputs = true;
-        }
-        if (timeout > 0) {
-          mMonitor.Wait(timeout);
-          STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f",
-                                     (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
-                                     (TimeStamp::Now() - now).ToSeconds()));
-        }
-        if (pausedOutputs) {
-          ResumeAllAudioOutputs();
-        }
-      }
-      mWaitState = WAITSTATE_RUNNING;
-      mNeedAnotherIteration = false;
-      messageQueue.SwapElements(mMessageQueue);
-    }
-  }
+  return true;
 }
 
 void
 MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
 {
   mMonitor.AssertCurrentThreadOwns();
 
   MediaStream* stream = aUpdate->mStream;
@@ -1560,83 +1422,46 @@ MediaStreamGraphImpl::ApplyStreamUpdate(
     stream->mWrapper->NotifyStreamStateChanged();
   }
   for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) {
     stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged();
   }
 }
 
 void
-MediaStreamGraphImpl::ShutdownThreads()
-{
-  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
-  // mGraph's thread is not running so it's OK to do whatever here
-  STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this));
-
-  if (mThread) {
-    mThread->Shutdown();
-    mThread = nullptr;
-  }
-}
-
-void
 MediaStreamGraphImpl::ForceShutDown()
 {
   NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
   STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this));
   {
     MonitorAutoLock lock(mMonitor);
     mForceShutDown = true;
-    EnsureImmediateWakeUpLocked(lock);
+    CurrentDriver()->EnsureNextIterationLocked();
   }
 }
 
 namespace {
 
-class MediaStreamGraphInitThreadRunnable : public nsRunnable {
-public:
-  explicit MediaStreamGraphInitThreadRunnable(MediaStreamGraphImpl* aGraph)
-    : mGraph(aGraph)
-  {
-  }
-  NS_IMETHOD Run()
-  {
-    char aLocal;
-    profiler_register_thread("MediaStreamGraph", &aLocal);
-    mGraph->RunThread();
-    return NS_OK;
-  }
-private:
-  MediaStreamGraphImpl* mGraph;
-};
-
-class MediaStreamGraphThreadRunnable : public nsRunnable {
-public:
-  explicit MediaStreamGraphThreadRunnable(MediaStreamGraphImpl* aGraph)
-    : mGraph(aGraph)
-  {
-  }
-  NS_IMETHOD Run()
-  {
-    mGraph->RunThread();
-    return NS_OK;
-  }
-private:
-  MediaStreamGraphImpl* mGraph;
-};
-
 class MediaStreamGraphShutDownRunnable : public nsRunnable {
 public:
-  MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {}
+  MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph)
+    : mGraph(aGraph)
+  {}
   NS_IMETHOD Run()
   {
     NS_ASSERTION(mGraph->mDetectedNotRunning,
                  "We should know the graph thread control loop isn't running!");
 
-    mGraph->ShutdownThreads();
+    STREAM_LOG(PR_LOG_DEBUG, ("Shutting down graph %p", mGraph.get()));
+
+    if (mGraph->CurrentDriver()->AsAudioCallbackDriver()) {
+      MOZ_ASSERT(!mGraph->CurrentDriver()->AsAudioCallbackDriver()->InCallback());
+    }
+
+    mGraph->CurrentDriver()->Stop();
 
     // mGraph's thread is not running so it's OK to do whatever here
     if (mGraph->IsEmpty()) {
       // mGraph is no longer needed, so delete it.
       mGraph->Destroy();
     } else {
       // The graph is not empty.  We must be in a forced shutdown, or a
       // non-realtime graph that has finished processing.  Some later
@@ -1652,34 +1477,37 @@ public:
       }
 
       mGraph->mLifecycleState =
         MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
     }
     return NS_OK;
   }
 private:
-  MediaStreamGraphImpl* mGraph;
+  nsRefPtr<MediaStreamGraphImpl> mGraph;
 };
 
 class MediaStreamGraphStableStateRunnable : public nsRunnable {
 public:
-  explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph)
+  explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph,
+                                               bool aSourceIsMSG)
     : mGraph(aGraph)
+    , mSourceIsMSG(aSourceIsMSG)
   {
   }
   NS_IMETHOD Run()
   {
     if (mGraph) {
-      mGraph->RunInStableState();
+      mGraph->RunInStableState(mSourceIsMSG);
     }
     return NS_OK;
   }
 private:
-  MediaStreamGraphImpl* mGraph;
+  nsRefPtr<MediaStreamGraphImpl> mGraph;
+  bool mSourceIsMSG;
 };
 
 /*
  * Control messages forwarded from main thread to graph manager thread
  */
 class CreateMessage : public ControlMessage {
 public:
   CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
@@ -1703,112 +1531,125 @@ class MediaStreamGraphShutdownObserver M
 public:
   NS_DECL_ISUPPORTS
   NS_DECL_NSIOBSERVER
 };
 
 }
 
 void
-MediaStreamGraphImpl::RunInStableState()
+MediaStreamGraphImpl::RunInStableState(bool aSourceIsMSG)
 {
   NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
 
   nsTArray<nsCOMPtr<nsIRunnable> > runnables;
   // When we're doing a forced shutdown, pending control messages may be
   // run on the main thread via RunDuringShutdown. Those messages must
   // run without the graph monitor being held. So, we collect them here.
   nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown;
 
   {
     MonitorAutoLock lock(mMonitor);
-    mPostedRunInStableStateEvent = false;
+    if (aSourceIsMSG) {
+      MOZ_ASSERT(mPostedRunInStableStateEvent);
+      mPostedRunInStableStateEvent = false;
+    }
 
     runnables.SwapElements(mUpdateRunnables);
     for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
       StreamUpdate* update = &mStreamUpdates[i];
       if (update->mStream) {
         ApplyStreamUpdate(update);
       }
     }
     mStreamUpdates.Clear();
 
-    // Don't start the thread for a non-realtime graph until it has been
-    // explicitly started by StartNonRealtimeProcessing.
-    if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED &&
-        (mRealtime || mNonRealtimeProcessing)) {
-      mLifecycleState = LIFECYCLE_RUNNING;
-      // Start the thread now. We couldn't start it earlier because
-      // the graph might exit immediately on finding it has no streams. The
-      // first message for a new graph must create a stream.
-      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
-      NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event);
-    }
-
     if (mCurrentTaskMessageQueue.IsEmpty()) {
       if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) {
         // Complete shutdown. First, ensure that this graph is no longer used.
         // A new graph graph will be created if one is needed.
         STREAM_LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", this));
         if (this == gGraph) {
           // null out gGraph if that's the graph being shut down
           gGraph = nullptr;
         }
         // Asynchronously clean up old graph. We don't want to do this
         // synchronously because it spins the event loop waiting for threads
         // to shut down, and we don't want to do that in a stable state handler.
         mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
-        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
+        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this );
         NS_DispatchToMainThread(event);
       }
     } else {
       if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
-        MessageBlock* block = mMessageQueue.AppendElement();
+        MessageBlock* block = mBackMessageQueue.AppendElement();
         block->mMessages.SwapElements(mCurrentTaskMessageQueue);
         block->mGraphUpdateIndex = mNextGraphUpdateIndex;
         ++mNextGraphUpdateIndex;
-        EnsureNextIterationLocked(lock);
+        CurrentDriver()->EnsureNextIterationLocked();
       }
 
       // If the MediaStreamGraph has more messages going to it, try to revive
       // it to process those messages. Don't do this if we're in a forced
       // shutdown or it's a non-realtime graph that has already terminated
       // processing.
       if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
           mRealtime && !mForceShutDown) {
         mLifecycleState = LIFECYCLE_RUNNING;
         // Revive the MediaStreamGraph since we have more messages going to it.
         // Note that we need to put messages into its queue before reviving it,
         // or it might exit immediately.
-        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable(this);
-        mThread->Dispatch(event, 0);
+        {
+          MonitorAutoUnlock unlock(mMonitor);
+          CurrentDriver()->Revive();
+        }
+      }
+    }
+
+    // Don't start the thread for a non-realtime graph until it has been
+    // explicitly started by StartNonRealtimeProcessing.
+    if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED &&
+        (mRealtime || mNonRealtimeProcessing)) {
+      mLifecycleState = LIFECYCLE_RUNNING;
+      // Start the thread now. We couldn't start it earlier because
+      // the graph might exit immediately on finding it has no streams. The
+      // first message for a new graph must create a stream.
+      {
+        // We should exit the monitor for now, because starting a stream might
+        // take locks, and we don't want to deadlock.
+        MonitorAutoUnlock unlock(mMonitor);
+        STREAM_LOG(PR_LOG_DEBUG, ("Starting a graph ! %s\n", CurrentDriver()->AsAudioCallbackDriver() ? "AudioDriver" : "SystemDriver"));
+        CurrentDriver()->Start();
       }
     }
 
     if ((mForceShutDown || !mRealtime) &&
         mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
       // Defer calls to RunDuringShutdown() to happen while mMonitor is not held.
-      for (uint32_t i = 0; i < mMessageQueue.Length(); ++i) {
-        MessageBlock& mb = mMessageQueue[i];
+      for (uint32_t i = 0; i < mBackMessageQueue.Length(); ++i) {
+        MessageBlock& mb = mBackMessageQueue[i];
         controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages);
       }
-      mMessageQueue.Clear();
+      mBackMessageQueue.Clear();
       MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty());
       // Stop MediaStreamGraph threads. Do not clear gGraph since
       // we have outstanding DOM objects that may need it.
       mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
       nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
       NS_DispatchToMainThread(event);
     }
 
     mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING;
   }
 
   // Make sure we get a new current time in the next event loop task
-  mPostedRunInStableState = false;
+  if (!aSourceIsMSG) {
+    MOZ_ASSERT(mPostedRunInStableState);
+    mPostedRunInStableState = false;
+  }
 
   for (uint32_t i = 0; i < runnables.Length(); ++i) {
     runnables[i]->Run();
   }
   for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) {
     controlMessagesToRunDuringShutdown[i]->RunDuringShutdown();
   }
 
@@ -1823,34 +1664,34 @@ static NS_DEFINE_CID(kAppShellCID, NS_AP
 void
 MediaStreamGraphImpl::EnsureRunInStableState()
 {
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
 
   if (mPostedRunInStableState)
     return;
   mPostedRunInStableState = true;
-  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this, false);
   nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
   if (appShell) {
     appShell->RunInStableState(event);
   } else {
     NS_ERROR("Appshell already destroyed?");
   }
 }
 
 void
 MediaStreamGraphImpl::EnsureStableStateEventPosted()
 {
   mMonitor.AssertCurrentThreadOwns();
 
   if (mPostedRunInStableStateEvent)
     return;
   mPostedRunInStableStateEvent = true;
-  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this, true);
   NS_DispatchToMainThread(event);
 }
 
 void
 MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
 {
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
   NS_ASSERTION(!aMessage->GetStream() ||
@@ -1921,49 +1762,45 @@ MediaStream::SizeOfExcludingThis(MallocS
   // Not owned:
   // - mGraph - Not reported here
   // - mConsumers - elements
   // Future:
   // - mWrapper
   // - mVideoOutputs - elements
   // - mLastPlayedVideoFrame
   // - mListeners - elements
-  // - mAudioOutputStreams - elements
+  // - mAudioOutputStream - elements
 
   amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
   amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf);
   amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf);
   amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf);
   amount += mListeners.SizeOfExcludingThis(aMallocSizeOf);
   amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf);
   amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf);
   amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf);
   amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf);
   amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf);
-  amount += mAudioOutputStreams.SizeOfExcludingThis(aMallocSizeOf);
-  for (size_t i = 0; i < mAudioOutputStreams.Length(); i++) {
-    amount += mAudioOutputStreams[i].SizeOfExcludingThis(aMallocSizeOf);
-  }
 
   return amount;
 }
 
 size_t
 MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
 {
   return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
 }
 
 void
 MediaStream::Init()
 {
   MediaStreamGraphImpl* graph = GraphImpl();
-  mBlocked.SetAtAndAfter(graph->mCurrentTime, true);
-  mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true);
-  mExplicitBlockerCount.SetAtAndAfter(graph->mStateComputedTime, false);
+  mBlocked.SetAtAndAfter(graph->IterationEnd(), true);
+  mExplicitBlockerCount.SetAtAndAfter(graph->IterationEnd(), true);
+  mExplicitBlockerCount.SetAtAndAfter(graph->CurrentDriver()->StateComputedTime(), false);
 }
 
 MediaStreamGraphImpl*
 MediaStream::GraphImpl()
 {
   return mGraph;
 }
 
@@ -2047,20 +1884,16 @@ MediaStream::RemoveAllListenersImpl()
 }
 
 void
 MediaStream::DestroyImpl()
 {
   for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
     mConsumers[i]->Disconnect();
   }
-  for (uint32_t i = 0; i < mAudioOutputStreams.Length(); ++i) {
-    mAudioOutputStreams[i].mStream->Shutdown();
-  }
-  mAudioOutputStreams.Clear();
   mGraph = nullptr;
 }
 
 void
 MediaStream::Destroy()
 {
   // Keep this stream alive until we leave this method
   nsRefPtr<MediaStream> kungFuDeathGrip = this;
@@ -2195,17 +2028,17 @@ MediaStream::ChangeExplicitBlockerCount(
 {
   class Message : public ControlMessage {
   public:
     Message(MediaStream* aStream, int32_t aDelta) :
       ControlMessage(aStream), mDelta(aDelta) {}
     virtual void Run()
     {
       mStream->ChangeExplicitBlockerCountImpl(
-          mStream->GraphImpl()->mStateComputedTime, mDelta);
+          mStream->GraphImpl()->CurrentDriver()->StateComputedTime(), mDelta);
     }
     int32_t mDelta;
   };
 
   // This can happen if this method has been called asynchronously, and the
   // stream has been destroyed since then.
   if (mMainThreadDestroyed) {
     return;
@@ -2361,17 +2194,17 @@ SourceMediaStream::DestroyImpl()
 }
 
 void
 SourceMediaStream::SetPullEnabled(bool aEnabled)
 {
   MutexAutoLock lock(mMutex);
   mPullEnabled = aEnabled;
   if (mPullEnabled && GraphImpl()) {
-    GraphImpl()->EnsureNextIteration();
+    GraphImpl()->CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
                             MediaSegment* aSegment)
 {
   MutexAutoLock lock(mMutex);
@@ -2381,17 +2214,17 @@ SourceMediaStream::AddTrack(TrackID aID,
   // We resample all audio input tracks to the sample rate of the audio mixer.
   data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ?
                       GraphImpl()->AudioSampleRate() : aRate;
   data->mStart = aStart;
   data->mCommands = TRACK_CREATE;
   data->mData = aSegment;
   data->mHaveEnough = false;
   if (auto graph = GraphImpl()) {
-    graph->EnsureNextIteration();
+    graph->CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
 {
   if (aSegment->GetType() != MediaSegment::AUDIO ||
       aTrackData->mInputRate == GraphImpl()->AudioSampleRate()) {
@@ -2443,17 +2276,17 @@ SourceMediaStream::AppendToTrack(TrackID
       ApplyTrackDisabling(aID, aSegment, aRawSegment);
 
       ResampleAudioToGraphSampleRate(track, aSegment);
 
       // Must notify first, since AppendFrom() will empty out aSegment
       NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment);
       track->mData->AppendFrom(aSegment); // note: aSegment is now dead
       appended = true;
-      graph->EnsureNextIteration();
+      GraphImpl()->CurrentDriver()->EnsureNextIteration();
     } else {
       aSegment->Clear();
     }
   }
   return appended;
 }
 
 void
@@ -2567,38 +2400,38 @@ SourceMediaStream::EndTrack(TrackID aID)
   // ::EndAllTrackAndFinished() can end these before the sources call this
   if (!mFinished) {
     TrackData *track = FindDataForTrack(aID);
     if (track) {
       track->mCommands |= TRACK_END;
     }
   }
   if (auto graph = GraphImpl()) {
-    graph->EnsureNextIteration();
+    graph->CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime)
 {
   MutexAutoLock lock(mMutex);
   MOZ_ASSERT(aKnownTime >= mUpdateKnownTracksTime);
   mUpdateKnownTracksTime = aKnownTime;
   if (auto graph = GraphImpl()) {
-    graph->EnsureNextIteration();
+    graph->CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 SourceMediaStream::FinishWithLockHeld()
 {
   mMutex.AssertCurrentThreadOwns();
   mUpdateFinished = true;
   if (auto graph = GraphImpl()) {
-    graph->EnsureNextIteration();
+    graph->CurrentDriver()->EnsureNextIteration();
   }
 }
 
 void
 SourceMediaStream::EndAllTrackAndFinish()
 {
   MutexAutoLock lock(mMutex);
   for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) {
@@ -2613,17 +2446,17 @@ TrackTicks
 SourceMediaStream::GetBufferedTicks(TrackID aID)
 {
   StreamBuffer::Track* track  = mBuffer.FindTrack(aID);
   if (track) {
     MediaSegment* segment = track->GetSegment();
     if (segment) {
       return segment->GetDuration() -
         track->TimeToTicksRoundDown(
-          GraphTimeToStreamTime(GraphImpl()->mStateComputedTime));
+          GraphTimeToStreamTime(GraphImpl()->CurrentDriver()->StateComputedTime()));
     }
   }
   return 0;
 }
 
 void
 SourceMediaStream::RegisterForAudioMixing()
 {
@@ -2667,20 +2500,22 @@ MediaInputPort::Disconnect()
 
 MediaInputPort::InputInterval
 MediaInputPort::GetNextInputInterval(GraphTime aTime)
 {
   InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false };
   GraphTime t = aTime;
   GraphTime end;
   for (;;) {
-    if (!mDest->mBlocked.GetAt(t, &end))
+    if (!mDest->mBlocked.GetAt(t, &end)) {
       break;
-    if (end >= GRAPH_TIME_MAX)
+    }
+    if (end >= GRAPH_TIME_MAX) {
       return result;
+    }
     t = end;
   }
   result.mStart = t;
   GraphTime sourceEnd;
   result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd);
   result.mEnd = std::min(end, sourceEnd);
   return result;
 }
@@ -2797,51 +2632,62 @@ ProcessedMediaStream::DestroyImpl()
   }
   MediaStream::DestroyImpl();
   // The stream order is only important if there are connections, in which
   // case MediaInputPort::Disconnect() called SetStreamOrderDirty().
   // MediaStreamGraphImpl::RemoveStream() will also call
   // SetStreamOrderDirty(), for other reasons.
 }
 
-MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime, TrackRate aSampleRate)
-  : mCurrentTime(0)
-  , mStateComputedTime(0)
-  , mProcessingGraphUpdateIndex(0)
+MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime,
+                                           TrackRate aSampleRate,
+                                           DOMMediaStream::TrackTypeHints aHint= DOMMediaStream::HINT_CONTENTS_UNKNOWN,
+                                           dom::AudioChannel aChannel)
+  : mProcessingGraphUpdateIndex(0)
   , mPortCount(0)
   , mMonitor("MediaStreamGraphImpl")
   , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
-  , mWaitState(WAITSTATE_RUNNING)
   , mEndTime(GRAPH_TIME_MAX)
   , mSampleRate(aSampleRate)
-  , mNeedAnotherIteration(false)
   , mForceShutDown(false)
   , mPostedRunInStableStateEvent(false)
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aRealtime)
   , mNonRealtimeProcessing(false)
   , mStreamOrderDirty(false)
   , mLatencyLog(AsyncLatencyLogger::Get())
-  , mMixer(nullptr)
+  , mFarendObserverRef(nullptr)
   , mMemoryReportMonitor("MSGIMemory")
   , mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST())
   , mAudioStreamSizes()
   , mNeedsMemoryReport(false)
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
 {
 #ifdef PR_LOGGING
   if (!gMediaStreamGraphLog) {
     gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
   }
 #endif
 
-  mCurrentTimeStamp = mInitialTimeStamp = mLastMainThreadUpdate = TimeStamp::Now();
+  if (mRealtime) {
+    if (aHint & DOMMediaStream::HINT_CONTENTS_AUDIO) {
+      AudioCallbackDriver* driver = new AudioCallbackDriver(this, aChannel);
+      mDriver = driver;
+      mMixer.AddCallback(driver);
+    } else {
+      mDriver = new SystemClockDriver(this);
+    }
+  } else {
+     mDriver = new OfflineClockDriver(this, MEDIA_GRAPH_TARGET_PERIOD_MS);
+  }
+
+  mLastMainThreadUpdate = TimeStamp::Now();
 
   RegisterWeakMemoryReporter(this);
 }
 
 void
 MediaStreamGraphImpl::Destroy()
 {
   // First unregister from memory reporting.
@@ -2866,43 +2712,45 @@ MediaStreamGraphShutdownObserver::Observ
     }
     nsContentUtils::UnregisterShutdownObserver(this);
     gShutdownObserverRegistered = false;
   }
   return NS_OK;
 }
 
 MediaStreamGraph*
-MediaStreamGraph::GetInstance()
+MediaStreamGraph::GetInstance(DOMMediaStream::TrackTypeHints aHint, dom::AudioChannel aChannel)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
   if (!gGraph) {
     if (!gShutdownObserverRegistered) {
       gShutdownObserverRegistered = true;
       nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
     }
 
-    AudioStream::InitPreferredSampleRate();
+    CubebUtils::InitPreferredSampleRate();
 
-    gGraph = new MediaStreamGraphImpl(true, AudioStream::PreferredSampleRate());
+    gGraph = new MediaStreamGraphImpl(true, CubebUtils::PreferredSampleRate(), aHint, aChannel);
 
     STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
   }
 
   return gGraph;
 }
 
 MediaStreamGraph*
 MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
   MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate);
 
+  STREAM_LOG(PR_LOG_DEBUG, ("Starting up Offline MediaStreamGraph %p", graph));
+
   return graph;
 }
 
 void
 MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
   MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here");
@@ -2933,21 +2781,18 @@ MediaStreamGraphImpl::CollectReports(nsI
 {
   // Clears out the report array after we're done with it.
   ArrayClearer reportCleanup(mAudioStreamSizes);
 
   {
     MonitorAutoLock memoryReportLock(mMemoryReportMonitor);
     mNeedsMemoryReport = true;
 
-    {
-      // Wake up the MSG thread.
-      MonitorAutoLock monitorLock(mMonitor);
-      EnsureImmediateWakeUpLocked(monitorLock);
-    }
+    // Wake up the MSG thread.
+    CurrentDriver()->WakeUp();
 
     if (mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN) {
       // Shutting down, nothing to report.
       return NS_OK;
     }
 
     // Wait for up to one second for the report to complete.
     nsresult rv;
@@ -3065,17 +2910,18 @@ MediaStreamGraph::StartNonRealtimeProces
 {
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
 
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
   NS_ASSERTION(!graph->mRealtime, "non-realtime only");
 
   if (graph->mNonRealtimeProcessing)
     return;
-  graph->mEndTime = graph->mCurrentTime +
+
+  graph->mEndTime = graph->IterationEnd() +
     RateConvertTicksRoundUp(graph->GraphRate(), aRate, aTicksToProcess);
   graph->mNonRealtimeProcessing = true;
   graph->EnsureRunInStableState();
 }
 
 void
 ProcessedMediaStream::AddInput(MediaInputPort* aPort)
 {
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -12,43 +12,36 @@
 #include "nsTArray.h"
 #include "nsIRunnable.h"
 #include "StreamBuffer.h"
 #include "TimeVarying.h"
 #include "VideoFrameContainer.h"
 #include "VideoSegment.h"
 #include "MainThreadUtils.h"
 #include "nsAutoRef.h"
+#include "GraphDriver.h"
 #include <speex/speex_resampler.h>
-#include "AudioMixer.h"
 #include "mozilla/dom/AudioChannelBinding.h"
+#include "DOMMediaStream.h"
 
 class nsIRunnable;
 
 template <>
 class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
 {
   public:
   static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
 };
 
 namespace mozilla {
 
-class DOMMediaStream;
-
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* gMediaStreamGraphLog;
 #endif
 
-/**
- * Microseconds relative to the start of the graph timeline.
- */
-typedef int64_t GraphTime;
-const GraphTime GRAPH_TIME_MAX = MEDIA_TIME_MAX;
-
 /*
  * MediaStreamGraph is a framework for synchronized audio/video processing
  * and playback. It is designed to be used by other browser components such as
  * HTML media elements, media capture APIs, real-time media streaming APIs,
  * multitrack media APIs, and advanced audio APIs.
  *
  * The MediaStreamGraph uses a dedicated thread to process media --- the media
  * graph thread. This ensures that we can process media through the graph
@@ -436,16 +429,21 @@ public:
 #ifdef DEBUG
   void DumpTrackInfo() { return mBuffer.DumpTrackInfo(); }
 #endif
   void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
   void AddAudioOutputImpl(void* aKey)
   {
     mAudioOutputs.AppendElement(AudioOutput(aKey));
   }
+  // Returns true if this stream has an audio output.
+  bool HasAudioOutput()
+  {
+    return !mAudioOutputs.IsEmpty();
+  }
   void RemoveAudioOutputImpl(void* aKey);
   void AddVideoOutputImpl(already_AddRefed<VideoFrameContainer> aContainer)
   {
     *mVideoOutputs.AppendElement() = aContainer;
   }
   void RemoveVideoOutputImpl(VideoFrameContainer* aContainer)
   {
     mVideoOutputs.RemoveElement(aContainer);
@@ -617,25 +615,17 @@ protected:
     // When we started audio playback for this track.
     // Add mStream->GetPosition() to find the current audio playback position.
     GraphTime mAudioPlaybackStartTime;
     // Amount of time that we've wanted to play silence because of the stream
     // blocking.
     MediaTime mBlockedAudioTime;
     // Last tick written to the audio output.
     TrackTicks mLastTickWritten;
-    RefPtr<AudioStream> mStream;
     TrackID mTrackID;
-
-    size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
-    {
-      size_t amount = 0;
-      amount += mStream->SizeOfIncludingThis(aMallocSizeOf);
-      return amount;
-    }
   };
   nsTArray<AudioOutputStream> mAudioOutputStreams;
 
   /**
    * When true, this means the stream will be finished once all
    * buffered data has been consumed.
    */
   bool mFinished;
@@ -1131,19 +1121,21 @@ protected:
  * object too.
  */
 class MediaStreamGraph {
 public:
   // We ensure that the graph current time advances in multiples of
   // IdealAudioBlockSize()/AudioStream::PreferredSampleRate(). A stream that
   // never blocks and has a track with the ideal audio rate will produce audio
   // in multiples of the block size.
+  //
 
   // Main thread only
-  static MediaStreamGraph* GetInstance();
+  static MediaStreamGraph* GetInstance(DOMMediaStream::TrackTypeHints aHint = DOMMediaStream::HINT_CONTENTS_UNKNOWN,
+                                       dom::AudioChannel aChannel = dom::AudioChannel::Normal);
   static MediaStreamGraph* CreateNonRealtimeInstance(TrackRate aSampleRate);
   // Idempotent
   static void DestroyNonRealtimeInstance(MediaStreamGraph* aGraph);
 
   // Control API.
   /**
    * Create a stream that a media decoder (or some other source of
    * media data, such as a camera) can write to.
--- a/content/media/MediaStreamGraphImpl.h
+++ b/content/media/MediaStreamGraphImpl.h
@@ -10,56 +10,24 @@
 
 #include "mozilla/Monitor.h"
 #include "mozilla/TimeStamp.h"
 #include "nsIMemoryReporter.h"
 #include "nsIThread.h"
 #include "nsIRunnable.h"
 #include "Latency.h"
 #include "mozilla/WeakPtr.h"
+#include "GraphDriver.h"
+#include "AudioMixer.h"
 
 namespace mozilla {
 
 template <typename T>
 class LinkedList;
-
-class AudioMixer;
-
-/**
- * Assume we can run an iteration of the MediaStreamGraph loop in this much time
- * or less.
- * We try to run the control loop at this rate.
- */
-static const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
-
-/**
- * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
- * this much.
- */
-static const int SCHEDULE_SAFETY_MARGIN_MS = 10;
-
-/**
- * Try have this much audio buffered in streams and queued to the hardware.
- * The maximum delay to the end of the next control loop
- * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
- * There is no point in buffering more audio than this in a stream at any
- * given time (until we add processing).
- * This is not optimal yet.
- */
-static const int AUDIO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
-    SCHEDULE_SAFETY_MARGIN_MS;
-
-/**
- * Try have this much video buffered. Video frames are set
- * near the end of the iteration of the control loop. The maximum delay
- * to the setting of the next video frame is 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
- * SCHEDULE_SAFETY_MARGIN_MS. This is not optimal yet.
- */
-static const int VIDEO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
-    SCHEDULE_SAFETY_MARGIN_MS;
+class AudioOutputObserver;
 
 /**
  * A per-stream update message passed from the media graph thread to the
  * main thread.
  */
 struct StreamUpdate {
   int64_t mGraphUpdateIndex;
   nsRefPtr<MediaStream> mStream;
@@ -95,53 +63,63 @@ public:
 
 protected:
   // We do not hold a reference to mStream. The graph will be holding
   // a reference to the stream until the Destroy message is processed. The
   // last message referencing a stream is the Destroy message for that stream.
   MediaStream* mStream;
 };
 
+class MessageBlock {
+public:
+  int64_t mGraphUpdateIndex;
+  nsTArray<nsAutoPtr<ControlMessage> > mMessages;
+};
+
 /**
  * The implementation of a media stream graph. This class is private to this
  * file. It's not in the anonymous namespace because MediaStream needs to
  * be able to friend it.
  *
  * Currently we have one global instance per process, and one per each
  * OfflineAudioContext object.
  */
 class MediaStreamGraphImpl : public MediaStreamGraph,
                              public nsIMemoryReporter {
 public:
-  NS_DECL_ISUPPORTS
+  NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEMORYREPORTER
 
   /**
    * Set aRealtime to true in order to create a MediaStreamGraph which provides
    * support for real-time audio and video.  Set it to false in order to create
    * a non-realtime instance which just churns through its inputs and produces
    * output.  Those objects currently only support audio, and are used to
    * implement OfflineAudioContext.  They do not support MediaStream inputs.
    */
-  explicit MediaStreamGraphImpl(bool aRealtime, TrackRate aSampleRate);
+  explicit MediaStreamGraphImpl(bool aRealtime,
+                                TrackRate aSampleRate,
+                                DOMMediaStream::TrackTypeHints aHint,
+                                dom::AudioChannel aChannel = dom::AudioChannel::Normal);
 
   /**
    * Unregisters memory reporting and deletes this instance. This should be
    * called instead of calling the destructor directly.
    */
   void Destroy();
 
   // Main thread only.
   /**
    * This runs every time we need to sync state from the media graph thread
    * to the main thread while the main thread is not in the middle
    * of a script. It runs during a "stable state" (per HTML5) or during
    * an event posted to the main thread.
+   * The boolean affects which boolean controlling runnable dispatch is cleared
    */
-  void RunInStableState();
+  void RunInStableState(bool aSourceIsMSG);
   /**
    * Ensure a runnable to run RunInStableState is posted to the appshell to
    * run at the next stable state (per HTML5).
    * See EnsureStableStateEventPosted.
    */
   void EnsureRunInStableState();
   /**
    * Called to apply a StreamUpdate to its stream.
@@ -165,35 +143,39 @@ public:
   void ShutdownThreads();
 
   /**
    * Called before the thread runs.
    */
   void Init();
   // The following methods run on the graph thread (or possibly the main thread if
   // mLifecycleState > LIFECYCLE_RUNNING)
-  /**
-   * Runs main control loop on the graph thread. Normally a single invocation
-   * of this runs for the entire lifetime of the graph thread.
-   */
-  void RunThread();
-  /**
-   * Call this to indicate that another iteration of the control loop is
-   * required on its regular schedule. The monitor must not be held.
+  /*
+   * This does the actual iteration: Message processing, MediaStream ordering,
+   * blocking computation and processing.
    */
-  void EnsureNextIteration();
-  /**
-   * As above, but with the monitor already held.
-   */
-  void EnsureNextIterationLocked(MonitorAutoLock& aLock);
-  /**
-   * Call this to indicate that another iteration of the control loop is
-   * required immediately. The monitor must already be held.
-   */
-  void EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock);
+  void DoIteration();
+
+  bool OneIteration(GraphTime aFrom, GraphTime aTo,
+                    GraphTime aStateFrom, GraphTime aStateEnd);
+
+  bool Running() {
+    mMonitor.AssertCurrentThreadOwns();
+    return mLifecycleState == LIFECYCLE_RUNNING;
+  }
+
+  // Get the message queue, from the current GraphDriver thread.
+  nsTArray<MessageBlock>& MessageQueue() {
+    mMonitor.AssertCurrentThreadOwns();
+    return mFrontMessageQueue;
+  }
+
+  /* This is the end of the current iteration, that is, the current time of the
+   * graph. */
+  GraphTime IterationEnd();
   /**
    * Ensure there is an event posted to the main thread to run RunInStableState.
    * mMonitor must be held.
    * See EnsureRunInStableState
    */
   void EnsureStableStateEventPosted();
   /**
    * Generate messages to the main thread to update it for all state changes.
@@ -208,20 +190,34 @@ public:
   /**
    * If we are rendering in non-realtime mode, we don't want to send messages to
    * the main thread at each iteration for performance reasons. We instead
    * notify the main thread at the same rate
    */
   bool ShouldUpdateMainThread();
   // The following methods are the various stages of RunThread processing.
   /**
-   * Compute a new current time for the graph and advance all on-graph-thread
-   * state to the new current time.
+   * Advance all stream state to the new current time.
+   */
+  void UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime,
+                                   GraphTime aNextCurrentTime);
+  /**
+   * Process graph message for this iteration, update stream processing order,
+   * and recompute stream blocking until aEndBlockingDecisions.
    */
-  void UpdateCurrentTime();
+  void UpdateGraph(GraphTime aEndBlockingDecisions);
+
+  void SwapMessageQueues() {
+    mMonitor.AssertCurrentThreadOwns();
+    mFrontMessageQueue.SwapElements(mBackMessageQueue);
+  }
+  /**
+   * Do all the processing and play the audio and video, ffrom aFrom to aTo.
+   */
+  void Process(GraphTime aFrom, GraphTime aTo);
   /**
    * Update the consumption state of aStream to reflect whether its data
    * is needed or not.
    */
   void UpdateConsumptionState(SourceMediaStream* aStream);
   /**
    * Extract any state updates pending in aStream, and apply them.
    */
@@ -268,16 +264,23 @@ public:
    * If this results in decisions that need to be revisited at some point
    * in the future, *aEnd will be reduced to the first time in the future to
    * recompute those decisions.
    */
   void RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams,
                            GraphTime aTime, GraphTime aEndBlockingDecisions,
                            GraphTime* aEnd);
   /**
+   * Returns smallest value of t such that
+   * TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE
+   * and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) >
+   * floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE).
+   */
+  GraphTime RoundUpToNextAudioBlock(GraphTime aTime);
+  /**
    * Produce data for all streams >= aStreamIndex for the given time interval.
    * Advances block by block, each iteration producing data for all streams
    * for a single block.
    * This is called whenever we have an AudioNodeStream in the graph.
    */
   void ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
                                          TrackRate aSampleRate,
                                          GraphTime aFrom,
@@ -285,16 +288,17 @@ public:
   /**
    * Returns true if aStream will underrun at aTime for its own playback.
    * aEndBlockingDecisions is when we plan to stop making blocking decisions.
    * *aEnd will be reduced to the first time in the future to recompute these
    * decisions.
    */
   bool WillUnderrun(MediaStream* aStream, GraphTime aTime,
                     GraphTime aEndBlockingDecisions, GraphTime* aEnd);
+
   /**
    * Given a graph time aTime, convert it to a stream time taking into
    * account the time during which aStream is scheduled to be blocked.
    */
   StreamTime GraphTimeToStreamTime(MediaStream* aStream, GraphTime aTime);
   /**
    * Given a graph time aTime, convert it to a stream time taking into
    * account the time during which aStream is scheduled to be blocked, and
@@ -322,18 +326,17 @@ public:
   /**
    * Call NotifyHaveCurrentData on aStream's listeners.
    */
   void NotifyHasCurrentData(MediaStream* aStream);
   /**
    * If aStream needs an audio stream but doesn't have one, create it.
    * If aStream doesn't need an audio stream but has one, destroy it.
    */
-  void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
-                                   MediaStream* aStream);
+  void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime, MediaStream* aStream);
   /**
    * Queue audio (mix of stream audio and silence for blocked intervals)
    * to the audio output stream. Returns the number of frames played.
    */
   TrackTicks PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
   /**
    * Set the correct current video frame for stream aStream.
    */
@@ -373,27 +376,21 @@ public:
   void DestroyPort(MediaInputPort* aPort);
   /**
    * Mark the media stream order as dirty.
    */
   void SetStreamOrderDirty()
   {
     mStreamOrderDirty = true;
   }
-  /**
-   * Pause all AudioStreams being written to by MediaStreams
-   */
-  void PauseAllAudioOutputs();
-  /**
-   * Resume all AudioStreams being written to by MediaStreams
-   */
-  void ResumeAllAudioOutputs();
 
   TrackRate AudioSampleRate() const { return mSampleRate; }
   TrackRate GraphRate() const { return mSampleRate; }
+  // Always stereo for now.
+  uint32_t AudioChannelCount() { return 2; }
 
   double MediaTimeToSeconds(GraphTime aTime)
   {
     return TrackTicksToSeconds(GraphRate(), aTime);
   }
   GraphTime SecondsToMediaTime(double aS)
   {
     return SecondsToTicksRoundDown(GraphRate(), aS);
@@ -403,23 +400,50 @@ public:
     return RateConvertTicksRoundDown(GraphRate(), 1000, aMS);
   }
 
   TrackTicks TimeToTicksRoundDown(TrackRate aRate, StreamTime aTime)
   {
     return RateConvertTicksRoundDown(aRate, GraphRate(), aTime);
   }
 
-  // Data members
+  /**
+   * Signal to the graph that the thread has paused indefinitly,
+   * or resumed.
+   */
+  void PausedIndefinitly();
+  void ResumedFromPaused();
+
+  GraphDriver* CurrentDriver() {
+    return mDriver;
+  }
 
   /**
-   * Media graph thread.
-   * Readonly after initialization on the main thread.
+   * Effectively set the new driver, while we are switching.
+   * It is only safe to call this at the very end of an iteration, when there
+   * has been a SwitchAtNextIteration call during the iteration. The driver
+   * should return and pass the control to the new driver shortly after.
    */
-  nsCOMPtr<nsIThread> mThread;
+  void SetCurrentDriver(GraphDriver* aDriver) {
+    mDriver = aDriver;
+  }
+
+  Monitor& GetMonitor() {
+    return mMonitor;
+  }
+
+  // Data members
+  //
+  /**
+   * Graphs own owning references to their driver, until shutdown. When a driver
+   * switch occur, previous driver is either deleted, or it's ownership is
+   * passed to a event that will take care of the asynchronous cleanup, as
+   * audio stream can take some time to shut down.
+   */
+  nsRefPtr<GraphDriver> mDriver;
 
   // The following state is managed on the graph thread only, unless
   // mLifecycleState > LIFECYCLE_RUNNING in which case the graph thread
   // is not running and this state can be used from the main thread.
 
   /**
    * The graph keeps a reference to each stream.
    * References are maintained manually to simplify reordering without
@@ -428,35 +452,16 @@ public:
   nsTArray<MediaStream*> mStreams;
   /**
    * Streams from mFirstCycleBreaker to the end of mStreams produce output
    * before they receive input.  They correspond to DelayNodes that are in
    * cycles.
    */
   uint32_t mFirstCycleBreaker;
   /**
-   * The current graph time for the current iteration of the RunThread control
-   * loop.
-   */
-  GraphTime mCurrentTime;
-  /**
-   * Blocking decisions and all stream contents have been computed up to this
-   * time. The next batch of updates from the main thread will be processed
-   * at this time. Always >= mCurrentTime.
-   */
-  GraphTime mStateComputedTime;
-  /**
-   * A timestamp corresponding to INITIAL_CURRENT_TIME.
-   */
-  TimeStamp mInitialTimeStamp;
-  /**
-   * The real timestamp of the latest run of UpdateCurrentTime.
-   */
-  TimeStamp mCurrentTimeStamp;
-  /**
    * Date of the last time we updated the main thread with the graph state.
    */
   TimeStamp mLastMainThreadUpdate;
   /**
    * Which update batch we are currently processing.
    */
   int64_t mProcessingGraphUpdateIndex;
   /**
@@ -477,25 +482,30 @@ public:
   /**
    * State to copy to main thread
    */
   nsTArray<StreamUpdate> mStreamUpdates;
   /**
    * Runnables to run after the next update to main thread state.
    */
   nsTArray<nsCOMPtr<nsIRunnable> > mUpdateRunnables;
-  struct MessageBlock {
-    int64_t mGraphUpdateIndex;
-    nsTArray<nsAutoPtr<ControlMessage> > mMessages;
-  };
   /**
    * A list of batches of messages to process. Each batch is processed
    * as an atomic unit.
    */
-  nsTArray<MessageBlock> mMessageQueue;
+  /* Message queue processed by the MSG thread during an iteration. */
+  nsTArray<MessageBlock> mFrontMessageQueue;
+  /* Message queue in which the main thread appends messages. */
+  nsTArray<MessageBlock> mBackMessageQueue;
+
+  /* True if there will messages to process if we swap the message queues. */
+  bool MessagesQueued() {
+    mMonitor.AssertCurrentThreadOwns();
+    return !mBackMessageQueue.IsEmpty();
+  }
   /**
    * This enum specifies where this graph is in its lifecycle. This is used
    * to control shutdown.
    * Shutdown is tricky because it can happen in two different ways:
    * 1) Shutdown due to inactivity. RunThread() detects that it has no
    * pending messages and no streams, and exits. The next RunInStableState()
    * checks if there are new pending messages from the main thread (true only
    * if new stream creation raced with shutdown); if there are, it revives
@@ -529,32 +539,16 @@ public:
     // Graph threads have shut down but we're waiting for remaining streams
     // to be destroyed. Only happens during application shutdown and on
     // completed non-realtime graphs, since normally we'd only shut down a
     // realtime graph when it has no streams.
     LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION
   };
   LifecycleState mLifecycleState;
   /**
-   * This enum specifies the wait state of the graph thread.
-   */
-  enum WaitState {
-    // RunThread() is running normally
-    WAITSTATE_RUNNING,
-    // RunThread() is paused waiting for its next iteration, which will
-    // happen soon
-    WAITSTATE_WAITING_FOR_NEXT_ITERATION,
-    // RunThread() is paused indefinitely waiting for something to change
-    WAITSTATE_WAITING_INDEFINITELY,
-    // Something has signaled RunThread() to wake up immediately,
-    // but it hasn't done so yet
-    WAITSTATE_WAKING_UP
-  };
-  WaitState mWaitState;
-  /**
    * The graph should stop processing at or after this time.
    */
   GraphTime mEndTime;
 
   /**
    * Sample rate at which this graph runs. For real time graphs, this is
    * the rate of the audio mixer. For offline graphs, this is the rate specified
    * at construction.
@@ -608,20 +602,18 @@ public:
    * True when a change has happened which requires us to recompute the stream
    * blocking order.
    */
   bool mStreamOrderDirty;
   /**
    * Hold a ref to the Latency logger
    */
   nsRefPtr<AsyncLatencyLogger> mLatencyLog;
-  /**
-   * If this is not null, all the audio output for the MSG will be mixed down.
-   */
-  nsAutoPtr<AudioMixer> mMixer;
+  AudioMixer mMixer;
+  nsRefPtr<AudioOutputObserver> mFarendObserverRef;
 
 private:
   virtual ~MediaStreamGraphImpl();
 
   MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
 
   /**
    * Used to signal that a memory report has been requested.
--- a/content/media/MediaStreamTrack.cpp
+++ b/content/media/MediaStreamTrack.cpp
@@ -49,10 +49,16 @@ MediaStreamTrack::GetId(nsAString& aID)
 
 void
 MediaStreamTrack::SetEnabled(bool aEnabled)
 {
   mEnabled = aEnabled;
   mStream->SetTrackEnabled(mTrackID, aEnabled);
 }
 
+void
+MediaStreamTrack::Stop()
+{
+  mStream->StopTrack(mTrackID);
+}
+
 }
 }
--- a/content/media/MediaStreamTrack.h
+++ b/content/media/MediaStreamTrack.h
@@ -43,16 +43,17 @@ public:
   virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; }
 
   // WebIDL
   virtual void GetKind(nsAString& aKind) = 0;
   void GetId(nsAString& aID);
   void GetLabel(nsAString& aLabel) { aLabel.Truncate(); }
   bool Enabled() { return mEnabled; }
   void SetEnabled(bool aEnabled);
+  void Stop();
 
   // Notifications from the MediaStreamGraph
   void NotifyEnded() { mEnded = true; }
 
 protected:
   virtual ~MediaStreamTrack();
 
   nsRefPtr<DOMMediaStream> mStream;
--- a/content/media/TrackUnionStream.h
+++ b/content/media/TrackUnionStream.h
@@ -21,18 +21,18 @@ namespace mozilla {
  * See MediaStreamGraph::CreateTrackUnionStream.
  * This file is only included by MediaStreamGraph.cpp so it's OK to put the
  * entire implementation in this header file.
  */
 class TrackUnionStream : public ProcessedMediaStream {
 public:
   TrackUnionStream(DOMMediaStream* aWrapper) :
     ProcessedMediaStream(aWrapper),
-    mFilterCallback(nullptr),
-    mMaxTrackID(0) {}
+    mFilterCallback(nullptr)
+  {}
 
   virtual void RemoveInput(MediaInputPort* aPort) MOZ_OVERRIDE
   {
     for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
       if (mTrackMap[i].mInputPort == aPort) {
         EndTrack(i);
         mTrackMap.RemoveElementAt(i);
       }
@@ -158,20 +158,34 @@ protected:
     TrackID mInputTrackID;
     TrackID mOutputTrackID;
     nsAutoPtr<MediaSegment> mSegment;
   };
 
   uint32_t AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack,
                     GraphTime aFrom)
   {
-    // Use the ID of the source track if we can, otherwise allocate a new
-    // unique ID
-    TrackID id = std::max(mMaxTrackID + 1, aTrack->GetID());
-    mMaxTrackID = id;
+    // Use the ID of the source track if it's not already assigned to a track,
+    // otherwise allocate a new unique ID.
+    TrackID id = aTrack->GetID();
+    TrackID maxTrackID = 0;
+    for (uint32_t i = 0; i < mTrackMap.Length(); ++i) {
+      TrackID outID = mTrackMap[i].mOutputTrackID;
+      maxTrackID = std::max(maxTrackID, outID);
+    }
+    // Note: we might have removed it here, but it might still be in the
+    // StreamBuffer if the TrackUnionStream sees its input stream flip from
+    // A to B, where both A and B have a track with the same ID
+    while (1) {
+      // search until we find one not in use here, and not in mBuffer
+      if (!mBuffer.FindTrack(id)) {
+        break;
+      }
+      id = ++maxTrackID;
+    }
 
     TrackRate rate = aTrack->GetRate();
     // Round up the track start time so the track, if anything, starts a
     // little later than the true time. This means we'll have enough
     // samples in our input stream to go just beyond the destination time.
     TrackTicks outputStart = TimeToTicksRoundUp(rate, GraphTimeToStreamTime(aFrom));
 
     nsAutoPtr<MediaSegment> segment;
@@ -245,18 +259,17 @@ protected:
       if (interval.mStart >= interval.mEnd)
         break;
       next = interval.mEnd;
 
       // Ticks >= startTicks and < endTicks are in the interval
       StreamTime outputEnd = GraphTimeToStreamTime(interval.mEnd);
       TrackTicks startTicks = outputTrack->GetEnd();
       StreamTime outputStart = GraphTimeToStreamTime(interval.mStart);
-      NS_WARN_IF_FALSE(startTicks == TimeToTicksRoundUp(rate, outputStart),
-                       "Samples missing");
+      MOZ_ASSERT(startTicks == TimeToTicksRoundUp(rate, outputStart), "Samples missing");
       TrackTicks endTicks = TimeToTicksRoundUp(rate, outputEnd);
       TrackTicks ticks = endTicks - startTicks;
       StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart);
 
       if (interval.mInputIsBlocked) {
         // Maybe the input track ended?
         segment->AppendNullData(ticks);
         STREAM_LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of null data to track %d",
@@ -339,14 +352,13 @@ protected:
                                     outputTrack->GetRate(), startTicks, 0,
                                     *segment);
       }
       outputTrack->GetSegment()->AppendFrom(segment);
     }
   }
 
   nsTArray<TrackMapEntry> mTrackMap;
-  TrackID mMaxTrackID;
 };
 
 }
 
 #endif /* MOZILLA_MEDIASTREAMGRAPH_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/compiledtest/TestAudioBuffers.cpp
@@ -0,0 +1,60 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <stdint.h>
+#include <assert.h>
+#include <mozilla/NullPtr.h>
+#include "AudioBufferUtils.h"
+
+const uint32_t FRAMES = 256;
+const uint32_t CHANNELS = 2;
+const uint32_t SAMPLES = CHANNELS * FRAMES;
+
+int main() {
+  mozilla::AudioCallbackBufferWrapper<float, CHANNELS> mBuffer;
+  mozilla::SpillBuffer<float, 128, CHANNELS> b;
+  float fromCallback[SAMPLES];
+  float other[SAMPLES];
+
+  for (uint32_t i = 0; i < SAMPLES; i++) {
+    other[i] = 1.0;
+    fromCallback[i] = 0.0;
+  }
+
+  // Set the buffer in the wrapper from the callback
+  mBuffer.SetBuffer(fromCallback, FRAMES);
+
+  // Fill the SpillBuffer with data.
+  assert(b.Fill(other, 15) == 15);
+  assert(b.Fill(other, 17) == 17);
+  for (uint32_t i = 0; i < 32 * CHANNELS; i++) {
+    other[i] = 0.0;
+  }
+
+  // Empty it in the AudioCallbackBufferWrapper
+  assert(b.Empty(mBuffer) == 32);
+
+  // Check available return something reasonnable
+  assert(mBuffer.Available() == FRAMES - 32);
+
+  // Fill the buffer with the rest of the data
+  mBuffer.WriteFrames(other + 32 * CHANNELS, FRAMES - 32);
+
+  // Check the buffer is now full
+  assert(mBuffer.Available() == 0);
+
+  for (uint32_t i = 0 ; i < SAMPLES; i++) {
+    if (fromCallback[i] != 1.0) {
+      fprintf(stderr, "Difference at %d (%f != %f)\n", i, fromCallback[i], 1.0);
+      assert(false);
+    }
+  }
+
+  assert(b.Fill(other, FRAMES) == 128);
+  assert(b.Fill(other, FRAMES) == 0);
+  assert(b.Empty(mBuffer) == 0);
+
+  return 0;
+}
--- a/content/media/compiledtest/TestAudioMixer.cpp
+++ b/content/media/compiledtest/TestAudioMixer.cpp
@@ -4,35 +4,38 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioMixer.h"
 #include <assert.h>
 
 using mozilla::AudioDataValue;
 using mozilla::AudioSampleFormat;
 
+struct MixerConsumer : public mozilla::MixerCallbackReceiver
+{
 /* In this test, the different audio stream and channels are always created to
  * cancel each other. */
-void MixingDone(AudioDataValue* aData, AudioSampleFormat aFormat, uint32_t aChannels, uint32_t aFrames, uint32_t aSampleRate)
-{
-  bool silent = true;
-  for (uint32_t i = 0; i < aChannels * aFrames; i++) {
-    if (aData[i] != 0.0) {
-      if (aFormat == mozilla::AUDIO_FORMAT_S16) {
-        fprintf(stderr, "Sample at %d is not silent: %d\n", i, (short)aData[i]);
-      } else {
-        fprintf(stderr, "Sample at %d is not silent: %f\n", i, (float)aData[i]);
+  void MixerCallback(AudioDataValue* aData, AudioSampleFormat aFormat, uint32_t aChannels, uint32_t aFrames, uint32_t aSampleRate)
+  {
+    bool silent = true;
+    for (uint32_t i = 0; i < aChannels * aFrames; i++) {
+      if (aData[i] != 0.0) {
+        if (aFormat == mozilla::AUDIO_FORMAT_S16) {
+          fprintf(stderr, "Sample at %d is not silent: %d\n", i, (short)aData[i]);
+        } else {
+          fprintf(stderr, "Sample at %d is not silent: %f\n", i, (float)aData[i]);
+        }
+        silent = false;
       }
-      silent = false;
+    }
+    if (!silent) {
+      MOZ_CRASH();
     }
   }
-  if (!silent) {
-    MOZ_CRASH();
-  }
-}
+};
 
 /* Helper function to give us the maximum and minimum value that don't clip,
  * for a given sample format (integer or floating-point). */
 template<typename T>
 T GetLowValue();
 
 template<typename T>
 T GetHighValue();
@@ -63,38 +66,41 @@ void FillBuffer(AudioDataValue* aBuffer,
   while (aBuffer != end) {
     *aBuffer++ = aValue;
   }
 }
 
 int main(int argc, char* argv[]) {
   const uint32_t CHANNEL_LENGTH = 256;
   const uint32_t AUDIO_RATE = 44100;
+  MixerConsumer consumer;
   AudioDataValue a[CHANNEL_LENGTH * 2];
   AudioDataValue b[CHANNEL_LENGTH * 2];
   FillBuffer(a, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
   FillBuffer(a + CHANNEL_LENGTH, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
   FillBuffer(b, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
   FillBuffer(b + CHANNEL_LENGTH, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
 
   {
     int iterations = 2;
-    mozilla::AudioMixer mixer(MixingDone);
+    mozilla::AudioMixer mixer;
+    mixer.AddCallback(&consumer);
 
     fprintf(stderr, "Test AudioMixer constant buffer length.\n");
 
     while (iterations--) {
       mixer.Mix(a, 2, CHANNEL_LENGTH, AUDIO_RATE);
       mixer.Mix(b, 2, CHANNEL_LENGTH, AUDIO_RATE);
       mixer.FinishMixing();
     }
   }
 
   {
-    mozilla::AudioMixer mixer(MixingDone);
+    mozilla::AudioMixer mixer;
+    mixer.AddCallback(&consumer);
 
     fprintf(stderr, "Test AudioMixer variable buffer length.\n");
 
     FillBuffer(a, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
     FillBuffer(a + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
     FillBuffer(b, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
     FillBuffer(b + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
     mixer.Mix(a, 2, CHANNEL_LENGTH / 2, AUDIO_RATE);
@@ -115,32 +121,35 @@ int main(int argc, char* argv[]) {
     mixer.Mix(b, 2, CHANNEL_LENGTH / 2, AUDIO_RATE);
     mixer.FinishMixing();
   }
 
   FillBuffer(a, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
   FillBuffer(b, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
 
   {
-    mozilla::AudioMixer mixer(MixingDone);
+    mozilla::AudioMixer mixer;
+    mixer.AddCallback(&consumer);
+
     fprintf(stderr, "Test AudioMixer variable channel count.\n");
 
     mixer.Mix(a, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(b, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.FinishMixing();
     mixer.Mix(a, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(b, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.FinishMixing();
     mixer.Mix(a, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(b, 1, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.FinishMixing();
   }
 
   {
-    mozilla::AudioMixer mixer(MixingDone);
+    mozilla::AudioMixer mixer;
+    mixer.AddCallback(&consumer);
     fprintf(stderr, "Test AudioMixer variable stream count.\n");
 
     mixer.Mix(a, 2, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(b, 2, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.FinishMixing();
     mixer.Mix(a, 2, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(b, 2, CHANNEL_LENGTH, AUDIO_RATE);
     mixer.Mix(a, 2, CHANNEL_LENGTH, AUDIO_RATE);
--- a/content/media/compiledtest/moz.build
+++ b/content/media/compiledtest/moz.build
@@ -1,16 +1,17 @@
 # -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 CPP_UNIT_TESTS += [
-    'TestAudioMixer',
+    'TestAudioBuffers',
+    'TestAudioMixer'
 ]
 
 SOURCES += sorted('%s.cpp' % t for t in CPP_UNIT_TESTS)
 
 FAIL_ON_WARNINGS = True
 
 LOCAL_INCLUDES += [
     '..',
--- a/content/media/eme/CDMCaps.cpp
+++ b/content/media/eme/CDMCaps.cpp
@@ -179,9 +179,21 @@ CDMCaps::AutoLock::CanDecryptAudio()
 }
 
 bool
 CDMCaps::AutoLock::CanDecryptVideo()
 {
   return mData.HasCap(GMP_EME_CAP_DECRYPT_VIDEO);
 }
 
+void
+CDMCaps::AutoLock::GetUsableKeysForSession(const nsAString& aSessionId,
+                                           nsTArray<CencKeyId>& aOutKeyIds)
+{
+  for (size_t i = 0; i < mData.mUsableKeyIds.Length(); i++) {
+    const auto& key = mData.mUsableKeyIds[i];
+    if (key.mSessionId.Equals(aSessionId)) {
+      aOutKeyIds.AppendElement(key.mId);
+    }
+  }
+}
+
 } // namespace mozilla
\ No newline at end of file
--- a/content/media/eme/CDMCaps.h
+++ b/content/media/eme/CDMCaps.h
@@ -37,16 +37,18 @@ public:
     bool AreCapsKnown();
 
     bool IsKeyUsable(const CencKeyId& aKeyId);
 
     void SetKeyUsable(const CencKeyId& aKeyId, const nsString& aSessionId);
     void SetKeyUnusable(const CencKeyId& aKeyId, const nsString& aSessionId);
 
     void DropKeysForSession(const nsAString& aSessionId);
+    void GetUsableKeysForSession(const nsAString& aSessionId,
+                                 nsTArray<CencKeyId>& aOutKeyIds);
 
     // Sets the capabilities of the CDM. aCaps is the logical OR of the
     // GMP_EME_CAP_* flags from gmp-decryption.h.
     void SetCaps(uint64_t aCaps);
 
     bool CanDecryptAndDecodeAudio();
     bool CanDecryptAndDecodeVideo();
 
--- a/content/media/eme/CDMProxy.cpp
+++ b/content/media/eme/CDMProxy.cpp
@@ -109,26 +109,26 @@ CDMProxy::OnCDMCreated(uint32_t aPromise
   }
   mKeys->OnCDMCreated(aPromiseId);
 }
 
 void
 CDMProxy::CreateSession(dom::SessionType aSessionType,
                         PromiseId aPromiseId,
                         const nsAString& aInitDataType,
-                        const Uint8Array& aInitData)
+                        nsTArray<uint8_t>& aInitData)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mGMPThread);
 
   nsAutoPtr<CreateSessionData> data(new CreateSessionData());
   data->mSessionType = aSessionType;
   data->mPromiseId = aPromiseId;
   data->mInitDataType = NS_ConvertUTF16toUTF8(aInitDataType);
-  data->mInitData.AppendElements(aInitData.Data(), aInitData.Length());
+  data->mInitData = Move(aInitData);
 
   nsRefPtr<nsIRunnable> task(
     NS_NewRunnableMethodWithArg<nsAutoPtr<CreateSessionData>>(this, &CDMProxy::gmp_CreateSession, data));
   mGMPThread->Dispatch(task, NS_DISPATCH_NORMAL);
 }
 
 GMPSessionType
 ToGMPSessionType(dom::SessionType aSessionType) {
@@ -177,24 +177,24 @@ CDMProxy::gmp_LoadSession(nsAutoPtr<Sess
     RejectPromise(aData->mPromiseId, NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
   mCDM->LoadSession(aData->mPromiseId, aData->mSessionId);
 }
 
 void
 CDMProxy::SetServerCertificate(PromiseId aPromiseId,
-                               const Uint8Array& aCert)
+                               nsTArray<uint8_t>& aCert)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mGMPThread);
 
   nsAutoPtr<SetServerCertificateData> data;
   data->mPromiseId = aPromiseId;
-  data->mCert.AppendElements(aCert.Data(), aCert.Length());
+  data->mCert = Move(aCert);
   nsRefPtr<nsIRunnable> task(
     NS_NewRunnableMethodWithArg<nsAutoPtr<SetServerCertificateData>>(this, &CDMProxy::gmp_SetServerCertificate, data));
   mGMPThread->Dispatch(task, NS_DISPATCH_NORMAL);
 }
 
 void
 CDMProxy::gmp_SetServerCertificate(nsAutoPtr<SetServerCertificateData> aData)
 {
@@ -204,26 +204,26 @@ CDMProxy::gmp_SetServerCertificate(nsAut
     return;
   }
   mCDM->SetServerCertificate(aData->mPromiseId, aData->mCert);
 }
 
 void
 CDMProxy::UpdateSession(const nsAString& aSessionId,
                         PromiseId aPromiseId,
-                        const Uint8Array& aResponse)
+                        nsTArray<uint8_t>& aResponse)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mGMPThread);
   NS_ENSURE_TRUE_VOID(!mKeys.IsNull());
 
   nsAutoPtr<UpdateSessionData> data(new UpdateSessionData());
   data->mPromiseId = aPromiseId;
   data->mSessionId = NS_ConvertUTF16toUTF8(aSessionId);
-  data->mResponse.AppendElements(aResponse.Data(), aResponse.Length());
+  data->mResponse = Move(aResponse);
   nsRefPtr<nsIRunnable> task(
     NS_NewRunnableMethodWithArg<nsAutoPtr<UpdateSessionData>>(this, &CDMProxy::gmp_UpdateSession, data));
   mGMPThread->Dispatch(task, NS_DISPATCH_NORMAL);
 }
 
 void
 CDMProxy::gmp_UpdateSession(nsAutoPtr<UpdateSessionData> aData)
 {
--- a/content/media/eme/CDMProxy.h
+++ b/content/media/eme/CDMProxy.h
@@ -5,17 +5,16 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef CDMProxy_h_
 #define CDMProxy_h_
 
 #include "nsString.h"
 #include "nsAutoPtr.h"
 #include "mozilla/dom/MediaKeys.h"
-#include "mozilla/dom/TypedArray.h"
 #include "mozilla/Monitor.h"
 #include "nsIThread.h"
 #include "GMPDecryptorProxy.h"
 #include "mozilla/CDMCaps.h"
 #include "mp4_demuxer/DecoderData.h"
 
 namespace mozilla {
 
@@ -34,60 +33,59 @@ public:
 
 // Proxies calls GMP/CDM, and proxies calls back.
 // Note: Promises are passed in via a PromiseId, so that the ID can be
 // passed via IPC to the CDM, which can then signal when to reject or
 // resolve the promise using its PromiseId.
 class CDMProxy {
   typedef dom::PromiseId PromiseId;
   typedef dom::SessionType SessionType;
-  typedef dom::Uint8Array Uint8Array;
 public:
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(CDMProxy)
 
   // Main thread only.
   CDMProxy(dom::MediaKeys* aKeys, const nsAString& aKeySystem);
 
   // Main thread only.
   // Loads the CDM corresponding to mKeySystem.
   // Calls MediaKeys::OnCDMCreated() when the CDM is created.
   void Init(PromiseId aPromiseId);
 
   // Main thread only.
   // Uses the CDM to create a key session.
-  // Caller is responsible for calling aInitData.ComputeLengthAndData().
   // Calls MediaKeys::OnSessionActivated() when session is created.
+  // Assumes ownership of (Move()s) aInitData's contents.
   void CreateSession(dom::SessionType aSessionType,
                      PromiseId aPromiseId,
                      const nsAString& aInitDataType,
-                     const Uint8Array& aInitData);
+                     nsTArray<uint8_t>& aInitData);
 
   // Main thread only.
   // Uses the CDM to load a presistent session stored on disk.
   // Calls MediaKeys::OnSessionActivated() when session is loaded.
   void LoadSession(PromiseId aPromiseId,
                    const nsAString& aSessionId);
 
   // Main thread only.
   // Sends a new certificate to the CDM.
-  // Caller is responsible for calling aCert.ComputeLengthAndData().
   // Calls MediaKeys->ResolvePromise(aPromiseId) after the CDM has
   // processed the request.
+  // Assumes ownership of (Move()s) aCert's contents.
   void SetServerCertificate(PromiseId aPromiseId,
-                            const Uint8Array& aCert);
+                            nsTArray<uint8_t>& aCert);
 
   // Main thread only.
   // Sends an update to the CDM.
-  // Caller is responsible for calling aResponse.ComputeLengthAndData().
   // Calls MediaKeys->ResolvePromise(aPromiseId) after the CDM has
   // processed the request.
+  // Assumes ownership of (Move()s) aResponse's contents.
   void UpdateSession(const nsAString& aSessionId,
                      PromiseId aPromiseId,
-                     const Uint8Array& aResponse);
+                     nsTArray<uint8_t>& aResponse);
 
   // Main thread only.
   // Calls MediaKeys->ResolvePromise(aPromiseId) after the CDM has
   // processed the request.
   // If processing this operation results in the session actually closing,
   // we also call MediaKeySession::OnClosed(), which in turn calls
   // MediaKeys::OnSessionClosed().
   void CloseSession(const nsAString& aSessionId,
rename from content/media/eme/MediaKeyNeededEvent.cpp
rename to content/media/eme/MediaEncryptedEvent.cpp
--- a/content/media/eme/MediaKeyNeededEvent.cpp
+++ b/content/media/eme/MediaEncryptedEvent.cpp
@@ -1,114 +1,115 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaKeyNeededEvent.h"
-#include "mozilla/dom/MediaKeyNeededEventBinding.h"
+#include "MediaEncryptedEvent.h"
+#include "mozilla/dom/MediaEncryptedEventBinding.h"
 #include "nsContentUtils.h"
 #include "jsfriendapi.h"
 #include "nsINode.h"
+#include "mozilla/dom/MediaKeys.h"
 
 namespace mozilla {
 namespace dom {
 
-NS_IMPL_CYCLE_COLLECTION_CLASS(MediaKeyNeededEvent)
+NS_IMPL_CYCLE_COLLECTION_CLASS(MediaEncryptedEvent)
 
-NS_IMPL_ADDREF_INHERITED(MediaKeyNeededEvent, Event)
-NS_IMPL_RELEASE_INHERITED(MediaKeyNeededEvent, Event)
+NS_IMPL_ADDREF_INHERITED(MediaEncryptedEvent, Event)
+NS_IMPL_RELEASE_INHERITED(MediaEncryptedEvent, Event)
 
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(MediaKeyNeededEvent, Event)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(MediaEncryptedEvent, Event)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
-NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN_INHERITED(MediaKeyNeededEvent, Event)
+NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN_INHERITED(MediaEncryptedEvent, Event)
   NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mInitData)
 NS_IMPL_CYCLE_COLLECTION_TRACE_END
 
-NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(MediaKeyNeededEvent, Event)
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(MediaEncryptedEvent, Event)
   tmp->mInitData = nullptr;
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(MediaKeyNeededEvent)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(MediaEncryptedEvent)
 NS_INTERFACE_MAP_END_INHERITING(Event)
 
-MediaKeyNeededEvent::MediaKeyNeededEvent(EventTarget* aOwner)
+MediaEncryptedEvent::MediaEncryptedEvent(EventTarget* aOwner)
   : Event(aOwner, nullptr, nullptr)
 {
   mozilla::HoldJSObjects(this);
 }
 
-MediaKeyNeededEvent::~MediaKeyNeededEvent()
+MediaEncryptedEvent::~MediaEncryptedEvent()
 {
   mInitData = nullptr;
   mozilla::DropJSObjects(this);
 }
 
 JSObject*
-MediaKeyNeededEvent::WrapObject(JSContext* aCx)
+MediaEncryptedEvent::WrapObject(JSContext* aCx)
 {
-  return MediaKeyNeededEventBinding::Wrap(aCx, this);
+  return MediaEncryptedEventBinding::Wrap(aCx, this);
 }
 
-already_AddRefed<MediaKeyNeededEvent>
-MediaKeyNeededEvent::Constructor(EventTarget* aOwner,
+already_AddRefed<MediaEncryptedEvent>
+MediaEncryptedEvent::Constructor(EventTarget* aOwner,
                                  const nsAString& aInitDataType,
                                  const nsTArray<uint8_t>& aInitData)
 {
-  nsRefPtr<MediaKeyNeededEvent> e = new MediaKeyNeededEvent(aOwner);
-  e->InitEvent(NS_LITERAL_STRING("needkey"), false, false);
+  nsRefPtr<MediaEncryptedEvent> e = new MediaEncryptedEvent(aOwner);
+  e->InitEvent(NS_LITERAL_STRING("encrypted"), false, false);
   e->mInitDataType = aInitDataType;
   e->mRawInitData = aInitData;
   e->SetTrusted(true);
   return e.forget();
 }
 
-already_AddRefed<MediaKeyNeededEvent>
-MediaKeyNeededEvent::Constructor(const GlobalObject& aGlobal,
+already_AddRefed<MediaEncryptedEvent>
+MediaEncryptedEvent::Constructor(const GlobalObject& aGlobal,
                                  const nsAString& aType,
                                  const MediaKeyNeededEventInit& aEventInitDict,
                                  ErrorResult& aRv)
 {
   nsCOMPtr<EventTarget> owner = do_QueryInterface(aGlobal.GetAsSupports());
-  nsRefPtr<MediaKeyNeededEvent> e = new MediaKeyNeededEvent(owner);
+  nsRefPtr<MediaEncryptedEvent> e = new MediaEncryptedEvent(owner);
   bool trusted = e->Init(owner);
   e->InitEvent(aType, aEventInitDict.mBubbles, aEventInitDict.mCancelable);
   e->mInitDataType = aEventInitDict.mInitDataType;
-  if (aEventInitDict.mInitData.WasPassed() &&
-      !aEventInitDict.mInitData.Value().IsNull()) {
-    const auto& a = aEventInitDict.mInitData.Value().Value();
+  if (!aEventInitDict.mInitData.IsNull()) {
+    const auto& a = aEventInitDict.mInitData.Value();
     a.ComputeLengthAndData();
-    e->mInitData = Uint8Array::Create(aGlobal.Context(), owner, a.Length(), a.Data());
+    e->mInitData = ArrayBuffer::Create(aGlobal.Context(),
+                                       a.Length(),
+                                       a.Data());
     if (!e->mInitData) {
       aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
       return nullptr;
     }
   }
   e->SetTrusted(trusted);
   return e.forget();
 }
 
 void
-MediaKeyNeededEvent::GetInitDataType(nsString& aRetVal) const
+MediaEncryptedEvent::GetInitDataType(nsString& aRetVal) const
 {
   aRetVal = mInitDataType;
 }
 
 void
-MediaKeyNeededEvent::GetInitData(JSContext* cx,
+MediaEncryptedEvent::GetInitData(JSContext* cx,
                                  JS::MutableHandle<JSObject*> aData,
                                  ErrorResult& aRv)
 {
   if (mRawInitData.Length()) {
-    mInitData = Uint8Array::Create(cx,
-                                   this,
-                                   mRawInitData.Length(),
-                                   mRawInitData.Elements());
+    mInitData = ArrayBuffer::Create(cx,
+                                    mRawInitData.Length(),
+                                    mRawInitData.Elements());
     if (!mInitData) {
       aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
       return;
     }
     mRawInitData.Clear();
   }
   if (mInitData) {
     JS::ExposeObjectToActiveJS(mInitData);
rename from content/media/eme/MediaKeyNeededEvent.h
rename to content/media/eme/MediaEncryptedEvent.h
--- a/content/media/eme/MediaKeyNeededEvent.h
+++ b/content/media/eme/MediaEncryptedEvent.h
@@ -2,53 +2,53 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_dom_MediaKeyNeededEvent_h__
 #define mozilla_dom_MediaKeyNeededEvent_h__
 
-#include "mozilla/dom/MediaKeyNeededEventBinding.h"
+#include "mozilla/dom/MediaEncryptedEventBinding.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/ErrorResult.h"
 #include "nsCycleCollectionParticipant.h"
 #include "nsWrapperCache.h"
 #include "nsCOMPtr.h"
 #include "mozilla/dom/Event.h"
 #include "mozilla/dom/TypedArray.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/dom/BindingUtils.h"
 #include "js/TypeDecls.h"
 
 namespace mozilla {
 namespace dom {
 
-class MediaKeyNeededEvent MOZ_FINAL : public Event
+class MediaEncryptedEvent MOZ_FINAL : public Event
 {
 public:
   NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS_INHERITED(MediaKeyNeededEvent, Event)
+  NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS_INHERITED(MediaEncryptedEvent, Event)
 protected:
-  virtual ~MediaKeyNeededEvent();
-  MediaKeyNeededEvent(EventTarget* aOwner);
+  virtual ~MediaEncryptedEvent();
+  MediaEncryptedEvent(EventTarget* aOwner);
 
   nsString mInitDataType;
   JS::Heap<JSObject*> mInitData;
 
 public:
 
   virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
 
-  static already_AddRefed<MediaKeyNeededEvent>
+  static already_AddRefed<MediaEncryptedEvent>
   Constructor(EventTarget* aOwner,
               const nsAString& aInitDataType,
               const nsTArray<uint8_t>& aInitData);
 
-  static already_AddRefed<MediaKeyNeededEvent>
+  static already_AddRefed<MediaEncryptedEvent>
   Constructor(const GlobalObject& aGlobal,
               const nsAString& aType,
               const MediaKeyNeededEventInit& aEventInitDict,
               ErrorResult& aRv);
 
   void GetInitDataType(nsString& aRetVal) const;
 
   void GetInitData(JSContext* cx,
--- a/content/media/eme/MediaKeyMessageEvent.cpp
+++ b/content/media/eme/MediaKeyMessageEvent.cpp
@@ -8,16 +8,17 @@
 #include "mozilla/dom/MediaKeyMessageEventBinding.h"
 #include "js/GCAPI.h"
 #include "jsfriendapi.h"
 #include "mozilla/dom/Nullable.h"
 #include "mozilla/dom/PrimitiveConversions.h"
 #include "mozilla/HoldDropJSObjects.h"
 #include "mozilla/dom/TypedArray.h"
 #include "nsContentUtils.h"
+#include "mozilla/dom/MediaKeys.h"
 
 namespace mozilla {
 namespace dom {
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(MediaKeyMessageEvent)
 
 NS_IMPL_ADDREF_INHERITED(MediaKeyMessageEvent, Event)
 NS_IMPL_RELEASE_INHERITED(MediaKeyMessageEvent, Event)
@@ -78,42 +79,43 @@ MediaKeyMessageEvent::Constructor(const 
                                   const nsAString& aType,
                                   const MediaKeyMessageEventInit& aEventInitDict,
                                   ErrorResult& aRv)
 {
   nsCOMPtr<EventTarget> owner = do_QueryInterface(aGlobal.GetAsSupports());
   nsRefPtr<MediaKeyMessageEvent> e = new MediaKeyMessageEvent(owner);
   bool trusted = e->Init(owner);
   e->InitEvent(aType, aEventInitDict.mBubbles, aEventInitDict.mCancelable);
+  const uint8_t* data = nullptr;
+  size_t length = 0;
   if (aEventInitDict.mMessage.WasPassed()) {
     const auto& a = aEventInitDict.mMessage.Value();
     a.ComputeLengthAndData();
-    e->mMessage = Uint8Array::Create(aGlobal.Context(), owner, a.Length(), a.Data());
-  } else {
-    e->mMessage = Uint8Array::Create(aGlobal.Context(), owner, 0, nullptr);
+    data = a.Data();
+    length = a.Length();
   }
+  e->mMessage = ArrayBuffer::Create(aGlobal.Context(), length, data);
   if (!e->mMessage) {
     aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
     return nullptr;
   }
   e->mDestinationURL = aEventInitDict.mDestinationURL;
   e->SetTrusted(trusted);
   return e.forget();
 }
 
 void
 MediaKeyMessageEvent::GetMessage(JSContext* cx,
                                  JS::MutableHandle<JSObject*> aMessage,
                                  ErrorResult& aRv)
 {
   if (!mMessage) {
-    mMessage = Uint8Array::Create(cx,
-                                  this,
-                                  mRawMessage.Length(),
-                                  mRawMessage.Elements());
+    mMessage = ArrayBuffer::Create(cx,
+                                   mRawMessage.Length(),
+                                   mRawMessage.Elements());
     if (!mMessage) {
       aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
       return;
     }
     mRawMessage.Clear();
   }
   JS::ExposeObjectToActiveJS(mMessage);
   aMessage.set(mMessage);
--- a/content/media/eme/MediaKeySession.cpp
+++ b/content/media/eme/MediaKeySession.cpp
@@ -3,20 +3,21 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "mozilla/dom/MediaKeySession.h"
 #include "mozilla/dom/MediaKeyError.h"
 #include "mozilla/dom/MediaKeyMessageEvent.h"
-#include "mozilla/dom/MediaKeyNeededEvent.h"
+#include "mozilla/dom/MediaEncryptedEvent.h"
 #include "nsCycleCollectionParticipant.h"
 #include "mozilla/CDMProxy.h"
 #include "mozilla/AsyncEventDispatcher.h"
+#include "mozilla/Move.h"
 
 namespace mozilla {
 namespace dom {
 
 NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaKeySession,
                                    DOMEventTargetHelper,
                                    mMediaKeyError,
                                    mKeys,
@@ -90,32 +91,32 @@ MediaKeySession::Expiration() const
 
 Promise*
 MediaKeySession::Closed() const
 {
   return mClosed;
 }
 
 already_AddRefed<Promise>
-MediaKeySession::Update(const Uint8Array& aResponse, ErrorResult& aRv)
+MediaKeySession::Update(const ArrayBufferViewOrArrayBuffer& aResponse, ErrorResult& aRv)
 {
   nsRefPtr<Promise> promise(mKeys->MakePromise(aRv));
   if (aRv.Failed()) {
     return nullptr;
   }
-  aResponse.ComputeLengthAndData();
+  nsTArray<uint8_t> data;
   if (IsClosed() ||
       !mKeys->GetCDMProxy() ||
-      !aResponse.Length()) {
+      !CopyArrayBufferViewOrArrayBufferData(aResponse, data)) {
     promise->MaybeReject(NS_ERROR_DOM_INVALID_ACCESS_ERR);
     return promise.forget();
   }
   mKeys->GetCDMProxy()->UpdateSession(mSessionId,
                                       mKeys->StorePromise(promise),
-                                      aResponse);
+                                      data);
   return promise.forget();
 }
 
 already_AddRefed<Promise>
 MediaKeySession::Close(ErrorResult& aRv)
 {
   nsRefPtr<Promise> promise(mKeys->MakePromise(aRv));
   if (aRv.Failed()) {
@@ -165,16 +166,44 @@ MediaKeySession::Remove(ErrorResult& aRv
     promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
     // "The session is closed."
     return promise.forget();
   }
   mKeys->GetCDMProxy()->RemoveSession(mSessionId, mKeys->StorePromise(promise));
   return promise.forget();
 }
 
+already_AddRefed<Promise>
+MediaKeySession::GetUsableKeyIds(ErrorResult& aRv)
+{
+  nsRefPtr<Promise> promise(mKeys->MakePromise(aRv));
+  if (aRv.Failed()) {
+    return nullptr;
+  }
+
+  if (IsClosed() || !mKeys->GetCDMProxy()) {
+    promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
+    return promise.forget();
+  }
+
+  nsTArray<CencKeyId> keyIds;
+  {
+    CDMCaps::AutoLock caps(mKeys->GetCDMProxy()->Capabilites());
+    caps.GetUsableKeysForSession(mSessionId, keyIds);
+  }
+
+  nsTArray<TypedArrayCreator<ArrayBuffer>> array;
+  for (size_t i = 0; i < keyIds.Length(); i++) {
+    array.AppendElement(keyIds[i]);
+  }
+  promise->MaybeResolve(array);
+
+  return promise.forget();
+}
+
 void
 MediaKeySession::DispatchKeyMessage(const nsTArray<uint8_t>& aMessage,
                                     const nsAString& aURL)
 {
   nsRefPtr<MediaKeyMessageEvent> event(
     MediaKeyMessageEvent::Constructor(this, aURL, aMessage));
   nsRefPtr<AsyncEventDispatcher> asyncDispatcher =
     new AsyncEventDispatcher(this, event);
--- a/content/media/eme/MediaKeySession.h
+++ b/content/media/eme/MediaKeySession.h
@@ -13,16 +13,17 @@
 #include "mozilla/DOMEventTargetHelper.h"
 #include "nsCOMPtr.h"
 #include "mozilla/dom/TypedArray.h"
 #include "mozilla/Mutex.h"
 #include "mozilla/dom/Date.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/MediaKeySessionBinding.h"
 #include "mozilla/dom/MediaKeysBinding.h"
+#include "mozilla/dom/UnionTypes.h"
 
 struct JSContext;
 
 namespace mozilla {
 
 class CDMProxy;
 
 namespace dom {
@@ -57,23 +58,25 @@ public:
 
   // Number of ms since epoch at which expiration occurs, or NaN if unknown.
   // TODO: The type of this attribute is still under contention.
   // https://www.w3.org/Bugs/Public/show_bug.cgi?id=25902
   double Expiration() const;
 
   Promise* Closed() const;
 
-  already_AddRefed<Promise> Update(const Uint8Array& response,
+  already_AddRefed<Promise> Update(const ArrayBufferViewOrArrayBuffer& response,
                                    ErrorResult& aRv);
 
   already_AddRefed<Promise> Close(ErrorResult& aRv);
 
   already_AddRefed<Promise> Remove(ErrorResult& aRv);
 
+  already_AddRefed<Promise> GetUsableKeyIds(ErrorResult& aRv);
+
   void DispatchKeyMessage(const nsTArray<uint8_t>& aMessage,
                           const nsAString& aURL);
 
   void DispatchKeyError(uint32_t system_code);
 
   void OnClosed();
 
   bool IsClosed() const;
--- a/content/media/eme/MediaKeys.cpp
+++ b/content/media/eme/MediaKeys.cpp
@@ -84,24 +84,30 @@ MediaKeys::WrapObject(JSContext* aCx)
 
 void
 MediaKeys::GetKeySystem(nsString& retval) const
 {
   retval = mKeySystem;
 }
 
 already_AddRefed<Promise>
-MediaKeys::SetServerCertificate(const Uint8Array& aCert, ErrorResult& aRv)
+MediaKeys::SetServerCertificate(const ArrayBufferViewOrArrayBuffer& aCert, ErrorResult& aRv)
 {
-  aCert.ComputeLengthAndData();
   nsRefPtr<Promise> promise(MakePromise(aRv));
   if (aRv.Failed()) {
     return nullptr;
   }
-  mProxy->SetServerCertificate(StorePromise(promise), aCert);
+
+  nsTArray<uint8_t> data;
+  if (!CopyArrayBufferViewOrArrayBufferData(aCert, data)) {
+    promise->MaybeReject(NS_ERROR_DOM_INVALID_ACCESS_ERR);
+    return promise.forget();
+  }
+
+  mProxy->SetServerCertificate(StorePromise(promise), data);
   return promise.forget();
 }
 
 static bool
 IsSupportedKeySystem(const nsAString& aKeySystem)
 {
   return aKeySystem.EqualsASCII("org.w3.clearkey") ||
 #ifdef XP_WIN
@@ -300,40 +306,44 @@ MediaKeys::LoadSession(const nsAString& 
   mPendingSessions.Put(pid, session);
   mProxy->LoadSession(pid, aSessionId);
 
   return promise.forget();
 }
 
 already_AddRefed<Promise>
 MediaKeys::CreateSession(const nsAString& initDataType,
-                         const Uint8Array& aInitData,
+                         const ArrayBufferViewOrArrayBuffer& aInitData,
                          SessionType aSessionType,
                          ErrorResult& aRv)
 {
-  aInitData.ComputeLengthAndData();
   nsRefPtr<Promise> promise(MakePromise(aRv));
   if (aRv.Failed()) {
     return nullptr;
   }
+
+  nsTArray<uint8_t> data;
+  if (initDataType.IsEmpty() ||
+      !CopyArrayBufferViewOrArrayBufferData(aInitData, data)) {
+    promise->MaybeReject(NS_ERROR_DOM_INVALID_ACCESS_ERR);
+    return promise.forget();
+  }
+
   nsRefPtr<MediaKeySession> session = new MediaKeySession(GetParentObject(),
                                                           this,
                                                           mKeySystem,
-                                                          aSessionType, aRv);
-  if (aRv.Failed()) {
-    return nullptr;
-  }
-
+                                                          aSessionType,
+                                                          aRv);
   auto pid = StorePromise(promise);
   // Hang onto session until the CDM has finished setting it up.
   mPendingSessions.Put(pid, session);
   mProxy->CreateSession(aSessionType,
                         pid,
                         initDataType,
-                        aInitData);
+                        data);
 
   return promise.forget();
 }
 
 void
 MediaKeys::OnSessionCreated(PromiseId aId, const nsAString& aSessionId)
 {
   nsRefPtr<Promise> promise(RetrievePromise(aId));
@@ -393,10 +403,28 @@ MediaKeys::GetOrigin(nsString& aOutOrigi
 
   nsresult res = nsContentUtils::GetUTFOrigin(principal, aOutOrigin);
 
   EME_LOG("EME Origin = '%s'", NS_ConvertUTF16toUTF8(aOutOrigin).get());
 
   return res;
 }
 
+bool
+CopyArrayBufferViewOrArrayBufferData(const ArrayBufferViewOrArrayBuffer& aBufferOrView,
+                                     nsTArray<uint8_t>& aOutData)
+{
+  if (aBufferOrView.IsArrayBuffer()) {
+    const ArrayBuffer& buffer = aBufferOrView.GetAsArrayBuffer();
+    buffer.ComputeLengthAndData();
+    aOutData.AppendElements(buffer.Data(), buffer.Length());
+  } else if (aBufferOrView.IsArrayBufferView()) {
+    const ArrayBufferView& bufferview = aBufferOrView.GetAsArrayBufferView();
+    bufferview.ComputeLengthAndData();
+    aOutData.AppendElements(bufferview.Data(), bufferview.Length());
+  } else {
+    return false;
+  }
+  return true;
+}
+
 } // namespace dom
 } // namespace mozilla
--- a/content/media/eme/MediaKeys.h
+++ b/content/media/eme/MediaKeys.h
@@ -12,30 +12,37 @@
 #include "nsISupports.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/RefPtr.h"
 #include "nsCOMPtr.h"
 #include "nsCycleCollectionParticipant.h"
 #include "nsRefPtrHashtable.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/MediaKeysBinding.h"
+#include "mozilla/dom/UnionTypes.h"
 
 namespace mozilla {
 
 class CDMProxy;
 
 namespace dom {
 
 class MediaKeySession;
 
 typedef nsRefPtrHashtable<nsStringHashKey, MediaKeySession> KeySessionHashMap;
 typedef nsRefPtrHashtable<nsUint32HashKey, dom::Promise> PromiseHashMap;
 typedef nsRefPtrHashtable<nsUint32HashKey, MediaKeySession> PendingKeySessionsHashMap;
 typedef uint32_t PromiseId;
 
+// Helper function to extract data coming in from JS in an
+// (ArrayBuffer or ArrayBufferView) IDL typed function argument.
+bool
+CopyArrayBufferViewOrArrayBufferData(const ArrayBufferViewOrArrayBuffer& aBufferOrView,
+                                     nsTArray<uint8_t>& aOutData);
+
 // This class is used on the main thread only.
 // Note: it's addref/release is not (and can't be) thread safe!
 class MediaKeys MOZ_FINAL : public nsISupports,
                             public nsWrapperCache
 {
   ~MediaKeys();
 
 public:
@@ -48,26 +55,26 @@ public:
 
   virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
 
   // Javascript: readonly attribute DOMString keySystem;
   void GetKeySystem(nsString& retval) const;
 
   // JavaScript: MediaKeys.createSession()
   already_AddRefed<Promise> CreateSession(const nsAString& aInitDataType,
-                                          const Uint8Array& aInitData,
+                                          const ArrayBufferViewOrArrayBuffer& aInitData,
                                           SessionType aSessionType,
                                           ErrorResult& aRv);
 
   // JavaScript: MediaKeys.loadSession()
   already_AddRefed<Promise> LoadSession(const nsAString& aSessionId,
                                         ErrorResult& aRv);
 
   // JavaScript: MediaKeys.SetServerCertificate()
-  already_AddRefed<Promise> SetServerCertificate(const Uint8Array& aServerCertificate,
+  already_AddRefed<Promise> SetServerCertificate(const ArrayBufferViewOrArrayBuffer& aServerCertificate,
                                                  ErrorResult& aRv);
 
   // JavaScript: MediaKeys.create()
   static
   already_AddRefed<Promise> Create(const GlobalObject& aGlobal,
                                    const nsAString& aKeySystem,
                                    ErrorResult& aRv);
 
--- a/content/media/eme/moz.build
+++ b/content/media/eme/moz.build
@@ -1,36 +1,36 @@
 # -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 EXPORTS.mozilla.dom += [
+    'MediaEncryptedEvent.h',
     'MediaKeyError.h',
     'MediaKeyMessageEvent.h',
-    'MediaKeyNeededEvent.h',
     'MediaKeys.h',
     'MediaKeySession.h',
 ]
 
 EXPORTS.mozilla += [
     'CDMCallbackProxy.h',
     'CDMCaps.h',
     'CDMProxy.h',
     'EMELog.h'
 ]
 
 UNIFIED_SOURCES += [
     'CDMCallbackProxy.cpp',
     'CDMCaps.cpp',
     'CDMProxy.cpp',
     'EMELog.cpp',
+    'MediaEncryptedEvent.cpp',
     'MediaKeyError.cpp',
     'MediaKeyMessageEvent.cpp',
-    'MediaKeyNeededEvent.cpp',
     'MediaKeys.cpp',
     'MediaKeySession.cpp',
 ]
 
 FINAL_LIBRARY = 'xul'
 
 FAIL_ON_WARNINGS = True
--- a/content/media/fmp4/MP4Reader.cpp
+++ b/content/media/fmp4/MP4Reader.cpp
@@ -220,17 +220,17 @@ public:
     , mInitDataType(aInitDataType)
   {
   }
   NS_IMETHOD Run() {
     // Note: Null check the owner, as the decoder could have been shutdown
     // since this event was dispatched.
     MediaDecoderOwner* owner = mDecoder->GetOwner();
     if (owner) {
-      owner->DispatchNeedKey(mInitData, mInitDataType);
+      owner->DispatchEncrypted(mInitData, mInitDataType);
     }
     mDecoder = nullptr;
     return NS_OK;
   }
 private:
   nsRefPtr<AbstractMediaDecoder> mDecoder;
   nsTArray<uint8_t> mInitData;
   nsString mInitDataType;
--- a/content/media/gmp/GMPChild.cpp
+++ b/content/media/gmp/GMPChild.cpp
@@ -19,17 +19,17 @@
 using mozilla::dom::CrashReporterChild;
 
 #ifdef XP_WIN
 #include <stdlib.h> // for _exit()
 #else
 #include <unistd.h> // for _exit()
 #endif
 
-#if defined(XP_WIN)
+#if defined(MOZ_SANDBOX) && defined(XP_WIN)
 #define TARGET_SANDBOX_EXPORTS
 #include "mozilla/sandboxTarget.h"
 #elif defined (MOZ_GMP_SANDBOX)
 #if defined(XP_LINUX) || defined(XP_MACOSX)
 #include "mozilla/Sandbox.h"
 #endif
 #endif
 
@@ -230,17 +230,17 @@ GMPChild::Init(const std::string& aPlugi
   SendPCrashReporterConstructor(CrashReporter::CurrentThreadId());
 #endif
 
 #if defined(XP_MACOSX) && defined(MOZ_GMP_SANDBOX)
   mPluginPath = aPluginPath;
   return true;
 #endif
 
-#if defined(XP_WIN)
+#if defined(MOZ_SANDBOX) && defined(XP_WIN)
   mozilla::SandboxTarget::Instance()->StartSandbox();
 #endif
 
   return LoadPluginLibrary(aPluginPath);
 }
 
 bool
 GMPChild::LoadPluginLibrary(const std::string& aPluginPath)
--- a/content/media/moz.build
+++ b/content/media/moz.build
@@ -58,27 +58,30 @@ TEST_DIRS += [
     'gtest',
 ]
 
 MOCHITEST_MANIFESTS += ['test/mochitest.ini']
 MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini']
 
 EXPORTS += [
     'AbstractMediaDecoder.h',
+    'AudioBufferUtils.h',
     'AudioChannelFormat.h',
     'AudioCompactor.h',
     'AudioMixer.h',
     'AudioSampleFormat.h',
     'AudioSegment.h',
     'AudioStream.h',
     'BufferMediaResource.h',
+    'CubebUtils.h',
     'DecoderTraits.h',
     'DOMMediaStream.h',
     'EncodedBufferCache.h',
     'FileBlockCache.h',
+    'GraphDriver.h',
     'Latency.h',
     'MediaCache.h',
     'MediaData.h',
     'MediaDataDecodedListener.h',
     'MediaDecoder.h',
     'MediaDecoderOwner.h',
     'MediaDecoderReader.h',
     'MediaDecoderStateMachine.h',
@@ -127,19 +130,21 @@ UNIFIED_SOURCES += [
     'AudioChannelFormat.cpp',
     'AudioCompactor.cpp',
     'AudioSegment.cpp',
     'AudioSink.cpp',
     'AudioStream.cpp',
     'AudioStreamTrack.cpp',
     'AudioTrack.cpp',
     'AudioTrackList.cpp',
+    'CubebUtils.cpp',
     'DOMMediaStream.cpp',
     'EncodedBufferCache.cpp',
     'FileBlockCache.cpp',
+    "GraphDriver.cpp",
     'MediaCache.cpp',
     'MediaData.cpp',
     'MediaDecoder.cpp',
     'MediaDecoderReader.cpp',
     'MediaDecoderStateMachine.cpp',
     'MediaDecoderStateMachineScheduler.cpp',
     'MediaRecorder.cpp',
     'MediaResource.cpp',
--- a/content/media/omx/OmxDecoder.cpp
+++ b/content/media/omx/OmxDecoder.cpp
@@ -770,16 +770,18 @@ bool OmxDecoder::ReadVideo(VideoFrame *a
 
   aFrame->mSize = 0;
 
   if (err == OK) {
     int64_t timeUs;
     int32_t unreadable;
     int32_t keyFrame;
 
+    size_t length = mVideoBuffer->range_length();
+
     if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
       NS_WARNING("OMX decoder did not return frame time");
       return false;
     }
 
     if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
     }
@@ -812,30 +814,29 @@ bool OmxDecoder::ReadVideo(VideoFrame *a
       aFrame->mRotation = mVideoRotation;
       aFrame->mTimeUs = timeUs;
       aFrame->mKeyFrame = keyFrame;
       aFrame->Y.mWidth = mVideoWidth;
       aFrame->Y.mHeight = mVideoHeight;
       // Release to hold video buffer in OmxDecoder more.
       // MediaBuffer's ref count is changed from 2 to 1.
       ReleaseVideoBuffer();
-    } else if (mVideoBuffer->range_length() > 0) {
+    } else if (length > 0) {
       char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
-      size_t length = mVideoBuffer->range_length();
 
       if (unreadable) {
         LOG(PR_LOG_DEBUG, "video frame is unreadable");
       }
 
       if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
         return false;
       }
     }
-
-    if (aKeyframeSkip && timeUs < aTimeUs) {
+    // Check if this frame is valid or not. If not, skip it.
+    if ((aKeyframeSkip && timeUs < aTimeUs) || length == 0) {
       aFrame->mShouldSkip = true;
     }
   }
   else if (err == INFO_FORMAT_CHANGED) {
     // If the format changed, update our cached info.
     if (!SetVideoFormat()) {
       return false;
     } else {
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -66,18 +66,18 @@ NS_IMPL_RELEASE_INHERITED(AudioContext, 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioContext)
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
 static float GetSampleRateForAudioContext(bool aIsOffline, float aSampleRate)
 {
   if (aIsOffline) {
     return aSampleRate;
   } else {
-    AudioStream::InitPreferredSampleRate();
-    return static_cast<float>(AudioStream::PreferredSampleRate());
+    CubebUtils::InitPreferredSampleRate();
+    return static_cast<float>(CubebUtils::PreferredSampleRate());
   }
 }
 
 AudioContext::AudioContext(nsPIDOMWindow* aWindow,
                            bool aIsOffline,
                            AudioChannel aChannel,
                            uint32_t aNumberOfChannels,
                            uint32_t aLength,
@@ -513,17 +513,17 @@ void
 AudioContext::UpdatePannerSource()
 {
   mPannerNodes.EnumerateEntries(FindConnectedSourcesOn, nullptr);
 }
 
 uint32_t
 AudioContext::MaxChannelCount() const
 {
-  return mIsOffline ? mNumberOfChannels : AudioStream::MaxNumberOfChannels();
+  return mIsOffline ? mNumberOfChannels : CubebUtils::MaxNumberOfChannels();
 }
 
 MediaStreamGraph*
 AudioContext::Graph() const
 {
   return Destination()->Stream()->Graph();
 }
 
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -317,17 +317,17 @@ AudioDestinationNode::AudioDestinationNo
   , mHasFinished(false)
   , mAudioChannelAgentPlaying(false)
   , mExtraCurrentTime(0)
   , mExtraCurrentTimeSinceLastStartedBlocking(0)
   , mExtraCurrentTimeUpdatedSinceLastStableState(false)
 {
   MediaStreamGraph* graph = aIsOffline ?
                             MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate) :
-                            MediaStreamGraph::GetInstance();
+                            MediaStreamGraph::GetInstance(DOMMediaStream::HINT_CONTENTS_AUDIO, aChannel);
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
   mStream->AddMainThreadListener(this);
   mStream->AddAudioOutput(&gWebAudioOutputKey);
--- a/content/media/webaudio/test/mochitest.ini
+++ b/content/media/webaudio/test/mochitest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-skip-if = (buildapp == 'b2g' && (toolkit != 'gonk' || debug)) #b2g-debug,b2g-desktop(bug 916135)
+skip-if = ((buildapp == 'mulet' || buildapp == 'b2g') && (toolkit != 'gonk' || debug)) #b2g-debug,b2g-desktop(bug 916135)
 support-files =
   audio-expected.wav
   audio-mono-expected-2.wav
   audio-mono-expected.wav
   audio-quad.wav
   audio.ogv
   audioBufferSourceNodeNeutered_worker.js
   invalid.txt
@@ -83,16 +83,17 @@ skip-if = toolkit == 'android' # bug 105
 [test_convolverNodeChannelCount.html]
 [test_convolverNodePassThrough.html]
 [test_convolverNodeWithGain.html]
 [test_currentTime.html]
 [test_decodeMultichannel.html]
 [test_delayNode.html]
 [test_delayNodeAtMax.html]
 [test_delayNodeChannelChanges.html]
+skip-if = toolkit == 'android' # bug 1056706
 [test_delayNodeCycles.html]
 [test_delayNodePassThrough.html]
 [test_delayNodeSmallMaxDelay.html]
 [test_delayNodeTailIncrease.html]
 [test_delayNodeTailWithDisconnect.html]
 [test_delayNodeTailWithGain.html]
 [test_delayNodeTailWithReconnect.html]
 [test_delayNodeWithGain.html]
--- a/content/media/webaudio/test/test_delayNodeChannelChanges.html
+++ b/content/media/webaudio/test/test_delayNodeChannelChanges.html
@@ -6,16 +6,17 @@
   <script type="text/javascript" src="webaudio.js"></script>
   <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
 </head>
 <body>
 <pre id="test">
 <script class="testbody" type="text/javascript">
 
 SimpleTest.waitForExplicitFinish();
+SimpleTest.requestCompleteLog();
 
 const bufferSize = 4096;
 
 var ctx;
 var testDelay;
 var stereoDelay;
 var invertor;
 
--- a/content/media/webrtc/AudioOutputObserver.h
+++ b/content/media/webrtc/AudioOutputObserver.h
@@ -1,55 +1,63 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef AUDIOOUTPUTOBSERVER_H_
 #define AUDIOOUTPUTOBSERVER_H_
 
 #include "mozilla/StaticPtr.h"
+#include "AudioMixer.h"
 
 namespace webrtc {
 class SingleRwFifo;
 }
 
 namespace mozilla {
 
 typedef struct FarEndAudioChunk_ {
   uint16_t mSamples;
   bool mOverrun;
   int16_t mData[1]; // variable-length
 } FarEndAudioChunk;
 
 // XXX Really a singleton currently
-class AudioOutputObserver // : public MSGOutputObserver
+class AudioOutputObserver : public MixerCallbackReceiver
 {
 public:
   AudioOutputObserver();
-  virtual ~AudioOutputObserver();
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioOutputObserver);
+
+  void MixerCallback(AudioDataValue* aMixedBuffer,
+                     AudioSampleFormat aFormat,
+                     uint32_t aChannels,
+                     uint32_t aFrames,
+                     uint32_t aSampleRate) MOZ_OVERRIDE;
 
   void Clear();
   void InsertFarEnd(const AudioDataValue *aBuffer, uint32_t aFrames, bool aOverran,
                     int aFreq, int aChannels, AudioSampleFormat aFormat);
   uint32_t PlayoutFrequency() { return mPlayoutFreq; }
   uint32_t PlayoutChannels() { return mPlayoutChannels; }
 
   FarEndAudioChunk *Pop();
   uint32_t Size();
 
 private:
+  virtual ~AudioOutputObserver();
   uint32_t mPlayoutFreq;
   uint32_t mPlayoutChannels;
 
   nsAutoPtr<webrtc::SingleRwFifo> mPlayoutFifo;
   uint32_t mChunkSize;
 
   // chunking to 10ms support
   FarEndAudioChunk *mSaved; // can't be nsAutoPtr since we need to use moz_free()
   uint32_t mSamplesSaved;
 };
 
-// XXX until there's a registration API in MSG
-extern StaticAutoPtr<AudioOutputObserver> gFarendObserver;
+extern StaticRefPtr<AudioOutputObserver> gFarendObserver;
 
 }
 
 #endif
--- a/content/media/webrtc/MediaEngineWebRTC.h
+++ b/content/media/webrtc/MediaEngineWebRTC.h
@@ -399,17 +399,16 @@ public:
   virtual void EnumerateAudioDevices(MediaSourceType,
                                     nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
 private:
   ~MediaEngineWebRTC() {
     Shutdown();
 #ifdef MOZ_B2G_CAMERA
     AsyncLatencyLogger::Get()->Release();
 #endif
-    // XXX
     gFarendObserver = nullptr;
   }
 
   nsCOMPtr<nsIThread> mThread;
 
   Mutex mMutex;
 
   // protected with mMutex:
--- a/content/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/content/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -44,17 +44,17 @@ extern PRLogModuleInfo* GetMediaManagerL
 #endif
 
 /**
  * Webrtc audio source.
  */
 NS_IMPL_ISUPPORTS0(MediaEngineWebRTCAudioSource)
 
 // XXX temp until MSG supports registration
-StaticAutoPtr<AudioOutputObserver> gFarendObserver;
+StaticRefPtr<AudioOutputObserver> gFarendObserver;
 
 AudioOutputObserver::AudioOutputObserver()
   : mPlayoutFreq(0)
   , mPlayoutChannels(0)
   , mChunkSize(0)
   , mSaved(nullptr)
   , mSamplesSaved(0)
 {
@@ -85,16 +85,29 @@ AudioOutputObserver::Pop()
 }
 
 uint32_t
 AudioOutputObserver::Size()
 {
   return mPlayoutFifo->size();
 }
 
+void
+AudioOutputObserver::MixerCallback(AudioDataValue* aMixedBuffer,
+                                   AudioSampleFormat aFormat,
+                                   uint32_t aChannels,
+                                   uint32_t aFrames,
+                                   uint32_t aSampleRate)
+{
+  if (gFarendObserver) {
+    gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false,
+                                  aSampleRate, aChannels, aFormat);
+  }
+}
+
 // static
 void
 AudioOutputObserver::InsertFarEnd(const AudioDataValue *aBuffer, uint32_t aFrames, bool aOverran,
                                   int aFreq, int aChannels, AudioSampleFormat aFormat)
 {
   if (mPlayoutChannels != 0) {
     if (mPlayoutChannels != static_cast<uint32_t>(aChannels)) {
       MOZ_CRASH();
--- a/content/media/webspeech/synth/ipc/test/file_ipc.html
+++ b/content/media/webspeech/synth/ipc/test/file_ipc.html
@@ -45,29 +45,28 @@
     let VALID_ACTIONS = ['suite_start', 'suite_end', 'test_start', 'test_end', 'test_status', 'process_output', 'log'];
     function validStructuredMessage(message) {
       return message.action !== undefined && VALID_ACTIONS.indexOf(message.action) >= 0;
     }
     function onTestMessage(data) {
       let message = SpecialPowers.wrap(data).data.msg;
 
       if (validStructuredMessage(message)) {
-        if (message.test === undefined || message.message === undefined) {
-            return;
-        }
-
-        let details = message.test + " | " + message.message;
-
-        switch(message.action) {
+        switch (message.action) {
           case "test_status":
+            ok(message.expected === undefined, message.subtest, message.message);
+            break;
           case "test_end":
             ok(message.expected === undefined, message.test, message.message);
             break;
+          case "log":
+            info(message.message);
+            break;
           default:
-            info(details);
+            // nothing
         }
       }
     }
 
     function onTestComplete() {
       let comp = SpecialPowers.wrap(SpecialPowers.Components);
       let mm = SpecialPowers.getBrowserFrameMessageManager(iframe);
       let spObserver = comp.classes["@mozilla.org/special-powers-observer;1"]
--- a/dom/canvas/ImageEncoder.cpp
+++ b/dom/canvas/ImageEncoder.cpp
@@ -292,21 +292,20 @@ ImageEncoder::ExtractDataInternal(const 
     // no context, so we have to encode an empty image
     // note that if we didn't have a current context, the spec says we're
     // supposed to just return transparent black pixels of the canvas
     // dimensions.
     RefPtr<DataSourceSurface> emptyCanvas =
       Factory::CreateDataSourceSurfaceWithStride(IntSize(aSize.width, aSize.height),
                                                  SurfaceFormat::B8G8R8A8,
                                                  4 * aSize.width);
-
-    if (!emptyCanvas) {
-      NS_ERROR("Failded to create DataSourceSurface");
+    if (NS_WARN_IF(!emptyCanvas)) {
       return NS_ERROR_INVALID_ARG;
     }
+
     ClearDataSourceSurface(emptyCanvas);
     DataSourceSurface::MappedSurface map;
     if (!emptyCanvas->Map(DataSourceSurface::MapType::WRITE, &map)) {
       return NS_ERROR_INVALID_ARG;
     }
     rv = aEncoder->InitFromData(map.mData,
                                 aSize.width * aSize.height * 4,
                                 aSize.width,
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -1486,17 +1486,17 @@ WebGLContext::GetSurfaceSnapshot(bool* a
 
     bool hasAlpha = mOptions.alpha;
     SurfaceFormat surfFormat = hasAlpha ? SurfaceFormat::B8G8R8A8
                                         : SurfaceFormat::B8G8R8X8;
     RefPtr<DataSourceSurface> surf;
     surf = Factory::CreateDataSourceSurfaceWithStride(IntSize(mWidth, mHeight),
                                                       surfFormat,
                                                       mWidth * 4);
-    if (!surf) {
+    if (NS_WARN_IF(!surf)) {
         return nullptr;
     }
 
     gl->MakeCurrent();
     {
         ScopedBindFramebuffer autoFB(gl, 0);
         ClearBackbufferIfNeeded();
         ReadPixelsIntoDataSurface(gl, surf);
new file mode 100644
--- /dev/null
+++ b/dom/devicestorage/ipc/ipc.json
@@ -0,0 +1,7 @@
+{
+"runtests":{
+},
+"excludetests":{
+    "dom/devicestorage/test/test_dirs.html":"excluded"
+   }
+}
--- a/dom/devicestorage/ipc/mochitest.ini
+++ b/dom/devicestorage/ipc/mochitest.ini
@@ -1,6 +1,8 @@
 [DEFAULT]
 skip-if = toolkit == 'android' || e10s #bug 781789 & bug 782275
-support-files = ../test/devicestorage_common.js
+support-files = 
+  ../test/devicestorage_common.js
+  ipc.json
 
 [test_ipc.html]
 skip-if = buildapp == 'mulet' || buildapp == 'b2g' # b2g(nested ipc not working) b2g-debug(nested ipc not working) b2g-desktop(nested ipc not working)
--- a/dom/devicestorage/ipc/test_ipc.html
+++ b/dom/devicestorage/ipc/test_ipc.html
@@ -44,28 +44,28 @@
     let VALID_ACTIONS = ['suite_start', 'suite_end', 'test_start', 'test_end', 'test_status', 'process_output', 'log'];
     function validStructuredMessage(message) {
       return message.action !== undefined && VALID_ACTIONS.indexOf(message.action) >= 0;
     }
     function onTestMessage(data) {
       let message = SpecialPowers.wrap(data).data.msg;
 
       if (validStructuredMessage(message)) {
-        if (message.test === undefined || message.message === undefined) {
-          return;
-        }
-
         switch (message.action) {
           case "test_status":
+            ok(message.expected === undefined, message.subtest, message.message);
+            break;
           case "test_end":
             ok(message.expected === undefined, message.test, message.message);
             break;
+          case "log":
+            info(message.message);
+            break;
           default:
-            let details = message.test + " | " + message.message;
-            info(details);
+            // nothing
         }
       }
     }
 
     function onTestComplete() {
       let comp = SpecialPowers.wrap(SpecialPowers.Components);
       let mm = SpecialPowers.getBrowserFrameMessageManager(iframe);
       let spObserver = comp.classes["@mozilla.org/special-powers-observer;1"]
@@ -129,18 +129,18 @@
       }
 
       iframe.addEventListener("mozbrowserloadend", iframeLoadFirst);
 
       // Strip this filename and one directory level and then add "/test".
       let href =  window.location.href;
       href = href.substring(0, href.lastIndexOf('/'));
       href = href.substring(0, href.lastIndexOf('/'));
-      iframe.src = href + "/test?consoleLevel=INFO";
-
+      let manifest = "tests/dom/devicestorage/ipc/ipc.json";
+      iframe.src = href + "/test?consoleLevel=INFO&testManifest=" + manifest;
       document.body.appendChild(iframe);
     }
 
     addEventListener("load", function() {
 
       SpecialPowers.addPermission("browser", true, document);
       SpecialPowers.pushPrefEnv({
         "set": [
--- a/dom/indexedDB/ipc/test_ipc.html
+++ b/dom/indexedDB/ipc/test_ipc.html
@@ -60,28 +60,28 @@
     function validStructuredMessage(message) {
       return message.action !== undefined && VALID_ACTIONS.indexOf(message.action) >= 0;
     }
     function onTestMessage(data) {
       seenTestMessage = true;
       let message = SpecialPowers.wrap(data).data.msg;
 
       if (validStructuredMessage(message)) {
-        if (message.test === undefined || message.message === undefined) {
-          return;
-        }
-
         switch (message.action) {
           case "test_status":
+            ok(message.expected === undefined, message.subtest, message.message);
+            break;
           case "test_end":
             ok(message.expected === undefined, message.test, message.message);
             break;
+          case "log":
+            info(message.message);
+            break;
           default:
-            let details = message.test + " | " + message.message;
-            info(details);
+            // nothing
         }
       }
     }
 
     let usingChildProcess = false;
 
     function onProcessType(data) {
       let isMainProcess = SpecialPowers.wrap(data).data.isMainProcess;
--- a/dom/inputmethod/MozKeyboard.js
+++ b/dom/inputmethod/MozKeyboard.js
@@ -227,17 +227,17 @@ MozInputMethod.prototype = {
           this.setInputContext(json);
         }
         else {
           this.setInputContext(null);
         }
         break;
       case 'Keyboard:SelectionChange':
         if (this.inputcontext) {
-          this._inputcontext.updateSelectionContext(json);
+          this._inputcontext.updateSelectionContext(json, false);
         }
         break;
       case 'Keyboard:GetContext:Result:OK':
         this.setInputContext(json);
         break;
       case 'Keyboard:LayoutsChange':
         this._layouts = json;
         break;
@@ -449,17 +449,17 @@ MozInputContext.prototype = {
     let resolver = this.takePromiseResolver(json.requestId);
 
     if (!resolver) {
       return;
     }
 
     // Update context first before resolving promise to avoid race condition
     if (json.selectioninfo) {
-      this.updateSelectionContext(json.selectioninfo);
+      this.updateSelectionContext(json.selectioninfo, true);
     }
 
     switch (msg.name) {
       case "Keyboard:SendKey:Result:OK":
         resolver.resolve();
         break;
       case "Keyboard:SendKey:Result:Error":
         resolver.reject(json.error);
@@ -486,17 +486,17 @@ MozInputContext.prototype = {
         break;
       default:
         dump("Could not find a handler for " + msg.name);
         resolver.reject();
         break;
     }
   },
 
-  updateSelectionContext: function ic_updateSelectionContext(ctx) {
+  updateSelectionContext: function ic_updateSelectionContext(ctx, ownAction) {
     if (!this._context) {
       return;
     }
 
     let selectionDirty = this._context.selectionStart !== ctx.selectionStart ||
           this._context.selectionEnd !== ctx.selectionEnd;
     let surroundDirty = this._context.textBeforeCursor !== ctx.textBeforeCursor ||
           this._context.textAfterCursor !== ctx.textAfterCursor;
@@ -504,35 +504,37 @@ MozInputContext.prototype = {
     this._context.selectionStart = ctx.selectionStart;
     this._context.selectionEnd = ctx.selectionEnd;
     this._context.textBeforeCursor = ctx.textBeforeCursor;
     this._context.textAfterCursor = ctx.textAfterCursor;
 
     if (selectionDirty) {
       this._fireEvent("selectionchange", {
         selectionStart: ctx.selectionStart,
-        selectionEnd: ctx.selectionEnd
+        selectionEnd: ctx.selectionEnd,
+        ownAction: ownAction
       });
     }
 
     if (surroundDirty) {
       this._fireEvent("surroundingtextchange", {
         beforeString: ctx.textBeforeCursor,
-        afterString: ctx.textAfterCursor
+        afterString: ctx.textAfterCursor,
+        ownAction: ownAction
       });
     }
   },
 
   _fireEvent: function ic_fireEvent(eventName, aDetail) {
     let detail = {
       detail: aDetail
     };
 
-    let event = new this._window.Event(eventName,
-                                       Cu.cloneInto(aDetail, this._window));
+    let event = new this._window.CustomEvent(eventName,
+                                             Cu.cloneInto(detail, this._window));
     this.__DOM_IMPL__.dispatchEvent(event);
   },
 
   // tag name of the input field
   get type() {
     return this._context.type;
   },
 
--- a/dom/inputmethod/mochitest/test_basic.html
+++ b/dom/inputmethod/mochitest/test_basic.html
@@ -139,23 +139,25 @@ function test_onSelectionChange() {
     if (failed) {
       inputmethod_cleanup();
     }
     else {
       test_onSurroundingTextChange();
     }
   }
 
-  gContext.onselectionchange = function() {
+  gContext.onselectionchange = function(evt) {
     ok(true, 'onselectionchange fired');
-    cleanup();
+    is(evt.detail.selectionStart, 10);
+    is(evt.detail.selectionEnd, 10);
+    ok(evt.detail.ownAction);
   };
 
   gContext.sendKey(0, 'j'.charCodeAt(0), 0).then(function() {
-    // do nothing and wait for onselectionchange event
+    cleanup();
   }, function(e) {
     ok(false, 'sendKey failed: ' + e.name);
     cleanup(true);
   });
 }
 
 function test_onSurroundingTextChange() {
   var sccTimeout = setTimeout(function() {
@@ -170,23 +172,25 @@ function test_onSurroundingTextChange() 
       inputmethod_cleanup();
     }
     else {
       // in case we want more tests leave this
       inputmethod_cleanup();
     }
   }
 
-  gContext.onsurroundingtextchange = function() {
+  gContext.onsurroundingtextchange = function(evt) {
     ok(true, 'onsurroundingtextchange fired');
-    cleanup();
+    is(evt.detail.beforeString, 'Xulei2013jj');
+    is(evt.detail.afterString, '');
+    ok(evt.detail.ownAction);
   };
 
   gContext.sendKey(0, 'j'.charCodeAt(0), 0).then(function() {
-    // do nothing and wait for onselectionchange event
+    cleanup();
   }, function(e) {
     ok(false, 'sendKey failed: ' + e.name);
     cleanup(true);
   });
 }
 
 </script>
 </pre>
--- a/dom/ipc/TabChild.cpp
+++ b/dom/ipc/TabChild.cpp
@@ -1527,16 +1527,18 @@ TabChild::DestroyWindow()
         if (info->mFileDescriptor.IsValid()) {
             MOZ_ASSERT(!info->mCanceled);
 
             nsRefPtr<CloseFileRunnable> runnable =
                 new CloseFileRunnable(info->mFileDescriptor);
             runnable->Dispatch();
         }
     }
+
+    mCachedFileDescriptorInfos.Clear();
 }
 
 void
 TabChild::ActorDestroy(ActorDestroyReason why)
 {
   if (mTabChildGlobal) {
     // The messageManager relays messages via the TabChild which
     // no longer exists.
--- a/dom/ipc/TabParent.cpp
+++ b/dom/ipc/TabParent.cpp
@@ -144,17 +144,22 @@ private:
         nsRefPtr<TabParent> tabParent;
         mTabParent.swap(tabParent);
 
         using mozilla::ipc::FileDescriptor;
 
         FileDescriptor::PlatformHandleType handle =
             FileDescriptor::PlatformHandleType(PR_FileDesc2NativeHandle(mFD));
 
-        mozilla::unused << tabParent->SendCacheFileDescriptor(mPath, FileDescriptor(handle));
+        // Our TabParent may have been destroyed already.  If so, don't send any
+        // fds over, just go back to the IO thread and close them.
+        if (!tabParent->IsDestroyed()) {
+          mozilla::unused << tabParent->SendCacheFileDescriptor(mPath,
+                                                                FileDescriptor(handle));
+        }
 
         nsCOMPtr<nsIEventTarget> eventTarget;
         mEventTarget.swap(eventTarget);
 
         if (NS_FAILED(eventTarget->Dispatch(this, NS_DISPATCH_NORMAL))) {
             NS_WARNING("Failed to dispatch to stream transport service!");
 
             // It's probably safer to take the main thread IO hit here rather
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -2,16 +2,17 @@
 /* vim: set ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaManager.h"
 
 #include "MediaStreamGraph.h"
+#include "mozilla/dom/MediaStreamTrack.h"
 #include "GetUserMediaRequest.h"
 #include "nsHashPropertyBag.h"
 #ifdef MOZ_WIDGET_GONK
 #include "nsIAudioManager.h"
 #endif
 #include "nsIDOMFile.h"
 #include "nsIEventTarget.h"
 #include "nsIUUIDGenerator.h"
@@ -476,29 +477,33 @@ AudioDevice::GetSource()
  * A subclass that we only use to stash internal pointers to MediaStreamGraph objects
  * that need to be cleaned up.
  */
 class nsDOMUserMediaStream : public DOMLocalMediaStream
 {
 public:
   static already_AddRefed<nsDOMUserMediaStream>
   CreateTrackUnionStream(nsIDOMWindow* aWindow,
-                         MediaEngineSource *aAudioSource,
-                         MediaEngineSource *aVideoSource)
+                         GetUserMediaCallbackMediaStreamListener* aListener,
+                         MediaEngineSource* aAudioSource,
+                         MediaEngineSource* aVideoSource)
   {
     DOMMediaStream::TrackTypeHints hints =
       (aAudioSource ? DOMMediaStream::HINT_CONTENTS_AUDIO : 0) |
       (aVideoSource ? DOMMediaStream::HINT_CONTENTS_VIDEO : 0);
 
-    nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aAudioSource);
+    nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aListener,
+                                                                     aAudioSource);
     stream->InitTrackUnionStream(aWindow, hints);
     return stream.forget();
   }
 
-  nsDOMUserMediaStream(MediaEngineSource *aAudioSource) :
+  nsDOMUserMediaStream(GetUserMediaCallbackMediaStreamListener* aListener,
+                       MediaEngineSource *aAudioSource) :
+    mListener(aListener),
     mAudioSource(aAudioSource),
     mEchoOn(true),
     mAgcOn(false),
     mNoiseOn(true),
 #ifdef MOZ_WEBRTC
     mEcho(webrtc::kEcDefault),
     mAgc(webrtc::kAgcDefault),
     mNoise(webrtc::kNsDefault),
@@ -524,16 +529,50 @@ public:
 
   virtual void Stop()
   {
     if (mSourceStream) {
       mSourceStream->EndAllTrackAndFinish();
     }
   }
 
+  // For gUM streams, we have a trackunion which assigns TrackIDs.  However, for a
+  // single-source trackunion like we have here, the TrackUnion will assign trackids
+  // that match the source's trackids, so we can avoid needing a mapping function.
+  // XXX This will not handle more complex cases well.
+  virtual void StopTrack(TrackID aTrackID)
+  {
+    if (mSourceStream) {
+      mSourceStream->EndTrack(aTrackID);
+      // We could override NotifyMediaStreamTrackEnded(), and maybe should, but it's
+      // risky to do late in a release since that will affect all track ends, and not
+      // just StopTrack()s.
+      if (GetDOMTrackFor(aTrackID)) {
+        mListener->StopTrack(aTrackID, !!GetDOMTrackFor(aTrackID)->AsAudioStreamTrack());
+      } else {
+        LOG(("StopTrack(%d) on non-existant track", aTrackID));
+      }
+    }
+  }
+
+#if 0
+  virtual void NotifyMediaStreamTrackEnded(dom::MediaStreamTrack* aTrack)
+  {
+    TrackID trackID = aTrack->GetTrackID();
+    // We override this so we can also tell the backend to stop capturing if the track ends
+    LOG(("track %d ending, type = %s",
+         trackID, aTrack->AsAudioStreamTrack() ? "audio" : "video"));
+    MOZ_ASSERT(aTrack->AsVideoStreamTrack() || aTrack->AsAudioStreamTrack());
+    mListener->StopTrack(trackID, !!aTrack->AsAudioStreamTrack());
+
+    // forward to superclass
+    DOMLocalMediaStream::NotifyMediaStreamTrackEnded(aTrack);
+  }
+#endif
+
   // Allow getUserMedia to pass input data directly to PeerConnection/MediaPipeline
   virtual bool AddDirectListener(MediaStreamDirectListener *aListener) MOZ_OVERRIDE
   {
     if (mSourceStream) {
       mSourceStream->AddDirectListener(aListener);
       return true; // application should ignore NotifyQueuedTrackData
     }
     return false;
@@ -571,16 +610,17 @@ public:
     // forward the request to the source and translate the ID
     GetStream()->AsProcessedStream()->ForwardTrackEnabled(aID, aEnabled);
   }
 
   // The actual MediaStream is a TrackUnionStream. But these resources need to be
   // explicitly destroyed too.
   nsRefPtr<SourceMediaStream> mSourceStream;
   nsRefPtr<MediaInputPort> mPort;
+  nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
   nsRefPtr<MediaEngineSource> mAudioSource; // so we can turn on AEC
   bool mEchoOn;
   bool mAgcOn;
   bool mNoiseOn;
   uint32_t mEcho;
   uint32_t mAgc;
   uint32_t mNoise;
   uint32_t mPlayoutDelay;
@@ -703,18 +743,18 @@ public:
         branch->GetBoolPref("media.getusermedia.noise_enabled", &noise_on);
         branch->GetIntPref("media.getusermedia.noise", &noise);
         branch->GetIntPref("media.getusermedia.playout_delay", &playout_delay);
       }
     }
 #endif
     // Create a media stream.
     nsRefPtr<nsDOMUserMediaStream> trackunion =
-      nsDOMUserMediaStream::CreateTrackUnionStream(window, mAudioSource,
-                                                   mVideoSource);
+      nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
+                                                   mAudioSource, mVideoSource);
     if (!trackunion) {
       nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error = mError.forget();
       LOG(("Returning error for getUserMedia() - no stream"));
       error->OnError(NS_LITERAL_STRING("NO_STREAM"));
       return NS_OK;
     }
     trackunion->AudioConfig(aec_on, (uint32_t) aec,
                             agc_on, (uint32_t) agc,
@@ -1600,21 +1640,26 @@ MediaManager::GetUserMedia(bool aPrivile
         }
       } else if (!Preferences::GetBool("media.getusermedia.screensharing.enabled", false)) {
         return runnable->Denied(NS_LITERAL_STRING("PERMISSION_DENIED"));
       }
       /* Deny screensharing if the requesting document is not from a host
        on the whitelist. */
       // Block screen/window sharing on Mac OSX 10.6 and WinXP until proved that they work
       if (
+#if defined(XP_MACOSX) || defined(XP_WIN)
+          (
+            !Preferences::GetBool("media.getusermedia.screensharing.allow_on_old_platforms", false) &&
 #if defined(XP_MACOSX)
-          !nsCocoaFeatures::OnLionOrLater() ||
+            !nsCocoaFeatures::OnLionOrLater()
 #endif
 #if defined (XP_WIN)
-          !IsVistaOrLater() ||
+            !IsVistaOrLater()
+#endif
+           ) ||
 #endif
           (!aPrivileged && !HostHasPermission(*docURI))) {
         return runnable->Denied(NS_LITERAL_STRING("PERMISSION_DENIED"));
       }
     }
   }
 
 #ifdef MOZ_B2G_CAMERA
@@ -2052,17 +2097,18 @@ WindowsHashToArrayFunc (const uint64_t& 
     // to return only the latter. See bug 975177.
     bool capturing = false;
     if (aData) {
       uint32_t length = aData->Length();
       for (uint32_t i = 0; i < length; ++i) {
         nsRefPtr<GetUserMediaCallbackMediaStreamListener> listener =
           aData->ElementAt(i);
         if (listener->CapturingVideo() || listener->CapturingAudio() ||
-            listener->CapturingScreen() || listener->CapturingWindow()) {
+            listener->CapturingScreen() || listener->CapturingWindow() ||
+            listener->CapturingApplication()) {
           capturing = true;
           break;
         }
       }
     }
 
     if (capturing)
       array->AppendElement(window);
@@ -2084,38 +2130,40 @@ MediaManager::GetActiveMediaCaptureWindo
 
   *aArray = array;
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaManager::MediaCaptureWindowState(nsIDOMWindow* aWindow, bool* aVideo,
                                       bool* aAudio, bool *aScreenShare,
-                                      bool* aWindowShare)
+                                      bool* aWindowShare, bool *aAppShare)
 {
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
   *aVideo = false;
   *aAudio = false;
   *aScreenShare = false;
   *aWindowShare = false;
+  *aAppShare = false;
 
-  nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio, aScreenShare, aWindowShare);
+  nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio, aScreenShare, aWindowShare, aAppShare);
 #ifdef DEBUG
   nsCOMPtr<nsPIDOMWindow> piWin = do_QueryInterface(aWindow);
-  LOG(("%s: window %lld capturing %s %s %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1,
+  LOG(("%s: window %lld capturing %s %s %s %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1,
        *aVideo ? "video" : "", *aAudio ? "audio" : "",
-       *aScreenShare ? "screenshare" : "",  *aWindowShare ? "windowshare" : ""));
+       *aScreenShare ? "screenshare" : "",  *aWindowShare ? "windowshare" : "",
+       *aAppShare ? "appshare" : ""));
 #endif
   return rv;
 }
 
 nsresult
 MediaManager::MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo,
                                               bool* aAudio, bool *aScreenShare,
-                                              bool* aWindowShare)
+                                              bool* aWindowShare, bool *aAppShare)
 {
   // We need to return the union of all streams in all innerwindows that
   // correspond to that outerwindow.
 
   // Iterate the docshell tree to find all the child windows, find
   // all the listeners for each one, get the booleans, and merge the
   // results.
   nsCOMPtr<nsPIDOMWindow> piWin = do_QueryInterface(aWindow);
@@ -2140,31 +2188,34 @@ MediaManager::MediaCaptureWindowStateInt
             *aAudio = true;
           }
           if (listener->CapturingScreen()) {
             *aScreenShare = true;
           }
           if (listener->CapturingWindow()) {
             *aWindowShare = true;
           }
+          if (listener->CapturingApplication()) {
+            *aAppShare = true;
+          }
         }
       }
     }
 
     // iterate any children of *this* window (iframes, etc)
     nsCOMPtr<nsIDocShell> docShell = piWin->GetDocShell();
     if (docShell) {
       int32_t i, count;
       docShell->GetChildCount(&count);
       for (i = 0; i < count; ++i) {
         nsCOMPtr<nsIDocShellTreeItem> item;
         docShell->GetChildAt(i, getter_AddRefs(item));
         nsCOMPtr<nsPIDOMWindow> win = item ? item->GetWindow() : nullptr;
 
-        MediaCaptureWindowStateInternal(win, aVideo, aAudio, aScreenShare, aWindowShare);
+        MediaCaptureWindowStateInternal(win, aVideo, aAudio, aScreenShare, aWindowShare, aAppShare);
       }
     }
   }
   return NS_OK;
 }
 
 // XXX abstract out the iteration over all children and provide a function pointer and data ptr
 
@@ -2275,16 +2326,38 @@ GetUserMediaCallbackMediaStreamListener:
       new MediaOperationRunnable(mAudioSource ? MEDIA_STOP_TRACK : MEDIA_STOP,
                                  this, nullptr, nullptr,
                                  nullptr, mVideoSource,
                                  mFinished, mWindowID, nullptr));
     mMediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
   }
 }
 
+// Stop backend for track
+
+void
+GetUserMediaCallbackMediaStreamListener::StopTrack(TrackID aID, bool aIsAudio)
+{
+  if (((aIsAudio && mAudioSource) ||
+       (!aIsAudio && mVideoSource)) && !mStopped)
+  {
+    // XXX to support multiple tracks of a type in a stream, this should key off
+    // the TrackID and not just the type
+    nsRefPtr<MediaOperationRunnable> runnable(
+      new MediaOperationRunnable(MEDIA_STOP_TRACK,
+                                 this, nullptr, nullptr,
+                                 aIsAudio  ? mAudioSource : nullptr,
+                                 !aIsAudio ? mVideoSource : nullptr,
+                                 mFinished, mWindowID, nullptr));
+    mMediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
+  } else {
+    LOG(("gUM track %d ended, but we don't have type %s",
+         aID, aIsAudio ? "audio" : "video"));
+  }
+}
 
 // Called from the MediaStreamGraph thread
 void
 GetUserMediaCallbackMediaStreamListener::NotifyFinished(MediaStreamGraph* aGraph)
 {
   mFinished = true;
   Invalidate(); // we know it's been activated
   NS_DispatchToMainThread(new GetUserMediaListenerRemove(mWindowID, this));
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -96,16 +96,18 @@ public:
     if (!mStream) {
       return nullptr;
     }
     return mStream->AsSourceStream();
   }
 
   void StopScreenWindowSharing();
 
+  void StopTrack(TrackID aID, bool aIsAudio);
+
   // mVideo/AudioSource are set by Activate(), so we assume they're capturing
   // if set and represent a real capture device.
   bool CapturingVideo()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     return mVideoSource && !mStopped &&
            mVideoSource->GetMediaSource() == MediaSourceType::Camera &&
            (!mVideoSource->IsFake() ||
@@ -125,16 +127,22 @@ public:
            mVideoSource->GetMediaSource() == MediaSourceType::Screen;
   }
   bool CapturingWindow()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     return mVideoSource && !mStopped && !mVideoSource->IsAvailable() &&
            mVideoSource->GetMediaSource() == MediaSourceType::Window;
   }
+  bool CapturingApplication()
+  {
+    NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+    return mVideoSource && !mStopped && !mVideoSource->IsAvailable() &&
+           mVideoSource->GetMediaSource() == MediaSourceType::Application;
+  }
 
   void SetStopped()
   {
     mStopped = true;
   }
 
   // implement in .cpp to avoid circular dependency with MediaOperationRunnable
   // Can be invoked from EITHER MainThread or MSG thread
@@ -594,17 +602,17 @@ private:
 
   // Make private because we want only one instance of this class
   MediaManager();
 
   ~MediaManager() {}
 
   nsresult MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo,
                                            bool* aAudio, bool *aScreenShare,
-                                           bool* aWindowShare);
+                                           bool* aWindowShare, bool *aAppShare);
 
   void StopScreensharing(uint64_t aWindowID);
   void StopScreensharing(nsPIDOMWindow *aWindow);
 
   void StopMediaStreams();
 
   // ONLY access from MainThread so we don't need to lock
   WindowTable mActiveWindows;
--- a/dom/media/PeerConnection.js
+++ b/dom/media/PeerConnection.js
@@ -290,16 +290,17 @@ function RTCPeerConnection() {
 
   this._onCreateOfferSuccess = null;
   this._onCreateOfferFailure = null;
   this._onCreateAnswerSuccess = null;
   this._onCreateAnswerFailure = null;
   this._onGetStatsSuccess = null;
   this._onGetStatsFailure = null;
   this._onReplaceTrackSender = null;
+  this._onReplaceTrackWithTrack = null;
   this._onReplaceTrackSuccess = null;
   this._onReplaceTrackFailure = null;
 
   this._pendingType = null;
   this._localType = null;
   this._remoteType = null;
   this._trickleIce = false;
   this._peerIdentity = null;
@@ -825,16 +826,17 @@ RTCPeerConnection.prototype = {
     // stream. However, since our MediaStreams currently are limited to one
     // track per type, we allow replacement with an outside track not already
     // in the same stream.
     //
     // Since a track may be replaced more than once, the track being replaced
     // may not be in the stream either, so we check neither arg right now.
 
     this._onReplaceTrackSender = sender;
+    this._onReplaceTrackWithTrack = withTrack;
     this._onReplaceTrackSuccess = onSuccess;
     this._onReplaceTrackFailure = onError;
     this._impl.replaceTrack(sender.track, withTrack, sender._stream);
   },
 
   close: function() {
     if (this._closed) {
       return;
@@ -1315,22 +1317,28 @@ PeerConnectionObserver.prototype = {
   },
 
   onRemoveTrack: function(track, type) {
     this.dispatchEvent(new this._dompc._win.MediaStreamTrackEvent("removetrack",
                                                                   { track: track }));
   },
 
   onReplaceTrackSuccess: function() {
-    this._dompc.callCB(this._dompc._onReplaceTrackSuccess);
+    var pc = this._dompc;
+    pc._onReplaceTrackSender.track = pc._onReplaceTrackWithTrack;
+    pc._onReplaceTrackWithTrack = null;
+    pc._onReplaceTrackSender = null;
+    pc.callCB(pc._onReplaceTrackSuccess);
   },
 
   onReplaceTrackError: function(code, message) {
-    this._dompc.callCB(this._dompc._onReplaceTrackError,
-                       new RTCError(code, message));
+    var pc = this._dompc;
+    pc._onReplaceTrackWithTrack = null;
+    pc._onReplaceTrackSender = null;
+    pc.callCB(pc._onReplaceTrackError, new RTCError(code, message));
   },
 
   foundIceCandidate: function(cand) {
     this.dispatchEvent(new this._dompc._win.RTCPeerConnectionIceEvent("icecandidate",
                                                                       { candidate: cand } ));
   },
 
   notifyDataChannel: function(channel) {
--- a/dom/media/nsIMediaManager.idl
+++ b/dom/media/nsIMediaManager.idl
@@ -7,18 +7,19 @@
 interface nsISupportsArray;
 interface nsIDOMWindow;
 
 %{C++
 #define NS_MEDIAMANAGERSERVICE_CID {0xabc622ea, 0x9655, 0x4123, {0x80, 0xd9, 0x22, 0x62, 0x1b, 0xdd, 0x54, 0x65}}
 #define MEDIAMANAGERSERVICE_CONTRACTID "@mozilla.org/mediaManagerService;1"
 %}
 
-[scriptable, builtinclass, uuid(f431b523-4536-4ba7-a2c1-7e1bf670d32a)]
+[scriptable, builtinclass, uuid(2ab0e6f7-9a5b-4b9a-901d-145531f47a6b)]
 interface nsIMediaManagerService : nsISupports
 {
   /* return a array of inner windows that have active captures */
   readonly attribute nsISupportsArray activeMediaCaptureWindows;
 
   /* Get the capture state for the given window and all descendant windows (iframes, etc) */
   void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio,
-                               [optional] out boolean aScreenShare, [optional] out boolean aWindowShare);
+                               [optional] out boolean aScreenShare, [optional] out boolean aWindowShare,
+                               [optional] out boolean aAppShare);
 };
--- a/dom/media/tests/ipc/test_ipc.html
+++ b/dom/media/tests/ipc/test_ipc.html
@@ -53,28 +53,28 @@
     let VALID_ACTIONS = ['suite_start', 'suite_end', 'test_start', 'test_end', 'test_status', 'process_output', 'log'];
     function validStructuredMessage(message) {
       return message.action !== undefined && VALID_ACTIONS.indexOf(message.action) >= 0;
     }
     function onTestMessage(data) {
       let message = SpecialPowers.wrap(data).data.msg;
 
       if (validStructuredMessage(message)) {
-        if (message.test === undefined || message.message === undefined) {
-          return;
-        }
-
         switch (message.action) {
           case "test_status":
+            ok(message.expected === undefined, message.subtest, message.message);
+            break;
           case "test_end":
             ok(message.expected === undefined, message.test, message.message);
             break;
+          case "log":
+            info(message.message);
+            break;
           default:
-            let details = message.test + " | " + message.message;
-            info(details);
+            // nothing
         }
       }
     }
 
     function onTestComplete() {
       let comp = SpecialPowers.wrap(SpecialPowers.Components);
       let mm = SpecialPowers.getBrowserFrameMessageManager(iframe);
       let spObserver = comp.classes["@mozilla.org/special-powers-observer;1"]
--- a/dom/media/tests/mochitest/test_peerConnection_replaceTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_replaceTrack.html
@@ -33,19 +33,21 @@
     test.chain.append(flowtest);
     test.chain.append([["PC_LOCAL_REPLACE_VIDEOTRACK",
       function (test) {
         var stream = test.pcLocal._pc.getLocalStreams()[0];
         var track = stream.getVideoTracks()[0];
         var sender = test.pcLocal._pc.getSenders().find(isSenderOfTrack, track);
         ok(sender, "track has a sender");
         navigator.mozGetUserMedia({video:true, fake: true}, function(newStream) {
-          sender.replaceTrack(newStream.getVideoTracks()[0],
+          var newtrack = newStream.getVideoTracks()[0];
+          sender.replaceTrack(newtrack,
             function() {
               ok(true, "replaceTrack success callback is called");
+              is(sender.track, newtrack, "sender.track has been replaced");
               test.next();
             },
             function(err) {
               ok(false, "replaceTrack failed with error = " + err);
               test.next();
             });
         },
         function(err) {
--- a/dom/mobileconnection/tests/marionette/test_mobile_operator_names_plmnlist.js
+++ b/dom/mobileconnection/tests/marionette/test_mobile_operator_names_plmnlist.js
@@ -35,29 +35,36 @@ function test(aLongName, aShortName, aMc
     // aExpectedLongName, aExpectedShortName could be empty string.
     .then(() => check(aExpectedLongName == null ? aLongName : aExpectedLongName,
                       aExpectedShortName == null ? aShortName : aExpectedShortName,
                       aMcc, aMnc, aLac, aCid));
 }
 
 startTestCommon(function() {
   /**
-   * In emulator we have pre-defined 4 PNN sets:
+   * In emulator we have pre-defined 7 EF_PNN (see 3GPP TS 31.102 clause 4.2.58)
+   * sets:
    *
    *   PNN 1: Full name: "Test1", Short name: "Test1"
    *   PNN 2: Full name: "Test2", Short name: (none)
-   *   PNN 2: Full name: "Test3", Short name: (none)
-   *   PNN 2: Full name: "Test4", Short name: (none)
+   *   PNN 3: Full name: "Test3", Short name: (none)
+   *   PNN 4: Full name: "Test4", Short name: (none)
+   *   PNN 5: Full name: "Test5", Short name: (none)
+   *   PNN 6: Full name: "Test6", Short name: (none)
+   *   PNN 7: Full name: "Test7", Short name: (none)
    *
-   * Also 4 OPL sets:
+   * Also 7 EF_OPL (see 3GPP TS 31.102 clause 4.2.59) sets:
    *
    *   MCC = 001, MNC =  01, START=0000, END=FFFE, PNN = 01,
    *   MCC = 001, MNC =  02, START=0001, END=0010, PNN = 02,
    *   MCC = 001, MNC =  03, START=0011, END=0011, PNN = 03,
    *   MCC = 001, MNC = 001, START=0012, END=0012, PNN = 04,
+   *   MCC = 001, MNC =  1D, START=0000, END=FFFE, PNN = 05,
+   *   MCC = 001, MNC = 2DD, START=0000, END=FFFE, PNN = 06,
+   *   MCC = 001, MNC = DDD, START=0000, END=FFFE, PNN = 07,
    *
    * See https://github.com/mozilla-b2g/platform_external_qemu/blob/master/telephony/sim_card.c#L725
    */
   return getEmulatorOperatorNames()
     .then(function(aOperators) {
       let {longName: longName, shortName: shortName} = aOperators[0];
       let {mcc: mcc, mnc: mnc} = mobileConnection.voice.network;
       let {gsmLocationAreaCode: lac, gsmCellId: cid} = mobileConnection.voice.cell;
@@ -102,12 +109,47 @@ startTestCommon(function() {
         .then(() => test("Foo2", "Bar2", "001", "03", 0x0011, TEST_CELL_ID,
                          "Test3", ""))
         .then(() => test("Foo3", "Bar3", "001", "03", 0xFFFE, TEST_CELL_ID))
 
         // Test if we match MNC "01" and "001" correctly.
         .then(() => test("Foo1", "Bar1", "001", "001", 0x0012, TEST_CELL_ID,
                          "Test4", ""))
 
+        // Wild char test for MCC = 001, MNC = 1D cases.
+        .then(() => test("Foo10", "Bar10", "001", "10", 0x0000, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo11", "Bar11", "001", "11", 0x0001, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo12", "Bar12", "001", "12", 0x0002, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo13", "Bar13", "001", "13", 0x0003, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo14", "Bar14", "001", "14", 0x0004, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo15", "Bar15", "001", "15", 0x0005, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo16", "Bar16", "001", "16", 0x0006, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo17", "Bar17", "001", "17", 0x0007, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo18", "Bar18", "001", "18", 0x0008, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo19", "Bar19", "001", "19", 0x0009, TEST_CELL_ID,
+                         "Test5", ""))
+        .then(() => test("Foo20", "Bar20", "001", "20", 0x000A, TEST_CELL_ID))
+
+        // Wild chars test for MCC = 001, MNC = 2DD cases.
+        .then(() => test("Foo0", "Bar0", "001", "200", 0x00C8, TEST_CELL_ID,
+                         "Test6", ""))
+        .then(() => test("Foo1", "Bar1", "001", "299", 0x012B, TEST_CELL_ID,
+                         "Test6", ""))
+
+        // Wild chars test for MCC = 001, MNC = DDD cases.
+        .then(() => test("Foo300", "Bar300", "001", "300", 0x012C, TEST_CELL_ID,
+                         "Test7", ""))
+        .then(() => test("Foo999", "Bar999", "001", "999", 0x03E7, TEST_CELL_ID,
+                         "Test7", ""))
+
         // Reset back to initial values.
         .then(() => test(longName, shortName, mcc, mnc, lac, cid));
     });
 });
--- a/dom/nfc/nsNfc.js
+++ b/dom/nfc/nsNfc.js
@@ -55,35 +55,25 @@ MozNFCTag.prototype = {
   initialize: function(aWindow, aSessionToken) {
     this._window = aWindow;
     this.session = aSessionToken;
   },
 
   _techTypesMap: null,
 
   // NFCTag interface:
-  getDetailsNDEF: function getDetailsNDEF() {
-    return this._nfcContentHelper.getDetailsNDEF(this._window, this.session);
-  },
   readNDEF: function readNDEF() {
     return this._nfcContentHelper.readNDEF(this._window, this.session);
   },
   writeNDEF: function writeNDEF(records) {
     return this._nfcContentHelper.writeNDEF(this._window, records, this.session);
   },
   makeReadOnlyNDEF: function makeReadOnlyNDEF() {
     return this._nfcContentHelper.makeReadOnlyNDEF(this._window, this.session);
   },
-  connect: function connect(enum_tech_type) {
-    let int_tech_type = this._techTypesMap[enum_tech_type];
-    return this._nfcContentHelper.connect(this._window, int_tech_type, this.session);
-  },
-  close: function close() {
-    return this._nfcContentHelper.close(this._window, this.session);
-  },
 
   classID: Components.ID("{4e1e2e90-3137-11e3-aa6e-0800200c9a66}"),
   contractID: "@mozilla.org/nfc/NFCTag;1",
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports,
                                          Ci.nsIDOMGlobalPropertyInitializer]),
 };
 
 /**
--- a/dom/system/gonk/ril_consts.js
+++ b/dom/system/gonk/ril_consts.js
@@ -126,21 +126,20 @@ this.REQUEST_REPORT_SMS_MEMORY_STATUS = 
 this.REQUEST_REPORT_STK_SERVICE_IS_RUNNING = 103;
 this.REQUEST_CDMA_GET_SUBSCRIPTION_SOURCE = 104;
 this.REQUEST_ISIM_AUTHENTICATION = 105;
 this.REQUEST_ACKNOWLEDGE_INCOMING_GSM_SMS_WITH_PDU = 106;
 this.REQUEST_STK_SEND_ENVELOPE_WITH_STATUS = 107;
 this.REQUEST_VOICE_RADIO_TECH = 108;
 this.REQUEST_GET_CELL_INFO_LIST = 109;
 
-// Flame specific parcel types.
-this.REQUEST_SET_UICC_SUBSCRIPTION = 114;
-this.REQUEST_SET_DATA_SUBSCRIPTION = 115;
-this.REQUEST_GET_UICC_SUBSCRIPTION = 116;
-this.REQUEST_GET_DATA_SUBSCRIPTION = 117;
+// CAF specific parcel type. Synced with latest version.
+// Please see https://www.codeaurora.org/cgit/quic/la/platform/hardware/ril/tree/include/telephony/ril.h?h=b2g_kk_3.5
+this.REQUEST_SET_UICC_SUBSCRIPTION = 115;
+this.REQUEST_SET_DATA_SUBSCRIPTION = 116;
 
 // UICC Secure Access.
 this.REQUEST_SIM_OPEN_CHANNEL = 121;
 this.REQUEST_SIM_CLOSE_CHANNEL = 122;
 this.REQUEST_SIM_ACCESS_CHANNEL = 123;
 
 // Mozilla specific parcel type.
 this.REQUEST_GET_UNLOCK_RETRY_COUNT = 150;
--- a/dom/system/gonk/ril_worker.js
+++ b/dom/system/gonk/ril_worker.js
@@ -165,17 +165,17 @@ BufObject.prototype = {
    *        Object containing information about the request, e.g. the
    *        original main thread message object that led to the RIL request.
    */
   newParcel: function(type, options) {
     if (DEBUG) this.context.debug("New outgoing parcel of type " + type);
 
     // We're going to leave room for the parcel size at the beginning.
     this.outgoingIndex = this.PARCEL_SIZE_SIZE;
-    this.writeInt32(type);
+    this.writeInt32(this._reMapRequestType(type));
     this.writeInt32(this.mToken);
 
     if (!options) {
       options = {};
     }
     options.rilRequestType = type;
     options.rilRequestError = null;
     this.mTokenRequestMap.set(this.mToken, options);
@@ -185,16 +185,38 @@ BufObject.prototype = {
 
   simpleRequest: function(type, options) {
     this.newParcel(type, options);
     this.sendParcel();
   },
 
   onSendParcel: function(parcel) {
     postRILMessage(this.context.clientId, parcel);
+  },
+
+  /**
+   * Remapping the request type to different values based on RIL version.
+   * We only have to do this for SUBSCRIPTION right now, so I just make it
+   * simple. A generic logic or structure could be discussed if we have more
+   * use cases, especially the cases from different partners.
+   */
+  _reMapRequestType: function(type) {
+    let newType = type;
+    switch (type) {
+      case REQUEST_SET_UICC_SUBSCRIPTION:
+      case REQUEST_SET_DATA_SUBSCRIPTION:
+        if (this.context.RIL.version < 9) {
+          // Shift the CAF's proprietary parcels. Please see
+          // https://www.codeaurora.org/cgit/quic/la/platform/hardware/ril/tree/include/telephony/ril.h?h=b2g_jb_3.2
+          newType = type - 1;
+        }
+        break;
+    }
+
+    return newType;
   }
 };
 
 (function() {
   let base = require("resource://gre/modules/workers/worker_buf.js").Buf;
   for (let p in base) {
     BufObject.prototype[p] = base[p];
   }
@@ -350,16 +372,20 @@ function RilObject(aContext) {
   // Init properties that are only initialized once.
   this.v5Legacy = RILQUIRKS_V5_LEGACY;
 
   this.pendingMO = null;
 }
 RilObject.prototype = {
   context: null,
 
+  /**
+   * RIL version.
+   */
+  version: null,
   v5Legacy: null,
 
   /**
    * Valid calls.
    */
   currentCalls: null,
 
   /**
@@ -6741,18 +6767,16 @@ RilObject.prototype[REQUEST_SET_DATA_SUB
     return;
   }
   options.success = (options.rilRequestError === 0);
   if (!options.success) {
     options.errorMsg = RIL_ERROR_TO_GECKO_ERROR[options.rilRequestError];
   }
   this.sendChromeMessage(options);
 };
-RilObject.prototype[REQUEST_GET_UICC_SUBSCRIPTION] = null;
-RilObject.prototype[REQUEST_GET_DATA_SUBSCRIPTION] = null;
 RilObject.prototype[REQUEST_GET_UNLOCK_RETRY_COUNT] = function REQUEST_GET_UNLOCK_RETRY_COUNT(length, options) {
   options.success = (options.rilRequestError === 0);
   if (!options.success) {
     options.errorMsg = RIL_ERROR_TO_GECKO_ERROR[options.rilRequestError];
   }
   options.retryCount = length ? this.context.Buf.readInt32List()[0] : -1;
   this.sendChromeMessage(options);
 };
@@ -7089,20 +7113,20 @@ RilObject.prototype[UNSOLICITED_EXIT_EME
 };
 RilObject.prototype[UNSOLICITED_RIL_CONNECTED] = function UNSOLICITED_RIL_CONNECTED(length) {
   // Prevent response id collision between UNSOLICITED_RIL_CONNECTED and
   // UNSOLICITED_VOICE_RADIO_TECH_CHANGED for Akami on gingerbread branch.
   if (!length) {
     return;
   }
 
-  let version = this.context.Buf.readInt32List()[0];
-  this.v5Legacy = (version < 5);
+  this.version = this.context.Buf.readInt32List()[0];
+  this.v5Legacy = (this.version < 5);
   if (DEBUG) {
-    this.context.debug("Detected RIL version " + version);
+    this.context.debug("Detected RIL version " + this.version);
     this.context.debug("this.v5Legacy is " + this.v5Legacy);
   }
 
   this.initRILState();
   // Always ensure that we are not in emergency callback mode when init.
   this.exitEmergencyCbMode();
   // Reset radio in the case that b2g restart (or crash).
   this.setRadioEnabled({enabled: false});
--- a/dom/webidl/HTMLMediaElement.webidl
+++ b/dom/webidl/HTMLMediaElement.webidl
@@ -150,14 +150,14 @@ partial interface HTMLMediaElement {
   [Pref="media.eme.enabled"]
   readonly attribute MediaKeys? mediaKeys;
 
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   [Pref="media.eme.enabled", Throws, NewObject]
   Promise<void> setMediaKeys(MediaKeys? mediaKeys);
 
   [Pref="media.eme.enabled"]
-  attribute EventHandler onneedkey;
+  attribute EventHandler onencrypted;
 
   [Pref="media.eme.enabled"]
   readonly attribute MediaWaitingFor waitingFor;
 };
 #endif
--- a/dom/webidl/InputMethod.webidl
+++ b/dom/webidl/InputMethod.webidl
@@ -133,16 +133,20 @@ interface MozInputContext: EventTarget {
      * To move the cursor, set the start and end position to the same value.
      *
      * @return boolean
      */
     Promise<boolean> setSelectionRange(long start, long length);
 
     /* User moves the cursor, or changes the selection with other means. If the text around
      * cursor has changed, but the cursor has not been moved, the IME won't get notification.
+     *
+     * A dict is provided in the detail property of the event containing the new values, and
+     * an "ownAction" property to denote the event is the result of our own mutation to
+     * the input field.
      */
     attribute EventHandler onselectionchange;
 
     /*
      * Commit text to current input field and replace text around
      * cursor position. It will clear the current composition.
      *
      * @param text The string to be replaced with.
@@ -158,27 +162,24 @@ interface MozInputContext: EventTarget {
      * @param offset The offset from the cursor position where deletion starts.
      * @param length The length of text to delete.
      * TODO: maybe updateSurroundingText(DOMString beforeText, DOMString afterText); ?
      * @return boolean
      */
     Promise<boolean> deleteSurroundingText(long offset, long length);
 
     /*
-    * Notifies when the text around the cursor is changed, due to either text
-    * editing or cursor movement. If the cursor has been moved, but the text around has not
-    * changed, the IME won't get notification.
-    *
-    * The event handler function is specified as:
-    * @param beforeString Text before and including cursor position.
-    * @param afterString Text after and excluing cursor position.
-    * function(DOMString beforeText, DOMString afterText) {
-    * ...
-    *  }
-    */
+     * Notifies when the text around the cursor is changed, due to either text
+     * editing or cursor movement. If the cursor has been moved, but the text around has not
+     * changed, the IME won't get notification.
+     *
+     * A dict is provided in the detail property of the event containing the new values, and
+     * an "ownAction" property to denote the event is the result of our own mutation to
+     * the input field.
+     */
     attribute EventHandler onsurroundingtextchange;
 
     /*
       * send a character with its key events.
       * @param modifiers see http://mxr.mozilla.org/mozilla-central/source/dom/interfaces/base/nsIDOMWindowUtils.idl#206
       * @param repeat indicates whether a key would be sent repeatedly.
       * @return true if succeeds. Otherwise false if the input context becomes void.
       * Alternative: sendKey(KeyboardEvent event), but we will likely
rename from dom/webidl/MediaKeyNeededEvent.webidl
rename to dom/webidl/MediaEncryptedEvent.webidl
--- a/dom/webidl/MediaKeyNeededEvent.webidl
+++ b/dom/webidl/MediaEncryptedEvent.webidl
@@ -6,18 +6,18 @@
  * The origin of this IDL file is
  * https://dvcs.w3.org/hg/html-media/raw-file/default/encrypted-media/encrypted-media.html
  *
  * Copyright © 2014 W3C® (MIT, ERCIM, Keio, Beihang), All Rights Reserved.
  * W3C liability, trademark and document use rules apply.
  */
 
 [Pref="media.eme.enabled", Constructor(DOMString type, optional MediaKeyNeededEventInit eventInitDict)]
-interface MediaKeyNeededEvent : Event {
+interface MediaEncryptedEvent : Event {
   readonly attribute DOMString initDataType;
   [Throws]
-  readonly attribute Uint8Array? initData;
+  readonly attribute ArrayBuffer? initData;
 };
 
 dictionary MediaKeyNeededEventInit : EventInit {
   DOMString initDataType = "";
-  Uint8Array? initData;
+  ArrayBuffer? initData = null;
 };
--- a/dom/webidl/MediaKeyMessageEvent.webidl
+++ b/dom/webidl/MediaKeyMessageEvent.webidl
@@ -8,16 +8,16 @@
  *
  * Copyright © 2014 W3C® (MIT, ERCIM, Keio, Beihang), All Rights Reserved.
  * W3C liability, trademark and document use rules apply.
  */
 
 [Pref="media.eme.enabled", Constructor(DOMString type, optional MediaKeyMessageEventInit eventInitDict)]
 interface MediaKeyMessageEvent : Event {
   [Throws]
-  readonly attribute Uint8Array message;
+  readonly attribute ArrayBuffer message;
   readonly attribute DOMString? destinationURL;
 };
 
 dictionary MediaKeyMessageEventInit : EventInit {
-  Uint8Array message;
-  DOMString? destinationURL = "";
+  ArrayBuffer message;
+  DOMString? destinationURL = null;
 };
--- a/dom/webidl/MediaKeySession.webidl
+++ b/dom/webidl/MediaKeySession.webidl
@@ -14,30 +14,29 @@
 interface MediaKeySession : EventTarget {
   // error state
   readonly attribute MediaKeyError? error;
 
   // session properties
   readonly attribute DOMString keySystem;
   readonly attribute DOMString sessionId;
 
-  // Invalid WebIDL, doesn't work.
-  // https://www.w3.org/Bugs/Public/show_bug.cgi?id=25594
-  // readonly attribute Array<Uint8Array> usableKeyIds;
-
   readonly attribute unrestricted double expiration;
 
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   readonly attribute Promise<void> closed;
 
   // session operations
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   [NewObject, Throws]
-  Promise<void> update(Uint8Array response);
+  Promise<void> update((ArrayBufferView or ArrayBuffer) response);
 
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   [NewObject, Throws]
   Promise<void> close();
 
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   [NewObject, Throws]
   Promise<void> remove();
+
+  [NewObject, Throws]
+  Promise<sequence<ArrayBuffer>> getUsableKeyIds();
 };
--- a/dom/webidl/MediaKeys.webidl
+++ b/dom/webidl/MediaKeys.webidl
@@ -13,22 +13,22 @@
 enum IsTypeSupportedResult { "" /* empty string */, "maybe", "probably" };
 enum SessionType { "temporary", "persistent" };
 
 [Pref="media.eme.enabled"]
 interface MediaKeys {
   readonly attribute DOMString keySystem;
 
   [NewObject, Throws]
-  Promise<MediaKeySession> createSession(DOMString initDataType, Uint8Array initData, optional SessionType sessionType = "temporary");
+  Promise<MediaKeySession> createSession(DOMString initDataType, (ArrayBufferView or ArrayBuffer) initData, optional SessionType sessionType = "temporary");
 
   [NewObject, Throws]
   Promise<MediaKeySession> loadSession(DOMString sessionId);
 
   // void, not any: https://www.w3.org/Bugs/Public/show_bug.cgi?id=26457
   [NewObject, Throws]
-  Promise<void> setServerCertificate(Uint8Array serverCertificate);
+  Promise<void> setServerCertificate((ArrayBufferView or ArrayBuffer) serverCertificate);
 
   [Throws,NewObject]
   static Promise<MediaKeys> create(DOMString keySystem);
   static IsTypeSupportedResult isTypeSupported(DOMString keySystem, optional DOMString initDataType, optional DOMString contentType, optional DOMString capability);
 
 };
--- a/dom/webidl/MediaStreamTrack.webidl
+++ b/dom/webidl/MediaStreamTrack.webidl
@@ -38,10 +38,10 @@ interface MediaStreamTrack {
 //                attribute EventHandler          onended;
 //    any                    getConstraint (DOMString constraintName, optional boolean mandatory = false);
 //    void                   setConstraint (DOMString constraintName, any constraintValue, optional boolean mandatory = false);
 //    MediaTrackConstraints? constraints ();
 //    void                   applyConstraints (MediaTrackConstraints constraints);
 //    void                   prependConstraint (DOMString constraintName, any constraintValue);
 //    void                   appendConstraint (DOMString constraintName, any constraintValue);
 //                attribute EventHandler          onoverconstrained;
-//    void                   stop ();
+    void                   stop ();
 };
--- a/dom/webidl/MozNFCTag.webidl
+++ b/dom/webidl/MozNFCTag.webidl
@@ -20,22 +20,18 @@ enum NFCTechType {
   "MIFARE_ULTRALIGHT",
   "NFC_BARCODE",
   "P2P",
   "UNKNOWN_TECH"
 };
 
 [JSImplementation="@mozilla.org/nfc/NFCTag;1", AvailableIn="CertifiedApps"]
 interface MozNFCTag {
-  DOMRequest getDetailsNDEF();
   DOMRequest readNDEF();
   DOMRequest writeNDEF(sequence<MozNDEFRecord> records);
   DOMRequest makeReadOnlyNDEF();
-
-  DOMRequest connect(NFCTechType techType);
-  DOMRequest close();
 };
 
 // Mozilla Only
 partial interface MozNFCTag {
   [ChromeOnly]
   attribute DOMString session;
 };
--- a/dom/webidl/moz.build
+++ b/dom/webidl/moz.build
@@ -740,14 +740,14 @@ if CONFIG['MOZ_BUILD_APP'] in ['browser'
 
 if CONFIG['MOZ_BUILD_APP'] in ['browser', 'mobile/android', 'xulrunner']:
     WEBIDL_FILES += [
         'External.webidl',
     ]
 
 if CONFIG['MOZ_EME']:
     WEBIDL_FILES += [
+        'MediaEncryptedEvent.webidl',
         'MediaKeyError.webidl',
         'MediaKeyMessageEvent.webidl',
-        'MediaKeyNeededEvent.webidl',
         'MediaKeys.webidl',
         'MediaKeySession.webidl',
     ]
--- a/gfx/2d/2D.h
+++ b/gfx/2d/2D.h
@@ -1092,26 +1092,28 @@ public:
    * cairo_scaled_font_t* parameters must correspond to the same font.
    */
   static TemporaryRef<ScaledFont>
     CreateScaledFontWithCairo(const NativeFont &aNativeFont, Float aSize, cairo_scaled_font_t* aScaledFont);
 
   /**
    * This creates a simple data source surface for a certain size. It allocates
    * new memory for the surface. This memory is freed when the surface is
-   * destroyed.
+   * destroyed.  The caller is responsible for handing the case where nullptr
+   * is returned.
    */
   static TemporaryRef<DataSourceSurface>
     CreateDataSourceSurface(const IntSize &aSize, SurfaceFormat aFormat);
 
   /**
    * This creates a simple data source surface for a certain size with a
    * specific stride, which must be large enough to fit all pixels.
    * It allocates new memory for the surface. This memory is freed when
-   * the surface is destroyed.
+   * the surface is destroyed.  The caller is responsible for handling the case
+   * where nullptr is returned.
    */
   static TemporaryRef<DataSourceSurface>
     CreateDataSourceSurfaceWithStride(const IntSize &aSize, SurfaceFormat aFormat, int32_t aStride);
 
   /**
    * This creates a simple data source surface for some existing data. It will
    * wrap this data and the data for this source surface. The caller is
    * responsible for deallocating the memory only after destruction of the
--- a/gfx/2d/DrawTargetTiled.cpp
+++ b/gfx/2d/DrawTargetTiled.cpp
@@ -1,14 +1,15 @@
 /* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "DrawTargetTiled.h"
+#include "Logging.h"
 
 using namespace std;
 
 namespace mozilla {
 namespace gfx {
 
 DrawTargetTiled::DrawTargetTiled()
 {
@@ -57,16 +58,19 @@ public:
 
   virtual SurfaceType GetType() const { return SurfaceType::TILED; }
   virtual IntSize GetSize() const { return IntSize(mRect.XMost(), mRect.YMost()); }
   virtual SurfaceFormat GetFormat() const { return mSnapshots[0]->GetFormat(); }
 
   virtual TemporaryRef<DataSourceSurface> GetDataSurface()
   {
     RefPtr<DataSourceSurface> surf = Factory::CreateDataSourceSurface(GetSize(), GetFormat());
+    if (MOZ2D_WARN_IF(!surf)) {
+      return nullptr;
+    }
 
     DataSourceSurface::MappedSurface mappedSurf;
     surf->Map(DataSourceSurface::MapType::WRITE, &mappedSurf);
 
     {
       RefPtr<DrawTarget> dt =
         Factory::CreateDrawTargetForData(BackendType::CAIRO, mappedSurf.mData,
         GetSize(), mappedSurf.mStride, GetFormat());
--- a/gfx/2d/Factory.cpp
+++ b/gfx/2d/Factory.cpp
@@ -689,41 +689,45 @@ Factory::CreateWrappingDataSourceSurface
   return nullptr;
 }
 
 TemporaryRef<DataSourceSurface>
 Factory::CreateDataSourceSurface(const IntSize &aSize,
                                  SurfaceFormat aFormat)
 {
   if (!CheckSurfaceSize(aSize)) {
+    gfxWarning() << "CreateDataSourceSurface failed with bad size";
     return nullptr;
   }
 
   RefPtr<SourceSurfaceAlignedRawData> newSurf = new SourceSurfaceAlignedRawData();
   if (newSurf->Init(aSize, aFormat)) {
     return newSurf.forget();
   }
 
+  gfxWarning() << "CreateDataSourceSurface failed in init";
   return nullptr;
 }
 
 TemporaryRef<DataSourceSurface>
 Factory::CreateDataSourceSurfaceWithStride(const IntSize &aSize,
                                            SurfaceFormat aFormat,
                                            int32_t aStride)
 {
   if (aStride < aSize.width * BytesPerPixel(aFormat)) {
+    gfxWarning() << "CreateDataSourceSurfaceWithStride failed with bad stride";
     return nullptr;
   }
 
   RefPtr<SourceSurfaceAlignedRawData> newSurf = new SourceSurfaceAlignedRawData();
   if (newSurf->InitWithStride(aSize, aFormat, aStride)) {
     return newSurf.forget();
   }
 
+  gfxWarning() << "CreateDataSourceSurfaceWithStride failed to initialize";
   return nullptr;
 }
 
 TemporaryRef<DrawEventRecorder>
 Factory::CreateEventRecorderForFile(const char *aFilename)
 {
   return new DrawEventRecorderFile(aFilename);
 }
--- a/gfx/2d/FilterNodeSoftware.cpp
+++ b/gfx/2d/FilterNodeSoftware.cpp
@@ -8,16 +8,17 @@
 #include <cmath>
 #include "DataSurfaceHelpers.h"
 #include "FilterNodeSoftware.h"
 #include "2D.h"
 #include "Tools.h"
 #include "Blur.h"
 #include <map>
 #include "FilterProcessing.h"
+#include "Logging.h"
 #include "mozilla/PodOperations.h"
 #include "mozilla/DebugOnly.h"
 
 // #define DEBUG_DUMP_SURFACES
 
 #ifdef DEBUG_DUMP_SURFACES
 #include "gfxUtils.h" // not part of Moz2D
 #endif
@@ -488,18 +489,17 @@ GetDataSurfaceInRect(SourceSurface *aSur
 
   IntRect intersect = sourceRect.Intersect(aDestRect);
   IntRect intersectInSourceSpace = intersect - sourceRect.TopLeft();
   IntRect intersectInDestSpace = intersect - aDestRect.TopLeft();
   SurfaceFormat format = aSurface ? aSurface->GetFormat() : SurfaceFormat(SurfaceFormat::B8G8R8A8);
 
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(aDestRect.Size(), format);
-
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   if (aEdgeMode == EDGE_MODE_NONE && !aSurfaceRect.Contains(aDestRect)) {
     ClearDataSourceSurface(target);
   }
 
   if (!aSurface) {
@@ -1189,17 +1189,17 @@ ApplyMorphology(const IntRect& aSourceRe
              margin.bottom >= ry && margin.left >= rx, "insufficient margin");
 #endif
 
   RefPtr<DataSourceSurface> tmp;
   if (rx == 0) {
     tmp = aInput;
   } else {
     tmp = Factory::CreateDataSourceSurface(tmpRect.Size(), SurfaceFormat::B8G8R8A8);
-    if (!tmp) {
+    if (MOZ2D_WARN_IF(!tmp)) {
       return nullptr;
     }
 
     int32_t sourceStride = aInput->Stride();
     uint8_t* sourceData = DataAtOffset(aInput, destRect.TopLeft() - srcRect.TopLeft());
 
     int32_t tmpStride = tmp->Stride();
     uint8_t* tmpData = DataAtOffset(tmp, destRect.TopLeft() - tmpRect.TopLeft());
@@ -1208,17 +1208,17 @@ ApplyMorphology(const IntRect& aSourceRe
       sourceData, sourceStride, tmpData, tmpStride, tmpRect, rx, aOperator);
   }
 
   RefPtr<DataSourceSurface> dest;
   if (ry == 0) {
     dest = tmp;
   } else {
     dest = Factory::CreateDataSourceSurface(destRect.Size(), SurfaceFormat::B8G8R8A8);
-    if (!dest) {
+    if (MOZ2D_WARN_IF(!dest)) {
       return nullptr;
     }
 
     int32_t tmpStride = tmp->Stride();
     uint8_t* tmpData = DataAtOffset(tmp, destRect.TopLeft() - tmpRect.TopLeft());
 
     int32_t destStride = dest->Stride();
     uint8_t* destData = dest->GetData();
@@ -1306,17 +1306,17 @@ Premultiply(DataSourceSurface* aSurface)
 {
   if (aSurface->GetFormat() == SurfaceFormat::A8) {
     return aSurface;
   }
 
   IntSize size = aSurface->GetSize();
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(size, SurfaceFormat::B8G8R8A8);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   uint8_t* inputData = aSurface->GetData();
   int32_t inputStride = aSurface->Stride();
   uint8_t* targetData = target->GetData();
   int32_t targetStride = target->Stride();
 
@@ -1331,17 +1331,17 @@ Unpremultiply(DataSourceSurface* aSurfac
 {
   if (aSurface->GetFormat() == SurfaceFormat::A8) {
     return aSurface;
   }
 
   IntSize size = aSurface->GetSize();
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(size, SurfaceFormat::B8G8R8A8);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   uint8_t* inputData = aSurface->GetData();
   int32_t inputStride = aSurface->Stride();
   uint8_t* targetData = target->GetData();
   int32_t targetStride = target->Stride();
 
@@ -1418,17 +1418,17 @@ FormatForColor(Color aColor)
 }
 
 TemporaryRef<DataSourceSurface>
 FilterNodeFloodSoftware::Render(const IntRect& aRect)
 {
   SurfaceFormat format = FormatForColor(mColor);
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(aRect.Size(), format);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   uint8_t* targetData = target->GetData();
   uint32_t stride = target->Stride();
 
   if (format == SurfaceFormat::B8G8R8A8) {
     uint32_t color = ColorToBGRA(mColor);
@@ -1547,17 +1547,17 @@ FilterNodeTileSoftware::Render(const Int
       if (!input) {
         return nullptr;
       }
       if (!target) {
         // We delay creating the target until now because we want to use the
         // same format as our input filter, and we do not actually know the
         // input format before we call GetInputDataSourceSurface.
         target = Factory::CreateDataSourceSurface(aRect.Size(), input->GetFormat());
-        if (!target) {
+        if (MOZ2D_WARN_IF(!target)) {
           return nullptr;
         }
       }
       MOZ_ASSERT(input->GetFormat() == target->GetFormat(), "different surface formats from the same input?");
 
       CopyRect(input, target, srcRect - srcRect.TopLeft(), destRect.TopLeft() - aRect.TopLeft());
     }
   }
@@ -1704,17 +1704,17 @@ FilterNodeComponentTransferSoftware::Ren
 
   SurfaceFormat format = input->GetFormat();
   if (format == SurfaceFormat::A8 && mDisableA) {
     return input.forget();
   }
 
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(aRect.Size(), format);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   if (format == SurfaceFormat::A8) {
     TransferComponents<1>(input, target, &lookupTables[B8G8R8A8_COMPONENT_BYTEOFFSET_A]);
   } else {
     TransferComponents<4>(input, target, lookupTables);
   }
@@ -2367,17 +2367,17 @@ FilterNodeConvolveMatrixSoftware::DoRend
   if (!input) {
     return nullptr;
   }
 
   DebugOnlyAutoColorSamplingAccessControl accessControl(input);
 
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(aRect.Size(), SurfaceFormat::B8G8R8A8);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
   ClearDataSourceSurface(target);
 
   IntPoint offset = aRect.TopLeft() - srcRect.TopLeft();
 
   uint8_t* sourceData = DataAtOffset(input, offset);
   int32_t sourceStride = input->Stride();
@@ -2514,17 +2514,17 @@ FilterNodeDisplacementMapSoftware::Rende
 {
   IntRect srcRect = InflatedSourceOrDestRect(aRect);
   RefPtr<DataSourceSurface> input =
     GetInputDataSourceSurface(IN_DISPLACEMENT_MAP_IN, srcRect, NEED_COLOR_CHANNELS);
   RefPtr<DataSourceSurface> map =
     GetInputDataSourceSurface(IN_DISPLACEMENT_MAP_IN2, aRect, NEED_COLOR_CHANNELS);
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(aRect.Size(), SurfaceFormat::B8G8R8A8);
-  if (!input || !map || !target) {
+  if (MOZ2D_WARN_IF(!(input && map && target))) {
     return nullptr;
   }
 
   IntPoint offset = aRect.TopLeft() - srcRect.TopLeft();
 
   uint8_t* sourceData = DataAtOffset(input, offset);
   int32_t sourceStride = input->Stride();
   uint8_t* mapData = map->GetData();
@@ -2772,17 +2772,17 @@ FilterNodeCompositeSoftware::SetAttribut
 
 TemporaryRef<DataSourceSurface>
 FilterNodeCompositeSoftware::Render(const IntRect& aRect)
 {
   RefPtr<DataSourceSurface> start =
     GetInputDataSourceSurface(IN_COMPOSITE_IN_START, aRect, NEED_COLOR_CHANNELS);
   RefPtr<DataSourceSurface> dest =
     Factory::CreateDataSourceSurface(aRect.Size(), SurfaceFormat::B8G8R8A8);
-  if (!dest) {
+  if (MOZ2D_WARN_IF(!dest)) {
     return nullptr;
   }
 
   if (start) {
     CopyRect(start, dest, aRect - aRect.TopLeft(), IntPoint());
   } else {
     ClearDataSourceSurface(dest);
   }
@@ -2865,22 +2865,28 @@ FilterNodeBlurXYSoftware::Render(const I
     return nullptr;
   }
 
   RefPtr<DataSourceSurface> target;
   Rect r(0, 0, srcRect.width, srcRect.height);
 
   if (input->GetFormat() == SurfaceFormat::A8) {
     target = Factory::CreateDataSourceSurface(srcRect.Size(), SurfaceFormat::A8);
+    if (MOZ2D_WARN_IF(!target)) {
+      return nullptr;
+    }
     CopyRect(input, target, IntRect(IntPoint(), input->GetSize()), IntPoint());
     AlphaBoxBlur blur(r, target->Stride(), sigmaXY.width, sigmaXY.height);
     blur.Blur(target->GetData());
   } else {
     RefPtr<DataSourceSurface> channel0, channel1, channel2, channel3;
     FilterProcessing::SeparateColorChannels(input, channel0, channel1, channel2, channel3);
+    if (MOZ2D_WARN_IF(!(channel0 && channel1 && channel2))) {
+      return nullptr;
+    }
     AlphaBoxBlur blur(r, channel0->Stride(), sigmaXY.width, sigmaXY.height);
     blur.Blur(channel0->GetData());
     blur.Blur(channel1->GetData());
     blur.Blur(channel2->GetData());
     blur.Blur(channel3->GetData());
     target = FilterProcessing::CombineColorChannels(channel0, channel1, channel2, channel3);
   }
 
@@ -3379,17 +3385,17 @@ FilterNodeLightingSoftware<LightType, Li
   if (input->GetFormat() != SurfaceFormat::A8) {
     input = FilterProcessing::ExtractAlpha(input);
   }
 
   DebugOnlyAutoColorSamplingAccessControl accessControl(input);
 
   RefPtr<DataSourceSurface> target =
     Factory::CreateDataSourceSurface(size, SurfaceFormat::B8G8R8A8);
-  if (!target) {
+  if (MOZ2D_WARN_IF(!target)) {
     return nullptr;
   }
 
   IntPoint offset = aRect.TopLeft() - srcRect.TopLeft();
 
   uint8_t* sourceData = DataAtOffset(input, offset);
   int32_t sourceStride = input->Stride();
   uint8_t* targetData = target->GetData();
--- a/gfx/2d/FilterProcessing.cpp
+++ b/gfx/2d/FilterProcessing.cpp
@@ -1,23 +1,27 @@
 /* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "FilterProcessing.h"
+#include "Logging.h"
 
 namespace mozilla {
 namespace gfx {
 
 TemporaryRef<DataSourceSurface>
 FilterProcessing::ExtractAlpha(DataSourceSurface* aSource)
 {
   IntSize size = aSource->GetSize();
   RefPtr<DataSourceSurface> alpha = Factory::CreateDataSourceSurface(s