Merge m-c into s-c
authorGregory Szorc <gps@mozilla.com>
Mon, 30 Apr 2012 19:02:39 -0700
changeset 95737 5eb0112f34095f79924c770fb6492c372504b305
parent 95736 005a5bfe547406d6cb7d76b90dabd56ee65fc036 (current diff)
parent 94994 6e34995a746ec1b62a6ec6cfb13db16afc660076 (diff)
child 95738 2eb1d5c6d89a4de1c316c099b585a1cd0f2bb4b5
push id1439
push userlsblakk@mozilla.com
push dateMon, 04 Jun 2012 20:19:22 +0000
treeherdermozilla-aurora@ea74834dccd3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone15.0a1
Merge m-c into s-c
accessible/src/atk/nsRoleMap.h
accessible/src/mac/nsRoleMap.h
accessible/src/msaa/nsRoleMap.h
build/stdc++compat.cpp
media/libvpx/vp8/common/arm/armv6/recon_v6.asm
media/libvpx/vp8/common/arm/neon/recon16x16mb_neon.asm
media/libvpx/vp8/common/arm/neon/recon2b_neon.asm
media/libvpx/vp8/common/arm/neon/recon4b_neon.asm
media/libvpx/vp8/common/arm/neon/recon_neon.c
media/libvpx/vp8/common/arm/neon/reconb_neon.asm
media/libvpx/vp8/common/arm/neon/shortidct4x4llm_1_neon.asm
media/libvpx/vp8/common/common_types.h
media/libvpx/vp8/common/defaultcoefcounts.c
media/libvpx/vp8/common/defaultcoefcounts.h
media/libvpx/vp8/common/g_common.h
media/libvpx/vp8/common/invtrans.c
media/libvpx/vp8/common/recon.c
media/libvpx/vp8/common/textblit.c
media/libvpx/vp8/common/type_aliases.h
media/libvpx/vp8/decoder/arm/armv6/dequant_dc_idct_v6.asm
media/libvpx/vp8/decoder/arm/armv6/dequant_idct_v6.asm
media/libvpx/vp8/decoder/arm/armv6/dequantize_v6.asm
media/libvpx/vp8/decoder/arm/armv6/idct_blk_v6.c
media/libvpx/vp8/decoder/arm/dequantize_arm.c
media/libvpx/vp8/decoder/arm/dequantize_arm.h
media/libvpx/vp8/decoder/arm/neon/dequant_idct_neon.asm
media/libvpx/vp8/decoder/arm/neon/dequantizeb_neon.asm
media/libvpx/vp8/decoder/arm/neon/idct_blk_neon.c
media/libvpx/vp8/decoder/arm/neon/idct_dequant_0_2x_neon.asm
media/libvpx/vp8/decoder/arm/neon/idct_dequant_dc_0_2x_neon.asm
media/libvpx/vp8/decoder/arm/neon/idct_dequant_dc_full_2x_neon.asm
media/libvpx/vp8/decoder/arm/neon/idct_dequant_full_2x_neon.asm
media/libvpx/vp8/decoder/dequantize.c
media/libvpx/vp8/decoder/dequantize.h
media/libvpx/vp8/decoder/idct_blk.c
media/libvpx/vp8/decoder/x86/dequantize_mmx.asm
media/libvpx/vp8/decoder/x86/dequantize_x86.h
media/libvpx/vp8/decoder/x86/idct_blk_mmx.c
media/libvpx/vp8/decoder/x86/idct_blk_sse2.c
media/libvpx/vp8/encoder/arm/armv6/vp8_fast_fdct4x4_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_mse16x16_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_sad16x16_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_variance16x16_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_variance8x8_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_variance_halfpixvar16x16_h_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_variance_halfpixvar16x16_hv_armv6.asm
media/libvpx/vp8/encoder/arm/armv6/vp8_variance_halfpixvar16x16_v_armv6.asm
media/libvpx/vp8/encoder/arm/neon/fastfdct4x4_neon.asm
media/libvpx/vp8/encoder/arm/neon/fastfdct8x4_neon.asm
media/libvpx/vp8/encoder/arm/neon/sad16_neon.asm
media/libvpx/vp8/encoder/arm/neon/sad8_neon.asm
media/libvpx/vp8/encoder/arm/neon/variance_neon.asm
media/libvpx/vp8/encoder/arm/neon/vp8_mse16x16_neon.asm
media/libvpx/vp8/encoder/arm/neon/vp8_subpixelvariance16x16_neon.asm
media/libvpx/vp8/encoder/arm/neon/vp8_subpixelvariance16x16s_neon.asm
media/libvpx/vp8/encoder/arm/neon/vp8_subpixelvariance8x8_neon.asm
media/libvpx/vp8/encoder/arm/picklpf_arm.c
media/libvpx/vp8/encoder/arm/variance_arm.c
media/libvpx/vp8/encoder/arm/variance_arm.h
media/libvpx/vp8/encoder/sad_c.c
media/libvpx/vp8/encoder/variance.h
media/libvpx/vp8/encoder/variance_c.c
media/libvpx/vp8/encoder/x86/sad_mmx.asm
media/libvpx/vp8/encoder/x86/sad_sse2.asm
media/libvpx/vp8/encoder/x86/sad_sse3.asm
media/libvpx/vp8/encoder/x86/sad_sse4.asm
media/libvpx/vp8/encoder/x86/sad_ssse3.asm
media/libvpx/vp8/encoder/x86/variance_impl_mmx.asm
media/libvpx/vp8/encoder/x86/variance_impl_sse2.asm
media/libvpx/vp8/encoder/x86/variance_impl_ssse3.asm
media/libvpx/vp8/encoder/x86/variance_mmx.c
media/libvpx/vp8/encoder/x86/variance_sse2.c
media/libvpx/vp8/encoder/x86/variance_ssse3.c
media/libvpx/vp8/encoder/x86/variance_x86.h
media/libvpx/vpx_ports/config.h
media/libvpx/vpx_scale/arm/neon/vp8_vpxyv12_copyframeyonly_neon.asm
--- a/accessible/public/nsIAccessibleRole.idl
+++ b/accessible/public/nsIAccessibleRole.idl
@@ -35,21 +35,18 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsISupports.idl"
 
 /**
  * Defines cross platform (Gecko) roles.
- *
- * @note - When adding a new role, be sure to also add it to nsRoleMap.h for
- *         each platform.
  */
-[scriptable, uuid(f134da65-39a8-4330-843c-5bd42780b34c)]
+[scriptable, uuid(50db5e86-9a45-4637-a5c3-4ff148c33270)]
 interface nsIAccessibleRole : nsISupports
 {
   /**
    * Used when accessible hans't strong defined role.
    */
   const unsigned long ROLE_NOTHING = 0;
 
   /**
@@ -806,16 +803,10 @@ interface nsIAccessibleRole : nsISupport
    * An HTML definition term <dt>
    */
   const unsigned long ROLE_TERM = 127;
 
   /**
    * An HTML definition <dd>
    */
   const unsigned long ROLE_DEFINITION = 128;
-
-  /**
-   * It's not role actually. This constant is important to help ensure
-   * nsRoleMap's are synchronized.
-   */
-  const unsigned long ROLE_LAST_ENTRY = 129;
 };
 
--- a/accessible/src/atk/nsAccessibleWrap.cpp
+++ b/accessible/src/atk/nsAccessibleWrap.cpp
@@ -48,17 +48,16 @@
 #include "nsRootAccessible.h"
 #include "nsDocAccessibleWrap.h"
 #include "nsIAccessibleValue.h"
 #include "nsMai.h"
 #include "nsMaiHyperlink.h"
 #include "nsString.h"
 #include "nsAutoPtr.h"
 #include "prprf.h"
-#include "nsRoleMap.h"
 #include "nsStateMap.h"
 #include "Relation.h"
 #include "States.h"
 
 #include "mozilla/Util.h"
 #include "nsXPCOMStrings.h"
 #include "nsComponentManagerUtils.h"
 
@@ -717,34 +716,30 @@ getDescriptionCB(AtkObject *aAtkObj)
                                    NS_ConvertUTF16toUTF8(uniDesc).get());
 
     return aAtkObj->description;
 }
 
 AtkRole
 getRoleCB(AtkObject *aAtkObj)
 {
-    nsAccessibleWrap *accWrap = GetAccessibleWrap(aAtkObj);
-    if (!accWrap) {
-        return ATK_ROLE_INVALID;
-    }
+  nsAccessibleWrap* accWrap = GetAccessibleWrap(aAtkObj);
+  if (!accWrap)
+    return ATK_ROLE_INVALID;
 
 #ifdef DEBUG_A11Y
-    NS_ASSERTION(nsAccUtils::IsTextInterfaceSupportCorrect(accWrap),
-                 "Does not support nsIAccessibleText when it should");
+  NS_ASSERTION(nsAccUtils::IsTextInterfaceSupportCorrect(accWrap),
+      "Does not support nsIAccessibleText when it should");
 #endif
 
-    if (aAtkObj->role == ATK_ROLE_INVALID) {
-        // map to the actual value
-        PRUint32 atkRole = atkRoleMap[accWrap->Role()];
-        NS_ASSERTION(atkRoleMap[nsIAccessibleRole::ROLE_LAST_ENTRY] ==
-                     kROLE_ATK_LAST_ENTRY, "ATK role map skewed");
-        aAtkObj->role = static_cast<AtkRole>(atkRole);
-    }
+  if (aAtkObj->role != ATK_ROLE_INVALID)
     return aAtkObj->role;
+
+  return aAtkObj->role =
+    static_cast<AtkRole>(nsAccessibleWrap::AtkRoleFor(accWrap->Role()));
 }
 
 AtkAttributeSet*
 ConvertToAtkAttributeSet(nsIPersistentProperties* aAttributes)
 {
     if (!aAttributes)
         return nsnull;
 
@@ -1395,8 +1390,24 @@ nsAccessibleWrap::FireAtkShowHideEvent(A
     char *signal_name = g_strconcat(aIsAdded ? "children_changed::add" :  "children_changed::remove",
                                     isFromUserInput ? "" : kNonUserInputEvent, NULL);
     g_signal_emit_by_name(parentObject, signal_name, indexInParent, aObject, NULL);
     g_free(signal_name);
 
     return NS_OK;
 }
 
+PRUint32
+nsAccessibleWrap::AtkRoleFor(role aRole)
+{
+#define ROLE(geckoRole, stringRole, atkRole, macRole, msaaRole, ia2Role) \
+  case roles::geckoRole: \
+    return atkRole;
+
+  switch (aRole) {
+#include "RoleMap.h"
+    default:
+      MOZ_NOT_REACHED("Unknown role.");
+      return ATK_ROLE_UNKNOWN;
+  }
+
+#undef ROLE
+}
--- a/accessible/src/atk/nsAccessibleWrap.h
+++ b/accessible/src/atk/nsAccessibleWrap.h
@@ -111,16 +111,21 @@ public:
     void SetMaiHyperlink(MaiHyperlink* aMaiHyperlink);
 
     static const char * ReturnString(nsAString &aString) {
       static nsCString returnedString;
       returnedString = NS_ConvertUTF16toUTF8(aString);
       return returnedString.get();
     }
 
+  /**
+   * Function mapping from cross platform roles to ATK roles.
+   */
+  static PRUint32 AtkRoleFor(mozilla::a11y::role aRole);
+
 protected:
     virtual nsresult FirePlatformEvent(AccEvent* aEvent);
 
     nsresult FireAtkStateChangeEvent(AccEvent* aEvent, AtkObject *aObject);
     nsresult FireAtkTextChangedEvent(AccEvent* aEvent, AtkObject *aObject);
     nsresult FireAtkShowHideEvent(AccEvent* aEvent, AtkObject *aObject,
                                   bool aIsAdded);
 
--- a/accessible/src/atk/nsMaiInterfaceText.cpp
+++ b/accessible/src/atk/nsMaiInterfaceText.cpp
@@ -37,27 +37,26 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "InterfaceInitFuncs.h"
 
 #include "nsHyperTextAccessible.h"
 #include "nsMai.h"
-#include "nsRoleMap.h"
 
 #include "nsIPersistentProperties2.h"
 
 AtkAttributeSet* ConvertToAtkAttributeSet(nsIPersistentProperties* aAttributes);
 
 static void
 ConvertTexttoAsterisks(nsAccessibleWrap* accWrap, nsAString& aString)
 {
     // convert each char to "*" when it's "password text" 
-    PRUint32 atkRole = atkRoleMap[accWrap->NativeRole()];
+    PRUint32 atkRole = nsAccessibleWrap::AtkRoleFor(accWrap->NativeRole());
     if (atkRole == ATK_ROLE_PASSWORD_TEXT) {
         for (PRUint32 i = 0; i < aString.Length(); i++)
             aString.Replace(i, 1, NS_LITERAL_STRING("*"));
     }
 }
 
 extern "C" {
 
@@ -156,17 +155,17 @@ getCharacterAtOffsetCB(AtkText *aText, g
 
     /* PRUnichar is unsigned short in Mozilla */
     /* gnuichar is guint32 in glib */
     PRUnichar uniChar;
     nsresult rv =
         accText->GetCharacterAtOffset(aOffset, &uniChar);
 
     // convert char to "*" when it's "password text" 
-    PRUint32 atkRole = atkRoleMap[accWrap->NativeRole()];
+    PRUint32 atkRole = nsAccessibleWrap::AtkRoleFor(accWrap->NativeRole());
     if (atkRole == ATK_ROLE_PASSWORD_TEXT)
         uniChar = '*';
 
     return (NS_FAILED(rv)) ? 0 : static_cast<gunichar>(uniChar);
 }
 
 static gchar*
 getTextBeforeOffsetCB(AtkText *aText, gint aOffset,
deleted file mode 100644
--- a/accessible/src/atk/nsRoleMap.h
+++ /dev/null
@@ -1,179 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=2 et sw=2 tw=80: */
-/* ***** BEGIN LICENSE BLOCK *****
- * Version: MPL 1.1/GPL 2.0/LGPL 2.1
- *
- * The contents of this file are subject to the Mozilla Public License Version
- * 1.1 (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- * http://www.mozilla.org/MPL/
- *
- * Software distributed under the License is distributed on an "AS IS" basis,
- * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
- * for the specific language governing rights and limitations under the
- * License.
- *
- * The Original Code is mozilla.org code.
- *
- * The Initial Developer of the Original Code is IBM Corporation
- * Portions created by the Initial Developer are Copyright (C) 2006
- * the Initial Developer. All Rights Reserved.
- *
- * Contributor(s):
- *   Gao, Ming (gaoming@cn.ibm.com)
- *   Aaron Leventhal (aleventh@us.ibm.com)
- *
- * Alternatively, the contents of this file may be used under the terms of
- * either the GNU General Public License Version 2 or later (the "GPL"), or
- * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
- * in which case the provisions of the GPL or the LGPL are applicable instead
- * of those above. If you wish to allow use of your version of this file only
- * under the terms of either the GPL or the LGPL, and not to allow others to
- * use your version of this file under the terms of the MPL, indicate your
- * decision by deleting the provisions above and replace them with the notice
- * and other provisions required by the GPL or the LGPL. If you do not delete
- * the provisions above, a recipient may use your version of this file under
- * the terms of any one of the MPL, the GPL or the LGPL.
- *
- * ***** END LICENSE BLOCK ***** */
-
-#include <atk/atk.h>
-#include "nsAccessibleWrap.h"
-
-const PRUint32 kROLE_ATK_LAST_ENTRY = 0xffffffff;
-
-// Map array from cross platform roles to  ATK roles
-static const PRUint32 atkRoleMap[] = {
-                                  // Cross Platform Roles       #
-    ATK_ROLE_UNKNOWN,             // roles::NOTHING              0
-    ATK_ROLE_UNKNOWN,             // roles::TITLEBAR             1
-    ATK_ROLE_MENU_BAR,            // roles::MENUBAR              2
-    ATK_ROLE_SCROLL_BAR,          // roles::SCROLLBAR            3
-    ATK_ROLE_UNKNOWN,             // roles::GRIP                 4
-    ATK_ROLE_UNKNOWN,             // roles::SOUND                5
-    ATK_ROLE_UNKNOWN,             // roles::CURSOR               6
-    ATK_ROLE_UNKNOWN,             // roles::CARET                7
-    ATK_ROLE_ALERT,               // roles::ALERT                8
-    ATK_ROLE_WINDOW,              // roles::WINDOW               9
-    ATK_ROLE_INTERNAL_FRAME,      // roles::INTERNAL_FRAME       10
-    ATK_ROLE_MENU,                // roles::MENUPOPUP            11
-    ATK_ROLE_MENU_ITEM,           // roles::MENUITEM             12
-    ATK_ROLE_TOOL_TIP,            // roles::TOOLTIP              13
-    ATK_ROLE_EMBEDDED,            // roles::APPLICATION          14
-    ATK_ROLE_DOCUMENT_FRAME,      // roles::DOCUMENT             15
-    ATK_ROLE_PANEL,               // roles::PANE                 16
-    ATK_ROLE_CHART,               // roles::CHART                17
-    ATK_ROLE_DIALOG,              // roles::DIALOG               18
-    ATK_ROLE_UNKNOWN,             // roles::BORDER               19
-    ATK_ROLE_PANEL,               // roles::GROUPING             20
-    ATK_ROLE_SEPARATOR,           // roles::SEPARATOR            21
-    ATK_ROLE_TOOL_BAR,            // roles::TOOLBAR              22
-    ATK_ROLE_STATUSBAR,           // roles::STATUSBAR            23
-    ATK_ROLE_TABLE,               // roles::TABLE                24
-    ATK_ROLE_COLUMN_HEADER,       // roles::COLUMNHEADER         25
-    ATK_ROLE_ROW_HEADER,          // roles::ROWHEADER            26
-    ATK_ROLE_UNKNOWN,             // roles::COLUMN               27
-    ATK_ROLE_LIST_ITEM,           // roles::ROW                  28
-    ATK_ROLE_TABLE_CELL,          // roles::CELL                 29
-    ATK_ROLE_LINK,                // roles::LINK                 30
-    ATK_ROLE_UNKNOWN,             // roles::HELPBALLOON          31
-    ATK_ROLE_IMAGE,               // roles::CHARACTER            32
-    ATK_ROLE_LIST,                // roles::LIST                 33
-    ATK_ROLE_LIST_ITEM,           // roles::LISTITEM             34
-    ATK_ROLE_TREE,                // roles::OUTLINE              35
-    ATK_ROLE_LIST_ITEM,           // roles::OUTLINEITEM          36
-    ATK_ROLE_PAGE_TAB,            // roles::PAGETAB              37
-    ATK_ROLE_SCROLL_PANE,         // roles::PROPERTYPAGE         38
-    ATK_ROLE_UNKNOWN,             // roles::INDICATOR            39
-    ATK_ROLE_IMAGE,               // roles::GRAPHIC              40
-    ATK_ROLE_UNKNOWN,             // roles::STATICTEXT           41
-    ATK_ROLE_UNKNOWN,             // roles::TEXT_LEAF            42
-    ATK_ROLE_PUSH_BUTTON,         // roles::PUSHBUTTON           43
-    ATK_ROLE_CHECK_BOX,           // roles::CHECKBUTTON          44
-    ATK_ROLE_RADIO_BUTTON,        // roles::RADIOBUTTON          45
-    ATK_ROLE_COMBO_BOX,           // roles::COMBOBOX             46
-    ATK_ROLE_COMBO_BOX,           // roles::DROPLIST             47
-    ATK_ROLE_PROGRESS_BAR,        // roles::PROGRESSBAR          48
-    ATK_ROLE_DIAL,                // roles::DIAL                 49
-    ATK_ROLE_UNKNOWN,             // roles::HOTKEYFIELD          50
-    ATK_ROLE_SLIDER,              // roles::SLIDER               51
-    ATK_ROLE_SPIN_BUTTON,         // roles::SPINBUTTON           52
-    ATK_ROLE_IMAGE,               // roles::DIAGRAM              53
-    ATK_ROLE_ANIMATION,           // roles::ANIMATION            54
-    ATK_ROLE_UNKNOWN,             // roles::EQUATION             55
-    ATK_ROLE_PUSH_BUTTON,         // roles::BUTTONDROPDOWN       56
-    ATK_ROLE_PUSH_BUTTON,         // roles::BUTTONMENU           57
-    ATK_ROLE_UNKNOWN,             // roles::BUTTONDROPDOWNGRID   58
-    ATK_ROLE_UNKNOWN,             // roles::WHITESPACE           59
-    ATK_ROLE_PAGE_TAB_LIST,       // roles::PAGETABLIST          60
-    ATK_ROLE_UNKNOWN,             // roles::CLOCK                61
-    ATK_ROLE_PUSH_BUTTON,         // roles::SPLITBUTTON          62
-    ATK_ROLE_UNKNOWN,             // roles::IPADDRESS            63
-    ATK_ROLE_ACCEL_LABEL,         // roles::ACCEL_LABEL          64
-    ATK_ROLE_ARROW,               // roles::ARROW                65
-    ATK_ROLE_CANVAS,              // roles::CANVAS               66
-    ATK_ROLE_CHECK_MENU_ITEM,     // roles::CHECK_MENU_ITEM      67
-    ATK_ROLE_COLOR_CHOOSER,       // roles::COLOR_CHOOSER        68
-    ATK_ROLE_DATE_EDITOR,         // roles::DATE_EDITOR          69
-    ATK_ROLE_DESKTOP_ICON,        // roles::DESKTOP_ICON         70
-    ATK_ROLE_DESKTOP_FRAME,       // roles::DESKTOP_FRAME        71
-    ATK_ROLE_DIRECTORY_PANE,      // roles::DIRECTORY_PANE       72
-    ATK_ROLE_FILE_CHOOSER,        // roles::FILE_CHOOSER         73
-    ATK_ROLE_FONT_CHOOSER,        // roles::FONT_CHOOSER         74
-    ATK_ROLE_FRAME,               // roles::CHROME_WINDOW        75
-    ATK_ROLE_GLASS_PANE,          // roles::GLASS_PANE           76
-    ATK_ROLE_HTML_CONTAINER,      // roles::HTML_CONTAINER       77
-    ATK_ROLE_ICON,                // roles::ICON                 78
-    ATK_ROLE_LABEL,               // roles::LABEL                79
-    ATK_ROLE_LAYERED_PANE,        // roles::LAYERED_PANE         80
-    ATK_ROLE_OPTION_PANE,         // roles::OPTION_PANE          81
-    ATK_ROLE_PASSWORD_TEXT,       // roles::PASSWORD_TEXT        82
-    ATK_ROLE_POPUP_MENU,          // roles::POPUP_MENU           83
-    ATK_ROLE_RADIO_MENU_ITEM,     // roles::RADIO_MENU_ITEM      84
-    ATK_ROLE_ROOT_PANE,           // roles::ROOT_PANE            85
-    ATK_ROLE_SCROLL_PANE,         // roles::SCROLL_PANE          86
-    ATK_ROLE_SPLIT_PANE,          // roles::SPLIT_PANE           87
-    ATK_ROLE_TABLE_COLUMN_HEADER, // roles::TABLE_COLUMN_HEADER  88
-    ATK_ROLE_TABLE_ROW_HEADER,    // roles::TABLE_ROW_HEADER     89
-    ATK_ROLE_TEAR_OFF_MENU_ITEM,  // roles::TEAR_OFF_MENU_ITEM   90
-    ATK_ROLE_TERMINAL,            // roles::TERMINAL             91
-    ATK_ROLE_TEXT,                // roles::TEXT_CONTAINER       92
-    ATK_ROLE_TOGGLE_BUTTON,       // roles::TOGGLE_BUTTON        93
-    ATK_ROLE_TREE_TABLE,          // roles::TREE_TABLE           94
-    ATK_ROLE_VIEWPORT,            // roles::VIEWPORT             95
-    ATK_ROLE_HEADER,              // roles::HEADER               96
-    ATK_ROLE_FOOTER,              // roles::FOOTER               97
-    ATK_ROLE_PARAGRAPH,           // roles::PARAGRAPH            98
-    ATK_ROLE_RULER,               // roles::RULER                99
-    ATK_ROLE_AUTOCOMPLETE,        // roles::AUTOCOMPLETE         100
-    ATK_ROLE_EDITBAR,             // roles::EDITBAR              101
-    ATK_ROLE_ENTRY,               // roles::ENTRY                102
-    ATK_ROLE_CAPTION,             // roles::CAPTION              103
-    ATK_ROLE_DOCUMENT_FRAME,      // roles::DOCUMENT_FRAME       104
-    ATK_ROLE_HEADING,             // roles::HEADING              105
-    ATK_ROLE_PAGE,                // roles::PAGE                 106
-    ATK_ROLE_SECTION,             // roles::SECTION              107
-    ATK_ROLE_REDUNDANT_OBJECT,    // roles::REDUNDANT_OBJECT     108
-    ATK_ROLE_FORM,                // roles::FORM                 109
-    ATK_ROLE_INPUT_METHOD_WINDOW, // roles::IME                  110
-    ATK_ROLE_APPLICATION,         // roles::APP_ROOT             111
-    ATK_ROLE_MENU,                // roles::PARENT_MENUITEM      112
-    ATK_ROLE_CALENDAR,            // roles::CALENDAR             113
-    ATK_ROLE_MENU,                // roles::COMBOBOX_LIST        114
-    ATK_ROLE_MENU_ITEM,           // roles::COMBOBOX_OPTION      115
-    ATK_ROLE_IMAGE,               // roles::IMAGE_MAP            116
-    ATK_ROLE_LIST_ITEM,           // roles::OPTION               117
-    ATK_ROLE_LIST_ITEM,           // roles::RICH_OPTION          118
-    ATK_ROLE_LIST,                // roles::LISTBOX              119
-    ATK_ROLE_UNKNOWN,             // roles::FLAT_EQUATION        120
-    ATK_ROLE_TABLE_CELL,          // roles::GRID_CELL            121
-    ATK_ROLE_PANEL,               // roles::EMBEDDED_OBJECT      122
-    ATK_ROLE_SECTION,             // roles::NOTE                 123
-    ATK_ROLE_PANEL,               // roles::FIGURE               124
-    ATK_ROLE_CHECK_BOX,           // roles::CHECK_RICH_OPTION    125
-    ATK_ROLE_LIST,                // roles::DEFINITION_LIST      126
-    ATK_ROLE_LIST_ITEM,           // roles::TERM                 127
-    ATK_ROLE_PARAGRAPH,           // roles::DEFINITION           128
-    kROLE_ATK_LAST_ENTRY          // roles::LAST_ENTRY
-};
-
--- a/accessible/src/base/Role.h
+++ b/accessible/src/base/Role.h
@@ -36,23 +36,26 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef _role_h_
 #define _role_h_
 
 /**
- * Note: Make sure to update the localized role names when changing the list.
+ * @note Make sure to update the localized role names when changing the list.
+ * @note When adding a new role, be sure to also add it to base/RoleMap.h and
+ *       update nsIAccessibleRole.
  */
 
 namespace mozilla {
 namespace a11y {
 namespace roles {
-  enum Role {
+
+enum Role {
   /**
    * Used when accessible hans't strong defined role.
    */
   NOTHING = 0,
 
   /**
    * Represents a title or caption bar for a window. It is used by MSAA only,
    * supported automatically by MS Windows.
@@ -807,21 +810,18 @@ namespace roles {
    * Represent a term in a definition list (dt in HTML).
    */
   TERM = 127,
 
   /**
    * Represent a definition in a definition list (dd in HTML)
    */
   DEFINITION = 128,
+};
 
-  /**
-   * It's not role actually. This constant is important to help ensure
-   * nsRoleMap's are synchronized.
-   */
-  LAST_ENTRY = 129
-  };
 } // namespace role
+
 typedef enum mozilla::a11y::roles::Role role;
+
 } // namespace a11y
 } // namespace mozilla
 
 #endif
new file mode 100644
--- /dev/null
+++ b/accessible/src/base/RoleMap.h
@@ -0,0 +1,920 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * Usage: declare the macro ROLE()with the following arguments:
+ * ROLE(geckoRole, stringRole, atkRole, macRole, msaaRole, ia2Role)
+ */
+
+ROLE(NOTHING,
+     "nothing",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_UNKNOWN)
+
+ROLE(TITLEBAR,
+     "titlebar",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Irrelevant on OS X; windows are always native.
+     ROLE_SYSTEM_TITLEBAR,
+     ROLE_SYSTEM_TITLEBAR)
+
+ROLE(MENUBAR,
+     "menubar",
+     ATK_ROLE_MENU_BAR,
+     NSAccessibilityMenuBarRole,  //Irrelevant on OS X; the menubar will always be native and on the top of the screen.
+     ROLE_SYSTEM_MENUBAR,
+     ROLE_SYSTEM_MENUBAR)
+
+ROLE(SCROLLBAR,
+     "scrollbar",
+     ATK_ROLE_SCROLL_BAR,
+     NSAccessibilityScrollBarRole,  //We might need to make this its own mozAccessible, to support the children objects (valueindicator, down/up buttons).
+     ROLE_SYSTEM_SCROLLBAR,
+     ROLE_SYSTEM_SCROLLBAR)
+
+ROLE(GRIP,
+     "grip",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilitySplitterRole,
+     ROLE_SYSTEM_GRIP,
+     ROLE_SYSTEM_GRIP)
+
+ROLE(SOUND,
+     "sound",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Unused on OS X.
+     ROLE_SYSTEM_SOUND,
+     ROLE_SYSTEM_SOUND)
+
+ROLE(CURSOR,
+     "cursor",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Unused on OS X.
+     ROLE_SYSTEM_CURSOR,
+     ROLE_SYSTEM_CURSOR)
+
+ROLE(CARET,
+     "caret",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Unused on OS X.
+     ROLE_SYSTEM_CARET,
+     ROLE_SYSTEM_CARET)
+
+ROLE(ALERT,
+     "alert",
+     ATK_ROLE_ALERT,
+     NSAccessibilityWindowRole,
+     ROLE_SYSTEM_ALERT,
+     ROLE_SYSTEM_ALERT)
+
+ROLE(WINDOW,
+     "window",
+     ATK_ROLE_WINDOW,
+     NSAccessibilityWindowRole,  //Irrelevant on OS X; all window a11y is handled by the system.
+     ROLE_SYSTEM_WINDOW,
+     ROLE_SYSTEM_WINDOW)
+
+ROLE(INTERNAL_FRAME,
+     "internal frame",
+     ATK_ROLE_INTERNAL_FRAME,
+     NSAccessibilityScrollAreaRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_INTERNAL_FRAME)
+
+ROLE(MENUPOPUP,
+     "menupopup",
+     ATK_ROLE_MENU,
+     NSAccessibilityMenuRole,  //The parent of menuitems.
+     ROLE_SYSTEM_MENUPOPUP,
+     ROLE_SYSTEM_MENUPOPUP)
+
+ROLE(MENUITEM,
+     "menuitem",
+     ATK_ROLE_MENU_ITEM,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_MENUITEM,
+     ROLE_SYSTEM_MENUITEM)
+
+ROLE(TOOLTIP,
+     "tooltip",
+     ATK_ROLE_TOOL_TIP,
+     @"AXHelpTag",  //10.4+ only, so we re-define the constant.
+     ROLE_SYSTEM_TOOLTIP,
+     ROLE_SYSTEM_TOOLTIP)
+
+ROLE(APPLICATION,
+     "application",
+     ATK_ROLE_EMBEDDED,
+     NSAccessibilityGroupRole,  //Unused on OS X. the system will take care of this.
+     ROLE_SYSTEM_APPLICATION,
+     ROLE_SYSTEM_APPLICATION)
+
+ROLE(DOCUMENT,
+     "document",
+     ATK_ROLE_DOCUMENT_FRAME,
+     @"AXWebArea",
+     ROLE_SYSTEM_DOCUMENT,
+     ROLE_SYSTEM_DOCUMENT)
+
+/**
+ *  msaa comment:
+ *   We used to map to ROLE_SYSTEM_PANE, but JAWS would
+ *   not read the accessible name for the contaning pane.
+ *   However, JAWS will read the accessible name for a groupbox.
+ *   By mapping a PANE to a GROUPING, we get no undesirable effects,
+ *   but fortunately JAWS will then read the group's label,
+ *   when an inner control gets focused.
+ */
+ROLE(PANE,
+     "pane",
+     ATK_ROLE_PANEL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_GROUPING,
+     ROLE_SYSTEM_GROUPING)
+
+ROLE(CHART,
+     "chart",
+     ATK_ROLE_CHART,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_CHART,
+     ROLE_SYSTEM_CHART)
+
+ROLE(DIALOG,
+     "dialog",
+     ATK_ROLE_DIALOG,
+     NSAccessibilityWindowRole,  //There's a dialog subrole.
+     ROLE_SYSTEM_DIALOG,
+     ROLE_SYSTEM_DIALOG)
+
+ROLE(BORDER,
+     "border",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Unused on OS X.
+     ROLE_SYSTEM_BORDER,
+     ROLE_SYSTEM_BORDER)
+
+ROLE(GROUPING,
+     "grouping",
+     ATK_ROLE_PANEL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_GROUPING,
+     ROLE_SYSTEM_GROUPING)
+
+ROLE(SEPARATOR,
+     "separator",
+     ATK_ROLE_SEPARATOR,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_SEPARATOR,
+     ROLE_SYSTEM_SEPARATOR)
+
+ROLE(TOOLBAR,
+     "toolbar",
+     ATK_ROLE_TOOL_BAR,
+     NSAccessibilityToolbarRole,
+     ROLE_SYSTEM_TOOLBAR,
+     ROLE_SYSTEM_TOOLBAR)
+
+ROLE(STATUSBAR,
+     "statusbar",
+     ATK_ROLE_STATUSBAR,
+     NSAccessibilityUnknownRole,  //Doesn't exist on OS X (a status bar is its parts; a progressbar, a label, etc.)
+     ROLE_SYSTEM_STATUSBAR,
+     ROLE_SYSTEM_STATUSBAR)
+
+ROLE(TABLE,
+     "table",
+     ATK_ROLE_TABLE,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_TABLE,
+     ROLE_SYSTEM_TABLE)
+
+ROLE(COLUMNHEADER,
+     "columnheader",
+     ATK_ROLE_COLUMN_HEADER,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_COLUMNHEADER,
+     ROLE_SYSTEM_COLUMNHEADER)
+
+ROLE(ROWHEADER,
+     "rowheader",
+     ATK_ROLE_ROW_HEADER,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_ROWHEADER,
+     ROLE_SYSTEM_ROWHEADER)
+
+ROLE(COLUMN,
+     "column",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityColumnRole,
+     ROLE_SYSTEM_COLUMN,
+     ROLE_SYSTEM_COLUMN)
+
+ROLE(ROW,
+     "row",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityRowRole,
+     ROLE_SYSTEM_ROW,
+     ROLE_SYSTEM_ROW)
+
+ROLE(CELL,
+     "cell",
+     ATK_ROLE_TABLE_CELL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_CELL,
+     ROLE_SYSTEM_CELL)
+
+ROLE(LINK,
+     "link",
+     ATK_ROLE_LINK,
+     @"AXLink",  //10.4+ the attr first define in SDK 10.4, so we define it here too. ROLE_LINK
+     ROLE_SYSTEM_LINK,
+     ROLE_SYSTEM_LINK)
+
+ROLE(HELPBALLOON,
+     "helpballoon",
+     ATK_ROLE_UNKNOWN,
+     @"AXHelpTag",
+     ROLE_SYSTEM_HELPBALLOON,
+     ROLE_SYSTEM_HELPBALLOON)
+
+ROLE(CHARACTER,
+     "character",
+     ATK_ROLE_IMAGE,
+     NSAccessibilityUnknownRole,  //Unused on OS X.
+     ROLE_SYSTEM_CHARACTER,
+     ROLE_SYSTEM_CHARACTER)
+
+ROLE(LIST,
+     "list",
+     ATK_ROLE_LIST,
+     NSAccessibilityListRole,
+     ROLE_SYSTEM_LIST,
+     ROLE_SYSTEM_LIST)
+
+ROLE(LISTITEM,
+     "listitem",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_LISTITEM,
+     ROLE_SYSTEM_LISTITEM)
+
+ROLE(OUTLINE,
+     "outline",
+     ATK_ROLE_TREE,
+     NSAccessibilityOutlineRole,
+     ROLE_SYSTEM_OUTLINE,
+     ROLE_SYSTEM_OUTLINE)
+
+ROLE(OUTLINEITEM,
+     "outlineitem",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityRowRole,  //XXX: use OutlineRow as subrole.
+     ROLE_SYSTEM_OUTLINEITEM,
+     ROLE_SYSTEM_OUTLINEITEM)
+
+ROLE(PAGETAB,
+     "pagetab",
+     ATK_ROLE_PAGE_TAB,
+     NSAccessibilityRadioButtonRole,
+     ROLE_SYSTEM_PAGETAB,
+     ROLE_SYSTEM_PAGETAB)
+
+ROLE(PROPERTYPAGE,
+     "propertypage",
+     ATK_ROLE_SCROLL_PANE,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_PROPERTYPAGE,
+     ROLE_SYSTEM_PROPERTYPAGE)
+
+ROLE(INDICATOR,
+     "indicator",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_INDICATOR,
+     ROLE_SYSTEM_INDICATOR)
+
+ROLE(GRAPHIC,
+     "graphic",
+     ATK_ROLE_IMAGE,
+     NSAccessibilityImageRole,
+     ROLE_SYSTEM_GRAPHIC,
+     ROLE_SYSTEM_GRAPHIC)
+
+ROLE(STATICTEXT,
+     "statictext",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityStaticTextRole,
+     ROLE_SYSTEM_STATICTEXT,
+     ROLE_SYSTEM_STATICTEXT)
+
+ROLE(TEXT_LEAF,
+     "text leaf",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityStaticTextRole,
+     ROLE_SYSTEM_TEXT,
+     ROLE_SYSTEM_TEXT)
+
+ROLE(PUSHBUTTON,
+     "pushbutton",
+     ATK_ROLE_PUSH_BUTTON,
+     NSAccessibilityButtonRole,
+     ROLE_SYSTEM_PUSHBUTTON,
+     ROLE_SYSTEM_PUSHBUTTON)
+
+ROLE(CHECKBUTTON,
+     "checkbutton",
+     ATK_ROLE_CHECK_BOX,
+     NSAccessibilityCheckBoxRole,
+     ROLE_SYSTEM_CHECKBUTTON,
+     ROLE_SYSTEM_CHECKBUTTON)
+
+ROLE(RADIOBUTTON,
+     "radiobutton",
+     ATK_ROLE_RADIO_BUTTON,
+     NSAccessibilityRadioButtonRole,
+     ROLE_SYSTEM_RADIOBUTTON,
+     ROLE_SYSTEM_RADIOBUTTON)
+
+ROLE(COMBOBOX,
+     "combobox",
+     ATK_ROLE_COMBO_BOX,
+     NSAccessibilityPopUpButtonRole,
+     ROLE_SYSTEM_COMBOBOX,
+     ROLE_SYSTEM_COMBOBOX)
+
+ROLE(DROPLIST,
+     "droplist",
+     ATK_ROLE_COMBO_BOX,
+     NSAccessibilityPopUpButtonRole,
+     ROLE_SYSTEM_DROPLIST,
+     ROLE_SYSTEM_DROPLIST)
+
+ROLE(PROGRESSBAR,
+     "progressbar",
+     ATK_ROLE_PROGRESS_BAR,
+     NSAccessibilityProgressIndicatorRole,
+     ROLE_SYSTEM_PROGRESSBAR,
+     ROLE_SYSTEM_PROGRESSBAR)
+
+ROLE(DIAL,
+     "dial",
+     ATK_ROLE_DIAL,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_DIAL,
+     ROLE_SYSTEM_DIAL)
+
+ROLE(HOTKEYFIELD,
+     "hotkeyfield",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_HOTKEYFIELD,
+     ROLE_SYSTEM_HOTKEYFIELD)
+
+ROLE(SLIDER,
+     "slider",
+     ATK_ROLE_SLIDER,
+     NSAccessibilitySliderRole,
+     ROLE_SYSTEM_SLIDER,
+     ROLE_SYSTEM_SLIDER)
+
+ROLE(SPINBUTTON,
+     "spinbutton",
+     ATK_ROLE_SPIN_BUTTON,
+     NSAccessibilityIncrementorRole,  //Subroles: Increment/Decrement.
+     ROLE_SYSTEM_SPINBUTTON,
+     ROLE_SYSTEM_SPINBUTTON)
+
+ROLE(DIAGRAM,
+     "diagram",
+     ATK_ROLE_IMAGE,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_DIAGRAM,
+     ROLE_SYSTEM_DIAGRAM)
+
+ROLE(ANIMATION,
+     "animation",
+     ATK_ROLE_ANIMATION,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_ANIMATION,
+     ROLE_SYSTEM_ANIMATION)
+
+ROLE(EQUATION,
+     "equation",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_EQUATION,
+     ROLE_SYSTEM_EQUATION)
+
+ROLE(BUTTONDROPDOWN,
+     "buttondropdown",
+     ATK_ROLE_PUSH_BUTTON,
+     NSAccessibilityPopUpButtonRole,
+     ROLE_SYSTEM_BUTTONDROPDOWN,
+     ROLE_SYSTEM_BUTTONDROPDOWN)
+
+ROLE(BUTTONMENU,
+     "buttonmenu",
+     ATK_ROLE_PUSH_BUTTON,
+     NSAccessibilityMenuButtonRole,
+     ROLE_SYSTEM_BUTTONMENU,
+     ROLE_SYSTEM_BUTTONMENU)
+
+ROLE(BUTTONDROPDOWNGRID,
+     "buttondropdowngrid",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_BUTTONDROPDOWNGRID,
+     ROLE_SYSTEM_BUTTONDROPDOWNGRID)
+
+ROLE(WHITESPACE,
+     "whitespace",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_WHITESPACE,
+     ROLE_SYSTEM_WHITESPACE)
+
+ROLE(PAGETABLIST,
+     "pagetablist",
+     ATK_ROLE_PAGE_TAB_LIST,
+     NSAccessibilityTabGroupRole,
+     ROLE_SYSTEM_PAGETABLIST,
+     ROLE_SYSTEM_PAGETABLIST)
+
+ROLE(CLOCK,
+     "clock",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,  //Unused on OS X
+     ROLE_SYSTEM_CLOCK,
+     ROLE_SYSTEM_CLOCK)
+
+ROLE(SPLITBUTTON,
+     "splitbutton",
+     ATK_ROLE_PUSH_BUTTON,
+     NSAccessibilityButtonRole,
+     ROLE_SYSTEM_SPLITBUTTON,
+     ROLE_SYSTEM_SPLITBUTTON)
+
+ROLE(IPADDRESS,
+     "ipaddress",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_IPADDRESS,
+     ROLE_SYSTEM_IPADDRESS)
+
+ROLE(ACCEL_LABEL,
+     "accel label",
+     ATK_ROLE_ACCEL_LABEL,
+     NSAccessibilityStaticTextRole,
+     ROLE_SYSTEM_STATICTEXT,
+     ROLE_SYSTEM_STATICTEXT)
+
+ROLE(ARROW,
+     "arrow",
+     ATK_ROLE_ARROW,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_INDICATOR,
+     ROLE_SYSTEM_INDICATOR)
+
+ROLE(CANVAS,
+     "canvas",
+     ATK_ROLE_CANVAS,
+     NSAccessibilityImageRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_CANVAS)
+
+ROLE(CHECK_MENU_ITEM,
+     "check menu item",
+     ATK_ROLE_CHECK_MENU_ITEM,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_MENUITEM,
+     IA2_ROLE_CHECK_MENU_ITEM)
+
+ROLE(COLOR_CHOOSER,
+     "color chooser",
+     ATK_ROLE_COLOR_CHOOSER,
+     NSAccessibilityColorWellRole,
+     ROLE_SYSTEM_DIALOG,
+     IA2_ROLE_COLOR_CHOOSER)
+
+ROLE(DATE_EDITOR,
+     "date editor",
+     ATK_ROLE_DATE_EDITOR,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_DATE_EDITOR)
+
+ROLE(DESKTOP_ICON,
+     "desktop icon",
+     ATK_ROLE_DESKTOP_ICON,
+     NSAccessibilityImageRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_DESKTOP_ICON)
+
+ROLE(DESKTOP_FRAME,
+     "desktop frame",
+     ATK_ROLE_DESKTOP_FRAME,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_DESKTOP_PANE)
+
+ROLE(DIRECTORY_PANE,
+     "directory pane",
+     ATK_ROLE_DIRECTORY_PANE,
+     NSAccessibilityBrowserRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_DIRECTORY_PANE)
+
+ROLE(FILE_CHOOSER,
+     "file chooser",
+     ATK_ROLE_FILE_CHOOSER,
+     NSAccessibilityUnknownRole,  //Unused on OS X
+     USE_ROLE_STRING,
+     IA2_ROLE_FILE_CHOOSER)
+
+ROLE(FONT_CHOOSER,
+     "font chooser",
+     ATK_ROLE_FONT_CHOOSER,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_FONT_CHOOSER)
+
+ROLE(CHROME_WINDOW,
+     "chrome window",
+     ATK_ROLE_FRAME,
+     NSAccessibilityUnknownRole,  //Unused on OS X
+     ROLE_SYSTEM_APPLICATION,
+     IA2_ROLE_FRAME)
+
+ROLE(GLASS_PANE,
+     "glass pane",
+     ATK_ROLE_GLASS_PANE,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_GLASS_PANE)
+
+ROLE(HTML_CONTAINER,
+     "html container",
+     ATK_ROLE_HTML_CONTAINER,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_UNKNOWN)
+
+ROLE(ICON,
+     "icon",
+     ATK_ROLE_ICON,
+     NSAccessibilityImageRole,
+     ROLE_SYSTEM_PUSHBUTTON,
+     IA2_ROLE_ICON)
+
+ROLE(LABEL,
+     "label",
+     ATK_ROLE_LABEL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_STATICTEXT,
+     IA2_ROLE_LABEL)
+
+ROLE(LAYERED_PANE,
+     "layered pane",
+     ATK_ROLE_LAYERED_PANE,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_LAYERED_PANE)
+
+ROLE(OPTION_PANE,
+     "option pane",
+     ATK_ROLE_OPTION_PANE,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_OPTION_PANE)
+
+ROLE(PASSWORD_TEXT,
+     "password text",
+     ATK_ROLE_PASSWORD_TEXT,
+     NSAccessibilityTextFieldRole,
+     ROLE_SYSTEM_TEXT,
+     ROLE_SYSTEM_TEXT)
+
+ROLE(POPUP_MENU,
+     "popup menu",
+     ATK_ROLE_POPUP_MENU,
+     NSAccessibilityUnknownRole,  //Unused
+     ROLE_SYSTEM_MENUPOPUP,
+     ROLE_SYSTEM_MENUPOPUP)
+
+ROLE(RADIO_MENU_ITEM,
+     "radio menu item",
+     ATK_ROLE_RADIO_MENU_ITEM,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_MENUITEM,
+     IA2_ROLE_RADIO_MENU_ITEM)
+
+ROLE(ROOT_PANE,
+     "root pane",
+     ATK_ROLE_ROOT_PANE,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_ROOT_PANE)
+
+ROLE(SCROLL_PANE,
+     "scroll pane",
+     ATK_ROLE_SCROLL_PANE,
+     NSAccessibilityScrollAreaRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_SCROLL_PANE)
+
+ROLE(SPLIT_PANE,
+     "split pane",
+     ATK_ROLE_SPLIT_PANE,
+     NSAccessibilitySplitGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_SPLIT_PANE)
+
+ROLE(TABLE_COLUMN_HEADER,
+     "table column header",
+     ATK_ROLE_TABLE_COLUMN_HEADER,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_COLUMNHEADER,
+     ROLE_SYSTEM_COLUMNHEADER)
+
+ROLE(TABLE_ROW_HEADER,
+     "table row header",
+     ATK_ROLE_TABLE_ROW_HEADER,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_ROWHEADER,
+     ROLE_SYSTEM_ROWHEADER)
+
+ROLE(TEAR_OFF_MENU_ITEM,
+     "tear off menu item",
+     ATK_ROLE_TEAR_OFF_MENU_ITEM,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_MENUITEM,
+     IA2_ROLE_TEAR_OFF_MENU)
+
+ROLE(TERMINAL,
+     "terminal",
+     ATK_ROLE_TERMINAL,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_TERMINAL)
+
+ROLE(TEXT_CONTAINER,
+     "text container",
+     ATK_ROLE_TEXT,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_TEXT_FRAME)
+
+ROLE(TOGGLE_BUTTON,
+     "toggle button",
+     ATK_ROLE_TOGGLE_BUTTON,
+     NSAccessibilityButtonRole,
+     ROLE_SYSTEM_PUSHBUTTON,
+     IA2_ROLE_TOGGLE_BUTTON)
+
+ROLE(TREE_TABLE,
+     "tree table",
+     ATK_ROLE_TREE_TABLE,
+     NSAccessibilityTableRole,
+     ROLE_SYSTEM_OUTLINE,
+     ROLE_SYSTEM_OUTLINE)
+
+ROLE(VIEWPORT,
+     "viewport",
+     ATK_ROLE_VIEWPORT,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_PANE,
+     IA2_ROLE_VIEW_PORT)
+
+ROLE(HEADER,
+     "header",
+     ATK_ROLE_HEADER,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_HEADER)
+
+ROLE(FOOTER,
+     "footer",
+     ATK_ROLE_FOOTER,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_FOOTER)
+
+ROLE(PARAGRAPH,
+     "paragraph",
+     ATK_ROLE_PARAGRAPH,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_PARAGRAPH)
+
+ROLE(RULER,
+     "ruler",
+     ATK_ROLE_RULER,
+     @"AXRuler",  //10.4+ only, so we re-define the constant.
+     USE_ROLE_STRING,
+     IA2_ROLE_RULER)
+
+ROLE(AUTOCOMPLETE,
+     "autocomplete",
+     ATK_ROLE_AUTOCOMPLETE,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_COMBOBOX,
+     ROLE_SYSTEM_COMBOBOX)
+
+ROLE(EDITBAR,
+     "editbar",
+     ATK_ROLE_EDITBAR,
+     NSAccessibilityTextFieldRole,
+     ROLE_SYSTEM_TEXT,
+     IA2_ROLE_EDITBAR)
+
+ROLE(ENTRY,
+     "entry",
+     ATK_ROLE_ENTRY,
+     NSAccessibilityTextFieldRole,
+     ROLE_SYSTEM_TEXT,
+     ROLE_SYSTEM_TEXT)
+
+ROLE(CAPTION,
+     "caption",
+     ATK_ROLE_CAPTION,
+     NSAccessibilityStaticTextRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_CAPTION)
+
+ROLE(DOCUMENT_FRAME,
+     "document frame",
+     ATK_ROLE_DOCUMENT_FRAME,
+     NSAccessibilityScrollAreaRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_UNKNOWN)
+
+ROLE(HEADING,
+     "heading",
+     ATK_ROLE_HEADING,
+     @"AXHeading",
+     USE_ROLE_STRING,
+     IA2_ROLE_HEADING)
+
+ROLE(PAGE,
+     "page",
+     ATK_ROLE_PAGE,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_PAGE)
+
+ROLE(SECTION,
+     "section",
+     ATK_ROLE_SECTION,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_SECTION)
+
+ROLE(REDUNDANT_OBJECT,
+     "redundant object",
+     ATK_ROLE_REDUNDANT_OBJECT,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_REDUNDANT_OBJECT)
+
+ROLE(FORM,
+     "form",
+     ATK_ROLE_FORM,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_FORM)
+
+ROLE(IME,
+     "ime",
+     ATK_ROLE_INPUT_METHOD_WINDOW,
+     NSAccessibilityUnknownRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_INPUT_METHOD_WINDOW)
+
+ROLE(APP_ROOT,
+     "app root",
+     ATK_ROLE_APPLICATION,
+     NSAccessibilityUnknownRole,  //Unused on OS X
+     ROLE_SYSTEM_APPLICATION,
+     ROLE_SYSTEM_APPLICATION)
+
+ROLE(PARENT_MENUITEM,
+     "parent menuitem",
+     ATK_ROLE_MENU,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_MENUITEM,
+     ROLE_SYSTEM_MENUITEM)
+
+ROLE(CALENDAR,
+     "calendar",
+     ATK_ROLE_CALENDAR,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_CLIENT,
+     ROLE_SYSTEM_CLIENT)
+
+ROLE(COMBOBOX_LIST,
+     "combobox list",
+     ATK_ROLE_MENU,
+     NSAccessibilityMenuRole,
+     ROLE_SYSTEM_LIST,
+     ROLE_SYSTEM_LIST)
+
+ROLE(COMBOBOX_OPTION,
+     "combobox option",
+     ATK_ROLE_MENU_ITEM,
+     NSAccessibilityMenuItemRole,
+     ROLE_SYSTEM_LISTITEM,
+     ROLE_SYSTEM_LISTITEM)
+
+ROLE(IMAGE_MAP,
+     "image map",
+     ATK_ROLE_IMAGE,
+     NSAccessibilityImageRole,
+     ROLE_SYSTEM_GRAPHIC,
+     ROLE_SYSTEM_GRAPHIC)
+
+ROLE(OPTION,
+     "listbox option",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityRowRole,
+     ROLE_SYSTEM_LISTITEM,
+     ROLE_SYSTEM_LISTITEM)
+
+ROLE(RICH_OPTION,
+     "listbox rich option",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityRowRole,
+     ROLE_SYSTEM_LISTITEM,
+     ROLE_SYSTEM_LISTITEM)
+
+ROLE(LISTBOX,
+     "listbox",
+     ATK_ROLE_LIST,
+     NSAccessibilityListRole,
+     ROLE_SYSTEM_LIST,
+     ROLE_SYSTEM_LIST)
+
+ROLE(FLAT_EQUATION,
+     "flat equation",
+     ATK_ROLE_UNKNOWN,
+     NSAccessibilityUnknownRole,
+     ROLE_SYSTEM_EQUATION,
+     ROLE_SYSTEM_EQUATION)
+
+ROLE(GRID_CELL,
+     "gridcell",
+     ATK_ROLE_TABLE_CELL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_CELL,
+     ROLE_SYSTEM_CELL)
+
+ROLE(EMBEDDED_OBJECT,
+     "embedded object",
+     ATK_ROLE_PANEL,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_EMBEDDED_OBJECT)
+
+ROLE(NOTE,
+     "note",
+     ATK_ROLE_SECTION,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_NOTE)
+
+ROLE(FIGURE,
+     "figure",
+     ATK_ROLE_PANEL,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_GROUPING,
+     ROLE_SYSTEM_GROUPING)
+
+ROLE(CHECK_RICH_OPTION,
+     "check rich option",
+     ATK_ROLE_CHECK_BOX,
+     NSAccessibilityCheckBoxRole,
+     ROLE_SYSTEM_CHECKBUTTON,
+     ROLE_SYSTEM_CHECKBUTTON)
+
+ROLE(DEFINITION_LIST,
+     "definitionlist",
+     ATK_ROLE_LIST,
+     NSAccessibilityListRole,
+     ROLE_SYSTEM_LIST,
+     ROLE_SYSTEM_LIST)
+
+ROLE(TERM,
+     "term",
+     ATK_ROLE_LIST_ITEM,
+     NSAccessibilityGroupRole,
+     ROLE_SYSTEM_LISTITEM,
+     ROLE_SYSTEM_LISTITEM)
+
+ROLE(DEFINITION,
+     "definition",
+     ATK_ROLE_PARAGRAPH,
+     NSAccessibilityGroupRole,
+     USE_ROLE_STRING,
+     IA2_ROLE_PARAGRAPH)
--- a/accessible/src/base/nsAccessibilityService.cpp
+++ b/accessible/src/base/nsAccessibilityService.cpp
@@ -701,23 +701,29 @@ nsAccessibilityService::GetAccessibleFor
 
   NS_IF_ADDREF(*aAccessible = GetAccessible(node, nsnull));
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsAccessibilityService::GetStringRole(PRUint32 aRole, nsAString& aString)
 {
-  if ( aRole >= ArrayLength(kRoleNames)) {
-    aString.AssignLiteral("unknown");
+#define ROLE(geckoRole, stringRole, atkRole, macRole, msaaRole, ia2Role) \
+  case roles::geckoRole: \
+    CopyUTF8toUTF16(stringRole, aString); \
     return NS_OK;
+
+  switch (aRole) {
+#include "RoleMap.h"
+    default:
+      aString.AssignLiteral("unknown");
+      return NS_OK;
   }
 
-  CopyUTF8toUTF16(kRoleNames[aRole], aString);
-  return NS_OK;
+#undef ROLE
 }
 
 NS_IMETHODIMP
 nsAccessibilityService::GetStringStates(PRUint32 aState, PRUint32 aExtraState,
                                         nsIDOMDOMStringList **aStringStates)
 {
   nsAccessibleDOMStringList *stringStates = new nsAccessibleDOMStringList();
   NS_ENSURE_TRUE(stringStates, NS_ERROR_OUT_OF_MEMORY);
--- a/accessible/src/base/nsAccessibilityService.h
+++ b/accessible/src/base/nsAccessibilityService.h
@@ -291,152 +291,16 @@ private:
  */
 inline nsAccessibilityService*
 GetAccService()
 {
   return nsAccessibilityService::gAccessibilityService;
 }
 
 /**
- * Map nsIAccessibleRole constants to strings. Used by
- * nsIAccessibleRetrieval::getStringRole() method.
- */
-static const char kRoleNames[][20] = {
-  "nothing",             //ROLE_NOTHING
-  "titlebar",            //ROLE_TITLEBAR
-  "menubar",             //ROLE_MENUBAR
-  "scrollbar",           //ROLE_SCROLLBAR
-  "grip",                //ROLE_GRIP
-  "sound",               //ROLE_SOUND
-  "cursor",              //ROLE_CURSOR
-  "caret",               //ROLE_CARET
-  "alert",               //ROLE_ALERT
-  "window",              //ROLE_WINDOW
-  "internal frame",      //ROLE_INTERNAL_FRAME
-  "menupopup",           //ROLE_MENUPOPUP
-  "menuitem",            //ROLE_MENUITEM
-  "tooltip",             //ROLE_TOOLTIP
-  "application",         //ROLE_APPLICATION
-  "document",            //ROLE_DOCUMENT
-  "pane",                //ROLE_PANE
-  "chart",               //ROLE_CHART
-  "dialog",              //ROLE_DIALOG
-  "border",              //ROLE_BORDER
-  "grouping",            //ROLE_GROUPING
-  "separator",           //ROLE_SEPARATOR
-  "toolbar",             //ROLE_TOOLBAR
-  "statusbar",           //ROLE_STATUSBAR
-  "table",               //ROLE_TABLE
-  "columnheader",        //ROLE_COLUMNHEADER
-  "rowheader",           //ROLE_ROWHEADER
-  "column",              //ROLE_COLUMN
-  "row",                 //ROLE_ROW
-  "cell",                //ROLE_CELL
-  "link",                //ROLE_LINK
-  "helpballoon",         //ROLE_HELPBALLOON
-  "character",           //ROLE_CHARACTER
-  "list",                //ROLE_LIST
-  "listitem",            //ROLE_LISTITEM
-  "outline",             //ROLE_OUTLINE
-  "outlineitem",         //ROLE_OUTLINEITEM
-  "pagetab",             //ROLE_PAGETAB
-  "propertypage",        //ROLE_PROPERTYPAGE
-  "indicator",           //ROLE_INDICATOR
-  "graphic",             //ROLE_GRAPHIC
-  "statictext",          //ROLE_STATICTEXT
-  "text leaf",           //ROLE_TEXT_LEAF
-  "pushbutton",          //ROLE_PUSHBUTTON
-  "checkbutton",         //ROLE_CHECKBUTTON
-  "radiobutton",         //ROLE_RADIOBUTTON
-  "combobox",            //ROLE_COMBOBOX
-  "droplist",            //ROLE_DROPLIST
-  "progressbar",         //ROLE_PROGRESSBAR
-  "dial",                //ROLE_DIAL
-  "hotkeyfield",         //ROLE_HOTKEYFIELD
-  "slider",              //ROLE_SLIDER
-  "spinbutton",          //ROLE_SPINBUTTON
-  "diagram",             //ROLE_DIAGRAM
-  "animation",           //ROLE_ANIMATION
-  "equation",            //ROLE_EQUATION
-  "buttondropdown",      //ROLE_BUTTONDROPDOWN
-  "buttonmenu",          //ROLE_BUTTONMENU
-  "buttondropdowngrid",  //ROLE_BUTTONDROPDOWNGRID
-  "whitespace",          //ROLE_WHITESPACE
-  "pagetablist",         //ROLE_PAGETABLIST
-  "clock",               //ROLE_CLOCK
-  "splitbutton",         //ROLE_SPLITBUTTON
-  "ipaddress",           //ROLE_IPADDRESS
-  "accel label",         //ROLE_ACCEL_LABEL
-  "arrow",               //ROLE_ARROW
-  "canvas",              //ROLE_CANVAS
-  "check menu item",     //ROLE_CHECK_MENU_ITEM
-  "color chooser",       //ROLE_COLOR_CHOOSER
-  "date editor",         //ROLE_DATE_EDITOR
-  "desktop icon",        //ROLE_DESKTOP_ICON
-  "desktop frame",       //ROLE_DESKTOP_FRAME
-  "directory pane",      //ROLE_DIRECTORY_PANE
-  "file chooser",        //ROLE_FILE_CHOOSER
-  "font chooser",        //ROLE_FONT_CHOOSER
-  "chrome window",       //ROLE_CHROME_WINDOW
-  "glass pane",          //ROLE_GLASS_PANE
-  "html container",      //ROLE_HTML_CONTAINER
-  "icon",                //ROLE_ICON
-  "label",               //ROLE_LABEL
-  "layered pane",        //ROLE_LAYERED_PANE
-  "option pane",         //ROLE_OPTION_PANE
-  "password text",       //ROLE_PASSWORD_TEXT
-  "popup menu",          //ROLE_POPUP_MENU
-  "radio menu item",     //ROLE_RADIO_MENU_ITEM
-  "root pane",           //ROLE_ROOT_PANE
-  "scroll pane",         //ROLE_SCROLL_PANE
-  "split pane",          //ROLE_SPLIT_PANE
-  "table column header", //ROLE_TABLE_COLUMN_HEADER
-  "table row header",    //ROLE_TABLE_ROW_HEADER
-  "tear off menu item",  //ROLE_TEAR_OFF_MENU_ITEM
-  "terminal",            //ROLE_TERMINAL
-  "text container",      //ROLE_TEXT_CONTAINER
-  "toggle button",       //ROLE_TOGGLE_BUTTON
-  "tree table",          //ROLE_TREE_TABLE
-  "viewport",            //ROLE_VIEWPORT
-  "header",              //ROLE_HEADER
-  "footer",              //ROLE_FOOTER
-  "paragraph",           //ROLE_PARAGRAPH
-  "ruler",               //ROLE_RULER
-  "autocomplete",        //ROLE_AUTOCOMPLETE
-  "editbar",             //ROLE_EDITBAR
-  "entry",               //ROLE_ENTRY
-  "caption",             //ROLE_CAPTION
-  "document frame",      //ROLE_DOCUMENT_FRAME
-  "heading",             //ROLE_HEADING
-  "page",                //ROLE_PAGE
-  "section",             //ROLE_SECTION
-  "redundant object",    //ROLE_REDUNDANT_OBJECT
-  "form",                //ROLE_FORM
-  "ime",                 //ROLE_IME
-  "app root",            //ROLE_APP_ROOT
-  "parent menuitem",     //ROLE_PARENT_MENUITEM
-  "calendar",            //ROLE_CALENDAR
-  "combobox list",       //ROLE_COMBOBOX_LIST
-  "combobox option",     //ROLE_COMBOBOX_OPTION
-  "image map",           //ROLE_IMAGE_MAP
-  "listbox option",      //ROLE_OPTION
-  "listbox rich option", //ROLE_RICH_OPTION
-  "listbox",             //ROLE_LISTBOX
-  "flat equation",       //ROLE_FLAT_EQUATION
-  "gridcell",            //ROLE_GRID_CELL
-  "embedded object",     //ROLE_EMBEDDED_OBJECT
-  "note",                //ROLE_NOTE
-  "figure",              //ROLE_FIGURE
-  "check rich option",   //ROLE_CHECK_RICH_OPTION
-  "definitionlist",      //ROLE_DEFINITION_LIST
-  "term",                //ROLE_TERM
-  "definition"           //ROLE_DEFINITION
-};
-
-/**
  * Map nsIAccessibleEvents constants to strings. Used by
  * nsIAccessibleRetrieval::getStringEventType() method.
  */
 static const char kEventTypeNames[][40] = {
   "unknown",                                 //
   "show",                                    // EVENT_SHOW
   "hide",                                    // EVENT_HIDE
   "reorder",                                 // EVENT_REORDER
--- a/accessible/src/mac/Makefile.in
+++ b/accessible/src/mac/Makefile.in
@@ -75,17 +75,16 @@ EXPORTS = \
   nsHyperTextAccessibleWrap.h \
   nsHTMLImageAccessibleWrap.h \
   nsHTMLTableAccessibleWrap.h \
   mozDocAccessible.h \
   mozAccessible.h \
   mozAccessibleProtocol.h \
   mozActionElements.h \
   mozTextAccessible.h \
-  nsRoleMap.h \
   $(NULL)
 
 # we don't want the shared lib, but we want to force the creation of a static lib.
 FORCE_STATIC_LIB = 1
 
 include $(topsrcdir)/config/rules.mk
 
 LOCAL_INCLUDES += \
--- a/accessible/src/mac/mozAccessible.mm
+++ b/accessible/src/mac/mozAccessible.mm
@@ -35,17 +35,16 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
  
 #import "mozAccessible.h"
 
 #import "MacUtils.h"
 #import "mozView.h"
-#import "nsRoleMap.h"
 
 #include "Accessible-inl.h"
 #include "nsIAccessibleRelation.h"
 #include "nsIAccessibleText.h"
 #include "nsIAccessibleEditableText.h"
 #include "nsRootAccessible.h"
 #include "Relation.h"
 #include "Role.h"
@@ -116,20 +115,16 @@ GetNativeFromGeckoAccessible(nsIAccessib
 - (id)initWithAccessible:(nsAccessibleWrap*)geckoAccessible
 {
   NS_OBJC_BEGIN_TRY_ABORT_BLOCK_NIL;
 
   if ((self = [super init])) {
     mGeckoAccessible = geckoAccessible;
     mIsExpired = NO;
     mRole = geckoAccessible->Role();
-    
-    // Check for OS X "role skew"; the role constants in nsIAccessible.idl need to match the ones
-    // in nsRoleMap.h.
-    NS_ASSERTION([AXRoles[roles::LAST_ENTRY] isEqualToString:@"ROLE_LAST_ENTRY"], "Role skew in the role map!");
   }
    
   return self;
 
   NS_OBJC_END_TRY_ABORT_BLOCK_NIL;
 }
 
 - (void)dealloc
@@ -457,17 +452,29 @@ GetNativeFromGeckoAccessible(nsIAccessib
 }
 
 - (NSString*)role
 {
 #ifdef DEBUG_A11Y
   NS_ASSERTION(nsAccUtils::IsTextInterfaceSupportCorrect(mGeckoAccessible),
                "Does not support nsIAccessibleText when it should");
 #endif
-  return (NSString*) AXRoles[mRole];
+
+#define ROLE(geckoRole, stringRole, atkRole, macRole, msaaRole, ia2Role) \
+  case roles::geckoRole: \
+    return macRole;
+
+  switch (mRole) {
+#include "RoleMap.h"
+    default:
+      NS_NOTREACHED("Unknown role.");
+      return NSAccessibilityUnknownRole;
+  }
+
+#undef ROLE
 }
 
 - (NSString*)subrole
 {
   switch (mRole) {
     case roles::LIST:
       return NSAccessibilityContentListSubrole;
 
--- a/accessible/src/mac/nsAccessibleWrap.h
+++ b/accessible/src/mac/nsAccessibleWrap.h
@@ -96,17 +96,17 @@ public: // construction, destruction
     { return (NativeState() & mozilla::a11y::states::HASPOPUP); }
   
   /**
    * Returns this accessible's all children, adhering to "flat" accessibles by 
    * not returning their children.
    */
   void GetUnignoredChildren(nsTArray<nsAccessible*>* aChildrenArray);
   nsAccessible* GetUnignoredParent() const;
-    
+
 protected:
 
   virtual nsresult FirePlatformEvent(AccEvent* aEvent);
 
   /**
    * Return true if the parent doesn't have children to expose to AT.
    */
   bool AncestorIsFlat();
--- a/accessible/src/mac/nsAccessibleWrap.mm
+++ b/accessible/src/mac/nsAccessibleWrap.mm
@@ -34,18 +34,16 @@
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsDocAccessible.h"
 #include "nsObjCExceptions.h"
 
-#import "nsRoleMap.h"
-
 #include "Accessible-inl.h"
 #include "Role.h"
 
 #import "mozAccessible.h"
 #import "mozActionElements.h"
 #import "mozHTMLAccessible.h"
 #import "mozTextAccessible.h"
 
deleted file mode 100644
--- a/accessible/src/mac/nsRoleMap.h
+++ /dev/null
@@ -1,175 +0,0 @@
-/* -*- Mode: Objective-C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:expandtab:shiftwidth=2:tabstop=2:
- */
-/* ***** BEGIN LICENSE BLOCK *****
- * Version: MPL 1.1/GPL 2.0/LGPL 2.1
- *
- * The contents of this file are subject to the Mozilla Public License Version
- * 1.1 (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- * http://www.mozilla.org/MPL/
- *
- * Software distributed under the License is distributed on an "AS IS" basis,
- * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
- * for the specific language governing rights and limitations under the
- * License.
- *
- * The Original Code is mozilla.org code.
- *
- * The Initial Developer of the Original Code is Mozilla Foundation
- * Portions created by the Initial Developer are Copyright (C) 2006
- * the Initial Developer. All Rights Reserved.
- *
- * Contributor(s):
- *   HÃ¥kan Waara <hwaara@gmail.com>
- *
- * Alternatively, the contents of this file may be used under the terms of
- * either the GNU General Public License Version 2 or later (the "GPL"), or
- * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
- * in which case the provisions of the GPL or the LGPL are applicable instead
- * of those above. If you wish to allow use of your version of this file only
- * under the terms of either the GPL or the LGPL, and not to allow others to
- * use your version of this file under the terms of the MPL, indicate your
- * decision by deleting the provisions above and replace them with the notice
- * and other provisions required by the GPL or the LGPL. If you do not delete
- * the provisions above, a recipient may use your version of this file under
- * the terms of any one of the MPL, the GPL or the LGPL.
- *
- * ***** END LICENSE BLOCK ***** */
-
-#import <Cocoa/Cocoa.h>
-
-#include "nsIAccessible.h"
-
-static const NSString* AXRoles [] = {
-  NSAccessibilityUnknownRole,                   // roles::NOTHING              0
-  NSAccessibilityUnknownRole,                   // roles::TITLEBAR             1      Irrelevant on OS X; windows are always native.
-  NSAccessibilityScrollBarRole,                 // roles::SCROLLBAR            3      We might need to make this its own mozAccessible, to support the children objects (valueindicator, down/up buttons).
-  NSAccessibilityMenuBarRole,                   // roles::MENUBAR              2      Irrelevant on OS X; the menubar will always be native and on the top of the screen.
-  NSAccessibilitySplitterRole,                  // roles::GRIP                 4
-  NSAccessibilityUnknownRole,                   // roles::SOUND                5      Unused on OS X.
-  NSAccessibilityUnknownRole,                   // roles::CURSOR               6      Unused on OS X.
-  NSAccessibilityUnknownRole,                   // roles::CARET                7      Unused on OS X.
-  NSAccessibilityWindowRole,                    // roles::ALERT                8
-  NSAccessibilityWindowRole,                    // roles::WINDOW               9      Irrelevant on OS X; all window a11y is handled by the system.
-  NSAccessibilityScrollAreaRole,                // roles::INTERNAL_FRAME       10
-  NSAccessibilityMenuRole,                      // roles::MENUPOPUP            11     The parent of menuitems.
-  NSAccessibilityMenuItemRole,                  // roles::MENUITEM             12
-  @"AXHelpTag",                                 // roles::TOOLTIP              13     10.4+ only, so we re-define the constant.
-  NSAccessibilityGroupRole,                     // roles::APPLICATION          14     Unused on OS X. the system will take care of this.
-  @"AXWebArea",                                 // roles::DOCUMENT             15
-  NSAccessibilityGroupRole,                     // roles::PANE                 16
-  NSAccessibilityUnknownRole,                   // roles::CHART                17
-  NSAccessibilityWindowRole,                    // roles::DIALOG               18     There's a dialog subrole.
-  NSAccessibilityUnknownRole,                   // roles::BORDER               19     Unused on OS X.
-  NSAccessibilityGroupRole,                     // roles::GROUPING             20
-  NSAccessibilityUnknownRole,                   // roles::SEPARATOR            21
-  NSAccessibilityToolbarRole,                   // roles::TOOLBAR              22
-  NSAccessibilityUnknownRole,                   // roles::STATUSBAR            23     Doesn't exist on OS X (a status bar is its parts; a progressbar, a label, etc.)
-  NSAccessibilityGroupRole,                     // roles::TABLE                24
-  NSAccessibilityGroupRole,                     // roles::COLUMNHEADER         25
-  NSAccessibilityGroupRole,                     // roles::ROWHEADER            26
-  NSAccessibilityColumnRole,                    // roles::COLUMN               27
-  NSAccessibilityRowRole,                       // roles::ROW                  28
-  NSAccessibilityGroupRole,                     // roles::CELL                 29
-  @"AXLink",                                    // roles::LINK                 30     10.4+ the attr first define in SDK 10.4, so we define it here too. ROLE_LINK
-  @"AXHelpTag",                                 // roles::HELPBALLOON          31
-  NSAccessibilityUnknownRole,                   // roles::CHARACTER            32     Unused on OS X.
-  NSAccessibilityListRole,                      // roles::LIST                 33
-  NSAccessibilityGroupRole,                     // roles::LISTITEM             34
-  NSAccessibilityOutlineRole,                   // roles::OUTLINE              35
-  NSAccessibilityRowRole,                       // roles::OUTLINEITEM          36     XXX: use OutlineRow as subrole.
-  NSAccessibilityRadioButtonRole,               // roles::PAGETAB              37
-  NSAccessibilityGroupRole,                     // roles::PROPERTYPAGE         38
-  NSAccessibilityUnknownRole,                   // roles::INDICATOR            39
-  NSAccessibilityImageRole,                     // roles::GRAPHIC              40
-  NSAccessibilityStaticTextRole,                // roles::STATICTEXT           41
-  NSAccessibilityStaticTextRole,                // roles::TEXT_LEAF            42
-  NSAccessibilityButtonRole,                    // roles::PUSHBUTTON           43
-  NSAccessibilityCheckBoxRole,                  // roles::CHECKBUTTON          44
-  NSAccessibilityRadioButtonRole,               // roles::RADIOBUTTON          45
-  NSAccessibilityPopUpButtonRole,               // roles::COMBOBOX             46
-  NSAccessibilityPopUpButtonRole,               // roles::DROPLIST             47
-  NSAccessibilityProgressIndicatorRole,         // roles::PROGRESSBAR          48
-  NSAccessibilityUnknownRole,                   // roles::DIAL                 49
-  NSAccessibilityUnknownRole,                   // roles::HOTKEYFIELD          50
-  NSAccessibilitySliderRole,                    // roles::SLIDER               51
-  NSAccessibilityIncrementorRole,               // roles::SPINBUTTON           52     Subroles: Increment/Decrement.
-  NSAccessibilityUnknownRole,                   // roles::DIAGRAM              53
-  NSAccessibilityUnknownRole,                   // roles::ANIMATION            54
-  NSAccessibilityUnknownRole,                   // roles::EQUATION             55
-  NSAccessibilityPopUpButtonRole,               // roles::BUTTONDROPDOWN       56
-  NSAccessibilityMenuButtonRole,                // roles::BUTTONMENU           57
-  NSAccessibilityGroupRole,                     // roles::BUTTONDROPDOWNGRID   58
-  NSAccessibilityUnknownRole,                   // roles::WHITESPACE           59
-  NSAccessibilityTabGroupRole,                  // roles::PAGETABLIST          60
-  NSAccessibilityUnknownRole,                   // roles::CLOCK                61     Unused on OS X
-  NSAccessibilityButtonRole,                    // roles::SPLITBUTTON          62
-  NSAccessibilityUnknownRole,                   // roles::IPADDRESS            63
-  NSAccessibilityStaticTextRole,                // roles::ACCEL_LABEL          64
-  NSAccessibilityUnknownRole,                   // roles::ARROW                65
-  NSAccessibilityImageRole,                     // roles::CANVAS               66
-  NSAccessibilityMenuItemRole,                  // roles::CHECK_MENU_ITEM      67
-  NSAccessibilityColorWellRole,                 // roles::COLOR_CHOOSER        68
-  NSAccessibilityUnknownRole,                   // roles::DATE_EDITOR          69 
-  NSAccessibilityImageRole,                     // roles::DESKTOP_ICON         70
-  NSAccessibilityUnknownRole,                   // roles::DESKTOP_FRAME        71
-  NSAccessibilityBrowserRole,                   // roles::DIRECTORY_PANE       72
-  NSAccessibilityUnknownRole,                   // roles::FILE_CHOOSER         73     Unused on OS X
-  NSAccessibilityUnknownRole,                   // roles::FONT_CHOOSER         74
-  NSAccessibilityUnknownRole,                   // roles::CHROME_WINDOW        75     Unused on OS X
-  NSAccessibilityGroupRole,                     // roles::GLASS_PANE           76
-  NSAccessibilityUnknownRole,                   // roles::HTML_CONTAINER       77
-  NSAccessibilityImageRole,                     // roles::ICON                 78
-  NSAccessibilityGroupRole,                     // roles::LABEL                79
-  NSAccessibilityGroupRole,                     // roles::LAYERED_PANE         80
-  NSAccessibilityGroupRole,                     // roles::OPTION_PANE          81
-  NSAccessibilityTextFieldRole,                 // roles::PASSWORD_TEXT        82
-  NSAccessibilityUnknownRole,                   // roles::POPUP_MENU           83     Unused
-  NSAccessibilityMenuItemRole,                  // roles::RADIO_MENU_ITEM      84
-  NSAccessibilityGroupRole,                     // roles::ROOT_PANE            85
-  NSAccessibilityScrollAreaRole,                // roles::SCROLL_PANE          86
-  NSAccessibilitySplitGroupRole,                // roles::SPLIT_PANE           87
-  NSAccessibilityUnknownRole,                   // roles::TABLE_COLUMN_HEADER  88
-  NSAccessibilityUnknownRole,                   // roles::TABLE_ROW_HEADER     89
-  NSAccessibilityMenuItemRole,                  // roles::TEAR_OFF_MENU_ITEM   90
-  NSAccessibilityUnknownRole,                   // roles::TERMINAL             91
-  NSAccessibilityGroupRole,                     // roles::TEXT_CONTAINER       92
-  NSAccessibilityButtonRole,                    // roles::TOGGLE_BUTTON        93
-  NSAccessibilityTableRole,                     // roles::TREE_TABLE           94
-  NSAccessibilityUnknownRole,                   // roles::VIEWPORT             95
-  NSAccessibilityGroupRole,                     // roles::HEADER               96
-  NSAccessibilityGroupRole,                     // roles::FOOTER               97
-  NSAccessibilityGroupRole,                     // roles::PARAGRAPH            98
-  @"AXRuler",                                   // roles::RULER                99     10.4+ only, so we re-define the constant.
-  NSAccessibilityUnknownRole,                   // roles::AUTOCOMPLETE         100
-  NSAccessibilityTextFieldRole,                 // roles::EDITBAR              101
-  NSAccessibilityTextFieldRole,                 // roles::ENTRY                102
-  NSAccessibilityStaticTextRole,                // roles::CAPTION              103
-  NSAccessibilityScrollAreaRole,                // roles::DOCUMENT_FRAME       104
-  @"AXHeading",                                 // roles::HEADING              105
-  NSAccessibilityGroupRole,                     // roles::PAGE                 106
-  NSAccessibilityGroupRole,                     // roles::SECTION              107
-  NSAccessibilityUnknownRole,                   // roles::REDUNDANT_OBJECT     108
-  NSAccessibilityGroupRole,                     // roles::FORM                 109
-  NSAccessibilityUnknownRole,                   // roles::IME                  110
-  NSAccessibilityUnknownRole,                   // roles::APP_ROOT             111    Unused on OS X
-  NSAccessibilityMenuItemRole,                  // roles::PARENT_MENUITEM      112
-  NSAccessibilityGroupRole,                     // roles::CALENDAR             113
-  NSAccessibilityMenuRole,                      // roles::COMBOBOX_LIST        114
-  NSAccessibilityMenuItemRole,                  // roles::COMBOBOX_OPTION      115
-  NSAccessibilityImageRole,                     // roles::IMAGE_MAP            116
-  NSAccessibilityRowRole,                       // roles::OPTION               117
-  NSAccessibilityRowRole,                       // roles::RICH_OPTION          118
-  NSAccessibilityListRole,                      // roles::LISTBOX              119
-  NSAccessibilityUnknownRole,                   // roles::FLAT_EQUATION        120
-  NSAccessibilityGroupRole,                     // roles::GRID_CELL            121
-  NSAccessibilityGroupRole,                     // roles::EMBEDDED_OBJECT      122
-  NSAccessibilityGroupRole,                     // roles::NOTE                 123
-  NSAccessibilityGroupRole,                     // roles::FIGURE               124
-  NSAccessibilityCheckBoxRole,                  // roles::CHECK_RICH_OPTION    125
-  NSAccessibilityListRole,                      // roles::DEFINITION_LIST      126
-  NSAccessibilityGroupRole,                     // roles::TERM                 127
-  NSAccessibilityGroupRole,                     // roles::DEFINITION           128
-  @"ROLE_LAST_ENTRY"                            // roles::LAST_ENTRY                  Bogus role that will never be shown (just marks the end of this array)!
-};
--- a/accessible/src/msaa/nsAccessibleWrap.cpp
+++ b/accessible/src/msaa/nsAccessibleWrap.cpp
@@ -47,35 +47,53 @@
 #include "States.h"
 
 #include "ia2AccessibleRelation.h"
 
 #include "nsIAccessibleEvent.h"
 #include "nsIAccessibleRelation.h"
 
 #include "Accessible2_i.c"
+#include "AccessibleRole.h"
 #include "AccessibleStates.h"
 
 #include "nsIMutableArray.h"
 #include "nsIDOMDocument.h"
 #include "nsIFrame.h"
 #include "nsIScrollableFrame.h"
 #include "nsINameSpaceManager.h"
 #include "nsINodeInfo.h"
 #include "nsRootAccessible.h"
 #include "nsIServiceManager.h"
 #include "nsTextFormatter.h"
 #include "nsIView.h"
 #include "nsIViewManager.h"
-#include "nsRoleMap.h"
 #include "nsEventMap.h"
 #include "nsArrayUtils.h"
 
+#include "OLEACC.H"
+
+using namespace mozilla;
 using namespace mozilla::a11y;
 
+const PRUint32 USE_ROLE_STRING = 0;
+
+#ifndef ROLE_SYSTEM_SPLITBUTTON
+const PRUint32 ROLE_SYSTEM_SPLITBUTTON  = 0x3e; // Not defined in all oleacc.h versions
+#endif
+
+#ifndef ROLE_SYSTEM_IPADDRESS
+const PRUint32 ROLE_SYSTEM_IPADDRESS = 0x3f; // Not defined in all oleacc.h versions
+#endif
+
+#ifndef ROLE_SYSTEM_OUTLINEBUTTON
+const PRUint32 ROLE_SYSTEM_OUTLINEBUTTON = 0x40; // Not defined in all oleacc.h versions
+#endif
+
+
 /* For documentation of the accessibility architecture,
  * see http://lxr.mozilla.org/seamonkey/source/accessible/accessible-docs.html
  */
 
 //#define DEBUG_LEAKS
 
 #ifdef DEBUG_LEAKS
 static gAccessibles = 0;
@@ -369,25 +387,36 @@ STDMETHODIMP nsAccessibleWrap::get_accRo
   if (xpAccessible->IsDefunct())
     return CO_E_OBJNOTCONNECTED;
 
 #ifdef DEBUG_A11Y
   NS_ASSERTION(nsAccUtils::IsTextInterfaceSupportCorrect(xpAccessible),
                "Does not support nsIAccessibleText when it should");
 #endif
 
-  roles::Role role = xpAccessible->Role();
-  PRUint32 msaaRole = gWindowsRoleMap[role].msaaRole;
-  NS_ASSERTION(gWindowsRoleMap[roles::LAST_ENTRY].msaaRole == ROLE_WINDOWS_LAST_ENTRY,
-               "MSAA role map skewed");
+  a11y::role geckoRole = xpAccessible->Role();
+  PRUint32 msaaRole = 0;
+
+#define ROLE(_geckoRole, stringRole, atkRole, macRole, _msaaRole, ia2Role) \
+  case roles::_geckoRole: \
+    msaaRole = _msaaRole; \
+    break;
+
+  switch (geckoRole) {
+#include "RoleMap.h"
+    default:
+      MOZ_NOT_REACHED("Unknown role.");
+  };
+
+#undef ROLE
 
   // Special case, if there is a ROLE_ROW inside of a ROLE_TREE_TABLE, then call the MSAA role
   // a ROLE_OUTLINEITEM for consistency and compatibility.
   // We need this because ARIA has a role of "row" for both grid and treegrid
-  if (role == roles::ROW) {
+  if (geckoRole == roles::ROW) {
     nsAccessible* xpParent = Parent();
     if (xpParent && xpParent->Role() == roles::TREE_TABLE)
       msaaRole = ROLE_SYSTEM_OUTLINEITEM;
   }
   
   // -- Try enumerated role
   if (msaaRole != USE_ROLE_STRING) {
     pvarRole->vt = VT_I4;
@@ -1197,25 +1226,33 @@ STDMETHODIMP
 nsAccessibleWrap::role(long *aRole)
 {
 __try {
   *aRole = 0;
 
   if (IsDefunct())
     return CO_E_OBJNOTCONNECTED;
 
-  NS_ASSERTION(gWindowsRoleMap[roles::LAST_ENTRY].ia2Role == ROLE_WINDOWS_LAST_ENTRY,
-               "MSAA role map skewed");
+#define ROLE(_geckoRole, stringRole, atkRole, macRole, msaaRole, ia2Role) \
+  case roles::_geckoRole: \
+    *aRole = ia2Role; \
+    break;
 
-  roles::Role role = Role();
-  *aRole = gWindowsRoleMap[role].ia2Role;
+  a11y::role geckoRole = Role();
+  switch (geckoRole) {
+#include "RoleMap.h"
+    default:
+      MOZ_NOT_REACHED("Unknown role.");
+  };
+
+#undef ROLE
 
   // Special case, if there is a ROLE_ROW inside of a ROLE_TREE_TABLE, then call
   // the IA2 role a ROLE_OUTLINEITEM.
-  if (role == roles::ROW) {
+  if (geckoRole == roles::ROW) {
     nsAccessible* xpParent = Parent();
     if (xpParent && xpParent->Role() == roles::TREE_TABLE)
       *aRole = ROLE_SYSTEM_OUTLINEITEM;
   }
 
   return S_OK;
 
 } __except(nsAccessNodeWrap::FilterA11yExceptions(::GetExceptionCode(), GetExceptionInformation())) { }
deleted file mode 100644
--- a/accessible/src/msaa/nsRoleMap.h
+++ /dev/null
@@ -1,467 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:expandtab:shiftwidth=2:tabstop=2:
- */
-/* ***** BEGIN LICENSE BLOCK *****
- * Version: MPL 1.1/GPL 2.0/LGPL 2.1
- *
- * The contents of this file are subject to the Mozilla Public License Version
- * 1.1 (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- * http://www.mozilla.org/MPL/
- *
- * Software distributed under the License is distributed on an "AS IS" basis,
- * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
- * for the specific language governing rights and limitations under the
- * License.
- *
- * The Original Code is mozilla.org code.
- *
- * The Initial Developer of the Original Code is IBM Corporation
- * Portions created by the Initial Developer are Copyright (C) 2006
- * the Initial Developer. All Rights Reserved.
- *
- * Contributor(s):
- *   Gao, Ming <gaoming@cn.ibm.com>
- *   Aaron Leventhal <aleventh@us.ibm.com>
- *   Alexander Surkov <surkov.alexander@gmail.com>
- *
- * Alternatively, the contents of this file may be used under the terms of
- * either the GNU General Public License Version 2 or later (the "GPL"), or
- * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
- * in which case the provisions of the GPL or the LGPL are applicable instead
- * of those above. If you wish to allow use of your version of this file only
- * under the terms of either the GPL or the LGPL, and not to allow others to
- * use your version of this file under the terms of the MPL, indicate your
- * decision by deleting the provisions above and replace them with the notice
- * and other provisions required by the GPL or the LGPL. If you do not delete
- * the provisions above, a recipient may use your version of this file under
- * the terms of any one of the MPL, the GPL or the LGPL.
- *
- * ***** END LICENSE BLOCK ***** */
-
-#include "OLEACC.H"
-#include "AccessibleRole.h"
-
-const PRUint32 USE_ROLE_STRING = 0;
-const PRUint32 ROLE_WINDOWS_LAST_ENTRY = 0xffffffff;
-
-#ifndef ROLE_SYSTEM_SPLITBUTTON
-const PRUint32 ROLE_SYSTEM_SPLITBUTTON  = 0x3e; // Not defined in all oleacc.h versions
-#endif
-
-#ifndef ROLE_SYSTEM_IPADDRESS
-const PRUint32 ROLE_SYSTEM_IPADDRESS = 0x3f; // Not defined in all oleacc.h versions
-#endif
-
-#ifndef ROLE_SYSTEM_OUTLINEBUTTON
-const PRUint32 ROLE_SYSTEM_OUTLINEBUTTON = 0x40; // Not defined in all oleacc.h versions
-#endif
-
-struct WindowsRoleMapItem
-{
-  PRUint32 msaaRole;
-  long ia2Role;
-};
-
-// Map array from cross platform roles to MSAA/IA2 roles
-static const WindowsRoleMapItem gWindowsRoleMap[] = {
-  // roles::NOTHING
-  { USE_ROLE_STRING, IA2_ROLE_UNKNOWN },
-
-  // roles::TITLEBAR
-  { ROLE_SYSTEM_TITLEBAR, ROLE_SYSTEM_TITLEBAR },
-
-  // roles::MENUBAR
-  { ROLE_SYSTEM_MENUBAR, ROLE_SYSTEM_MENUBAR },
-
-  // roles::SCROLLBAR
-  { ROLE_SYSTEM_SCROLLBAR, ROLE_SYSTEM_SCROLLBAR },
-
-  // roles::GRIP
-  { ROLE_SYSTEM_GRIP, ROLE_SYSTEM_GRIP },
-
-  // roles::SOUND
-  { ROLE_SYSTEM_SOUND, ROLE_SYSTEM_SOUND },
-
-  // roles::CURSOR
-  { ROLE_SYSTEM_CURSOR, ROLE_SYSTEM_CURSOR },
-
-  // roles::CARET
-  { ROLE_SYSTEM_CARET, ROLE_SYSTEM_CARET },
-
-  // roles::ALERT
-  { ROLE_SYSTEM_ALERT, ROLE_SYSTEM_ALERT },
-
-  // roles::WINDOW
-  { ROLE_SYSTEM_WINDOW, ROLE_SYSTEM_WINDOW },
-
-  // roles::INTERNAL_FRAME
-  { USE_ROLE_STRING, IA2_ROLE_INTERNAL_FRAME},
-
-  // roles::MENUPOPUP
-  { ROLE_SYSTEM_MENUPOPUP, ROLE_SYSTEM_MENUPOPUP },
-
-  // roles::MENUITEM
-  { ROLE_SYSTEM_MENUITEM, ROLE_SYSTEM_MENUITEM },
-
-  // roles::TOOLTIP
-  { ROLE_SYSTEM_TOOLTIP, ROLE_SYSTEM_TOOLTIP },
-
-  // roles::APPLICATION
-  { ROLE_SYSTEM_APPLICATION, ROLE_SYSTEM_APPLICATION },
-
-  // roles::DOCUMENT
-  { ROLE_SYSTEM_DOCUMENT, ROLE_SYSTEM_DOCUMENT },
-
-  // roles::PANE
-  // We used to map to ROLE_SYSTEM_PANE, but JAWS would
-  // not read the accessible name for the contaning pane.
-  // However, JAWS will read the accessible name for a groupbox.
-  // By mapping a PANE to a GROUPING, we get no undesirable effects,
-  // but fortunately JAWS will then read the group's label,
-  // when an inner control gets focused.
-  { ROLE_SYSTEM_GROUPING , ROLE_SYSTEM_GROUPING }, 
-
-  // roles::CHART
-  { ROLE_SYSTEM_CHART, ROLE_SYSTEM_CHART },
-
-  // roles::DIALOG
-  { ROLE_SYSTEM_DIALOG, ROLE_SYSTEM_DIALOG },
-
-  // roles::BORDER
-  { ROLE_SYSTEM_BORDER, ROLE_SYSTEM_BORDER },
-
-  // roles::GROUPING
-  { ROLE_SYSTEM_GROUPING, ROLE_SYSTEM_GROUPING },
-
-  // roles::SEPARATOR
-  { ROLE_SYSTEM_SEPARATOR, ROLE_SYSTEM_SEPARATOR },
-
-  // roles::TOOLBAR
-  { ROLE_SYSTEM_TOOLBAR, ROLE_SYSTEM_TOOLBAR },
-
-  // roles::STATUSBAR
-  { ROLE_SYSTEM_STATUSBAR, ROLE_SYSTEM_STATUSBAR },
-
-  // roles::TABLE
-  { ROLE_SYSTEM_TABLE, ROLE_SYSTEM_TABLE },
-
-  // roles::COLUMNHEADER,
-  { ROLE_SYSTEM_COLUMNHEADER, ROLE_SYSTEM_COLUMNHEADER },
-
-  // roles::ROWHEADER
-  { ROLE_SYSTEM_ROWHEADER, ROLE_SYSTEM_ROWHEADER },
-
-  // roles::COLUMN
-  { ROLE_SYSTEM_COLUMN, ROLE_SYSTEM_COLUMN },
-
-  // roles::ROW
-  { ROLE_SYSTEM_ROW, ROLE_SYSTEM_ROW },
-
-  // roles::CELL
-  { ROLE_SYSTEM_CELL, ROLE_SYSTEM_CELL },
-
-  // roles::LINK
-  { ROLE_SYSTEM_LINK, ROLE_SYSTEM_LINK },
-
-  // roles::HELPBALLOON
-  { ROLE_SYSTEM_HELPBALLOON, ROLE_SYSTEM_HELPBALLOON },
-
-  // roles::CHARACTER
-  { ROLE_SYSTEM_CHARACTER, ROLE_SYSTEM_CHARACTER },
-
-  // roles::LIST
-  { ROLE_SYSTEM_LIST, ROLE_SYSTEM_LIST },
-
-  // roles::LISTITEM
-  { ROLE_SYSTEM_LISTITEM, ROLE_SYSTEM_LISTITEM },
-
-  // roles::OUTLINE
-  { ROLE_SYSTEM_OUTLINE, ROLE_SYSTEM_OUTLINE },
-
-  // roles::OUTLINEITEM
-  { ROLE_SYSTEM_OUTLINEITEM, ROLE_SYSTEM_OUTLINEITEM },
-
-  // roles::PAGETAB
-  { ROLE_SYSTEM_PAGETAB, ROLE_SYSTEM_PAGETAB },
-
-  // roles::PROPERTYPAGE
-  { ROLE_SYSTEM_PROPERTYPAGE, ROLE_SYSTEM_PROPERTYPAGE },
-
-  // roles::INDICATOR
-  { ROLE_SYSTEM_INDICATOR, ROLE_SYSTEM_INDICATOR },
-
-  // roles::GRAPHIC
-  { ROLE_SYSTEM_GRAPHIC, ROLE_SYSTEM_GRAPHIC },
-
-  // roles::STATICTEXT
-  { ROLE_SYSTEM_STATICTEXT, ROLE_SYSTEM_STATICTEXT },
-
-  // roles::TEXT_LEAF
-  { ROLE_SYSTEM_TEXT, ROLE_SYSTEM_TEXT },
-
-  // roles::PUSHBUTTON
-  { ROLE_SYSTEM_PUSHBUTTON, ROLE_SYSTEM_PUSHBUTTON },
-
-  // roles::CHECKBUTTON
-  { ROLE_SYSTEM_CHECKBUTTON, ROLE_SYSTEM_CHECKBUTTON },
-
-  // roles::RADIOBUTTON
-  { ROLE_SYSTEM_RADIOBUTTON, ROLE_SYSTEM_RADIOBUTTON },
-
-  // roles::COMBOBOX
-  { ROLE_SYSTEM_COMBOBOX, ROLE_SYSTEM_COMBOBOX },
-
-  // roles::DROPLIST
-  { ROLE_SYSTEM_DROPLIST, ROLE_SYSTEM_DROPLIST },
-
-  // roles::PROGRESSBAR
-  { ROLE_SYSTEM_PROGRESSBAR, ROLE_SYSTEM_PROGRESSBAR },
-
-  // roles::DIAL
-  { ROLE_SYSTEM_DIAL, ROLE_SYSTEM_DIAL },
-
-  // roles::HOTKEYFIELD
-  { ROLE_SYSTEM_HOTKEYFIELD, ROLE_SYSTEM_HOTKEYFIELD },
-
-  // roles::SLIDER
-  { ROLE_SYSTEM_SLIDER, ROLE_SYSTEM_SLIDER },
-
-  // roles::SPINBUTTON
-  { ROLE_SYSTEM_SPINBUTTON, ROLE_SYSTEM_SPINBUTTON },
-
-  // roles::DIAGRAM
-  { ROLE_SYSTEM_DIAGRAM, ROLE_SYSTEM_DIAGRAM },
-
-  // roles::ANIMATION
-  { ROLE_SYSTEM_ANIMATION, ROLE_SYSTEM_ANIMATION },
-
-  // roles::EQUATION
-  { ROLE_SYSTEM_EQUATION, ROLE_SYSTEM_EQUATION },
-
-  // roles::BUTTONDROPDOWN
-  { ROLE_SYSTEM_BUTTONDROPDOWN, ROLE_SYSTEM_BUTTONDROPDOWN },
-
-  // roles::BUTTONMENU
-  { ROLE_SYSTEM_BUTTONMENU, ROLE_SYSTEM_BUTTONMENU },
-
-  // roles::BUTTONDROPDOWNGRID
-  { ROLE_SYSTEM_BUTTONDROPDOWNGRID, ROLE_SYSTEM_BUTTONDROPDOWNGRID },
-
-  // roles::WHITESPACE
-  { ROLE_SYSTEM_WHITESPACE, ROLE_SYSTEM_WHITESPACE },
-
-  // roles::PAGETABLIST
-  { ROLE_SYSTEM_PAGETABLIST, ROLE_SYSTEM_PAGETABLIST },
-
-  // roles::CLOCK
-  { ROLE_SYSTEM_CLOCK, ROLE_SYSTEM_CLOCK },
-
-  // roles::SPLITBUTTON
-  { ROLE_SYSTEM_SPLITBUTTON, ROLE_SYSTEM_SPLITBUTTON },
-
-  // roles::IPADDRESS
-  { ROLE_SYSTEM_IPADDRESS, ROLE_SYSTEM_IPADDRESS },
-
-  // Make up for Gecko roles that we don't have in MSAA or IA2. When in doubt
-  // map them to USE_ROLE_STRING (IA2_ROLE_UNKNOWN).
-
-  // roles::ACCEL_LABEL
-  { ROLE_SYSTEM_STATICTEXT, ROLE_SYSTEM_STATICTEXT },
-
-  // roles::ARROW
-  { ROLE_SYSTEM_INDICATOR, ROLE_SYSTEM_INDICATOR },
-
-  // roles::CANVAS
-  { USE_ROLE_STRING, IA2_ROLE_CANVAS },
-
-  // roles::CHECK_MENU_ITEM
-  { ROLE_SYSTEM_MENUITEM, IA2_ROLE_CHECK_MENU_ITEM },
-
-  // roles::COLOR_CHOOSER
-  { ROLE_SYSTEM_DIALOG, IA2_ROLE_COLOR_CHOOSER },
-
-  // roles::DATE_EDITOR
-  { USE_ROLE_STRING, IA2_ROLE_DATE_EDITOR },
-
-  // roles::DESKTOP_ICON
-  { USE_ROLE_STRING, IA2_ROLE_DESKTOP_ICON },
-
-  // roles::DESKTOP_FRAME
-  { USE_ROLE_STRING, IA2_ROLE_DESKTOP_PANE },
-
-  // roles::DIRECTORY_PANE
-  { USE_ROLE_STRING, IA2_ROLE_DIRECTORY_PANE },
-
-  // roles::FILE_CHOOSER
-  { USE_ROLE_STRING, IA2_ROLE_FILE_CHOOSER },
-
-  // roles::FONT_CHOOSER
-  { USE_ROLE_STRING, IA2_ROLE_FONT_CHOOSER },
-
-  // roles::CHROME_WINDOW
-  { ROLE_SYSTEM_APPLICATION, IA2_ROLE_FRAME },
-
-  // roles::GLASS_PANE
-  { USE_ROLE_STRING, IA2_ROLE_GLASS_PANE },
-
-  // roles::HTML_CONTAINER
-  { USE_ROLE_STRING, IA2_ROLE_UNKNOWN },
-
-  // roles::ICON
-  { ROLE_SYSTEM_PUSHBUTTON, IA2_ROLE_ICON },
-
-  // roles::LABEL
-  { ROLE_SYSTEM_STATICTEXT, IA2_ROLE_LABEL },
-
-  // roles::LAYERED_PANE
-  { USE_ROLE_STRING, IA2_ROLE_LAYERED_PANE },
-
-  // roles::OPTION_PANE
-  { USE_ROLE_STRING, IA2_ROLE_OPTION_PANE },
-
-  // roles::PASSWORD_TEXT
-  { ROLE_SYSTEM_TEXT, ROLE_SYSTEM_TEXT },
-
-  // roles::POPUP_MENU
-  { ROLE_SYSTEM_MENUPOPUP, ROLE_SYSTEM_MENUPOPUP },
-
-  // roles::RADIO_MENU_ITEM
-  { ROLE_SYSTEM_MENUITEM, IA2_ROLE_RADIO_MENU_ITEM },
-
-  // roles::ROOT_PANE
-  { USE_ROLE_STRING, IA2_ROLE_ROOT_PANE },
-
-  // roles::SCROLL_PANE
-  { USE_ROLE_STRING, IA2_ROLE_SCROLL_PANE },
-
-  // roles::SPLIT_PANE
-  { USE_ROLE_STRING, IA2_ROLE_SPLIT_PANE },
-
-  // roles::TABLE_COLUMN_HEADER
-  { ROLE_SYSTEM_COLUMNHEADER, ROLE_SYSTEM_COLUMNHEADER },
-
-  // roles::TABLE_ROW_HEADER
-  { ROLE_SYSTEM_ROWHEADER, ROLE_SYSTEM_ROWHEADER },
-
-  // roles::TEAR_OFF_MENU_ITEM
-  { ROLE_SYSTEM_MENUITEM, IA2_ROLE_TEAR_OFF_MENU },
-
-  // roles::TERMINAL
-  { USE_ROLE_STRING, IA2_ROLE_TERMINAL },
-
-  // roles::TEXT_CONTAINER
-  { USE_ROLE_STRING, IA2_ROLE_TEXT_FRAME },
-
-  // roles::TOGGLE_BUTTON
-  { ROLE_SYSTEM_PUSHBUTTON, IA2_ROLE_TOGGLE_BUTTON },
-
-  // roles::TREE_TABLE
-  { ROLE_SYSTEM_OUTLINE, ROLE_SYSTEM_OUTLINE },
-
-  // roles::VIEWPORT
-  { ROLE_SYSTEM_PANE, IA2_ROLE_VIEW_PORT },
-
-  // roles::HEADER
-  { USE_ROLE_STRING, IA2_ROLE_HEADER },
-
-  // roles::FOOTER
-  { USE_ROLE_STRING, IA2_ROLE_FOOTER },
-
-  // roles::PARAGRAPH
-  { USE_ROLE_STRING, IA2_ROLE_PARAGRAPH },
-
-  // roles::RULER
-  { USE_ROLE_STRING, IA2_ROLE_RULER },
-
-  // roles::AUTOCOMPLETE
-  { ROLE_SYSTEM_COMBOBOX, ROLE_SYSTEM_COMBOBOX },
-
-  // roles::EDITBAR
-  { ROLE_SYSTEM_TEXT, IA2_ROLE_EDITBAR },
-
-  // roles::ENTRY
-  { ROLE_SYSTEM_TEXT, ROLE_SYSTEM_TEXT },
-
-  // roles::CAPTION
-  { USE_ROLE_STRING, IA2_ROLE_CAPTION },
-
-  // roles::DOCUMENT_FRAME
-  { USE_ROLE_STRING, IA2_ROLE_UNKNOWN },
-
-  // roles::HEADING
-  { USE_ROLE_STRING, IA2_ROLE_HEADING },
-
-  // roles::PAGE
-  { USE_ROLE_STRING, IA2_ROLE_PAGE },
-
-  // roles::SECTION
-  { USE_ROLE_STRING, IA2_ROLE_SECTION },
-
-  // roles::REDUNDANT_OBJECT
-  { USE_ROLE_STRING, IA2_ROLE_REDUNDANT_OBJECT },
-
-  // roles::FORM
-  { USE_ROLE_STRING, IA2_ROLE_FORM },
-
-  // roles::IME
-  { USE_ROLE_STRING, IA2_ROLE_INPUT_METHOD_WINDOW },
-
-  // roles::APP_ROOT
-  { ROLE_SYSTEM_APPLICATION, ROLE_SYSTEM_APPLICATION },
-
-  // roles::PARENT_MENUITEM
-  { ROLE_SYSTEM_MENUITEM, ROLE_SYSTEM_MENUITEM },
-
-  // roles::CALENDAR
-  { ROLE_SYSTEM_CLIENT, ROLE_SYSTEM_CLIENT },
-
-  // roles::COMBOBOX_LIST
-  { ROLE_SYSTEM_LIST, ROLE_SYSTEM_LIST },
-
-  // roles::COMBOBOX_OPTION
-  { ROLE_SYSTEM_LISTITEM, ROLE_SYSTEM_LISTITEM },
-
-  // roles::IMAGE_MAP
-  { ROLE_SYSTEM_GRAPHIC, ROLE_SYSTEM_GRAPHIC },
-
-  // roles::OPTION 
-  { ROLE_SYSTEM_LISTITEM, ROLE_SYSTEM_LISTITEM },
-  
-  // roles::RICH_OPTION
-  { ROLE_SYSTEM_LISTITEM, ROLE_SYSTEM_LISTITEM },
-  
-  // roles::LISTBOX
-  { ROLE_SYSTEM_LIST, ROLE_SYSTEM_LIST },
-  
-  // roles::FLAT_EQUATION
-  { ROLE_SYSTEM_EQUATION, ROLE_SYSTEM_EQUATION },
-  
-  // roles::GRID_CELL
-  { ROLE_SYSTEM_CELL, ROLE_SYSTEM_CELL },
-
-  // roles::EMBEDDED_OBJECT
-  { USE_ROLE_STRING, IA2_ROLE_EMBEDDED_OBJECT },
-
-  // roles::NOTE
-  { USE_ROLE_STRING, IA2_ROLE_NOTE },
-
-  // roles::FIGURE
-  { ROLE_SYSTEM_GROUPING, ROLE_SYSTEM_GROUPING },
-
-  // roles::CHECK_RICH_OPTION
-  { ROLE_SYSTEM_CHECKBUTTON, ROLE_SYSTEM_CHECKBUTTON },
-
-  // roles::DEFINITION_LIST
-  { ROLE_SYSTEM_LIST, ROLE_SYSTEM_LIST },
-
-  // roles::TERM
-  { ROLE_SYSTEM_LISTITEM, ROLE_SYSTEM_LISTITEM },
-
-  // roles::DEFINITION
-  { USE_ROLE_STRING, IA2_ROLE_PARAGRAPH },
-
-  // roles::LAST_ENTRY
-  { ROLE_WINDOWS_LAST_ENTRY, ROLE_WINDOWS_LAST_ENTRY }
-};
-
--- a/accessible/src/msaa/nsRootAccessibleWrap.cpp
+++ b/accessible/src/msaa/nsRootAccessibleWrap.cpp
@@ -15,17 +15,17 @@
  * The Original Code is mozilla.org code.
  *
  * The Initial Developer of the Original Code is
  * Mozilla Foundation.
  * Portions created by the Initial Developer are Copyright (C) 2011
  * the Initial Developer. All Rights Reserved.
  *
  * Contributor(s):
- *   Alexander Surkov <surkov.alexander@gmail.com> (origianl author)
+ *   Alexander Surkov <surkov.alexander@gmail.com> (original author)
  *
  * Alternatively, the contents of this file may be used under the terms of
  * either of the GNU General Public License Version 2 or later (the "GPL"),
  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
  * in which case the provisions of the GPL or the LGPL are applicable instead
  * of those above. If you wish to allow use of your version of this file only
  * under the terms of either the GPL or the LGPL, and not to allow others to
  * use your version of this file under the terms of the MPL, indicate your
--- a/allmakefiles.sh
+++ b/allmakefiles.sh
@@ -84,16 +84,21 @@ if [ "$OS_ARCH" = "WINNT" ]; then
     build/win32/crashinjectdll/Makefile
   "
 fi
 
 if [ "$OS_ARCH" != "WINNT" -a "$OS_ARCH" != "OS2" ]; then
   add_makefiles "
     build/unix/Makefile
   "
+  if [ "$STDCXX_COMPAT" ]; then
+    add_makefiles "
+      build/unix/stdc++compat/Makefile
+    "
+  fi
   if [ "$USE_ELF_HACK" ]; then
     add_makefiles "
       build/unix/elfhack/Makefile
     "
   fi
 fi
 
 if [ "$COMPILER_DEPEND" = "" -a "$MOZ_NATIVE_MAKEDEPEND" = "" ]; then
--- a/b2g/installer/package-manifest.in
+++ b/b2g/installer/package-manifest.in
@@ -133,16 +133,17 @@
 @BINPATH@/components/commandhandler.xpt
 @BINPATH@/components/commandlines.xpt
 @BINPATH@/components/composer.xpt
 @BINPATH@/components/content_base.xpt
 @BINPATH@/components/content_events.xpt
 @BINPATH@/components/content_canvas.xpt
 @BINPATH@/components/content_htmldoc.xpt
 @BINPATH@/components/content_html.xpt
+@BINPATH@/components/content_media.xpt
 @BINPATH@/components/content_xslt.xpt
 @BINPATH@/components/content_xtf.xpt
 @BINPATH@/components/cookie.xpt
 @BINPATH@/components/directory.xpt
 @BINPATH@/components/docshell.xpt
 @BINPATH@/components/dom.xpt
 @BINPATH@/components/dom_apps.xpt
 @BINPATH@/components/dom_base.xpt
--- a/browser/branding/official/configure.sh
+++ b/browser/branding/official/configure.sh
@@ -1,2 +1,1 @@
 MOZ_APP_DISPLAYNAME=Firefox
-MOZ_UA_BUILDID=20100101
--- a/browser/components/migration/src/BrowserProfileMigrators.manifest
+++ b/browser/components/migration/src/BrowserProfileMigrators.manifest
@@ -1,10 +1,14 @@
 component {6F8BB968-C14F-4D6F-9733-6C6737B35DCE} ProfileMigrator.js
 contract @mozilla.org/toolkit/profile-migrator;1 {6F8BB968-C14F-4D6F-9733-6C6737B35DCE}
 component {4cec1de4-1671-4fc3-a53e-6c539dc77a26} ChromeProfileMigrator.js
 contract @mozilla.org/profile/migrator;1?app=browser&type=chrome {4cec1de4-1671-4fc3-a53e-6c539dc77a26}
 component {91185366-ba97-4438-acba-48deaca63386} FirefoxProfileMigrator.js
 contract @mozilla.org/profile/migrator;1?app=browser&type=firefox {91185366-ba97-4438-acba-48deaca63386}
+#ifdef HAS_IE_MIGRATOR
 component {3d2532e3-4932-4774-b7ba-968f5899d3a4} IEProfileMigrator.js
 contract @mozilla.org/profile/migrator;1?app=browser&type=ie {3d2532e3-4932-4774-b7ba-968f5899d3a4}
+#endif
+#ifdef HAS_SAFARI_MIGRATOR
 component {4b609ecf-60b2-4655-9df4-dc149e474da1} SafariProfileMigrator.js
 contract @mozilla.org/profile/migrator;1?app=browser&type=safari {4b609ecf-60b2-4655-9df4-dc149e474da1}
+#endif
--- a/browser/components/migration/src/Makefile.in
+++ b/browser/components/migration/src/Makefile.in
@@ -55,24 +55,26 @@ EXTRA_PP_COMPONENTS = \
   $(NULL)
 
 ifeq ($(OS_ARCH)_$(GNU_CXX),WINNT_)
 CPPSRCS += nsIEHistoryEnumerator.cpp
 
 EXTRA_PP_COMPONENTS += IEProfileMigrator.js \
                        SafariProfileMigrator.js \
                        $(NULL)
+DEFINES += -DHAS_IE_MIGRATOR -DHAS_SAFARI_MIGRATOR
 endif
 
 ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
 EXTRA_PP_COMPONENTS += SafariProfileMigrator.js \
                        $(NULL)
+DEFINES += -DHAS_SAFARI_MIGRATOR
 endif
 
-EXTRA_COMPONENTS = \
+EXTRA_PP_COMPONENTS += \
 	BrowserProfileMigrators.manifest \
 	$(NULL)
 
 EXTRA_PP_JS_MODULES = \
 	MigrationUtils.jsm \
 	$(NULL)
 
 include $(topsrcdir)/config/rules.mk
--- a/browser/installer/package-manifest.in
+++ b/browser/installer/package-manifest.in
@@ -137,16 +137,17 @@
 @BINPATH@/components/commandhandler.xpt
 @BINPATH@/components/commandlines.xpt
 @BINPATH@/components/composer.xpt
 @BINPATH@/components/content_base.xpt
 @BINPATH@/components/content_events.xpt
 @BINPATH@/components/content_canvas.xpt
 @BINPATH@/components/content_htmldoc.xpt
 @BINPATH@/components/content_html.xpt
+@BINPATH@/components/content_media.xpt
 @BINPATH@/components/content_xslt.xpt
 @BINPATH@/components/content_xtf.xpt
 @BINPATH@/components/cookie.xpt
 @BINPATH@/components/directory.xpt
 @BINPATH@/components/docshell.xpt
 @BINPATH@/components/dom.xpt
 @BINPATH@/components/dom_apps.xpt
 @BINPATH@/components/dom_base.xpt
--- a/build/unix/Makefile.in
+++ b/build/unix/Makefile.in
@@ -42,20 +42,26 @@ DEPTH		= ../..
 topsrcdir	= @top_srcdir@
 srcdir		= @srcdir@
 VPATH		= @srcdir@
 
 include $(DEPTH)/config/autoconf.mk
 
 MODULE       = build
 
+ifdef STDCXX_COMPAT
+DIRS += stdc++compat
+endif
+
 ifdef USE_ELF_HACK
-DIRS = elfhack
+DIRS += elfhack
 endif
 
 TEST_DIRS = test
 
+SDK_BINARY = run-mozilla.sh
+
 include $(topsrcdir)/config/rules.mk
 
 libs:: $(srcdir)/run-mozilla.sh
 	$(INSTALL) $< $(DIST)/bin
 
 # EOF
--- a/build/unix/elfhack/Makefile.in
+++ b/build/unix/elfhack/Makefile.in
@@ -49,17 +49,16 @@ HOST_PROGRAM = elfhack
 NO_DIST_INSTALL = 1
 NO_PROFILE_GUIDED_OPTIMIZE = 1
 
 VPATH += $(topsrcdir)/build
 
 HOST_CPPSRCS = \
   elf.cpp \
   elfhack.cpp \
-  $(STDCXX_COMPAT) \
   $(NULL)
 
 OS_CXXFLAGS := $(filter-out -fno-exceptions,$(OS_CXXFLAGS)) -fexceptions
 
 ifneq (,$(filter %86,$(TARGET_CPU)))
 CPU := x86
 else
 ifneq (,$(filter arm%,$(TARGET_CPU)))
new file mode 100644
--- /dev/null
+++ b/build/unix/stdc++compat/Makefile.in
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DEPTH		= ../../..
+topsrcdir	= @top_srcdir@
+srcdir		= @srcdir@
+VPATH		= @srcdir@
+
+include $(DEPTH)/config/autoconf.mk
+
+MODULE		= build
+LIBRARY_NAME	= stdc++compat
+HOST_LIBRARY_NAME = host_stdc++compat
+FORCE_STATIC_LIB= 1
+STL_FLAGS =
+NO_EXPAND_LIBS = 1
+NO_PROFILE_GUIDED_OPTIMIZE = 1
+
+CPPSRCS = \
+  stdc++compat.cpp
+  $(NULL)
+
+HOST_CPPSRCS = $(CPPSRCS)
+
+include $(topsrcdir)/config/rules.mk
+
+CXXFLAGS += -DMOZ_LIBSTDCXX_VERSION=$(MOZ_LIBSTDCXX_TARGET_VERSION)
rename from build/stdc++compat.cpp
rename to build/unix/stdc++compat/stdc++compat.cpp
--- a/config/Makefile.in
+++ b/config/Makefile.in
@@ -44,16 +44,20 @@ srcdir		= @srcdir@
 VPATH		= @srcdir@
 
 include $(DEPTH)/config/autoconf.mk
 
 # For sanity's sake, we compile nsinstall without the wrapped system
 # headers, so that we can use it to set up the wrapped system headers.
 VISIBILITY_FLAGS =
 
+# STDCXX_COMPAT is not needed here, and will actually fail because
+# libstdc++-compat is not built yet.
+STDCXX_COMPAT =
+
 HOST_PROGRAM	= nsinstall$(HOST_BIN_SUFFIX)
 
 ifeq (WINNT,$(HOST_OS_ARCH))
 HOST_CSRCS	= nsinstall_win.c
 else
 HOST_CSRCS	= nsinstall.c pathsub.c
 endif
 
--- a/config/autoconf.mk.in
+++ b/config/autoconf.mk.in
@@ -55,17 +55,16 @@ MOZ_APP_PROFILE = @MOZ_APP_PROFILE@
 MOZ_APP_ID = @MOZ_APP_ID@
 MAR_CHANNEL_ID = @MAR_CHANNEL_ID@
 ACCEPTED_MAR_CHANNEL_IDS = @ACCEPTED_MAR_CHANNEL_IDS@
 MOZ_PROFILE_MIGRATOR = @MOZ_PROFILE_MIGRATOR@
 MOZ_EXTENSION_MANAGER = @MOZ_EXTENSION_MANAGER@
 MOZ_APP_UA_NAME = @MOZ_APP_UA_NAME@
 MOZ_APP_VERSION = @MOZ_APP_VERSION@
 MOZ_APP_MAXVERSION = @MOZ_APP_MAXVERSION@
-MOZ_UA_BUILDID = @MOZ_UA_BUILDID@
 MOZ_MACBUNDLE_NAME = @MOZ_MACBUNDLE_NAME@
 MOZ_APP_STATIC_INI = @MOZ_APP_STATIC_INI@
 
 MOZ_PKG_SPECIAL = @MOZ_PKG_SPECIAL@
 
 prefix		= @prefix@
 exec_prefix	= @exec_prefix@
 bindir		= @bindir@
--- a/config/config.mk
+++ b/config/config.mk
@@ -784,16 +784,19 @@ ifdef SYMBOL_ORDER
 EXPAND_MKSHLIB_ARGS += --symbol-order $(SYMBOL_ORDER)
 endif
 EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB)
 
 ifdef STDCXX_COMPAT
 ifneq ($(OS_ARCH),Darwin)
 CHECK_STDCXX = objdump -p $(1) | grep -e 'GLIBCXX_3\.4\.\(9\|[1-9][0-9]\)' > /dev/null && echo "TEST-UNEXPECTED-FAIL | | We don't want these libstdc++ symbols to be used:" && objdump -T $(1) | grep -e 'GLIBCXX_3\.4\.\(9\|[1-9][0-9]\)' && exit 1 || exit 0
 endif
+
+EXTRA_LIBS += $(call EXPAND_LIBNAME_PATH,stdc++compat,$(DEPTH)/build/unix/stdc++compat)
+HOST_EXTRA_LIBS += $(call EXPAND_LIBNAME_PATH,host_stdc++compat,$(DEPTH)/build/unix/stdc++compat)
 endif
 
 # autoconf.mk sets OBJ_SUFFIX to an error to avoid use before including
 # this file
 OBJ_SUFFIX := $(_OBJ_SUFFIX)
 
 # PGO builds with GCC build objects with instrumentation in a first pass,
 # then objects optimized, without instrumentation, in a second pass. If
--- a/config/rules.mk
+++ b/config/rules.mk
@@ -821,19 +821,16 @@ ifdef GNU_CC
 $(PROGRAM) $(SHARED_LIBRARY) $(LIBRARY): FORCE
 endif
 endif
 
 endif # NO_PROFILE_GUIDED_OPTIMIZE
 
 ##############################################
 
-stdc++compat.$(OBJ_SUFFIX): CXXFLAGS+=-DMOZ_LIBSTDCXX_VERSION=$(MOZ_LIBSTDCXX_TARGET_VERSION)
-host_stdc++compat.$(OBJ_SUFFIX): CXXFLAGS+=-DMOZ_LIBSTDCXX_VERSION=$(MOZ_LIBSTDCXX_HOST_VERSION)
-
 checkout:
 	$(MAKE) -C $(topsrcdir) -f client.mk checkout
 
 clean clobber realclean clobber_all:: $(SUBMAKEFILES)
 	-$(RM) $(ALL_TRASH)
 	-$(RM) -r $(ALL_TRASH_DIRS)
 	$(foreach dir,$(PARALLEL_DIRS) $(DIRS) $(STATIC_DIRS) $(TOOL_DIRS),-$(call SUBMAKE,$@,$(dir)))
 
--- a/configure.in
+++ b/configure.in
@@ -305,16 +305,17 @@ MOZ_ARG_WITH_STRING(gonk-toolchain-prefi
 [  --with-gonk-toolchain-prefix=DIR
                           prefix to gonk toolchain commands],
     gonk_toolchain_prefix=$withval)
 
 if test -n "$gonkdir" ; then
     kernel_name=`uname -s | tr "[[:upper:]]" "[[:lower:]]"`
     android_source="$gonkdir"
     ANDROID_SOURCE="$android_source"
+    ANDROID_NDK="${ANDROID_SOURCE}/ndk"
 
     dnl set up compilers
     AS="$gonk_toolchain_prefix"as
     CC="$gonk_toolchain_prefix"gcc
     CXX="$gonk_toolchain_prefix"g++
     CPP="$gonk_toolchain_prefix"cpp
     LD="$gonk_toolchain_prefix"ld
     AR="$gonk_toolchain_prefix"ar
@@ -7513,17 +7514,17 @@ fi
 
 dnl ========================================================
 dnl = libstdc++ compatibility hacks
 dnl ========================================================
 
 STDCXX_COMPAT=
 MOZ_ARG_ENABLE_BOOL(stdcxx-compat,
 [  --enable-stdcxx-compat  Enable compatibility with older libstdc++],
-    STDCXX_COMPAT=stdc++compat.cpp)
+    STDCXX_COMPAT=1)
 
 AC_SUBST(STDCXX_COMPAT)
 
 if test -n "$STDCXX_COMPAT"; then
    eval $(CXX="$CXX" MACOS_SDK_DIR="$MACOS_SDK_DIR" $PYTHON $_topsrcdir/build/autoconf/libstdcxx.py)
    AC_SUBST(MOZ_LIBSTDCXX_TARGET_VERSION)
    AC_SUBST(MOZ_LIBSTDCXX_HOST_VERSION)
 fi
@@ -8561,18 +8562,16 @@ AC_SUBST(MOZ_EXTENSION_MANAGER)
 AC_DEFINE_UNQUOTED(MOZ_APP_UA_NAME, "$MOZ_APP_UA_NAME")
 AC_SUBST(MOZ_APP_UA_NAME)
 AC_DEFINE_UNQUOTED(MOZ_APP_UA_VERSION, "$MOZ_APP_VERSION")
 AC_SUBST(MOZ_APP_VERSION)
 AC_SUBST(MOZ_APP_MAXVERSION)
 AC_DEFINE_UNQUOTED(MOZ_UA_FIREFOX_VERSION, "$FIREFOX_VERSION")
 AC_DEFINE_UNQUOTED(FIREFOX_VERSION,$FIREFOX_VERSION)
 AC_SUBST(FIREFOX_VERSION)
-AC_DEFINE_UNQUOTED(MOZ_UA_BUILDID, "$MOZ_UA_BUILDID")
-AC_SUBST(MOZ_UA_BUILDID)
 
 # We can't use the static application.ini data when building against
 # a libxul SDK.
 if test -n "$LIBXUL_SDK"; then
     MOZ_APP_STATIC_INI=
 fi
 AC_SUBST(MOZ_APP_STATIC_INI)
 
--- a/content/base/public/nsContentUtils.h
+++ b/content/base/public/nsContentUtils.h
@@ -1368,16 +1368,33 @@ public:
                                           nsISupports* aExtra = nsnull);
 
   /**
    * Returns true if aPrincipal is the system principal.
    */
   static bool IsSystemPrincipal(nsIPrincipal* aPrincipal);
 
   /**
+   * *aResourcePrincipal is a principal describing who may access the contents
+   * of a resource. The resource can only be consumed by a principal that
+   * subsumes *aResourcePrincipal. MAKE SURE THAT NOTHING EVER ACTS WITH THE
+   * AUTHORITY OF *aResourcePrincipal.
+   * It may be null to indicate that the resource has no data from any origin
+   * in it yet and anything may access the resource.
+   * Additional data is being mixed into the resource from aExtraPrincipal
+   * (which may be null; if null, no data is being mixed in and this function
+   * will do nothing). Update *aResourcePrincipal to reflect the new data.
+   * If *aResourcePrincipal subsumes aExtraPrincipal, nothing needs to change,
+   * otherwise *aResourcePrincipal is replaced with the system principal.
+   * Returns true if *aResourcePrincipal changed.
+   */
+  static bool CombineResourcePrincipals(nsCOMPtr<nsIPrincipal>* aResourcePrincipal,
+                                        nsIPrincipal* aExtraPrincipal);
+
+  /**
    * Trigger a link with uri aLinkURI. If aClick is false, this triggers a
    * mouseover on the link, otherwise it triggers a load after doing a
    * security check using aContent's principal.
    *
    * @param aContent the node on which a link was triggered.
    * @param aPresContext the pres context, must be non-null.
    * @param aLinkURI the URI of the link, must be non-null.
    * @param aTargetSpec the target (like target=, may be empty).
--- a/content/base/src/nsContentUtils.cpp
+++ b/content/base/src/nsContentUtils.cpp
@@ -142,16 +142,17 @@ static NS_DEFINE_CID(kXTFServiceCID, NS_
 #include "nsContentErrors.h"
 #include "nsUnicharUtilCIID.h"
 #include "nsINativeKeyBindings.h"
 #include "nsIDOMNSEvent.h"
 #include "nsXULPopupManager.h"
 #include "nsIPermissionManager.h"
 #include "nsIContentPrefService.h"
 #include "nsIScriptObjectPrincipal.h"
+#include "nsNullPrincipal.h"
 #include "nsIRunnable.h"
 #include "nsDOMJSUtils.h"
 #include "nsGenericHTMLElement.h"
 #include "nsAttrValue.h"
 #include "nsReferencedElement.h"
 #include "nsIDragService.h"
 #include "nsIChannelEventSink.h"
 #include "nsIAsyncVerifyRedirectCallback.h"
@@ -4115,17 +4116,17 @@ nsresult
 nsContentUtils::ConvertToPlainText(const nsAString& aSourceBuffer,
                                    nsAString& aResultBuffer,
                                    PRUint32 aFlags,
                                    PRUint32 aWrapCol)
 {
   nsCOMPtr<nsIURI> uri;
   NS_NewURI(getter_AddRefs(uri), "about:blank");
   nsCOMPtr<nsIPrincipal> principal =
-    do_CreateInstance("@mozilla.org/nullprincipal;1");
+    do_CreateInstance(NS_NULLPRINCIPAL_CONTRACTID);
   nsCOMPtr<nsIDOMDocument> domDocument;
   nsresult rv = nsContentUtils::CreateDocument(EmptyString(),
                                                EmptyString(),
                                                nsnull,
                                                uri,
                                                uri,
                                                principal,
                                                nsnull,
@@ -4461,16 +4462,39 @@ nsContentUtils::CheckSecurityBeforeLoad(
 bool
 nsContentUtils::IsSystemPrincipal(nsIPrincipal* aPrincipal)
 {
   bool isSystem;
   nsresult rv = sSecurityManager->IsSystemPrincipal(aPrincipal, &isSystem);
   return NS_SUCCEEDED(rv) && isSystem;
 }
 
+bool
+nsContentUtils::CombineResourcePrincipals(nsCOMPtr<nsIPrincipal>* aResourcePrincipal,
+                                          nsIPrincipal* aExtraPrincipal)
+{
+  if (!aExtraPrincipal) {
+    return false;
+  }
+  if (!*aResourcePrincipal) {
+    *aResourcePrincipal = aExtraPrincipal;
+    return true;
+  }
+  if (*aResourcePrincipal == aExtraPrincipal) {
+    return false;
+  }
+  bool subsumes;
+  if (NS_SUCCEEDED((*aResourcePrincipal)->Subsumes(aExtraPrincipal, &subsumes)) &&
+      subsumes) {
+    return false;
+  }
+  sSecurityManager->GetSystemPrincipal(getter_AddRefs(*aResourcePrincipal));
+  return true;
+}
+
 /* static */
 void
 nsContentUtils::TriggerLink(nsIContent *aContent, nsPresContext *aPresContext,
                             nsIURI *aLinkURI, const nsString &aTargetSpec,
                             bool aClick, bool aIsUserTriggered,
                             bool aIsTrusted)
 {
   NS_ASSERTION(aPresContext, "Need a nsPresContext");
--- a/content/html/content/public/nsHTMLMediaElement.h
+++ b/content/html/content/public/nsHTMLMediaElement.h
@@ -46,30 +46,38 @@
 #include "nsThreadUtils.h"
 #include "nsIDOMRange.h"
 #include "nsCycleCollectionParticipant.h"
 #include "nsILoadGroup.h"
 #include "nsIObserver.h"
 #include "nsAudioStream.h"
 #include "VideoFrameContainer.h"
 #include "mozilla/CORSMode.h"
+#include "nsDOMMediaStream.h"
+#include "mozilla/Mutex.h"
 
 // Define to output information on decoding and painting framerate
 /* #define DEBUG_FRAME_RATE 1 */
 
 typedef PRUint16 nsMediaNetworkState;
 typedef PRUint16 nsMediaReadyState;
 
+namespace mozilla {
+class MediaResource;
+}
+
 class nsHTMLMediaElement : public nsGenericHTMLElement,
                            public nsIObserver
 {
 public:
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::layers::ImageContainer ImageContainer;
   typedef mozilla::VideoFrameContainer VideoFrameContainer;
+  typedef mozilla::MediaStream MediaStream;
+  typedef mozilla::MediaResource MediaResource;
 
   enum CanPlayStatus {
     CANPLAY_NO,
     CANPLAY_MAYBE,
     CANPLAY_YES
   };
 
   mozilla::CORSMode GetCORSMode() {
@@ -247,19 +255,24 @@ public:
   // Is the media element potentially playing as defined by the HTML 5 specification.
   // http://www.whatwg.org/specs/web-apps/current-work/#potentially-playing
   bool IsPotentiallyPlaying() const;
 
   // Has playback ended as defined by the HTML 5 specification.
   // http://www.whatwg.org/specs/web-apps/current-work/#ended
   bool IsPlaybackEnded() const;
 
-  // principal of the currently playing stream
+  // principal of the currently playing resource. Anything accessing the contents
+  // of this element must have a principal that subsumes this principal.
+  // Returns null if nothing is playing.
   already_AddRefed<nsIPrincipal> GetCurrentPrincipal();
 
+  // called to notify that the principal of the decoder's media resource has changed.
+  void NotifyDecoderPrincipalChanged();
+
   // Update the visual size of the media. Called from the decoder on the
   // main thread when/if the size changes.
   void UpdateMediaSize(nsIntSize size);
 
   // Returns the CanPlayStatus indicating if we can handle this
   // MIME type. The MIME type should not include the codecs parameter.
   // If it returns anything other than CANPLAY_NO then it also
   // returns a null-terminated list of supported codecs
@@ -365,18 +378,25 @@ public:
   /**
    * Fires a timeupdate event. If aPeriodic is true, the event will only
    * be fired if we've not fired a timeupdate event (for any reason) in the
    * last 250ms, as required by the spec when the current time is periodically
    * increasing during playback.
    */
   void FireTimeUpdate(bool aPeriodic);
 
+  MediaStream* GetMediaStream()
+  {
+    NS_ASSERTION(mStream, "Don't call this when not playing a stream");
+    return mStream->GetStream();
+  }
+
 protected:
   class MediaLoadListener;
+  class StreamListener;
 
   /**
    * Logs a warning message to the web console to report various failures.
    * aMsg is the localized message identifier, aParams is the parameters to
    * be substituted into the localized message, and aParamCount is the number
    * of parameters in aParams.
    */
   void ReportLoadError(const char* aMsg,
@@ -386,16 +406,34 @@ protected:
   /**
    * Changes mHasPlayedOrSeeked to aValue. If mHasPlayedOrSeeked changes
    * we'll force a reflow so that the video frame gets reflowed to reflect
    * the poster hiding or showing immediately.
    */
   void SetPlayedOrSeeked(bool aValue);
 
   /**
+   * Initialize the media element for playback of mSrcAttrStream
+   */
+  void SetupMediaStreamPlayback();
+  /**
+   * Stop playback on mStream.
+   */
+  void EndMediaStreamPlayback();
+
+  /**
+   * Returns an nsDOMMediaStream containing the played contents of this
+   * element. When aFinishWhenEnded is true, when this element ends playback
+   * we will finish the stream and not play any more into it.
+   * When aFinishWhenEnded is false, ending playback does not finish the stream.
+   * The stream will never finish.
+   */
+  already_AddRefed<nsDOMMediaStream> CaptureStreamInternal(bool aFinishWhenEnded);
+
+  /**
    * Create a decoder for the given aMIMEType. Returns null if we
    * were unable to create the decoder.
    */
   already_AddRefed<nsMediaDecoder> CreateDecoder(const nsACString& aMIMEType);
 
   /**
    * Initialize a decoder as a clone of an existing decoder in another
    * element.
@@ -410,33 +448,40 @@ protected:
    */
   nsresult InitializeDecoderForChannel(nsIChannel *aChannel,
                                        nsIStreamListener **aListener);
 
   /**
    * Finish setting up the decoder after Load() has been called on it.
    * Called by InitializeDecoderForChannel/InitializeDecoderAsClone.
    */
-  nsresult FinishDecoderSetup(nsMediaDecoder* aDecoder);
+  nsresult FinishDecoderSetup(nsMediaDecoder* aDecoder,
+                              MediaResource* aStream,
+                              nsIStreamListener **aListener,
+                              nsMediaDecoder* aCloneDonor);
 
   /**
    * Call this after setting up mLoadingSrc and mDecoder.
    */
   void AddMediaElementToURITable();
   /**
    * Call this before clearing mLoadingSrc.
    */
   void RemoveMediaElementFromURITable();
   /**
    * Call this to find a media element with the same NodePrincipal and mLoadingSrc
    * set to aURI, and with a decoder on which Load() has been called.
    */
   nsHTMLMediaElement* LookupMediaElementURITable(nsIURI* aURI);
 
   /**
+   * Shutdown and clear mDecoder and maintain associated invariants.
+   */
+  void ShutdownDecoder();
+  /**
    * Execute the initial steps of the load algorithm that ensure existing
    * loads are aborted, the element is emptied, and a new load ID is
    * created.
    */
   void AbortExistingLoads();
 
   /**
    * Create a URI for the given aURISpec string.
@@ -583,22 +628,43 @@ protected:
   void GetCurrentSpec(nsCString& aString);
 
   /**
    * Process any media fragment entries in the URI
    */
   void ProcessMediaFragmentURI();
 
   // The current decoder. Load() has been called on this decoder.
+  // At most one of mDecoder and mStream can be non-null.
   nsRefPtr<nsMediaDecoder> mDecoder;
 
   // A reference to the VideoFrameContainer which contains the current frame
   // of video to display.
   nsRefPtr<VideoFrameContainer> mVideoFrameContainer;
 
+  // Holds a reference to the DOM wrapper for the MediaStream that has been
+  // set in the src attribute.
+  nsRefPtr<nsDOMMediaStream> mSrcAttrStream;
+
+  // Holds a reference to the DOM wrapper for the MediaStream that we're
+  // actually playing.
+  // At most one of mDecoder and mStream can be non-null.
+  nsRefPtr<nsDOMMediaStream> mStream;
+
+  // Holds references to the DOM wrappers for the MediaStreams that we're
+  // writing to.
+  struct OutputMediaStream {
+    nsRefPtr<nsDOMMediaStream> mStream;
+    bool mFinishWhenEnded;
+  };
+  nsTArray<OutputMediaStream> mOutputStreams;
+
+  // Holds a reference to the MediaStreamListener attached to mStream. STRONG!
+  StreamListener* mStreamListener;
+
   // Holds a reference to the first channel we open to the media resource.
   // Once the decoder is created, control over the channel passes to the
   // decoder, and we null out this reference. We must store this in case
   // we need to cancel the channel before control of it passes to the decoder.
   nsCOMPtr<nsIChannel> mChannel;
 
   // Error attribute
   nsCOMPtr<nsIDOMMediaError> mError;
@@ -722,19 +788,22 @@ protected:
   // Indicates whether |autoplay| will actually autoplay based on the pref
   // media.autoplay.enabled
   bool mAutoplayEnabled;
 
   // Playback of the video is paused either due to calling the
   // 'Pause' method, or playback not yet having started.
   bool mPaused;
 
-  // True if the sound is muted
+  // True if the sound is muted.
   bool mMuted;
 
+  // True if the sound is being captured.
+  bool mAudioCaptured;
+
   // If TRUE then the media element was actively playing before the currently
   // in progress seeking. If FALSE then the media element is either not seeking
   // or was not actively playing before the current seek. Used to decide whether
   // to raise the 'waiting' event as per 4.7.1.8 in HTML 5 specification.
   bool mPlayingBeforeSeek;
 
   // True iff this element is paused because the document is inactive
   bool mPausedForInactiveDocument;
--- a/content/html/content/src/Makefile.in
+++ b/content/html/content/src/Makefile.in
@@ -144,12 +144,13 @@ INCLUDES	+= \
 		-I$(srcdir)/../../../../layout/style \
 		-I$(srcdir)/../../../../layout/tables \
 		-I$(srcdir)/../../../../layout/xul/base/src \
 		-I$(srcdir)/../../../../layout/generic \
 		-I$(srcdir)/../../../../dom/base \
 		-I$(srcdir)/../../../../editor/libeditor/base \
 		-I$(srcdir)/../../../../editor/libeditor/text \
 		-I$(srcdir) \
+		-I$(topsrcdir)/js/xpconnect/src \
 		-I$(topsrcdir)/xpcom/ds \
 		$(NULL)
 
 DEFINES += -D_IMPL_NS_LAYOUT
--- a/content/html/content/src/nsHTMLAudioElement.cpp
+++ b/content/html/content/src/nsHTMLAudioElement.cpp
@@ -127,33 +127,19 @@ nsHTMLAudioElement::Initialize(nsISuppor
   if (NS_FAILED(rv))
     return rv;
 
   if (argc <= 0) {
     // Nothing more to do here if we don't get any arguments.
     return NS_OK;
   }
 
-  // The only (optional) argument is the url of the audio
-  JSString* jsstr = JS_ValueToString(aContext, argv[0]);
-  if (!jsstr)
-    return NS_ERROR_FAILURE;
-
-  nsDependentJSString str;
-  if (!str.init(aContext, jsstr))
-    return NS_ERROR_FAILURE;
-
-  rv = SetAttr(kNameSpaceID_None, nsGkAtoms::src, str, true);
-  if (NS_FAILED(rv))
-    return rv;
-
-  // We have been specified with a src URL. Begin a load.
-  QueueSelectResourceTask();
-
-  return NS_OK;
+  // The only (optional) argument is the src of the audio (which can
+  // be a URL string or a MediaStream object)
+  return SetSrc(aContext, argv[0]);
 }
 
 NS_IMETHODIMP
 nsHTMLAudioElement::MozSetup(PRUint32 aChannels, PRUint32 aRate)
 {
   // If there is already a src provided, don't setup another stream
   if (mDecoder) {
     return NS_ERROR_FAILURE;
--- a/content/html/content/src/nsHTMLMediaElement.cpp
+++ b/content/html/content/src/nsHTMLMediaElement.cpp
@@ -49,16 +49,17 @@
 #include "nsSize.h"
 #include "nsIFrame.h"
 #include "nsIDocument.h"
 #include "nsIDOMDocument.h"
 #include "nsDOMError.h"
 #include "nsNodeInfoManager.h"
 #include "nsNetUtil.h"
 #include "nsXPCOMStrings.h"
+#include "xpcpublic.h"
 #include "nsThreadUtils.h"
 #include "nsIThreadInternal.h"
 #include "nsContentUtils.h"
 #include "nsIRequest.h"
 
 #include "nsFrameManager.h"
 #include "nsIScriptSecurityManager.h"
 #include "nsIXPConnect.h"
@@ -87,16 +88,19 @@
 #include "nsIAsyncVerifyRedirectCallback.h"
 #include "nsIAppShell.h"
 #include "nsWidgetsCID.h"
 
 #include "nsIPrivateDOMEvent.h"
 #include "nsIDOMNotifyAudioAvailableEvent.h"
 #include "nsMediaFragmentURIParser.h"
 #include "nsURIHashKey.h"
+#include "nsJSUtils.h"
+#include "MediaStreamGraph.h"
+#include "nsDOMMediaStream.h"
 #include "nsIScriptError.h"
 
 #ifdef MOZ_OGG
 #include "nsOggDecoder.h"
 #endif
 #ifdef MOZ_WAVE
 #include "nsWaveDecoder.h"
 #endif
@@ -413,39 +417,96 @@ NS_IMETHODIMP nsHTMLMediaElement::MediaL
 }
 
 NS_IMPL_ADDREF_INHERITED(nsHTMLMediaElement, nsGenericHTMLElement)
 NS_IMPL_RELEASE_INHERITED(nsHTMLMediaElement, nsGenericHTMLElement)
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(nsHTMLMediaElement)
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(nsHTMLMediaElement, nsGenericHTMLElement)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mStream)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mSrcAttrStream)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mSourcePointer)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mLoadBlockedDoc)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mSourceLoadCandidate)
+  for (PRUint32 i = 0; i < tmp->mOutputStreams.Length(); ++i) {
+    NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mOutputStreams[i].mStream);
+  }
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(nsHTMLMediaElement, nsGenericHTMLElement)
+  if (tmp->mStream) {
+    // Need to EndMediaStreamPlayback to clear mStream and make sure everything
+    // gets unhooked correctly.
+    tmp->EndMediaStreamPlayback();
+  }
+  NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mSrcAttrStream)
   NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mSourcePointer)
   NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mLoadBlockedDoc)
   NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mSourceLoadCandidate)
+  for (PRUint32 i = 0; i < tmp->mOutputStreams.Length(); ++i) {
+    NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mOutputStreams[i].mStream);
+  }
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(nsHTMLMediaElement)
   NS_INTERFACE_MAP_ENTRY(nsIObserver)
 NS_INTERFACE_MAP_END_INHERITING(nsGenericHTMLElement)
 
 // nsIDOMHTMLMediaElement
-NS_IMPL_URI_ATTR(nsHTMLMediaElement, Src, src)
 NS_IMPL_BOOL_ATTR(nsHTMLMediaElement, Controls, controls)
 NS_IMPL_BOOL_ATTR(nsHTMLMediaElement, Autoplay, autoplay)
 NS_IMPL_BOOL_ATTR(nsHTMLMediaElement, Loop, loop)
 NS_IMPL_BOOL_ATTR(nsHTMLMediaElement, DefaultMuted, muted)
 NS_IMPL_ENUM_ATTR_DEFAULT_VALUE(nsHTMLMediaElement, Preload, preload, NULL)
 
+NS_IMETHODIMP
+nsHTMLMediaElement::GetSrc(JSContext* aCtx, jsval *aParams)
+{
+  if (mSrcAttrStream) {
+    NS_ASSERTION(mSrcAttrStream->GetStream(), "MediaStream should have been set up properly");
+    return nsContentUtils::WrapNative(aCtx, JS_GetGlobalForScopeChain(aCtx),
+                                      mSrcAttrStream, aParams);
+  }
+
+  nsAutoString str;
+  nsresult rv = GetURIAttr(nsGkAtoms::src, nsnull, str);
+  NS_ENSURE_SUCCESS(rv, rv);
+  if (!xpc::StringToJsval(aCtx, str, aParams)) {
+    return NS_ERROR_FAILURE;
+  }
+  return NS_OK;
+}
+
+NS_IMETHODIMP
+nsHTMLMediaElement::SetSrc(JSContext* aCtx, const jsval & aParams)
+{
+  if (JSVAL_IS_OBJECT(aParams)) {
+    nsCOMPtr<nsIDOMMediaStream> stream;
+    stream = do_QueryInterface(nsContentUtils::XPConnect()->
+        GetNativeOfWrapper(aCtx, JSVAL_TO_OBJECT(aParams)));
+    if (stream) {
+      mSrcAttrStream = static_cast<nsDOMMediaStream*>(stream.get());
+      UnsetAttr(kNameSpaceID_None, nsGkAtoms::src, true);
+      Load();
+      return NS_OK;
+    }
+  }
+
+  mSrcAttrStream = nsnull;
+  JSString* jsStr = JS_ValueToString(aCtx, aParams);
+  if (!jsStr)
+    return NS_ERROR_DOM_TYPE_MISMATCH_ERR;
+  nsDependentJSString str;
+  if (!str.init(aCtx, jsStr))
+    return NS_ERROR_DOM_TYPE_MISMATCH_ERR;
+  // Will trigger Load()
+  return SetAttrHelper(nsGkAtoms::src, str);
+}
+
 /* readonly attribute nsIDOMHTMLMediaElement mozAutoplayEnabled; */
 NS_IMETHODIMP nsHTMLMediaElement::GetMozAutoplayEnabled(bool *aAutoplayEnabled)
 {
   *aAutoplayEnabled = mAutoplayEnabled;
 
   return NS_OK;
 }
 
@@ -455,18 +516,21 @@ NS_IMETHODIMP nsHTMLMediaElement::GetErr
   NS_IF_ADDREF(*aError = mError);
 
   return NS_OK;
 }
 
 /* readonly attribute boolean ended; */
 NS_IMETHODIMP nsHTMLMediaElement::GetEnded(bool *aEnded)
 {
-  *aEnded = mDecoder ? mDecoder->IsEnded() : false;
-
+  if (mStream) {
+    *aEnded = GetMediaStream()->IsFinished();
+  } else if (mDecoder) {
+    *aEnded = mDecoder->IsEnded();
+  }
   return NS_OK;
 }
 
 /* readonly attribute DOMString currentSrc; */
 NS_IMETHODIMP nsHTMLMediaElement::GetCurrentSrc(nsAString & aCurrentSrc)
 {
   nsCAutoString src;
   GetCurrentSpec(src);
@@ -506,32 +570,47 @@ nsHTMLMediaElement::OnChannelRedirect(ns
  
     nsresult rv = http->SetRequestHeader(rangeHdr, rangeVal, false);
     NS_ENSURE_SUCCESS(rv, rv);
   }
  
   return NS_OK;
 }
 
+void nsHTMLMediaElement::ShutdownDecoder()
+{
+  RemoveMediaElementFromURITable();
+  NS_ASSERTION(mDecoder, "Must have decoder to shut down");
+  mDecoder->Shutdown();
+  mDecoder = nsnull;
+  // Discard all output streams. mDecoder->Shutdown() will have finished all
+  // its output streams.
+  // XXX For now we ignore mFinishWhenEnded. We'll fix this later. The
+  // immediate goal is to not crash when reloading a media element with
+  // output streams.
+  mOutputStreams.Clear();
+}
+
 void nsHTMLMediaElement::AbortExistingLoads()
 {
   // Abort any already-running instance of the resource selection algorithm.
   mLoadWaitStatus = NOT_WAITING;
 
   // Set a new load ID. This will cause events which were enqueued
   // with a different load ID to silently be cancelled.
   mCurrentLoadID++;
 
   bool fireTimeUpdate = false;
 
   if (mDecoder) {
-    RemoveMediaElementFromURITable();
     fireTimeUpdate = mDecoder->GetCurrentTime() != 0.0;
-    mDecoder->Shutdown();
-    mDecoder = nsnull;
+    ShutdownDecoder();
+  }
+  if (mStream) {
+    EndMediaStreamPlayback();
   }
   mLoadingSrc = nsnull;
 
   if (mNetworkState == nsIDOMHTMLMediaElement::NETWORK_LOADING ||
       mNetworkState == nsIDOMHTMLMediaElement::NETWORK_IDLE)
   {
     DispatchEvent(NS_LITERAL_STRING("abort"));
   }
@@ -545,16 +624,17 @@ void nsHTMLMediaElement::AbortExistingLo
   mHaveQueuedSelectResource = false;
   mLoadIsSuspended = false;
   mSourcePointer = nsnull;
 
   // TODO: The playback rate must be set to the default playback rate.
 
   if (mNetworkState != nsIDOMHTMLMediaElement::NETWORK_EMPTY) {
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
+    NS_ASSERTION(!mDecoder && !mStream, "How did someone setup a new stream/decoder already?");
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_NOTHING);
     mPaused = true;
 
     if (fireTimeUpdate) {
       // Since we destroyed the decoder above, the current playback position
       // will now be reported as 0. The playback position was non-zero when
       // we destroyed the decoder, so fire a timeupdate event so that the
       // change will be reflected in the controls.
@@ -667,17 +747,18 @@ void nsHTMLMediaElement::SelectResourceW
 {
   SelectResource();
   mIsRunningSelectResource = false;
   mHaveQueuedSelectResource = false;
 }
 
 void nsHTMLMediaElement::SelectResource()
 {
-  if (!HasAttr(kNameSpaceID_None, nsGkAtoms::src) && !HasSourceChildren(this)) {
+  if (!mSrcAttrStream && !HasAttr(kNameSpaceID_None, nsGkAtoms::src) &&
+      !HasSourceChildren(this)) {
     // The media element has neither a src attribute nor any source
     // element children, abort the load.
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
     // This clears mDelayingLoadEvent, so AddRemoveSelfReference will be called
     ChangeDelayLoadStatus(false);
     return;
   }
 
@@ -691,17 +772,19 @@ void nsHTMLMediaElement::SelectResource(
   // Delay setting mIsRunningSeletResource until after UpdatePreloadAction
   // so that we don't lose our state change by bailing out of the preload
   // state update
   UpdatePreloadAction();
   mIsRunningSelectResource = true;
 
   // If we have a 'src' attribute, use that exclusively.
   nsAutoString src;
-  if (GetAttr(kNameSpaceID_None, nsGkAtoms::src, src)) {
+  if (mSrcAttrStream) {
+    SetupMediaStreamPlayback();
+  } else if (GetAttr(kNameSpaceID_None, nsGkAtoms::src, src)) {
     nsCOMPtr<nsIURI> uri;
     nsresult rv = NewURIFromString(src, getter_AddRefs(uri));
     if (NS_SUCCEEDED(rv)) {
       LOG(PR_LOG_DEBUG, ("%p Trying load from src=%s", this, NS_ConvertUTF16toUTF8(src).get()));
       NS_ASSERTION(!mIsLoadingFromSourceChildren,
         "Should think we're not loading from source children by default");
       mLoadingSrc = uri;
       if (mPreloadAction == nsHTMLMediaElement::PRELOAD_NONE) {
@@ -1059,17 +1142,22 @@ nsresult nsHTMLMediaElement::LoadResourc
 nsresult nsHTMLMediaElement::LoadWithChannel(nsIChannel *aChannel,
                                              nsIStreamListener **aListener)
 {
   NS_ENSURE_ARG_POINTER(aChannel);
   NS_ENSURE_ARG_POINTER(aListener);
 
   *aListener = nsnull;
 
+  // Make sure we don't reenter during synchronous abort events.
+  if (mIsRunningLoadMethod)
+    return NS_OK;
+  mIsRunningLoadMethod = true;
   AbortExistingLoads();
+  mIsRunningLoadMethod = false;
 
   nsresult rv = aChannel->GetOriginalURI(getter_AddRefs(mLoadingSrc));
   NS_ENSURE_SUCCESS(rv, rv);
 
   ChangeDelayLoadStatus(true);
   rv = InitializeDecoderForChannel(aChannel, aListener);
   if (NS_FAILED(rv)) {
     ChangeDelayLoadStatus(false);
@@ -1080,17 +1168,22 @@ nsresult nsHTMLMediaElement::LoadWithCha
 
   return NS_OK;
 }
 
 NS_IMETHODIMP nsHTMLMediaElement::MozLoadFrom(nsIDOMHTMLMediaElement* aOther)
 {
   NS_ENSURE_ARG_POINTER(aOther);
 
+  // Make sure we don't reenter during synchronous abort events.
+  if (mIsRunningLoadMethod)
+    return NS_OK;
+  mIsRunningLoadMethod = true;
   AbortExistingLoads();
+  mIsRunningLoadMethod = false;
 
   nsCOMPtr<nsIContent> content = do_QueryInterface(aOther);
   nsHTMLMediaElement* other = static_cast<nsHTMLMediaElement*>(content.get());
   if (!other || !other->mDecoder)
     return NS_OK;
 
   ChangeDelayLoadStatus(true);
 
@@ -1120,24 +1213,36 @@ NS_IMETHODIMP nsHTMLMediaElement::GetSee
   *aSeeking = mDecoder && mDecoder->IsSeeking();
 
   return NS_OK;
 }
 
 /* attribute double currentTime; */
 NS_IMETHODIMP nsHTMLMediaElement::GetCurrentTime(double *aCurrentTime)
 {
-  *aCurrentTime = mDecoder ? mDecoder->GetCurrentTime() : 0.0;
+  if (mStream) {
+    *aCurrentTime = MediaTimeToSeconds(GetMediaStream()->GetCurrentTime());
+  } else if (mDecoder) {
+    *aCurrentTime = mDecoder->GetCurrentTime();
+  } else {
+    *aCurrentTime = 0.0;
+  }
   return NS_OK;
 }
 
 NS_IMETHODIMP nsHTMLMediaElement::SetCurrentTime(double aCurrentTime)
 {
   StopSuspendingAfterFirstFrame();
 
+  if (mStream) {
+    // do nothing since streams aren't seekable; we effectively clamp to
+    // the current time.
+    return NS_ERROR_DOM_INVALID_STATE_ERR;
+  }
+
   if (!mDecoder) {
     LOG(PR_LOG_DEBUG, ("%p SetCurrentTime(%f) failed: no decoder", this, aCurrentTime));
     return NS_ERROR_DOM_INVALID_STATE_ERR;
   }
 
   if (mReadyState == nsIDOMHTMLMediaElement::HAVE_NOTHING) {
     LOG(PR_LOG_DEBUG, ("%p SetCurrentTime(%f) failed: no source", this, aCurrentTime));
     return NS_ERROR_DOM_INVALID_STATE_ERR;
@@ -1166,17 +1271,23 @@ NS_IMETHODIMP nsHTMLMediaElement::SetCur
   AddRemoveSelfReference();
 
   return rv;
 }
 
 /* readonly attribute double duration; */
 NS_IMETHODIMP nsHTMLMediaElement::GetDuration(double *aDuration)
 {
-  *aDuration = mDecoder ? mDecoder->GetDuration() : std::numeric_limits<double>::quiet_NaN();
+  if (mStream) {
+    *aDuration = std::numeric_limits<double>::infinity();
+  } else if (mDecoder) {
+    *aDuration = mDecoder->GetDuration();
+  } else {
+    *aDuration = std::numeric_limits<double>::quiet_NaN();
+  }
   return NS_OK;
 }
 
 /* readonly attribute nsIDOMHTMLTimeRanges seekable; */
 NS_IMETHODIMP nsHTMLMediaElement::GetSeekable(nsIDOMTimeRanges** aSeekable)
 {
   nsRefPtr<nsTimeRanges> ranges = new nsTimeRanges();
   if (mDecoder && mReadyState > nsIDOMHTMLMediaElement::HAVE_NOTHING) {
@@ -1208,16 +1319,19 @@ NS_IMETHODIMP nsHTMLMediaElement::Pause(
 
   bool oldPaused = mPaused;
   mPaused = true;
   mAutoplaying = false;
   // We changed mPaused and mAutoplaying which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 
   if (!oldPaused) {
+    if (mStream) {
+      GetMediaStream()->ChangeExplicitBlockerCount(1);
+    }
     FireTimeUpdate(false);
     DispatchAsyncEvent(NS_LITERAL_STRING("pause"));
   }
 
   return NS_OK;
 }
 
 /* attribute double volume; */
@@ -1233,20 +1347,24 @@ NS_IMETHODIMP nsHTMLMediaElement::SetVol
   if (aVolume < 0.0 || aVolume > 1.0)
     return NS_ERROR_DOM_INDEX_SIZE_ERR;
 
   if (aVolume == mVolume)
     return NS_OK;
 
   mVolume = aVolume;
 
-  if (mDecoder && !mMuted) {
-    mDecoder->SetVolume(mVolume);
-  } else if (mAudioStream && !mMuted) {
-    mAudioStream->SetVolume(mVolume);
+  if (!mMuted) {
+    if (mDecoder) {
+      mDecoder->SetVolume(mVolume);
+    } else if (mAudioStream) {
+      mAudioStream->SetVolume(mVolume);
+    } else if (mStream) {
+      GetMediaStream()->SetAudioOutputVolume(this, float(mVolume));
+    }
   }
 
   DispatchAsyncEvent(NS_LITERAL_STRING("volumechange"));
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
@@ -1303,27 +1421,67 @@ NS_IMETHODIMP nsHTMLMediaElement::GetMut
 
 NS_IMETHODIMP nsHTMLMediaElement::SetMuted(bool aMuted)
 {
   if (aMuted == mMuted)
     return NS_OK;
 
   mMuted = aMuted;
 
+  float effectiveVolume = mMuted ? 0.0f : float(mVolume);
   if (mDecoder) {
-    mDecoder->SetVolume(mMuted ? 0.0 : mVolume);
+    mDecoder->SetVolume(effectiveVolume);
   } else if (mAudioStream) {
-    mAudioStream->SetVolume(mMuted ? 0.0 : mVolume);
+    mAudioStream->SetVolume(effectiveVolume);
+  } else if (mStream) {
+    GetMediaStream()->SetAudioOutputVolume(this, effectiveVolume);
   }
 
   DispatchAsyncEvent(NS_LITERAL_STRING("volumechange"));
 
   return NS_OK;
 }
 
+already_AddRefed<nsDOMMediaStream>
+nsHTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded)
+{
+  OutputMediaStream* out = mOutputStreams.AppendElement();
+  out->mStream = nsDOMMediaStream::CreateInputStream();
+  nsRefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
+  out->mStream->CombineWithPrincipal(principal);
+  out->mFinishWhenEnded = aFinishWhenEnded;
+
+  mAudioCaptured = true;
+  if (mDecoder) {
+    mDecoder->SetAudioCaptured(true);
+    mDecoder->AddOutputStream(
+        out->mStream->GetStream()->AsSourceStream(), aFinishWhenEnded);
+  }
+  nsRefPtr<nsDOMMediaStream> result = out->mStream;
+  return result.forget();
+}
+
+NS_IMETHODIMP nsHTMLMediaElement::MozCaptureStream(nsIDOMMediaStream** aStream)
+{
+  *aStream = CaptureStreamInternal(false).get();
+  return NS_OK;
+}
+
+NS_IMETHODIMP nsHTMLMediaElement::MozCaptureStreamUntilEnded(nsIDOMMediaStream** aStream)
+{
+  *aStream = CaptureStreamInternal(true).get();
+  return NS_OK;
+}
+
+NS_IMETHODIMP nsHTMLMediaElement::GetMozAudioCaptured(bool *aCaptured)
+{
+  *aCaptured = mAudioCaptured;
+  return NS_OK;
+}
+
 class MediaElementSetForURI : public nsURIHashKey {
 public:
   MediaElementSetForURI(const nsIURI* aKey) : nsURIHashKey(aKey) {}
   MediaElementSetForURI(const MediaElementSetForURI& toCopy)
     : nsURIHashKey(toCopy), mElements(toCopy.mElements) {}
   nsTArray<nsHTMLMediaElement*> mElements;
 };
 
@@ -1420,16 +1578,17 @@ nsHTMLMediaElement::LookupMediaElementUR
       }
     }
   }
   return nsnull;
 }
 
 nsHTMLMediaElement::nsHTMLMediaElement(already_AddRefed<nsINodeInfo> aNodeInfo)
   : nsGenericHTMLElement(aNodeInfo),
+    mStreamListener(nsnull),
     mCurrentLoadID(0),
     mNetworkState(nsIDOMHTMLMediaElement::NETWORK_EMPTY),
     mReadyState(nsIDOMHTMLMediaElement::HAVE_NOTHING),
     mLoadWaitStatus(NOT_WAITING),
     mVolume(1.0),
     mChannels(0),
     mRate(0),
     mPreloadAction(PRELOAD_UNDEFINED),
@@ -1439,16 +1598,17 @@ nsHTMLMediaElement::nsHTMLMediaElement(a
     mFragmentEnd(-1.0),
     mAllowAudioData(false),
     mBegun(false),
     mLoadedFirstFrame(false),
     mAutoplaying(true),
     mAutoplayEnabled(true),
     mPaused(true),
     mMuted(false),
+    mAudioCaptured(false),
     mPlayingBeforeSeek(false),
     mPausedForInactiveDocument(false),
     mWaitingFired(false),
     mIsRunningLoadMethod(false),
     mIsLoadingFromSourceChildren(false),
     mDelayingLoadEvent(false),
     mIsRunningSelectResource(false),
     mHaveQueuedSelectResource(false),
@@ -1480,18 +1640,20 @@ nsHTMLMediaElement::~nsHTMLMediaElement(
   NS_ASSERTION(!mHasSelfReference,
                "How can we be destroyed if we're still holding a self reference?");
 
   if (mVideoFrameContainer) {
     mVideoFrameContainer->ForgetElement();
   }
   UnregisterFreezableElement();
   if (mDecoder) {
-    RemoveMediaElementFromURITable();
-    mDecoder->Shutdown();
+    ShutdownDecoder();
+  }
+  if (mStream) {
+    EndMediaStreamPlayback();
   }
 
   NS_ASSERTION(MediaElementTableCount(this, mLoadingSrc) == 0,
     "Destroyed media element should no longer be in element table");
 
   if (mChannel) {
     mChannel->Cancel(NS_BINDING_ABORTED);
   }
@@ -1552,16 +1714,19 @@ NS_IMETHODIMP nsHTMLMediaElement::Play()
       NS_ENSURE_SUCCESS(rv, rv);
     }
   }
 
   // TODO: If the playback has ended, then the user agent must set
   // seek to the effective start.
   // TODO: The playback rate must be set to the default playback rate.
   if (mPaused) {
+    if (mStream) {
+      GetMediaStream()->ChangeExplicitBlockerCount(-1);
+    }
     DispatchAsyncEvent(NS_LITERAL_STRING("play"));
     switch (mReadyState) {
     case nsIDOMHTMLMediaElement::HAVE_NOTHING:
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
       break;
     case nsIDOMHTMLMediaElement::HAVE_METADATA:
     case nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA:
       FireTimeUpdate(false);
@@ -2105,25 +2270,17 @@ nsresult nsHTMLMediaElement::InitializeD
   }
 
   MediaResource* resource = originalResource->CloneData(decoder);
   if (!resource) {
     LOG(PR_LOG_DEBUG, ("%p Failed to cloned stream for decoder %p", this, decoder.get()));
     return NS_ERROR_FAILURE;
   }
 
-  mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
-
-  nsresult rv = decoder->Load(resource, nsnull, aOriginal);
-  if (NS_FAILED(rv)) {
-    LOG(PR_LOG_DEBUG, ("%p Failed to load decoder/stream for decoder %p", this, decoder.get()));
-    return rv;
-  }
-
-  return FinishDecoderSetup(decoder);
+  return FinishDecoderSetup(decoder, resource, nsnull, aOriginal);
 }
 
 nsresult nsHTMLMediaElement::InitializeDecoderForChannel(nsIChannel *aChannel,
                                                          nsIStreamListener **aListener)
 {
   NS_ASSERTION(mLoadingSrc, "mLoadingSrc must already be set");
   NS_ASSERTION(mDecoder == nsnull, "Shouldn't have a decoder");
 
@@ -2137,78 +2294,211 @@ nsresult nsHTMLMediaElement::InitializeD
     NS_ConvertUTF8toUTF16 mimeUTF16(mimeType);
     const PRUnichar* params[] = { mimeUTF16.get(), src.get() };
     ReportLoadError("MediaLoadUnsupportedMimeType", params, ArrayLength(params));
     return NS_ERROR_FAILURE;
   }
 
   LOG(PR_LOG_DEBUG, ("%p Created decoder %p for type %s", this, decoder.get(), mimeType.get()));
 
-  mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
-
   MediaResource* resource = MediaResource::Create(decoder, aChannel);
   if (!resource)
     return NS_ERROR_OUT_OF_MEMORY;
 
-  nsresult rv = decoder->Load(resource, aListener, nsnull);
+  // stream successfully created, the stream now owns the channel.
+  mChannel = nsnull;
+
+  return FinishDecoderSetup(decoder, resource, aListener, nsnull);
+}
+
+nsresult nsHTMLMediaElement::FinishDecoderSetup(nsMediaDecoder* aDecoder,
+                                                MediaResource* aStream,
+                                                nsIStreamListener **aListener,
+                                                nsMediaDecoder* aCloneDonor)
+{
+  mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
+
+  // Force a same-origin check before allowing events for this media resource.
+  mMediaSecurityVerified = false;
+
+  // The new stream has not been suspended by us.
+  mPausedForInactiveDocument = false;
+
+  aDecoder->SetAudioCaptured(mAudioCaptured);
+  aDecoder->SetVolume(mMuted ? 0.0 : mVolume);
+  for (PRUint32 i = 0; i < mOutputStreams.Length(); ++i) {
+    OutputMediaStream* ms = &mOutputStreams[i];
+    aDecoder->AddOutputStream(ms->mStream->GetStream()->AsSourceStream(),
+        ms->mFinishWhenEnded);
+  }
+
+  nsresult rv = aDecoder->Load(aStream, aListener, aCloneDonor);
   if (NS_FAILED(rv)) {
+    LOG(PR_LOG_DEBUG, ("%p Failed to load for decoder %p", this, aDecoder));
     return rv;
   }
 
   // Decoder successfully created, the decoder now owns the MediaResource
   // which owns the channel.
   mChannel = nsnull;
 
-  return FinishDecoderSetup(decoder);
-}
-
-nsresult nsHTMLMediaElement::FinishDecoderSetup(nsMediaDecoder* aDecoder)
-{
-  NS_ASSERTION(mLoadingSrc, "mLoadingSrc set up");
-
   mDecoder = aDecoder;
   AddMediaElementToURITable();
-
-  // Force a same-origin check before allowing events for this media resource.
-  mMediaSecurityVerified = false;
-
-  // The new resource has not been suspended by us.
-  mPausedForInactiveDocument = false;
-  // But we may want to suspend it now.
+  NotifyDecoderPrincipalChanged();
+
+  // We may want to suspend the new stream now.
   // This will also do an AddRemoveSelfReference.
   NotifyOwnerDocumentActivityChanged();
 
-  nsresult rv = NS_OK;
-
-  mDecoder->SetVolume(mMuted ? 0.0 : mVolume);
-
   if (!mPaused) {
     SetPlayedOrSeeked(true);
     if (!mPausedForInactiveDocument) {
       rv = mDecoder->Play();
     }
   }
 
   if (OwnerDoc()->HasAudioAvailableListeners()) {
     NotifyAudioAvailableListener();
   }
 
   if (NS_FAILED(rv)) {
-    RemoveMediaElementFromURITable();
-    mDecoder->Shutdown();
-    mDecoder = nsnull;
+    ShutdownDecoder();
   }
 
   NS_ASSERTION(NS_SUCCEEDED(rv) == (MediaElementTableCount(this, mLoadingSrc) == 1),
     "Media element should have single table entry if decode initialized");
 
   mBegun = true;
   return rv;
 }
 
+class nsHTMLMediaElement::StreamListener : public MediaStreamListener {
+public:
+  StreamListener(nsHTMLMediaElement* aElement) :
+    mElement(aElement),
+    mMutex("nsHTMLMediaElement::StreamListener"),
+    mPendingNotifyOutput(false)
+  {}
+  void Forget() { mElement = nsnull; }
+
+  // Main thread
+  void DoNotifyFinished()
+  {
+    if (mElement) {
+      mElement->PlaybackEnded();
+    }
+  }
+  void DoNotifyBlocked()
+  {
+    if (mElement) {
+      mElement->UpdateReadyStateForData(NEXT_FRAME_UNAVAILABLE_BUFFERING);
+    }
+  }
+  void DoNotifyUnblocked()
+  {
+    if (mElement) {
+      mElement->UpdateReadyStateForData(NEXT_FRAME_AVAILABLE);
+    }
+  }
+  void DoNotifyOutput()
+  {
+    {
+      MutexAutoLock lock(mMutex);
+      mPendingNotifyOutput = false;
+    }
+    if (mElement) {
+      mElement->FireTimeUpdate(true);
+    }
+  }
+
+  // These notifications run on the media graph thread so we need to
+  // dispatch events to the main thread.
+  virtual void NotifyBlockingChanged(MediaStreamGraph* aGraph, Blocking aBlocked)
+  {
+    nsCOMPtr<nsIRunnable> event;
+    if (aBlocked == BLOCKED) {
+      event = NS_NewRunnableMethod(this, &StreamListener::DoNotifyBlocked);
+    } else {
+      event = NS_NewRunnableMethod(this, &StreamListener::DoNotifyUnblocked);
+    }
+    aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
+  }
+  virtual void NotifyFinished(MediaStreamGraph* aGraph)
+  {
+    nsCOMPtr<nsIRunnable> event =
+      NS_NewRunnableMethod(this, &StreamListener::DoNotifyFinished);
+    aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
+  }
+  virtual void NotifyOutput(MediaStreamGraph* aGraph)
+  {
+    MutexAutoLock lock(mMutex);
+    if (mPendingNotifyOutput)
+      return;
+    mPendingNotifyOutput = true;
+    nsCOMPtr<nsIRunnable> event =
+      NS_NewRunnableMethod(this, &StreamListener::DoNotifyOutput);
+    aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
+  }
+
+private:
+  nsHTMLMediaElement* mElement;
+
+  Mutex mMutex;
+  bool mPendingNotifyOutput;
+};
+
+void nsHTMLMediaElement::SetupMediaStreamPlayback()
+{
+  NS_ASSERTION(!mStream && !mStreamListener, "Should have been ended already");
+
+  mStream = mSrcAttrStream;
+  // XXX if we ever support capturing the output of a media element which is
+  // playing a stream, we'll need to add a CombineWithPrincipal call here.
+  mStreamListener = new StreamListener(this);
+  NS_ADDREF(mStreamListener);
+  GetMediaStream()->AddListener(mStreamListener);
+  if (mPaused) {
+    GetMediaStream()->ChangeExplicitBlockerCount(1);
+  }
+  if (mPausedForInactiveDocument) {
+    GetMediaStream()->ChangeExplicitBlockerCount(1);
+  }
+  ChangeDelayLoadStatus(false);
+  GetMediaStream()->AddAudioOutput(this);
+  GetMediaStream()->SetAudioOutputVolume(this, float(mMuted ? 0.0 : mVolume));
+  VideoFrameContainer* container = GetVideoFrameContainer();
+  if (container) {
+    GetMediaStream()->AddVideoOutput(container);
+  }
+  ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
+  DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
+  DispatchAsyncEvent(NS_LITERAL_STRING("loadedmetadata"));
+  ResourceLoaded();
+}
+
+void nsHTMLMediaElement::EndMediaStreamPlayback()
+{
+  GetMediaStream()->RemoveListener(mStreamListener);
+  // Kill its reference to this element
+  mStreamListener->Forget();
+  NS_RELEASE(mStreamListener); // sets to null
+  GetMediaStream()->RemoveAudioOutput(this);
+  VideoFrameContainer* container = GetVideoFrameContainer();
+  if (container) {
+    GetMediaStream()->RemoveVideoOutput(container);
+  }
+  if (mPaused) {
+    GetMediaStream()->ChangeExplicitBlockerCount(-1);
+  }
+  if (mPausedForInactiveDocument) {
+    GetMediaStream()->ChangeExplicitBlockerCount(-1);
+  }
+  mStream = nsnull;
+}
+
 nsresult nsHTMLMediaElement::NewURIFromString(const nsAutoString& aURISpec, nsIURI** aURI)
 {
   NS_ENSURE_ARG_POINTER(aURI);
 
   *aURI = nsnull;
 
   nsCOMPtr<nsIDocument> doc = OwnerDoc();
 
@@ -2313,19 +2603,17 @@ void nsHTMLMediaElement::NetworkError()
 void nsHTMLMediaElement::DecodeError()
 {
   nsAutoString src;
   GetCurrentSrc(src);
   const PRUnichar* params[] = { src.get() };
   ReportLoadError("MediaLoadDecodeError", params, ArrayLength(params));
 
   if (mDecoder) {
-    RemoveMediaElementFromURITable();
-    mDecoder->Shutdown();
-    mDecoder = nsnull;
+    ShutdownDecoder();
   }
   mLoadingSrc = nsnull;
   if (mIsLoadingFromSourceChildren) {
     mError = nsnull;
     if (mSourceLoadCandidate) {
       DispatchAsyncSourceError(mSourceLoadCandidate);
       QueueLoadFromSourceTask();
     } else {
@@ -2357,21 +2645,22 @@ void nsHTMLMediaElement::Error(PRUint16 
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
   }
   AddRemoveSelfReference();
   ChangeDelayLoadStatus(false);
 }
 
 void nsHTMLMediaElement::PlaybackEnded()
 {
-  NS_ASSERTION(mDecoder->IsEnded(), "Decoder fired ended, but not in ended state");
-  // We changed the state of IsPlaybackEnded which can affect AddRemoveSelfReference
+  // We changed state which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 
-  if (mDecoder && mDecoder->IsInfinite()) {
+  NS_ASSERTION(!mDecoder || mDecoder->IsEnded(),
+               "Decoder fired ended, but not in ended state");
+  if (mStream || (mDecoder && mDecoder->IsInfinite())) {
     LOG(PR_LOG_DEBUG, ("%p, got duration by reaching the end of the resource", this));
     DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
   }
 
   if (HasAttr(kNameSpaceID_None, nsGkAtoms::loop)) {
     SetCurrentTime(0);
     return;
   }
@@ -2440,16 +2729,21 @@ void nsHTMLMediaElement::UpdateReadyStat
     if (!mWaitingFired && aNextFrame == NEXT_FRAME_UNAVAILABLE_BUFFERING) {
       FireTimeUpdate(false);
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
       mWaitingFired = true;
     }
     return;
   }
 
+  if (mStream) {
+    ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
+    return;
+  }
+
   // Now see if we should set HAVE_ENOUGH_DATA.
   // If it's something we don't know the size of, then we can't
   // make a real estimate, so we go straight to HAVE_ENOUGH_DATA once
   // we've downloaded enough data that our download rate is considered
   // reliable. We have to move to HAVE_ENOUGH_DATA at some point or
   // autoplay elements for live streams will never play. Otherwise we
   // move to HAVE_ENOUGH_DATA if we can play through the entire media
   // without stopping to buffer.
@@ -2539,16 +2833,19 @@ void nsHTMLMediaElement::NotifyAutoplayD
   if (CanActivateAutoplay()) {
     mPaused = false;
     // We changed mPaused which can affect AddRemoveSelfReference
     AddRemoveSelfReference();
 
     if (mDecoder) {
       SetPlayedOrSeeked(true);
       mDecoder->Play();
+    } else if (mStream) {
+      SetPlayedOrSeeked(true);
+      GetMediaStream()->ChangeExplicitBlockerCount(-1);
     }
     DispatchAsyncEvent(NS_LITERAL_STRING("play"));
   }
 }
 
 VideoFrameContainer* nsHTMLMediaElement::GetVideoFrameContainer()
 {
   if (mVideoFrameContainer)
@@ -2659,46 +2956,65 @@ bool nsHTMLMediaElement::IsPlaybackEnded
   //   the current playback position is equal to the effective end of the media resource.
   //   See bug 449157.
   return mNetworkState >= nsIDOMHTMLMediaElement::HAVE_METADATA &&
     mDecoder ? mDecoder->IsEnded() : false;
 }
 
 already_AddRefed<nsIPrincipal> nsHTMLMediaElement::GetCurrentPrincipal()
 {
-  if (!mDecoder)
-    return nsnull;
-
-  return mDecoder->GetCurrentPrincipal();
+  if (mDecoder) {
+    return mDecoder->GetCurrentPrincipal();
+  }
+  if (mStream) {
+    nsRefPtr<nsIPrincipal> principal = mStream->GetPrincipal();
+    return principal.forget();
+  }
+  return nsnull;
+}
+
+void nsHTMLMediaElement::NotifyDecoderPrincipalChanged()
+{
+  for (PRUint32 i = 0; i < mOutputStreams.Length(); ++i) {
+    OutputMediaStream* ms = &mOutputStreams[i];
+    nsRefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
+    ms->mStream->CombineWithPrincipal(principal);
+  }
 }
 
 void nsHTMLMediaElement::UpdateMediaSize(nsIntSize size)
 {
   mMediaSize = size;
 }
 
 void nsHTMLMediaElement::NotifyOwnerDocumentActivityChanged()
 {
   nsIDocument* ownerDoc = OwnerDoc();
   bool pauseForInactiveDocument =
     !ownerDoc->IsActive() || !ownerDoc->IsVisible();
 
   if (pauseForInactiveDocument != mPausedForInactiveDocument) {
     mPausedForInactiveDocument = pauseForInactiveDocument;
-    if (mDecoder) {
-      if (pauseForInactiveDocument) {
+    if (pauseForInactiveDocument) {
+      if (mDecoder) {
         mDecoder->Pause();
         mDecoder->Suspend();
-      } else {
+      } else if (mStream) {
+        GetMediaStream()->ChangeExplicitBlockerCount(1);
+      }
+    } else {
+      if (mDecoder) {
         mDecoder->Resume(false);
-        DispatchPendingMediaEvents();
         if (!mPaused && !mDecoder->IsEnded()) {
           mDecoder->Play();
         }
+      } else if (mStream) {
+        GetMediaStream()->ChangeExplicitBlockerCount(-1);
       }
+      DispatchPendingMediaEvents();
     }
   }
 
   AddRemoveSelfReference();
 }
 
 void nsHTMLMediaElement::AddRemoveSelfReference()
 {
@@ -2710,16 +3026,17 @@ void nsHTMLMediaElement::AddRemoveSelfRe
   nsIDocument* ownerDoc = OwnerDoc();
 
   // See the comment at the top of this file for the explanation of this
   // boolean expression.
   bool needSelfReference = !mShuttingDown &&
     ownerDoc->IsActive() &&
     (mDelayingLoadEvent ||
      (!mPaused && mDecoder && !mDecoder->IsEnded()) ||
+     (!mPaused && mStream && !mStream->IsFinished()) ||
      (mDecoder && mDecoder->IsSeeking()) ||
      CanActivateAutoplay() ||
      mNetworkState == nsIDOMHTMLMediaElement::NETWORK_LOADING);
 
   if (needSelfReference != mHasSelfReference) {
     mHasSelfReference = needSelfReference;
     if (needSelfReference) {
       // The observer service will hold a strong reference to us. This
@@ -2943,17 +3260,17 @@ void nsHTMLMediaElement::SetRequestHeade
 void nsHTMLMediaElement::FireTimeUpdate(bool aPeriodic)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
 
   TimeStamp now = TimeStamp::Now();
   double time = 0;
   GetCurrentTime(&time);
 
-  // Fire a timupdate event if this is not a periodic update (i.e. it's a
+  // Fire a timeupdate event if this is not a periodic update (i.e. it's a
   // timeupdate event mandated by the spec), or if it's a periodic update
   // and TIMEUPDATE_MS has passed since the last timeupdate event fired and
   // the time has changed.
   if (!aPeriodic ||
       (mLastCurrentTime != time &&
        (mTimeUpdateTime.IsNull() ||
         now - mTimeUpdateTime >= TimeDuration::FromMilliseconds(TIMEUPDATE_MS)))) {
     DispatchAsyncEvent(NS_LITERAL_STRING("timeupdate"));
new file mode 100644
--- /dev/null
+++ b/content/media/AudioSegment.cpp
@@ -0,0 +1,193 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "AudioSegment.h"
+
+namespace mozilla {
+
+static PRUint16
+FlipByteOrderIfBigEndian(PRUint16 aValue)
+{
+  PRUint16 s = aValue;
+#if defined(IS_BIG_ENDIAN)
+  s = (s << 8) | (s >> 8);
+#endif
+  return s;
+}
+
+/*
+ * Use "2^N" conversion since it's simple, fast, "bit transparent", used by
+ * many other libraries and apparently behaves reasonably.
+ * http://blog.bjornroche.com/2009/12/int-float-int-its-jungle-out-there.html
+ * http://blog.bjornroche.com/2009/12/linearity-and-dynamic-range-in-int.html
+ */
+static float
+SampleToFloat(float aValue)
+{
+  return aValue;
+}
+static float
+SampleToFloat(PRUint8 aValue)
+{
+  return (aValue - 128)/128.0f;
+}
+static float
+SampleToFloat(PRInt16 aValue)
+{
+  return PRInt16(FlipByteOrderIfBigEndian(aValue))/32768.0f;
+}
+
+static void
+FloatToSample(float aValue, float* aOut)
+{
+  *aOut = aValue;
+}
+static void
+FloatToSample(float aValue, PRUint8* aOut)
+{
+  float v = aValue*128 + 128;
+  float clamped = NS_MAX(0.0f, NS_MIN(255.0f, v));
+  *aOut = PRUint8(clamped);
+}
+static void
+FloatToSample(float aValue, PRInt16* aOut)
+{
+  float v = aValue*32768.0f;
+  float clamped = NS_MAX(-32768.0f, NS_MIN(32767.0f, v));
+  *aOut = PRInt16(FlipByteOrderIfBigEndian(PRInt16(clamped)));
+}
+
+template <class SrcT, class DestT>
+static void
+InterleaveAndConvertBuffer(const SrcT* aSource, PRInt32 aSourceLength,
+                           PRInt32 aLength,
+                           float aVolume,
+                           PRInt32 aChannels,
+                           DestT* aOutput)
+{
+  DestT* output = aOutput;
+  for (PRInt32 i = 0; i < aLength; ++i) {
+    for (PRInt32 channel = 0; channel < aChannels; ++channel) {
+      float v = SampleToFloat(aSource[channel*aSourceLength + i])*aVolume;
+      FloatToSample(v, output);
+      ++output;
+    }
+  }
+}
+
+static void
+InterleaveAndConvertBuffer(const PRInt16* aSource, PRInt32 aSourceLength,
+                           PRInt32 aLength,
+                           float aVolume,
+                           PRInt32 aChannels,
+                           PRInt16* aOutput)
+{
+  PRInt16* output = aOutput;
+  float v = NS_MAX(NS_MIN(aVolume, 1.0f), -1.0f);
+  PRInt32 volume = PRInt32((1 << 16) * v);
+  for (PRInt32 i = 0; i < aLength; ++i) {
+    for (PRInt32 channel = 0; channel < aChannels; ++channel) {
+      PRInt16 s = FlipByteOrderIfBigEndian(aSource[channel*aSourceLength + i]);
+      *output = FlipByteOrderIfBigEndian(PRInt16((PRInt32(s) * volume) >> 16));
+      ++output;
+    }
+  }
+}
+
+template <class SrcT>
+static void
+InterleaveAndConvertBuffer(const SrcT* aSource, PRInt32 aSourceLength,
+                           PRInt32 aLength,
+                           float aVolume,
+                           PRInt32 aChannels,
+                           void* aOutput, nsAudioStream::SampleFormat aOutputFormat)
+{
+  switch (aOutputFormat) {
+  case nsAudioStream::FORMAT_FLOAT32:
+    InterleaveAndConvertBuffer(aSource, aSourceLength, aLength, aVolume,
+                               aChannels, static_cast<float*>(aOutput));
+    break;
+  case nsAudioStream::FORMAT_S16_LE:
+    InterleaveAndConvertBuffer(aSource, aSourceLength, aLength, aVolume,
+                               aChannels, static_cast<PRInt16*>(aOutput));
+    break;
+  case nsAudioStream::FORMAT_U8:
+    InterleaveAndConvertBuffer(aSource, aSourceLength, aLength, aVolume,
+                               aChannels, static_cast<PRUint8*>(aOutput));
+    break;
+  }
+}
+
+static void
+InterleaveAndConvertBuffer(const void* aSource, nsAudioStream::SampleFormat aSourceFormat,
+                           PRInt32 aSourceLength,
+                           PRInt32 aOffset, PRInt32 aLength,
+                           float aVolume,
+                           PRInt32 aChannels,
+                           void* aOutput, nsAudioStream::SampleFormat aOutputFormat)
+{
+  switch (aSourceFormat) {
+  case nsAudioStream::FORMAT_FLOAT32:
+    InterleaveAndConvertBuffer(static_cast<const float*>(aSource) + aOffset, aSourceLength,
+                               aLength,
+                               aVolume,
+                               aChannels,
+                               aOutput, aOutputFormat);
+    break;
+  case nsAudioStream::FORMAT_S16_LE:
+    InterleaveAndConvertBuffer(static_cast<const PRInt16*>(aSource) + aOffset, aSourceLength,
+                               aLength,
+                               aVolume,
+                               aChannels,
+                               aOutput, aOutputFormat);
+    break;
+  case nsAudioStream::FORMAT_U8:
+    InterleaveAndConvertBuffer(static_cast<const PRUint8*>(aSource) + aOffset, aSourceLength,
+                               aLength,
+                               aVolume,
+                               aChannels,
+                               aOutput, aOutputFormat);
+    break;
+  }
+}
+
+void
+AudioSegment::ApplyVolume(float aVolume)
+{
+  for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
+    ci->mVolume *= aVolume;
+  }
+}
+
+static const int STATIC_AUDIO_BUFFER_BYTES = 50000;
+
+void
+AudioSegment::WriteTo(nsAudioStream* aOutput)
+{
+  NS_ASSERTION(mChannels == aOutput->GetChannels(), "Wrong number of channels");
+  nsAutoTArray<PRUint8,STATIC_AUDIO_BUFFER_BYTES> buf;
+  PRUint32 frameSize = GetSampleSize(aOutput->GetFormat())*mChannels;
+  for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
+    AudioChunk& c = *ci;
+    if (frameSize*c.mDuration > PR_UINT32_MAX) {
+      NS_ERROR("Buffer overflow");
+      return;
+    }
+    buf.SetLength(PRInt32(frameSize*c.mDuration));
+    if (c.mBuffer) {
+      InterleaveAndConvertBuffer(c.mBuffer->Data(), c.mBufferFormat, c.mBufferLength,
+                                 c.mOffset, PRInt32(c.mDuration),
+                                 c.mVolume,
+                                 aOutput->GetChannels(),
+                                 buf.Elements(), aOutput->GetFormat());
+    } else {
+      // Assumes that a bit pattern of zeroes == 0.0f
+      memset(buf.Elements(), 0, buf.Length());
+    }
+    aOutput->Write(buf.Elements(), PRInt32(c.mDuration));
+  }
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/AudioSegment.h
@@ -0,0 +1,151 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_AUDIOSEGMENT_H_
+#define MOZILLA_AUDIOSEGMENT_H_
+
+#include "MediaSegment.h"
+#include "nsISupportsImpl.h"
+#include "nsAudioStream.h"
+#include "SharedBuffer.h"
+
+namespace mozilla {
+
+struct AudioChunk {
+  typedef nsAudioStream::SampleFormat SampleFormat;
+
+  // Generic methods
+  void SliceTo(TrackTicks aStart, TrackTicks aEnd)
+  {
+    NS_ASSERTION(aStart >= 0 && aStart < aEnd && aEnd <= mDuration,
+                 "Slice out of bounds");
+    if (mBuffer) {
+      mOffset += PRInt32(aStart);
+    }
+    mDuration = aEnd - aStart;
+  }
+  TrackTicks GetDuration() const { return mDuration; }
+  bool CanCombineWithFollowing(const AudioChunk& aOther) const
+  {
+    if (aOther.mBuffer != mBuffer) {
+      return false;
+    }
+    if (mBuffer) {
+      NS_ASSERTION(aOther.mBufferFormat == mBufferFormat && aOther.mBufferLength == mBufferLength,
+                   "Wrong metadata about buffer");
+      return aOther.mOffset == mOffset + mDuration && aOther.mVolume == mVolume;
+    }
+    return true;
+  }
+  bool IsNull() const { return mBuffer == nsnull; }
+  void SetNull(TrackTicks aDuration)
+  {
+    mBuffer = nsnull;
+    mDuration = aDuration;
+    mOffset = 0;
+    mVolume = 1.0f;
+  }
+
+  TrackTicks mDuration;           // in frames within the buffer
+  nsRefPtr<SharedBuffer> mBuffer; // null means data is all zeroes
+  PRInt32 mBufferLength;          // number of frames in mBuffer (only meaningful if mBuffer is nonnull)
+  SampleFormat mBufferFormat;     // format of frames in mBuffer (only meaningful if mBuffer is nonnull)
+  PRInt32 mOffset;                // in frames within the buffer (zero if mBuffer is null)
+  float mVolume;                  // volume multiplier to apply (1.0f if mBuffer is nonnull)
+};
+
+/**
+ * A list of audio samples consisting of a sequence of slices of SharedBuffers.
+ * The audio rate is determined by the track, not stored in this class.
+ */
+class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
+public:
+  typedef nsAudioStream::SampleFormat SampleFormat;
+
+  static int GetSampleSize(SampleFormat aFormat)
+  {
+    switch (aFormat) {
+    case nsAudioStream::FORMAT_U8: return 1;
+    case nsAudioStream::FORMAT_S16_LE: return 2;
+    case nsAudioStream::FORMAT_FLOAT32: return 4;
+    }
+    NS_ERROR("Bad format");
+    return 0;
+  }
+
+  AudioSegment() : MediaSegmentBase<AudioSegment, AudioChunk>(AUDIO), mChannels(0) {}
+
+  bool IsInitialized()
+  {
+    return mChannels > 0;
+  }
+  void Init(PRInt32 aChannels)
+  {
+    NS_ASSERTION(aChannels > 0, "Bad number of channels");
+    NS_ASSERTION(!IsInitialized(), "Already initialized");
+    mChannels = aChannels;
+  }
+  PRInt32 GetChannels()
+  {
+    NS_ASSERTION(IsInitialized(), "Not initialized");
+    return mChannels;
+  }
+  /**
+   * Returns the format of the first audio frame that has data, or
+   * FORMAT_FLOAT32 if there is none.
+   */
+  SampleFormat GetFirstFrameFormat()
+  {
+    for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
+      if (ci->mBuffer) {
+        return ci->mBufferFormat;
+      }
+    }
+    return nsAudioStream::FORMAT_FLOAT32;
+  }
+  void AppendFrames(already_AddRefed<SharedBuffer> aBuffer, PRInt32 aBufferLength,
+                    PRInt32 aStart, PRInt32 aEnd, SampleFormat aFormat)
+  {
+    NS_ASSERTION(mChannels > 0, "Not initialized");
+    AudioChunk* chunk = AppendChunk(aEnd - aStart);
+    chunk->mBuffer = aBuffer;
+    chunk->mBufferFormat = aFormat;
+    chunk->mBufferLength = aBufferLength;
+    chunk->mOffset = aStart;
+    chunk->mVolume = 1.0f;
+  }
+  void ApplyVolume(float aVolume);
+  /**
+   * aOutput must have a matching number of channels, but we will automatically
+   * convert sample formats.
+   */
+  void WriteTo(nsAudioStream* aOutput);
+
+  void AppendFrom(AudioSegment* aSource)
+  {
+    NS_ASSERTION(aSource->mChannels == mChannels, "Non-matching channels");
+    MediaSegmentBase<AudioSegment, AudioChunk>::AppendFrom(aSource);
+  }
+
+  // Segment-generic methods not in MediaSegmentBase
+  void InitFrom(const AudioSegment& aOther)
+  {
+    NS_ASSERTION(mChannels == 0, "Channels already set");
+    mChannels = aOther.mChannels;
+  }
+  void SliceFrom(const AudioSegment& aOther, TrackTicks aStart, TrackTicks aEnd)
+  {
+    InitFrom(aOther);
+    BaseSliceFrom(aOther, aStart, aEnd);
+  }
+  static Type StaticType() { return AUDIO; }
+
+protected:
+  PRInt32 mChannels;
+};
+
+}
+
+#endif /* MOZILLA_AUDIOSEGMENT_H_ */
--- a/content/media/Makefile.in
+++ b/content/media/Makefile.in
@@ -39,39 +39,56 @@ topsrcdir = @top_srcdir@
 srcdir    = @srcdir@
 VPATH     = @srcdir@
 
 include $(DEPTH)/config/autoconf.mk
 
 MODULE = content
 LIBRARY_NAME = gkconmedia_s
 LIBXUL_LIBRARY = 1
+XPIDL_MODULE = content_media
+
+XPIDLSRCS = \
+  nsIDOMMediaStream.idl \
+  $(NULL)
 
 EXPORTS = \
+  AudioSegment.h \
   FileBlockCache.h \
+  MediaResource.h \
+  MediaSegment.h \
+  MediaStreamGraph.h \
   nsAudioAvailableEventManager.h \
-  nsMediaDecoder.h \
-  nsMediaCache.h \
   nsBuiltinDecoder.h \
   nsBuiltinDecoderStateMachine.h \
   nsBuiltinDecoderReader.h \
-  MediaResource.h \
+  nsDOMMediaStream.h \
+  nsMediaCache.h \
+  nsMediaDecoder.h \
+  SharedBuffer.h \
+  StreamBuffer.h \
+  TimeVarying.h \
   VideoFrameContainer.h \
   VideoUtils.h \
+  VideoSegment.h \
   $(NULL)
 
 CPPSRCS = \
+  AudioSegment.cpp \
   FileBlockCache.cpp \
+  MediaResource.cpp \
+  MediaStreamGraph.cpp \
   nsAudioAvailableEventManager.cpp \
-  nsMediaDecoder.cpp \
-  nsMediaCache.cpp \
   nsBuiltinDecoder.cpp \
   nsBuiltinDecoderStateMachine.cpp \
   nsBuiltinDecoderReader.cpp \
-  MediaResource.cpp \
+  nsDOMMediaStream.cpp \
+  nsMediaCache.cpp \
+  nsMediaDecoder.cpp \
+  StreamBuffer.cpp \
   VideoFrameContainer.cpp \
   VideoUtils.cpp \
   $(NULL)
 
 ifdef MOZ_SYDNEYAUDIO
 EXPORTS += \
   nsAudioStream.h \
   $(NULL)
@@ -109,8 +126,10 @@ FORCE_STATIC_LIB = 1
 include $(topsrcdir)/config/config.mk
 include $(topsrcdir)/ipc/chromium/chromium-config.mk
 include $(topsrcdir)/config/rules.mk
 
 INCLUDES += \
   -I$(srcdir)/../base/src \
   -I$(srcdir)/../html/content/src \
   $(NULL)
+
+DEFINES += -D_IMPL_NS_LAYOUT
--- a/content/media/MediaResource.cpp
+++ b/content/media/MediaResource.cpp
@@ -757,16 +757,24 @@ ChannelMediaResource::CacheClientNotifyD
   NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
   // NOTE: this can be called with the media cache lock held, so don't
   // block or do anything which might try to acquire a lock!
 
   nsCOMPtr<nsIRunnable> event = new DataEnded(mDecoder, aStatus);
   NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
 }
 
+void
+ChannelMediaResource::CacheClientNotifyPrincipalChanged()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
+
+  mDecoder->NotifyPrincipalChanged();
+}
+
 nsresult
 ChannelMediaResource::CacheClientSeek(PRInt64 aOffset, bool aResume)
 {
   NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
 
   CloseChannel();
 
   if (aResume) {
--- a/content/media/MediaResource.h
+++ b/content/media/MediaResource.h
@@ -370,16 +370,18 @@ public:
   // Notify that data is available from the cache. This can happen even
   // if this stream didn't read any data, since another stream might have
   // received data for the same resource.
   void CacheClientNotifyDataReceived();
   // Notify that we reached the end of the stream. This can happen even
   // if this stream didn't read any data, since another stream might have
   // received data for the same resource.
   void CacheClientNotifyDataEnded(nsresult aStatus);
+  // Notify that the principal for the cached resource changed.
+  void CacheClientNotifyPrincipalChanged();
 
   // These are called on the main thread by nsMediaCache. These shouldn't block,
   // but they may grab locks --- the media cache is not holding its lock
   // when these are called.
   // Start a new load at the given aOffset. The old load is cancelled
   // and no more data from the old load will be notified via
   // nsMediaCacheStream::NotifyDataReceived/Ended.
   // This can fail.
new file mode 100644
--- /dev/null
+++ b/content/media/MediaSegment.h
@@ -0,0 +1,270 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_MEDIASEGMENT_H_
+#define MOZILLA_MEDIASEGMENT_H_
+
+#include "nsTArray.h"
+
+namespace mozilla {
+
+/**
+ * We represent media times in 64-bit fixed point. So 1 MediaTime is
+ * 1/(2^MEDIA_TIME_FRAC_BITS) seconds.
+ */
+typedef PRInt64 MediaTime;
+const PRInt64 MEDIA_TIME_FRAC_BITS = 20;
+const PRInt64 MEDIA_TIME_MAX = PR_INT64_MAX;
+
+inline MediaTime MillisecondsToMediaTime(PRInt32 aMS)
+{
+  return (MediaTime(aMS) << MEDIA_TIME_FRAC_BITS)/1000;
+}
+
+inline MediaTime SecondsToMediaTime(double aS)
+{
+  NS_ASSERTION(aS <= (MEDIA_TIME_MAX >> MEDIA_TIME_FRAC_BITS),
+               "Out of range");
+  return MediaTime(aS * (1 << MEDIA_TIME_FRAC_BITS));
+}
+
+inline double MediaTimeToSeconds(MediaTime aTime)
+{
+  return aTime*(1.0/(1 << MEDIA_TIME_FRAC_BITS));
+}
+
+/**
+ * A number of ticks at a rate determined by some underlying track (e.g.
+ * audio sample rate). We want to make sure that multiplying TrackTicks by
+ * 2^MEDIA_TIME_FRAC_BITS doesn't overflow, so we set its max accordingly.
+ */
+typedef PRInt64 TrackTicks;
+const PRInt64 TRACK_TICKS_MAX = PR_INT64_MAX >> MEDIA_TIME_FRAC_BITS;
+
+/**
+ * A MediaSegment is a chunk of media data sequential in time. Different
+ * types of data have different subclasses of MediaSegment, all inheriting
+ * from MediaSegmentBase.
+ * All MediaSegment data is timed using TrackTicks. The actual tick rate
+ * is defined on a per-track basis. For some track types, this can be
+ * a fixed constant for all tracks of that type (e.g. 1MHz for video).
+ *
+ * Each media segment defines a concept of "null media data" (e.g. silence
+ * for audio or "no video frame" for video), which can be efficiently
+ * represented. This is used for padding.
+ */
+class MediaSegment {
+public:
+  virtual ~MediaSegment()
+  {
+    MOZ_COUNT_DTOR(MediaSegment);
+  }
+
+  enum Type {
+    AUDIO,
+    VIDEO,
+    TYPE_COUNT
+  };
+
+  /**
+   * Gets the total duration of the segment.
+   */
+  TrackTicks GetDuration() { return mDuration; }
+  Type GetType() { return mType; }
+
+  /**
+   * Create a MediaSegment of the same type.
+   */
+  virtual MediaSegment* CreateEmptyClone() = 0;
+  /**
+   * Moves contents of aSource to the end of this segment.
+   */
+  virtual void AppendFrom(MediaSegment* aSource) = 0;
+  /**
+   * Replace all contents up to aDuration with null data.
+   */
+  virtual void ForgetUpTo(TrackTicks aDuration) = 0;
+  /**
+   * Insert aDuration of null data at the start of the segment.
+   */
+  virtual void InsertNullDataAtStart(TrackTicks aDuration) = 0;
+
+protected:
+  MediaSegment(Type aType) : mDuration(0), mType(aType)
+  {
+    MOZ_COUNT_CTOR(MediaSegment);
+  }
+
+  TrackTicks mDuration; // total of mDurations of all chunks
+  Type mType;
+};
+
+/**
+ * C is the implementation class subclassed from MediaSegmentBase.
+ * C must contain a Chunk class.
+ */
+template <class C, class Chunk> class MediaSegmentBase : public MediaSegment {
+public:
+  virtual MediaSegment* CreateEmptyClone()
+  {
+    C* s = new C();
+    s->InitFrom(*static_cast<C*>(this));
+    return s;
+  }
+
+  /**
+   * Appends the contents of aSource to this segment, clearing aSource.
+   */
+  virtual void AppendFrom(MediaSegmentBase<C, Chunk>* aSource)
+  {
+    mDuration += aSource->mDuration;
+    aSource->mDuration = 0;
+    if (!mChunks.IsEmpty() && !aSource->mChunks.IsEmpty() &&
+        mChunks[mChunks.Length() - 1].CanCombineWithFollowing(aSource->mChunks[0])) {
+      mChunks[mChunks.Length() - 1].mDuration += aSource->mChunks[0].mDuration;
+      aSource->mChunks.RemoveElementAt(0);
+    }
+    mChunks.MoveElementsFrom(aSource->mChunks);
+  }
+  void RemoveLeading(TrackTicks aDuration)
+  {
+    RemoveLeadingInternal(aDuration, 0);
+  }
+  virtual void AppendFrom(MediaSegment* aSource)
+  {
+    NS_ASSERTION(aSource->GetType() == C::StaticType(), "Wrong type");
+    AppendFrom(static_cast<C*>(aSource));
+  }
+  /**
+   * Replace the first aDuration ticks with null media data, because the data
+   * will not be required again.
+   */
+  virtual void ForgetUpTo(TrackTicks aDuration)
+  {
+    if (mChunks.IsEmpty() || aDuration <= 0) {
+      return;
+    }
+    if (mChunks[0].IsNull()) {
+      TrackTicks extraToForget = NS_MIN(aDuration, mDuration) - mChunks[0].GetDuration();
+      if (extraToForget > 0) {
+        RemoveLeadingInternal(extraToForget, 1);
+        mChunks[0].mDuration += extraToForget;
+        mDuration += extraToForget;
+      }
+      return;
+    }
+    RemoveLeading(aDuration);
+    mChunks.InsertElementAt(0)->SetNull(aDuration);
+    mDuration += aDuration;
+  }
+  virtual void InsertNullDataAtStart(TrackTicks aDuration)
+  {
+    if (aDuration <= 0) {
+      return;
+    }
+    if (!mChunks.IsEmpty() && mChunks[0].IsNull()) {
+      mChunks[0].mDuration += aDuration;
+    } else {
+      mChunks.InsertElementAt(0)->SetNull(aDuration);
+    }
+    mDuration += aDuration;
+  }
+
+protected:
+  MediaSegmentBase(Type aType) : MediaSegment(aType) {}
+
+  void BaseSliceFrom(const MediaSegmentBase<C, Chunk>& aOther,
+                     TrackTicks aStart, TrackTicks aEnd)
+  {
+    NS_ASSERTION(aStart >= 0 && aEnd <= aOther.mDuration,
+                 "Slice out of range");
+    TrackTicks offset = 0;
+    for (PRUint32 i = 0; i < aOther.mChunks.Length() && offset < aEnd; ++i) {
+      const Chunk& c = aOther.mChunks[i];
+      TrackTicks start = NS_MAX(aStart, offset);
+      TrackTicks nextOffset = offset + c.GetDuration();
+      TrackTicks end = NS_MIN(aEnd, nextOffset);
+      if (start < end) {
+        mChunks.AppendElement(c)->SliceTo(start - offset, end - offset);
+      }
+      offset = nextOffset;
+    }
+  }
+
+  Chunk* AppendChunk(TrackTicks aDuration)
+  {
+    Chunk* c = mChunks.AppendElement();
+    c->mDuration = aDuration;
+    mDuration += aDuration;
+    return c;
+  }
+
+  Chunk* FindChunkContaining(TrackTicks aOffset, TrackTicks* aStart = nsnull)
+  {
+    if (aOffset < 0) {
+      return nsnull;
+    }
+    TrackTicks offset = 0;
+    for (PRUint32 i = 0; i < mChunks.Length(); ++i) {
+      Chunk& c = mChunks[i];
+      TrackTicks nextOffset = offset + c.GetDuration();
+      if (aOffset < nextOffset) {
+        if (aStart) {
+          *aStart = offset;
+        }
+        return &c;
+      }
+      offset = nextOffset;
+    }
+    return nsnull;
+  }
+
+  Chunk* GetLastChunk()
+  {
+    if (mChunks.IsEmpty()) {
+      return nsnull;
+    }
+    return &mChunks[mChunks.Length() - 1];
+  }
+
+  class ChunkIterator {
+  public:
+    ChunkIterator(MediaSegmentBase<C, Chunk>& aSegment)
+      : mSegment(aSegment), mIndex(0) {}
+    bool IsEnded() { return mIndex >= mSegment.mChunks.Length(); }
+    void Next() { ++mIndex; }
+    Chunk& operator*() { return mSegment.mChunks[mIndex]; }
+    Chunk* operator->() { return &mSegment.mChunks[mIndex]; }
+  private:
+    MediaSegmentBase<C, Chunk>& mSegment;
+    PRUint32 mIndex;
+  };
+
+protected:
+  void RemoveLeadingInternal(TrackTicks aDuration, PRUint32 aStartIndex)
+  {
+    NS_ASSERTION(aDuration >= 0, "Can't remove negative duration");
+    TrackTicks t = aDuration;
+    PRUint32 chunksToRemove = 0;
+    for (PRUint32 i = aStartIndex; i < mChunks.Length() && t > 0; ++i) {
+      Chunk* c = &mChunks[i];
+      if (c->GetDuration() > t) {
+        c->SliceTo(t, c->GetDuration());
+        t = 0;
+        break;
+      }
+      t -= c->GetDuration();
+      chunksToRemove = i + 1 - aStartIndex;
+    }
+    mChunks.RemoveElementsAt(aStartIndex, chunksToRemove);
+    mDuration -= aDuration - t;
+  }
+
+  nsTArray<Chunk> mChunks;
+};
+
+}
+
+#endif /* MOZILLA_MEDIASEGMENT_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/MediaStreamGraph.cpp
@@ -0,0 +1,1963 @@
+/*-*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaStreamGraph.h"
+
+#include "mozilla/Monitor.h"
+#include "mozilla/TimeStamp.h"
+#include "AudioSegment.h"
+#include "VideoSegment.h"
+#include "nsContentUtils.h"
+#include "nsIAppShell.h"
+#include "nsIObserver.h"
+#include "nsServiceManagerUtils.h"
+#include "nsWidgetsCID.h"
+#include "nsXPCOMCIDInternal.h"
+#include "prlog.h"
+#include "VideoUtils.h"
+
+using namespace mozilla::layers;
+
+namespace mozilla {
+
+namespace {
+
+#ifdef PR_LOGGING
+PRLogModuleInfo* gMediaStreamGraphLog;
+#define LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
+#else
+#define LOG(type, msg)
+#endif
+
+/**
+ * Assume we can run an iteration of the MediaStreamGraph loop in this much time
+ * or less.
+ * We try to run the control loop at this rate.
+ */
+const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
+
+/**
+ * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
+ * this much.
+ */
+const int SCHEDULE_SAFETY_MARGIN_MS = 10;
+
+/**
+ * Try have this much audio buffered in streams and queued to the hardware.
+ * The maximum delay to the end of the next control loop
+ * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
+ * There is no point in buffering more audio than this in a stream at any
+ * given time (until we add processing).
+ * This is not optimal yet.
+ */
+const int AUDIO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+    SCHEDULE_SAFETY_MARGIN_MS;
+
+/**
+ * Try have this much video buffered. Video frames are set
+ * near the end of the iteration of the control loop. The maximum delay
+ * to the setting of the next video frame is 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+ * SCHEDULE_SAFETY_MARGIN_MS. This is not optimal yet.
+ */
+const int VIDEO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+    SCHEDULE_SAFETY_MARGIN_MS;
+
+/**
+ * A per-stream update message passed from the media graph thread to the
+ * main thread.
+ */
+struct StreamUpdate {
+  PRInt64 mGraphUpdateIndex;
+  nsRefPtr<MediaStream> mStream;
+  StreamTime mNextMainThreadCurrentTime;
+  bool mNextMainThreadFinished;
+};
+
+/**
+ * This represents a message passed from the main thread to the graph thread.
+ * A ControlMessage always references a particular affected stream.
+ */
+class ControlMessage {
+public:
+  ControlMessage(MediaStream* aStream) : mStream(aStream)
+  {
+    MOZ_COUNT_CTOR(ControlMessage);
+  }
+  // All these run on the graph thread
+  virtual ~ControlMessage()
+  {
+    MOZ_COUNT_DTOR(ControlMessage);
+  }
+  // Executed before we know what the action time for this message will be.
+  // Call NoteStreamAffected on the stream whose output will be
+  // modified by this message. Default implementation calls
+  // NoteStreamAffected(mStream).
+  virtual void UpdateAffectedStream();
+  // Executed after we know what the action time for this message will be.
+  virtual void Process() {}
+  // When we're shutting down the application, most messages are ignored but
+  // some cleanup messages should still be processed (on the main thread).
+  virtual void ProcessDuringShutdown() {}
+
+protected:
+  // We do not hold a reference to mStream. The main thread will be holding
+  // a reference to the stream while this message is in flight. The last message
+  // referencing a stream is the Destroy message for that stream.
+  MediaStream* mStream;
+};
+
+}
+
+/**
+ * The implementation of a media stream graph. This class is private to this
+ * file. It's not in the anonymous namespace because MediaStream needs to
+ * be able to friend it.
+ *
+ * Currently we only have one per process.
+ */
+class MediaStreamGraphImpl : public MediaStreamGraph {
+public:
+  MediaStreamGraphImpl();
+  ~MediaStreamGraphImpl()
+  {
+    NS_ASSERTION(IsEmpty(),
+                 "All streams should have been destroyed by messages from the main thread");
+    LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
+  }
+
+  // Main thread only.
+  /**
+   * This runs every time we need to sync state from the media graph thread
+   * to the main thread while the main thread is not in the middle
+   * of a script. It runs during a "stable state" (per HTML5) or during
+   * an event posted to the main thread.
+   */
+  void RunInStableState();
+  /**
+   * Ensure a runnable to run RunInStableState is posted to the appshell to
+   * run at the next stable state (per HTML5).
+   * See EnsureStableStateEventPosted.
+   */
+  void EnsureRunInStableState();
+  /**
+   * Called to apply a StreamUpdate to its stream.
+   */
+  void ApplyStreamUpdate(StreamUpdate* aUpdate);
+  /**
+   * Append a ControlMessage to the message queue. This queue is drained
+   * during RunInStableState; the messages will run on the graph thread.
+   */
+  void AppendMessage(ControlMessage* aMessage);
+  /**
+   * Make this MediaStreamGraph enter forced-shutdown state. This state
+   * will be noticed by the media graph thread, which will shut down all streams
+   * and other state controlled by the media graph thread.
+   * This is called during application shutdown.
+   */
+  void ForceShutDown();
+  /**
+   * Shutdown() this MediaStreamGraph's threads and return when they've shut down.
+   */
+  void ShutdownThreads();
+
+  // The following methods run on the graph thread (or possibly the main thread if
+  // mLifecycleState > LIFECYCLE_RUNNING)
+  /**
+   * Runs main control loop on the graph thread. Normally a single invocation
+   * of this runs for the entire lifetime of the graph thread.
+   */
+  void RunThread();
+  /**
+   * Call this to indicate that another iteration of the control loop is
+   * required on its regular schedule. The monitor must not be held.
+   */
+  void EnsureNextIteration();
+  /**
+   * As above, but with the monitor already held.
+   */
+  void EnsureNextIterationLocked(MonitorAutoLock& aLock);
+  /**
+   * Call this to indicate that another iteration of the control loop is
+   * required immediately. The monitor must already be held.
+   */
+  void EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock);
+  /**
+   * Ensure there is an event posted to the main thread to run RunInStableState.
+   * mMonitor must be held.
+   * See EnsureRunInStableState
+   */
+  void EnsureStableStateEventPosted();
+  /**
+   * Generate messages to the main thread to update it for all state changes.
+   * mMonitor must be held.
+   */
+  void PrepareUpdatesToMainThreadState();
+  // The following methods are the various stages of RunThread processing.
+  /**
+   * Compute a new current time for the graph and advance all on-graph-thread
+   * state to the new current time.
+   */
+  void UpdateCurrentTime();
+  /**
+   * Update mLastActionTime to the time at which the current set of messages
+   * will take effect.
+   */
+  void ChooseActionTime();
+  /**
+   * Extract any state updates pending in aStream, and apply them.
+   */
+  void ExtractPendingInput(SourceMediaStream* aStream);
+  /**
+   * Update "have enough data" flags in aStream.
+   */
+  void UpdateBufferSufficiencyState(SourceMediaStream* aStream);
+  /**
+   * Compute the blocking states of streams from mBlockingDecisionsMadeUntilTime
+   * until the desired future time (determined by heuristic).
+   * Updates mBlockingDecisionsMadeUntilTime and sets MediaStream::mBlocked
+   * for all streams.
+   */
+  void RecomputeBlocking();
+  // The following methods are used to help RecomputeBlocking.
+  /**
+   * Mark a stream blocked at time aTime. If this results in decisions that need
+   * to be revisited at some point in the future, *aEnd will be reduced to the
+   * first time in the future to recompute those decisions.
+   */
+  void MarkStreamBlocked(MediaStream* aStream, GraphTime aTime, GraphTime* aEnd);
+  /**
+   * Recompute blocking for all streams for the interval starting at aTime.
+   * If this results in decisions that need to be revisited at some point
+   * in the future, *aEnd will be reduced to the first time in the future to
+   * recompute those decisions.
+   */
+  void RecomputeBlockingAt(GraphTime aTime, GraphTime aEndBlockingDecisions,
+                           GraphTime* aEnd);
+  /**
+   * Returns true if aStream will underrun at aTime for its own playback.
+   * aEndBlockingDecisions is when we plan to stop making blocking decisions.
+   * *aEnd will be reduced to the first time in the future to recompute these
+   * decisions.
+   */
+  bool WillUnderrun(MediaStream* aStream, GraphTime aTime,
+                    GraphTime aEndBlockingDecisions, GraphTime* aEnd);
+  /**
+   * Return true if there is an explicit blocker set from the current time
+   * indefinitely far into the future.
+   */
+  bool IsAlwaysExplicitlyBlocked(MediaStream* aStream);
+  /**
+   * Given a graph time aTime, convert it to a stream time taking into
+   * account the time during which aStream is scheduled to be blocked.
+   */
+  StreamTime GraphTimeToStreamTime(MediaStream* aStream, StreamTime aTime);
+  enum {
+    INCLUDE_TRAILING_BLOCKED_INTERVAL = 0x01
+  };
+  /**
+   * Given a stream time aTime, convert it to a graph time taking into
+   * account the time during which aStream is scheduled to be blocked.
+   * aTime must be <= mBlockingDecisionsMadeUntilTime since blocking decisions
+   * are only known up to that point.
+   * If aTime is exactly at the start of a blocked interval, then the blocked
+   * interval is included in the time returned if and only if
+   * aFlags includes INCLUDE_TRAILING_BLOCKED_INTERVAL.
+   */
+  GraphTime StreamTimeToGraphTime(MediaStream* aStream, StreamTime aTime,
+                                  PRUint32 aFlags = 0);
+  /**
+   * Get the current audio position of the stream's audio output.
+   */
+  GraphTime GetAudioPosition(MediaStream* aStream);
+  /**
+   * If aStream needs an audio stream but doesn't have one, create it.
+   * If aStream doesn't need an audio stream but has one, destroy it.
+   */
+  void CreateOrDestroyAudioStream(GraphTime aAudioOutputStartTime,
+                                  MediaStream* aStream);
+  /**
+   * Update aStream->mFirstActiveTracks.
+   */
+  void UpdateFirstActiveTracks(MediaStream* aStream);
+  /**
+   * Queue audio (mix of stream audio and silence for blocked intervals)
+   * to the audio output stream.
+   */
+  void PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
+  /**
+   * Set the correct current video frame for stream aStream.
+   */
+  void PlayVideo(MediaStream* aStream);
+  /**
+   * No more data will be forthcoming for aStream. The stream will end
+   * at the current buffer end point. The StreamBuffer's tracks must be
+   * explicitly set to finished by the caller.
+   */
+  void FinishStream(MediaStream* aStream);
+  /**
+   * Compute how much stream data we would like to buffer for aStream.
+   */
+  StreamTime GetDesiredBufferEnd(MediaStream* aStream);
+  /**
+   * Returns true when there are no active streams.
+   */
+  bool IsEmpty() { return mStreams.IsEmpty(); }
+
+  // For use by control messages
+  /**
+   * Identify which graph update index we are currently processing.
+   */
+  PRInt64 GetProcessingGraphUpdateIndex() { return mProcessingGraphUpdateIndex; }
+  /**
+   * Marks aStream as affected by a change in its output at desired time aTime
+   * (in the timeline of aStream). The change may not actually happen at this time,
+   * it may be delayed until later if there is buffered data we can't change.
+   */
+  void NoteStreamAffected(MediaStream* aStream, double aTime);
+  /**
+   * Marks aStream as affected by a change in its output at the earliest
+   * possible time.
+   */
+  void NoteStreamAffected(MediaStream* aStream);
+  /**
+   * Add aStream to the graph and initializes its graph-specific state.
+   */
+  void AddStream(MediaStream* aStream);
+  /**
+   * Remove aStream from the graph. Ensures that pending messages about the
+   * stream back to the main thread are flushed.
+   */
+  void RemoveStream(MediaStream* aStream);
+
+  /**
+   * Compute the earliest time at which an action be allowed to occur on any
+   * stream. Actions cannot be earlier than the previous action time, and
+   * cannot affect already-committed blocking decisions (and associated
+   * buffered audio).
+   */
+  GraphTime GetEarliestActionTime()
+  {
+    return NS_MAX(mCurrentTime, NS_MAX(mLastActionTime, mBlockingDecisionsMadeUntilTime));
+  }
+
+  // Data members
+
+  /**
+   * Media graph thread.
+   * Readonly after initialization on the main thread.
+   */
+  nsCOMPtr<nsIThread> mThread;
+
+  // The following state is managed on the graph thread only, unless
+  // mLifecycleState > LIFECYCLE_RUNNING in which case the graph thread
+  // is not running and this state can be used from the main thread.
+
+  nsTArray<nsRefPtr<MediaStream> > mStreams;
+  /**
+   * The time the last action was deemed to have occurred. This could be
+   * later than mCurrentTime if actions have to be delayed during data
+   * buffering, or before mCurrentTime if mCurrentTime has advanced since
+   * the last action happened. In ControlMessage::Process calls,
+   * mLastActionTime has always been updated to be >= mCurrentTime.
+   */
+  GraphTime mLastActionTime;
+  /**
+   * The current graph time for the current iteration of the RunThread control
+   * loop.
+   */
+  GraphTime mCurrentTime;
+  /**
+   * Blocking decisions have been made up to this time. We also buffer audio
+   * up to this time.
+   */
+  GraphTime mBlockingDecisionsMadeUntilTime;
+  /**
+   * This is only used for logging.
+   */
+  TimeStamp mInitialTimeStamp;
+  /**
+   * The real timestamp of the latest run of UpdateCurrentTime.
+   */
+  TimeStamp mCurrentTimeStamp;
+  /**
+   * Which update batch we are currently processing.
+   */
+  PRInt64 mProcessingGraphUpdateIndex;
+
+  // mMonitor guards the data below.
+  // MediaStreamGraph normally does its work without holding mMonitor, so it is
+  // not safe to just grab mMonitor from some thread and start monkeying with
+  // the graph. Instead, communicate with the graph thread using provided
+  // mechanisms such as the ControlMessage queue.
+  Monitor mMonitor;
+
+  // Data guarded by mMonitor (must always be accessed with mMonitor held,
+  // regardless of the value of mLifecycleState.
+
+  /**
+   * State to copy to main thread
+   */
+  nsTArray<StreamUpdate> mStreamUpdates;
+  /**
+   * Runnables to run after the next update to main thread state.
+   */
+  nsTArray<nsCOMPtr<nsIRunnable> > mUpdateRunnables;
+  struct MessageBlock {
+    PRInt64 mGraphUpdateIndex;
+    nsTArray<nsAutoPtr<ControlMessage> > mMessages;
+  };
+  /**
+   * A list of batches of messages to process. Each batch is processed
+   * as an atomic unit.
+   */
+  nsTArray<MessageBlock> mMessageQueue;
+  /**
+   * This enum specifies where this graph is in its lifecycle. This is used
+   * to control shutdown.
+   * Shutdown is tricky because it can happen in two different ways:
+   * 1) Shutdown due to inactivity. RunThread() detects that it has no
+   * pending messages and no streams, and exits. The next RunInStableState()
+   * checks if there are new pending messages from the main thread (true only
+   * if new stream creation raced with shutdown); if there are, it revives
+   * RunThread(), otherwise it commits to shutting down the graph. New stream
+   * creation after this point will create a new graph. An async event is
+   * dispatched to Shutdown() the graph's threads and then delete the graph
+   * object.
+   * 2) Forced shutdown at application shutdown. A flag is set, RunThread()
+   * detects the flag and exits, the next RunInStableState() detects the flag,
+   * and dispatches the async event to Shutdown() the graph's threads. However
+   * the graph object is not deleted. New messages for the graph are processed
+   * synchronously on the main thread if necessary. When the last stream is
+   * destroyed, the graph object is deleted.
+   */
+  enum LifecycleState {
+    // The graph thread hasn't started yet.
+    LIFECYCLE_THREAD_NOT_STARTED,
+    // RunThread() is running normally.
+    LIFECYCLE_RUNNING,
+    // In the following states, the graph thread is not running so
+    // all "graph thread only" state in this class can be used safely
+    // on the main thread.
+    // RunThread() has exited and we're waiting for the next
+    // RunInStableState(), at which point we can clean up the main-thread
+    // side of the graph.
+    LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP,
+    // RunInStableState() posted a ShutdownRunnable, and we're waiting for it
+    // to shut down the graph thread(s).
+    LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN,
+    // Graph threads have shut down but we're waiting for remaining streams
+    // to be destroyed. Only happens during application shutdown since normally
+    // we'd only shut down a graph when it has no streams.
+    LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION
+  };
+  LifecycleState mLifecycleState;
+  /**
+   * This enum specifies the wait state of the graph thread.
+   */
+  enum WaitState {
+    // RunThread() is running normally
+    WAITSTATE_RUNNING,
+    // RunThread() is paused waiting for its next iteration, which will
+    // happen soon
+    WAITSTATE_WAITING_FOR_NEXT_ITERATION,
+    // RunThread() is paused indefinitely waiting for something to change
+    WAITSTATE_WAITING_INDEFINITELY,
+    // Something has signaled RunThread() to wake up immediately,
+    // but it hasn't done so yet
+    WAITSTATE_WAKING_UP
+  };
+  WaitState mWaitState;
+  /**
+   * True when another iteration of the control loop is required.
+   */
+  bool mNeedAnotherIteration;
+  /**
+   * True when we need to do a forced shutdown during application shutdown.
+   */
+  bool mForceShutDown;
+  /**
+   * True when we have posted an event to the main thread to run
+   * RunInStableState() and the event hasn't run yet.
+   */
+  bool mPostedRunInStableStateEvent;
+
+  // Main thread only
+
+  /**
+   * Messages posted by the current event loop task. These are forwarded to
+   * the media graph thread during RunInStableState. We can't forward them
+   * immediately because we want all messages between stable states to be
+   * processed as an atomic batch.
+   */
+  nsTArray<nsAutoPtr<ControlMessage> > mCurrentTaskMessageQueue;
+  /**
+   * True when RunInStableState has determined that mLifecycleState is >
+   * LIFECYCLE_RUNNING. Since only the main thread can reset mLifecycleState to
+   * LIFECYCLE_RUNNING, this can be relied on to not change unexpectedly.
+   */
+  bool mDetectedNotRunning;
+  /**
+   * True when a stable state runner has been posted to the appshell to run
+   * RunInStableState at the next stable state.
+   */
+  bool mPostedRunInStableState;
+};
+
+/**
+ * The singleton graph instance.
+ */
+static MediaStreamGraphImpl* gGraph;
+
+StreamTime
+MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream)
+{
+  StreamTime current = mCurrentTime - aStream->mBufferStartTime;
+  StreamTime desiredEnd = current;
+  if (!aStream->mAudioOutputs.IsEmpty()) {
+    desiredEnd = NS_MAX(desiredEnd, current + MillisecondsToMediaTime(AUDIO_TARGET_MS));
+  }
+  if (!aStream->mVideoOutputs.IsEmpty()) {
+    desiredEnd = NS_MAX(desiredEnd, current + MillisecondsToMediaTime(VIDEO_TARGET_MS));
+  }
+  return desiredEnd;
+}
+
+bool
+MediaStreamGraphImpl::IsAlwaysExplicitlyBlocked(MediaStream* aStream)
+{
+  GraphTime t = mCurrentTime;
+  while (true) {
+    GraphTime end;
+    if (aStream->mExplicitBlockerCount.GetAt(t, &end) == 0)
+      return false;
+    if (end >= GRAPH_TIME_MAX)
+      return true;
+    t = end;
+  }
+}
+
+void
+MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
+{
+  if (aStream->mFinished)
+    return;
+  LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream));
+  aStream->mFinished = true;
+  // Force at least one more iteration of the control loop, since we rely
+  // on UpdateCurrentTime to notify our listeners once the stream end
+  // has been reached.
+  EnsureNextIteration();
+}
+
+void
+MediaStreamGraphImpl::NoteStreamAffected(MediaStream* aStream, double aTime)
+{
+  NS_ASSERTION(aTime >= 0, "Bad time");
+  GraphTime t =
+      NS_MAX(GetEarliestActionTime(),
+          StreamTimeToGraphTime(aStream, SecondsToMediaTime(aTime),
+                                INCLUDE_TRAILING_BLOCKED_INTERVAL));
+  aStream->mMessageAffectedTime = NS_MIN(aStream->mMessageAffectedTime, t);
+}
+
+void
+MediaStreamGraphImpl::NoteStreamAffected(MediaStream* aStream)
+{
+  GraphTime t = GetEarliestActionTime();
+  aStream->mMessageAffectedTime = NS_MIN(aStream->mMessageAffectedTime, t);
+}
+
+void
+ControlMessage::UpdateAffectedStream()
+{
+  NS_ASSERTION(mStream, "Must have stream for default UpdateAffectedStream");
+  mStream->GraphImpl()->NoteStreamAffected(mStream);
+}
+
+void
+MediaStreamGraphImpl::AddStream(MediaStream* aStream)
+{
+  aStream->mBufferStartTime = mCurrentTime;
+  aStream->mMessageAffectedTime = GetEarliestActionTime();
+  *mStreams.AppendElement() = already_AddRefed<MediaStream>(aStream);
+  LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream));
+}
+
+void
+MediaStreamGraphImpl::RemoveStream(MediaStream* aStream)
+{
+  // Remove references in mStreamUpdates before we allow aStream to die.
+  // Pending updates are not needed (since the main thread has already given
+  // up the stream) so we will just drop them.
+  {
+    MonitorAutoLock lock(mMonitor);
+    for (PRUint32 i = 0; i < mStreamUpdates.Length(); ++i) {
+      if (mStreamUpdates[i].mStream == aStream) {
+        mStreamUpdates[i].mStream = nsnull;
+      }
+    }
+  }
+
+  // This unrefs the stream, probably destroying it
+  mStreams.RemoveElement(aStream);
+
+  LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream));
+}
+
+void
+MediaStreamGraphImpl::ChooseActionTime()
+{
+  mLastActionTime = GetEarliestActionTime();
+}
+
+void
+MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream)
+{
+  bool finished;
+  {
+    MutexAutoLock lock(aStream->mMutex);
+    finished = aStream->mUpdateFinished;
+    for (PRInt32 i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
+      SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
+      for (PRUint32 j = 0; j < aStream->mListeners.Length(); ++j) {
+        MediaStreamListener* l = aStream->mListeners[j];
+        TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
+            ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
+        l->NotifyQueuedTrackChanges(this, data->mID, data->mRate,
+                                    offset, data->mCommands, *data->mData);
+      }
+      if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
+        MediaSegment* segment = data->mData.forget();
+        LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld",
+                           aStream, data->mID, data->mRate, PRInt64(data->mStart),
+                           PRInt64(segment->GetDuration())));
+        aStream->mBuffer.AddTrack(data->mID, data->mRate, data->mStart, segment);
+        // The track has taken ownership of data->mData, so let's replace
+        // data->mData with an empty clone.
+        data->mData = segment->CreateEmptyClone();
+        data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
+      } else if (data->mData->GetDuration() > 0) {
+        MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
+        LOG(PR_LOG_DEBUG, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
+                           aStream, data->mID,
+                           PRInt64(dest->GetDuration()),
+                           PRInt64(dest->GetDuration() + data->mData->GetDuration())));
+        dest->AppendFrom(data->mData);
+      }
+      if (data->mCommands & SourceMediaStream::TRACK_END) {
+        aStream->mBuffer.FindTrack(data->mID)->SetEnded();
+        aStream->mUpdateTracks.RemoveElementAt(i);
+      }
+    }
+    aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
+  }
+  if (finished) {
+    FinishStream(aStream);
+  }
+}
+
+void
+MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream)
+{
+  StreamTime desiredEnd = GetDesiredBufferEnd(aStream);
+  nsTArray<SourceMediaStream::ThreadAndRunnable> runnables;
+
+  {
+    MutexAutoLock lock(aStream->mMutex);
+    for (PRUint32 i = 0; i < aStream->mUpdateTracks.Length(); ++i) {
+      SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
+      if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
+        // This track hasn't been created yet, so we have no sufficiency
+        // data. The track will be created in the next iteration of the
+        // control loop and then we'll fire insufficiency notifications
+        // if necessary.
+        continue;
+      }
+      if (data->mCommands & SourceMediaStream::TRACK_END) {
+        // This track will end, so no point in firing not-enough-data
+        // callbacks.
+        continue;
+      }
+      StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID);
+      // Note that track->IsEnded() must be false, otherwise we would have
+      // removed the track from mUpdateTracks already.
+      NS_ASSERTION(!track->IsEnded(), "What is this track doing here?");
+      data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd;
+      if (!data->mHaveEnough) {
+        runnables.MoveElementsFrom(data->mDispatchWhenNotEnough);
+      }
+    }
+  }
+
+  for (PRUint32 i = 0; i < runnables.Length(); ++i) {
+    runnables[i].mThread->Dispatch(runnables[i].mRunnable, 0);
+  }
+}
+
+
+StreamTime
+MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
+                                            GraphTime aTime)
+{
+  NS_ASSERTION(aTime <= mBlockingDecisionsMadeUntilTime,
+               "Don't ask about times where we haven't made blocking decisions yet");
+  if (aTime <= mCurrentTime) {
+    return NS_MAX<StreamTime>(0, aTime - aStream->mBufferStartTime);
+  }
+  GraphTime t = mCurrentTime;
+  StreamTime s = t - aStream->mBufferStartTime;
+  while (t < aTime) {
+    GraphTime end;
+    if (!aStream->mBlocked.GetAt(t, &end)) {
+      s += NS_MIN(aTime, end) - t;
+    }
+    t = end;
+  }
+  return NS_MAX<StreamTime>(0, s);
+}  
+
+GraphTime
+MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
+                                            StreamTime aTime, PRUint32 aFlags)
+{
+  if (aTime >= STREAM_TIME_MAX) {
+    return GRAPH_TIME_MAX;
+  }
+  MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime;
+  if (aTime < bufferElapsedToCurrentTime ||
+      (aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) {
+    return aTime + aStream->mBufferStartTime;
+  }
+
+  MediaTime streamAmount = aTime - bufferElapsedToCurrentTime;
+  NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time");
+
+  GraphTime t = mCurrentTime;
+  while (t < GRAPH_TIME_MAX) {
+    bool blocked;
+    GraphTime end;
+    if (t < mBlockingDecisionsMadeUntilTime) {
+      blocked = aStream->mBlocked.GetAt(t, &end);
+      end = NS_MIN(end, mBlockingDecisionsMadeUntilTime);
+    } else {
+      blocked = false;
+      end = GRAPH_TIME_MAX;
+    }
+    if (blocked) {
+      t = end;
+    } else {
+      if (streamAmount == 0) {
+        // No more stream time to consume at time t, so we're done.
+        break;
+      }
+      MediaTime consume = NS_MIN(end - t, streamAmount);
+      streamAmount -= consume;
+      t += consume;
+    }
+  }
+  return t;
+}
+
+GraphTime
+MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
+{
+  if (!aStream->mAudioOutput) {
+    return mCurrentTime;
+  }
+  return aStream->mAudioPlaybackStartTime +
+      TicksToTimeRoundDown(aStream->mAudioOutput->GetRate(),
+                           aStream->mAudioOutput->GetPositionInFrames());
+}
+
+void
+MediaStreamGraphImpl::UpdateCurrentTime()
+{
+  GraphTime prevCurrentTime = mCurrentTime;
+
+  TimeStamp now = TimeStamp::Now();
+  // The earliest buffer end time for streams that haven't finished. We can't
+  // advance the current time past this point.
+  GraphTime minBufferEndTime = GRAPH_TIME_MAX;
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+    GraphTime blockedBufferEndTime =
+      StreamTimeToGraphTime(stream, stream->GetBufferEnd(), INCLUDE_TRAILING_BLOCKED_INTERVAL);
+    if (stream->mAudioOutput &&
+        (!stream->mFinished || mBlockingDecisionsMadeUntilTime <= blockedBufferEndTime)) {
+      // XXX We should take audio positions into account when determining how
+      // far to advance the current time. Basically the current time should
+      // track the average or minimum of the audio positions. We don't do this
+      // currently since the audio positions aren't accurate enough. This
+      // logging code is helpful to track the accuracy of audio positions.
+      GraphTime audioPosition = GetAudioPosition(stream);
+      LOG(PR_LOG_DEBUG, ("Audio position for stream %p is %f", stream,
+                         MediaTimeToSeconds(audioPosition)));
+    }
+    if (!stream->mFinished) {
+      minBufferEndTime = NS_MIN(minBufferEndTime, blockedBufferEndTime);
+    }
+  }
+
+  NS_ASSERTION(mCurrentTime <= minBufferEndTime,
+               "We shouldn't have already advanced beyond buffer end!");
+  GraphTime nextCurrentTime =
+    SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime;
+  if (minBufferEndTime < nextCurrentTime) {
+    LOG(PR_LOG_WARNING, ("Reducing current time to minimum buffer end"));
+    nextCurrentTime = minBufferEndTime;
+  }
+  mCurrentTimeStamp = now;
+
+  mBlockingDecisionsMadeUntilTime =
+    NS_MAX(nextCurrentTime, mBlockingDecisionsMadeUntilTime);
+  LOG(PR_LOG_DEBUG, ("Updating current time to %f (minBufferEndTime %f, real %f, mBlockingDecisionsMadeUntilTime %f)",
+                     MediaTimeToSeconds(nextCurrentTime),
+                     MediaTimeToSeconds(minBufferEndTime),
+                     (now - mInitialTimeStamp).ToSeconds(),
+                     MediaTimeToSeconds(mBlockingDecisionsMadeUntilTime)));
+
+  if (prevCurrentTime >= nextCurrentTime) {
+    NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!");
+    // This could happen due to low clock resolution, maybe?
+    LOG(PR_LOG_DEBUG, ("Time did not advance"));
+    return;
+  }
+
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+
+    // Calculate blocked time and fire Blocked/Unblocked events
+    GraphTime blockedTime = 0;
+    GraphTime t = prevCurrentTime;
+    // Save current blocked status
+    bool wasBlocked = stream->mBlocked.GetAt(prevCurrentTime);
+    while (t < nextCurrentTime) {
+      GraphTime end;
+      bool blocked = stream->mBlocked.GetAt(t, &end);
+      if (blocked) {
+        blockedTime += NS_MIN(end, nextCurrentTime) - t;
+      }
+      if (blocked != wasBlocked) {
+        for (PRUint32 j = 0; j < stream->mListeners.Length(); ++j) {
+          MediaStreamListener* l = stream->mListeners[j];
+          l->NotifyBlockingChanged(this,
+              blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
+        }
+        wasBlocked = blocked;
+      }
+      t = end;
+    }
+
+    stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime);
+    // Advance mBlocked last so that implementations of
+    // AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked.
+    stream->mBlocked.AdvanceCurrentTime(nextCurrentTime);
+
+    if (blockedTime < nextCurrentTime - mCurrentTime) {
+      for (PRUint32 i = 0; i < stream->mListeners.Length(); ++i) {
+        MediaStreamListener* l = stream->mListeners[i];
+        l->NotifyOutput(this);
+      }
+    }
+
+    if (stream->mFinished && !stream->mNotifiedFinished &&
+        stream->mBufferStartTime + stream->GetBufferEnd() <= nextCurrentTime) {
+      stream->mNotifiedFinished = true;
+      for (PRUint32 j = 0; j < stream->mListeners.Length(); ++j) {
+        MediaStreamListener* l = stream->mListeners[j];
+        l->NotifyFinished(this);
+      }
+    }
+
+    LOG(PR_LOG_DEBUG, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
+                       stream, MediaTimeToSeconds(stream->mBufferStartTime),
+                       MediaTimeToSeconds(blockedTime)));
+  }
+
+  mCurrentTime = nextCurrentTime;
+}
+
+void
+MediaStreamGraphImpl::MarkStreamBlocked(MediaStream* aStream,
+                                        GraphTime aTime, GraphTime* aEnd)
+{
+  NS_ASSERTION(!aStream->mBlocked.GetAt(aTime), "MediaStream already blocked");
+
+  aStream->mBlocked.SetAtAndAfter(aTime, true);
+}
+
+bool
+MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime,
+                                   GraphTime aEndBlockingDecisions, GraphTime* aEnd)
+{
+  // Finished streams, or streams that aren't being played back, can't underrun.
+  if (aStream->mFinished ||
+      (aStream->mAudioOutputs.IsEmpty() && aStream->mVideoOutputs.IsEmpty())) {
+    return false;
+  }
+  GraphTime bufferEnd =
+    StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(),
+                          INCLUDE_TRAILING_BLOCKED_INTERVAL);
+  NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran");
+  // We should block after bufferEnd.
+  if (bufferEnd <= aTime) {
+    LOG(PR_LOG_DEBUG, ("MediaStream %p will block due to data underrun, "
+                       "bufferEnd %f",
+                       aStream, MediaTimeToSeconds(bufferEnd)));
+    return true;
+  }
+  // We should keep blocking if we're currently blocked and we don't have
+  // data all the way through to aEndBlockingDecisions. If we don't have
+  // data all the way through to aEndBlockingDecisions, we'll block soon,
+  // but we might as well remain unblocked and play the data we've got while
+  // we can.
+  if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
+    LOG(PR_LOG_DEBUG, ("MediaStream %p will block due to speculative data underrun, "
+                       "bufferEnd %f",
+                       aStream, MediaTimeToSeconds(bufferEnd)));
+    return true;
+  }
+  // Reconsider decisions at bufferEnd
+  *aEnd = NS_MIN(*aEnd, bufferEnd);
+  return false;
+}
+
+void
+MediaStreamGraphImpl::RecomputeBlocking()
+{
+  PRInt32 writeAudioUpTo = AUDIO_TARGET_MS;
+  GraphTime endBlockingDecisions =
+    mCurrentTime + MillisecondsToMediaTime(writeAudioUpTo);
+
+  bool blockingDecisionsWillChange = false;
+  // mBlockingDecisionsMadeUntilTime has been set in UpdateCurrentTime
+  while (mBlockingDecisionsMadeUntilTime < endBlockingDecisions) {
+    LOG(PR_LOG_DEBUG, ("Media graph %p computing blocking for time %f",
+                       this, MediaTimeToSeconds(mBlockingDecisionsMadeUntilTime)));
+    GraphTime end = GRAPH_TIME_MAX;
+    RecomputeBlockingAt(mBlockingDecisionsMadeUntilTime, endBlockingDecisions, &end);
+    LOG(PR_LOG_DEBUG, ("Media graph %p computed blocking for interval %f to %f",
+                       this, MediaTimeToSeconds(mBlockingDecisionsMadeUntilTime),
+                       MediaTimeToSeconds(end)));
+    mBlockingDecisionsMadeUntilTime = end;
+    if (end < GRAPH_TIME_MAX) {
+      blockingDecisionsWillChange = true;
+    }
+  }
+  mBlockingDecisionsMadeUntilTime = endBlockingDecisions;
+
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+    GraphTime end;
+    stream->mBlocked.GetAt(mCurrentTime, &end);
+    if (end < GRAPH_TIME_MAX) {
+      blockingDecisionsWillChange = true;
+    }
+  }
+  if (blockingDecisionsWillChange) {
+    // Make sure we wake up to notify listeners about these changes.
+    EnsureNextIteration();
+  }
+}
+
+void
+MediaStreamGraphImpl::RecomputeBlockingAt(GraphTime aTime,
+                                          GraphTime aEndBlockingDecisions,
+                                          GraphTime* aEnd)
+{
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+    stream->mBlocked.SetAtAndAfter(aTime, false);
+  }
+
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+    // Stream might be blocked by some other stream (due to processing
+    // constraints)
+    if (stream->mBlocked.GetAt(aTime)) {
+      continue;
+    }
+
+    if (stream->mFinished) {
+      GraphTime endTime = StreamTimeToGraphTime(stream, stream->GetBufferEnd());
+      if (endTime <= aTime) {
+        LOG(PR_LOG_DEBUG, ("MediaStream %p is blocked due to being finished", stream));
+        MarkStreamBlocked(stream, aTime, aEnd);
+        continue;
+      } else {
+        LOG(PR_LOG_DEBUG, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
+                           stream, MediaTimeToSeconds(stream->GetBufferEnd()),
+                           MediaTimeToSeconds(endTime)));
+        *aEnd = NS_MIN(*aEnd, endTime);
+      }
+    }
+
+    // We don't need to explicitly check for cycles; streams in a cycle will
+    // just never be able to produce data, and WillUnderrun will trigger.
+    GraphTime end;
+    bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0;
+    *aEnd = NS_MIN(*aEnd, end);
+    if (explicitBlock) {
+      LOG(PR_LOG_DEBUG, ("MediaStream %p is blocked due to explicit blocker", stream));
+      MarkStreamBlocked(stream, aTime, aEnd);
+      continue;
+    }
+
+    bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd);
+    if (underrun) {
+      MarkStreamBlocked(stream, aTime, aEnd);
+      continue;
+    }
+
+    if (stream->mAudioOutputs.IsEmpty() && stream->mVideoOutputs.IsEmpty()) {
+      // See if the stream is being consumed anywhere. If not, it should block.
+      LOG(PR_LOG_DEBUG, ("MediaStream %p is blocked due to having no consumers", stream));
+      MarkStreamBlocked(stream, aTime, aEnd);
+      continue;
+    }
+  }
+
+  NS_ASSERTION(*aEnd > aTime, "Failed to advance!");
+}
+
+void
+MediaStreamGraphImpl::UpdateFirstActiveTracks(MediaStream* aStream)
+{
+  StreamBuffer::Track* newTracksByType[MediaSegment::TYPE_COUNT];
+  for (PRUint32 i = 0; i < ArrayLength(newTracksByType); ++i) {
+    newTracksByType[i] = nsnull;
+  }
+
+  for (StreamBuffer::TrackIter iter(aStream->mBuffer);
+       !iter.IsEnded(); iter.Next()) {
+    MediaSegment::Type type = iter->GetType();
+    if ((newTracksByType[type] &&
+         iter->GetStartTimeRoundDown() < newTracksByType[type]->GetStartTimeRoundDown()) ||
+         aStream->mFirstActiveTracks[type] == TRACK_NONE) {
+      newTracksByType[type] = &(*iter);
+      aStream->mFirstActiveTracks[type] = iter->GetID();
+    }
+  }
+}
+
+void
+MediaStreamGraphImpl::CreateOrDestroyAudioStream(GraphTime aAudioOutputStartTime,
+                                                 MediaStream* aStream)
+{
+  StreamBuffer::Track* track;
+
+  if (aStream->mAudioOutputs.IsEmpty() ||
+      !(track = aStream->mBuffer.FindTrack(aStream->mFirstActiveTracks[MediaSegment::AUDIO]))) {
+    if (aStream->mAudioOutput) {
+      aStream->mAudioOutput->Shutdown();
+      aStream->mAudioOutput = nsnull;
+    }
+    return;
+  }
+
+  if (aStream->mAudioOutput)
+    return;
+
+  // No output stream created yet. Check if it's time to create one.
+  GraphTime startTime =
+    StreamTimeToGraphTime(aStream, track->GetStartTimeRoundDown(),
+                          INCLUDE_TRAILING_BLOCKED_INTERVAL);
+  if (startTime >= mBlockingDecisionsMadeUntilTime) {
+    // The stream wants to play audio, but nothing will play for the forseeable
+    // future, so don't create the stream.
+    return;
+  }
+
+  // Don't bother destroying the nsAudioStream for ended tracks yet.
+
+  // XXX allocating a nsAudioStream could be slow so we're going to have to do
+  // something here ... preallocation, async allocation, multiplexing onto a single
+  // stream ...
+
+  AudioSegment* audio = track->Get<AudioSegment>();
+  aStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
+  aStream->mAudioOutput = nsAudioStream::AllocateStream();
+  aStream->mAudioOutput->Init(audio->GetChannels(),
+                              track->GetRate(),
+                              audio->GetFirstFrameFormat());
+}
+
+void
+MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
+                                GraphTime aFrom, GraphTime aTo)
+{
+  if (!aStream->mAudioOutput)
+    return;
+
+  StreamBuffer::Track* track =
+    aStream->mBuffer.FindTrack(aStream->mFirstActiveTracks[MediaSegment::AUDIO]);
+  AudioSegment* audio = track->Get<AudioSegment>();
+
+  // When we're playing multiple copies of this stream at the same time, they're
+  // perfectly correlated so adding volumes is the right thing to do.
+  float volume = 0.0f;
+  for (PRUint32 i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
+    volume += aStream->mAudioOutputs[i].mVolume;
+  }
+
+  // We don't update aStream->mBufferStartTime here to account for
+  // time spent blocked. Instead, we'll update it in UpdateCurrentTime after the
+  // blocked period has completed. But we do need to make sure we play from the
+  // right offsets in the stream buffer, even if we've already written silence for
+  // some amount of blocked time after the current time.
+  GraphTime t = aFrom;
+  while (t < aTo) {
+    GraphTime end;
+    bool blocked = aStream->mBlocked.GetAt(t, &end);
+    end = NS_MIN(end, aTo);
+
+    AudioSegment output;
+    if (blocked) {
+      // Track total blocked time in aStream->mBlockedAudioTime so that
+      // the amount of silent samples we've inserted for blocking never gets
+      // more than one sample away from the ideal amount.
+      TrackTicks startTicks =
+          TimeToTicksRoundDown(track->GetRate(), aStream->mBlockedAudioTime);
+      aStream->mBlockedAudioTime += end - t;
+      TrackTicks endTicks =
+          TimeToTicksRoundDown(track->GetRate(), aStream->mBlockedAudioTime);
+
+      output.InitFrom(*audio);
+      output.InsertNullDataAtStart(endTicks - startTicks);
+      LOG(PR_LOG_DEBUG, ("MediaStream %p writing blocking-silence samples for %f to %f",
+                         aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end)));
+    } else {
+      TrackTicks startTicks =
+          track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, t));
+      TrackTicks endTicks =
+          track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, end));
+
+      output.SliceFrom(*audio, startTicks, endTicks);
+      output.ApplyVolume(volume);
+      LOG(PR_LOG_DEBUG, ("MediaStream %p writing samples for %f to %f (samples %lld to %lld)",
+                         aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
+                         startTicks, endTicks));
+    }
+    output.WriteTo(aStream->mAudioOutput);
+    t = end;
+  }
+}
+
+void
+MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
+{
+  if (aStream->mVideoOutputs.IsEmpty())
+    return;
+
+  StreamBuffer::Track* track =
+    aStream->mBuffer.FindTrack(aStream->mFirstActiveTracks[MediaSegment::VIDEO]);
+  if (!track)
+    return;
+  VideoSegment* video = track->Get<VideoSegment>();
+
+  // Display the next frame a bit early. This is better than letting the current
+  // frame be displayed for too long.
+  GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS;
+  NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
+  StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
+  TrackTicks start;
+  const VideoFrame* frame =
+    video->GetFrameAt(track->TimeToTicksRoundDown(frameBufferTime), &start);
+  if (!frame) {
+    frame = video->GetLastFrame(&start);
+    if (!frame)
+      return;
+  }
+
+  if (*frame != aStream->mLastPlayedVideoFrame) {
+    LOG(PR_LOG_DEBUG, ("MediaStream %p writing video frame %p (%dx%d)",
+                       aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
+                       frame->GetIntrinsicSize().height));
+    GraphTime startTime = StreamTimeToGraphTime(aStream,
+        track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
+    TimeStamp targetTime = mCurrentTimeStamp +
+        TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
+    for (PRUint32 i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
+      VideoFrameContainer* output = aStream->mVideoOutputs[i];
+      output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
+                              targetTime);
+      nsCOMPtr<nsIRunnable> event =
+        NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
+      NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
+    }
+    aStream->mLastPlayedVideoFrame = *frame;
+  }
+}
+
+void
+MediaStreamGraphImpl::PrepareUpdatesToMainThreadState()
+{
+  mMonitor.AssertCurrentThreadOwns();
+
+  for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* stream = mStreams[i];
+    StreamUpdate* update = mStreamUpdates.AppendElement();
+    update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime);
+    update->mStream = stream;
+    update->mNextMainThreadCurrentTime =
+      GraphTimeToStreamTime(stream, mCurrentTime);
+    update->mNextMainThreadFinished =
+      stream->mFinished &&
+      StreamTimeToGraphTime(stream, stream->GetBufferEnd()) <= mCurrentTime;
+  }
+  mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables);
+
+  EnsureStableStateEventPosted();
+}
+
+void
+MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock)
+{
+  if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION ||
+      mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
+    mWaitState = WAITSTATE_WAKING_UP;
+    aLock.Notify();
+  }
+}
+
+void
+MediaStreamGraphImpl::EnsureNextIteration()
+{
+  MonitorAutoLock lock(mMonitor);
+  EnsureNextIterationLocked(lock);
+}
+
+void
+MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock)
+{
+  if (mNeedAnotherIteration)
+    return;
+  mNeedAnotherIteration = true;
+  if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
+    mWaitState = WAITSTATE_WAKING_UP;
+    aLock.Notify();
+  }
+}
+
+void
+MediaStreamGraphImpl::RunThread()
+{
+  nsTArray<MessageBlock> messageQueue;
+  {
+    MonitorAutoLock lock(mMonitor);
+    messageQueue.SwapElements(mMessageQueue);
+  }
+  NS_ASSERTION(!messageQueue.IsEmpty(),
+               "Shouldn't have started a graph with empty message queue!");
+
+  for (;;) {
+    // Update mCurrentTime to the min of the playing audio times, or using the
+    // wall-clock time change if no audio is playing.
+    UpdateCurrentTime();
+
+    // Calculate independent action times for each batch of messages (each
+    // batch corresponding to an event loop task). This isolates the performance
+    // of different scripts to some extent.
+    for (PRUint32 i = 0; i < messageQueue.Length(); ++i) {
+      mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex;
+      nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages;
+
+      for (PRUint32 j = 0; j < mStreams.Length(); ++j) {
+        mStreams[j]->mMessageAffectedTime = GRAPH_TIME_MAX;
+      }
+      for (PRUint32 j = 0; j < messages.Length(); ++j) {
+        messages[j]->UpdateAffectedStream();
+      }
+
+      ChooseActionTime();
+
+      for (PRUint32 j = 0; j < messages.Length(); ++j) {
+        messages[j]->Process();
+      }
+    }
+    messageQueue.Clear();
+
+    // Grab pending ProcessingEngine results.
+    for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+      SourceMediaStream* is = mStreams[i]->AsSourceStream();
+      if (is) {
+        ExtractPendingInput(is);
+      }
+    }
+
+    GraphTime prevBlockingDecisionsMadeUntilTime = mBlockingDecisionsMadeUntilTime;
+    RecomputeBlocking();
+
+    PRUint32 audioStreamsActive = 0;
+    bool allBlockedForever = true;
+    // Figure out what each stream wants to do
+    for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+      MediaStream* stream = mStreams[i];
+      UpdateFirstActiveTracks(stream);
+      CreateOrDestroyAudioStream(prevBlockingDecisionsMadeUntilTime, stream);
+      PlayAudio(stream, prevBlockingDecisionsMadeUntilTime,
+                mBlockingDecisionsMadeUntilTime);
+      if (stream->mAudioOutput) {
+        ++audioStreamsActive;
+      }
+      PlayVideo(stream);
+      SourceMediaStream* is = stream->AsSourceStream();
+      if (is) {
+        UpdateBufferSufficiencyState(is);
+      }
+      GraphTime end;
+      if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
+        allBlockedForever = false;
+      }
+    }
+    if (!allBlockedForever || audioStreamsActive > 0) {
+      EnsureNextIteration();
+    }
+
+    {
+      MonitorAutoLock lock(mMonitor);
+      PrepareUpdatesToMainThreadState();
+      if (mForceShutDown || (IsEmpty() && mMessageQueue.IsEmpty())) {
+        // Enter shutdown mode. The stable-state handler will detect this
+        // and complete shutdown. Destroy any streams immediately.
+        for (PRUint32 i = 0; i < mStreams.Length(); ++i) {
+          mStreams[i]->DestroyImpl();
+        }
+        LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
+        mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
+        return;
+      }
+
+      PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
+      TimeStamp now = TimeStamp::Now();
+      if (mNeedAnotherIteration) {
+        PRInt64 timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
+          PRInt64((now - mCurrentTimeStamp).ToMilliseconds());
+        // Make sure timeoutMS doesn't overflow 32 bits by waking up at
+        // least once a minute, if we need to wake up at all
+        timeoutMS = NS_MAX<PRInt64>(0, NS_MIN<PRInt64>(timeoutMS, 60*1000));
+        timeout = PR_MillisecondsToInterval(PRUint32(timeoutMS));
+        LOG(PR_LOG_DEBUG, ("Waiting for next iteration; at %f, timeout=%f",
+                           (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
+        mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
+      } else {
+        mWaitState = WAITSTATE_WAITING_INDEFINITELY;
+      }
+      if (timeout > 0) {
+        lock.Wait(timeout);
+        LOG(PR_LOG_DEBUG, ("Resuming after timeout; at %f, elapsed=%f",
+                           (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
+                           (TimeStamp::Now() - now).ToSeconds()));
+      }
+      mWaitState = WAITSTATE_RUNNING;
+      mNeedAnotherIteration = false;
+      messageQueue.SwapElements(mMessageQueue);
+    }
+  }
+}
+
+void
+MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
+{
+  mMonitor.AssertCurrentThreadOwns();
+
+  MediaStream* stream = aUpdate->mStream;
+  if (!stream)
+    return;
+  stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
+  stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished;
+}
+
+void
+MediaStreamGraphImpl::ShutdownThreads()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
+  // mGraph's thread is not running so it's OK to do whatever here
+  LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this));
+
+  if (mThread) {
+    mThread->Shutdown();
+    mThread = nsnull;
+  }
+}
+
+void
+MediaStreamGraphImpl::ForceShutDown()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
+  LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this));
+  {
+    MonitorAutoLock lock(mMonitor);
+    mForceShutDown = true;
+    EnsureImmediateWakeUpLocked(lock);
+  }
+}
+
+namespace {
+
+class MediaStreamGraphThreadRunnable : public nsRunnable {
+public:
+  NS_IMETHOD Run()
+  {
+    gGraph->RunThread();
+    return NS_OK;
+  }
+};
+
+class MediaStreamGraphShutDownRunnable : public nsRunnable {
+public:
+  MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {}
+  NS_IMETHOD Run()
+  {
+    NS_ASSERTION(mGraph->mDetectedNotRunning,
+                 "We should know the graph thread control loop isn't running!");
+    // mGraph's thread is not running so it's OK to do whatever here
+    if (mGraph->IsEmpty()) {
+      // mGraph is no longer needed, so delete it. If the graph is not empty
+      // then we must be in a forced shutdown and some later AppendMessage will
+      // detect that the manager has been emptied, and delete it.
+      delete mGraph;
+    } else {
+      NS_ASSERTION(mGraph->mForceShutDown, "Not in forced shutdown?");
+      mGraph->mLifecycleState =
+        MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
+    }
+    return NS_OK;
+  }
+private:
+  MediaStreamGraphImpl* mGraph;
+};
+
+class MediaStreamGraphStableStateRunnable : public nsRunnable {
+public:
+  NS_IMETHOD Run()
+  {
+    if (gGraph) {
+      gGraph->RunInStableState();
+    }
+    return NS_OK;
+  }
+};
+
+/*
+ * Control messages forwarded from main thread to graph manager thread
+ */
+class CreateMessage : public ControlMessage {
+public:
+  CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
+  virtual void UpdateAffectedStream()
+  {
+    mStream->GraphImpl()->AddStream(mStream);
+  }
+  virtual void Process()
+  {
+    mStream->Init();
+  }
+};
+
+class MediaStreamGraphShutdownObserver : public nsIObserver
+{
+public:
+  NS_DECL_ISUPPORTS
+  NS_DECL_NSIOBSERVER
+};
+
+}
+
+void
+MediaStreamGraphImpl::RunInStableState()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
+
+  nsTArray<nsCOMPtr<nsIRunnable> > runnables;
+
+  {
+    MonitorAutoLock lock(mMonitor);
+    mPostedRunInStableStateEvent = false;
+
+    runnables.SwapElements(mUpdateRunnables);
+    for (PRUint32 i = 0; i < mStreamUpdates.Length(); ++i) {
+      StreamUpdate* update = &mStreamUpdates[i];
+      if (update->mStream) {
+        ApplyStreamUpdate(update);
+      }
+    }
+    mStreamUpdates.Clear();
+
+    if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && mForceShutDown) {
+      for (PRUint32 i = 0; i < mMessageQueue.Length(); ++i) {
+        MessageBlock& mb = mMessageQueue[i];
+        for (PRUint32 j = 0; j < mb.mMessages.Length(); ++j) {
+          mb.mMessages[j]->ProcessDuringShutdown();
+        }
+      }
+      mMessageQueue.Clear();
+      for (PRUint32 i = 0; i < mCurrentTaskMessageQueue.Length(); ++i) {
+        mCurrentTaskMessageQueue[i]->ProcessDuringShutdown();
+      }
+      mCurrentTaskMessageQueue.Clear();
+      // Stop MediaStreamGraph threads. Do not clear gGraph since
+      // we have outstanding DOM objects that may need it.
+      mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
+      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
+      NS_DispatchToMainThread(event);
+    }
+
+    if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED) {
+      mLifecycleState = LIFECYCLE_RUNNING;
+      // Start the thread now. We couldn't start it earlier because
+      // the graph might exit immediately on finding it has no streams. The
+      // first message for a new graph must create a stream.
+      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable();
+      NS_NewThread(getter_AddRefs(mThread), event);
+    }
+
+    if (mCurrentTaskMessageQueue.IsEmpty()) {
+      if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) {
+        NS_ASSERTION(gGraph == this, "Not current graph??");
+        // Complete shutdown. First, ensure that this graph is no longer used.
+        // A new graph graph will be created if one is needed.
+        LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", gGraph));
+        gGraph = nsnull;
+        // Asynchronously clean up old graph. We don't want to do this
+        // synchronously because it spins the event loop waiting for threads
+        // to shut down, and we don't want to do that in a stable state handler.
+        mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
+        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
+        NS_DispatchToMainThread(event);
+      }
+    } else {
+      if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
+        MessageBlock* block = mMessageQueue.AppendElement();
+        block->mMessages.SwapElements(mCurrentTaskMessageQueue);
+        block->mGraphUpdateIndex = mGraphUpdatesSent;
+        ++mGraphUpdatesSent;
+        EnsureNextIterationLocked(lock);
+      }
+
+      if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
+        mLifecycleState = LIFECYCLE_RUNNING;
+        // Revive the MediaStreamGraph since we have more messages going to it.
+        // Note that we need to put messages into its queue before reviving it,
+        // or it might exit immediately.
+        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable();
+        mThread->Dispatch(event, 0);
+      }
+    }
+
+    mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING;
+  }
+
+  // Make sure we get a new current time in the next event loop task
+  mPostedRunInStableState = false;
+
+  for (PRUint32 i = 0; i < runnables.Length(); ++i) {
+    runnables[i]->Run();
+  }
+}
+
+static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
+
+void
+MediaStreamGraphImpl::EnsureRunInStableState()
+{
+  NS_ASSERTION(NS_IsMainThread(), "main thread only");
+
+  if (mPostedRunInStableState)
+    return;
+  mPostedRunInStableState = true;
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable();
+  nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
+  if (appShell) {
+    appShell->RunInStableState(event);
+  } else {
+    NS_ERROR("Appshell already destroyed?");
+  }
+}
+
+void
+MediaStreamGraphImpl::EnsureStableStateEventPosted()
+{
+  mMonitor.AssertCurrentThreadOwns();
+
+  if (mPostedRunInStableStateEvent)
+    return;
+  mPostedRunInStableStateEvent = true;
+  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable();
+  NS_DispatchToMainThread(event);
+}
+
+void
+MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
+{
+  NS_ASSERTION(NS_IsMainThread(), "main thread only");
+
+  if (mDetectedNotRunning &&
+      mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
+    // The graph control loop is not running and main thread cleanup has
+    // happened. From now on we can't append messages to mCurrentTaskMessageQueue,
+    // because that will never be processed again, so just ProcessDuringShutdown
+    // this message.
+    // This should only happen during forced shutdown.
+    aMessage->ProcessDuringShutdown();
+    delete aMessage;
+    if (IsEmpty()) {
+      NS_ASSERTION(gGraph == this, "Switched managers during forced shutdown?");
+      gGraph = nsnull;
+      delete this;
+    }
+    return;
+  }
+
+  mCurrentTaskMessageQueue.AppendElement(aMessage);
+  EnsureRunInStableState();
+}
+
+void
+MediaStream::Init()
+{
+  MediaStreamGraphImpl* graph = GraphImpl();
+  mBlocked.SetAtAndAfter(graph->mCurrentTime, true);
+  mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true);
+  mExplicitBlockerCount.SetAtAndAfter(graph->mLastActionTime, false);
+}
+
+MediaStreamGraphImpl*
+MediaStream::GraphImpl()
+{
+  return gGraph;
+}
+
+void
+MediaStream::DestroyImpl()
+{
+  if (mAudioOutput) {
+    mAudioOutput->Shutdown();
+    mAudioOutput = nsnull;
+  }
+}
+
+void
+MediaStream::Destroy()
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream) : ControlMessage(aStream) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->DestroyImpl();
+      mStream->GraphImpl()->RemoveStream(mStream);
+    }
+    virtual void ProcessDuringShutdown()
+    { UpdateAffectedStream(); }
+  };
+  mWrapper = nsnull;
+  GraphImpl()->AppendMessage(new Message(this));
+}
+
+void
+MediaStream::AddAudioOutput(void* aKey)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->AddAudioOutputImpl(mKey);
+    }
+    void* mKey;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aKey));
+}
+
+void
+MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume)
+{
+  for (PRUint32 i = 0; i < mAudioOutputs.Length(); ++i) {
+    if (mAudioOutputs[i].mKey == aKey) {
+      mAudioOutputs[i].mVolume = aVolume;
+      return;
+    }
+  }
+  NS_ERROR("Audio output key not found");
+}
+
+void
+MediaStream::SetAudioOutputVolume(void* aKey, float aVolume)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, void* aKey, float aVolume) :
+      ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->SetAudioOutputVolumeImpl(mKey, mVolume);
+    }
+    void* mKey;
+    float mVolume;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aKey, aVolume));
+}
+
+void
+MediaStream::RemoveAudioOutputImpl(void* aKey)
+{
+  for (PRUint32 i = 0; i < mAudioOutputs.Length(); ++i) {
+    if (mAudioOutputs[i].mKey == aKey) {
+      mAudioOutputs.RemoveElementAt(i);
+      return;
+    }
+  }
+  NS_ERROR("Audio output key not found");
+}
+
+void
+MediaStream::RemoveAudioOutput(void* aKey)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, void* aKey) :
+      ControlMessage(aStream), mKey(aKey) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->RemoveAudioOutputImpl(mKey);
+    }
+    void* mKey;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aKey));
+}
+
+void
+MediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
+      ControlMessage(aStream), mContainer(aContainer) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->AddVideoOutputImpl(mContainer.forget());
+    }
+    nsRefPtr<VideoFrameContainer> mContainer;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aContainer));
+}
+
+void
+MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
+      ControlMessage(aStream), mContainer(aContainer) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->RemoveVideoOutputImpl(mContainer);
+    }
+    nsRefPtr<VideoFrameContainer> mContainer;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aContainer));
+}
+
+void
+MediaStream::ChangeExplicitBlockerCount(PRInt32 aDelta)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, PRInt32 aDelta) :
+      ControlMessage(aStream), mDelta(aDelta) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->ChangeExplicitBlockerCountImpl(
+          mStream->GraphImpl()->mLastActionTime, mDelta);
+    }
+    PRInt32 mDelta;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aDelta));
+}
+
+void
+MediaStream::AddListener(MediaStreamListener* aListener)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, MediaStreamListener* aListener) :
+      ControlMessage(aStream), mListener(aListener) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->AddListenerImpl(mListener.forget());
+    }
+    nsRefPtr<MediaStreamListener> mListener;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aListener));
+}
+
+void
+MediaStream::RemoveListener(MediaStreamListener* aListener)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, MediaStreamListener* aListener) :
+      ControlMessage(aStream), mListener(aListener) {}
+    virtual void UpdateAffectedStream()
+    {
+      mStream->RemoveListenerImpl(mListener);
+    }
+    nsRefPtr<MediaStreamListener> mListener;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aListener));
+}
+
+void
+SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
+                            MediaSegment* aSegment)
+{
+  {
+    MutexAutoLock lock(mMutex);
+    TrackData* data = mUpdateTracks.AppendElement();
+    data->mID = aID;
+    data->mRate = aRate;
+    data->mStart = aStart;
+    data->mCommands = TRACK_CREATE;
+    data->mData = aSegment;
+    data->mHaveEnough = false;
+  }
+  GraphImpl()->EnsureNextIteration();
+}
+
+void
+SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment)
+{
+  {
+    MutexAutoLock lock(mMutex);
+    FindDataForTrack(aID)->mData->AppendFrom(aSegment);
+  }
+  GraphImpl()->EnsureNextIteration();
+}
+
+bool
+SourceMediaStream::HaveEnoughBuffered(TrackID aID)
+{
+  MutexAutoLock lock(mMutex);
+  return FindDataForTrack(aID)->mHaveEnough;
+}
+
+void
+SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID,
+    nsIThread* aSignalThread, nsIRunnable* aSignalRunnable)
+{
+  MutexAutoLock lock(mMutex);
+  TrackData* data = FindDataForTrack(aID);
+  if (data->mHaveEnough) {
+    data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable);
+  } else {
+    aSignalThread->Dispatch(aSignalRunnable, 0);
+  }
+}
+
+void
+SourceMediaStream::EndTrack(TrackID aID)
+{
+  {
+    MutexAutoLock lock(mMutex);
+    FindDataForTrack(aID)->mCommands |= TRACK_END;
+  }
+  GraphImpl()->EnsureNextIteration();
+}
+
+void
+SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime)
+{
+  {
+    MutexAutoLock lock(mMutex);
+    mUpdateKnownTracksTime = aKnownTime;
+  }
+  GraphImpl()->EnsureNextIteration();
+}
+
+void
+SourceMediaStream::Finish()
+{
+  {
+    MutexAutoLock lock(mMutex);
+    mUpdateFinished = true;
+  }
+  GraphImpl()->EnsureNextIteration();
+}
+
+static const PRUint32 kThreadLimit = 4;
+static const PRUint32 kIdleThreadLimit = 4;
+static const PRUint32 kIdleThreadTimeoutMs = 2000;
+
+MediaStreamGraphImpl::MediaStreamGraphImpl()
+  : mLastActionTime(1)
+  , mCurrentTime(1)
+  , mBlockingDecisionsMadeUntilTime(1)
+  , mProcessingGraphUpdateIndex(0)
+  , mMonitor("MediaStreamGraphImpl")
+  , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
+  , mWaitState(WAITSTATE_RUNNING)
+  , mNeedAnotherIteration(false)
+  , mForceShutDown(false)
+  , mPostedRunInStableStateEvent(false)
+  , mDetectedNotRunning(false)
+  , mPostedRunInStableState(false)
+{
+#ifdef PR_LOGGING
+  if (!gMediaStreamGraphLog) {
+    gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
+  }
+#endif
+
+  mCurrentTimeStamp = mInitialTimeStamp = TimeStamp::Now();
+}
+
+NS_IMPL_ISUPPORTS1(MediaStreamGraphShutdownObserver, nsIObserver)
+
+static bool gShutdownObserverRegistered = false;
+
+NS_IMETHODIMP
+MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject,
+                                          const char *aTopic,
+                                          const PRUnichar *aData)
+{
+  if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
+    if (gGraph) {
+      gGraph->ForceShutDown();
+    }
+    nsContentUtils::UnregisterShutdownObserver(this);
+    gShutdownObserverRegistered = false;
+  }
+  return NS_OK;
+}
+
+MediaStreamGraph*
+MediaStreamGraph::GetInstance()
+{
+  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
+
+  if (!gGraph) {
+    if (!gShutdownObserverRegistered) {
+      gShutdownObserverRegistered = true;
+      nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
+    }
+
+    gGraph = new MediaStreamGraphImpl();
+    LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
+  }
+
+  return gGraph;
+}
+
+SourceMediaStream*
+MediaStreamGraph::CreateInputStream(nsDOMMediaStream* aWrapper)
+{
+  SourceMediaStream* stream = new SourceMediaStream(aWrapper);
+  NS_ADDREF(stream);
+  static_cast<MediaStreamGraphImpl*>(this)->AppendMessage(new CreateMessage(stream));
+  return stream;
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/MediaStreamGraph.h
@@ -0,0 +1,531 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_MEDIASTREAMGRAPH_H_
+#define MOZILLA_MEDIASTREAMGRAPH_H_
+
+#include "mozilla/Mutex.h"
+#include "nsAudioStream.h"
+#include "nsTArray.h"
+#include "nsIRunnable.h"
+#include "nsISupportsImpl.h"
+#include "StreamBuffer.h"
+#include "TimeVarying.h"
+#include "VideoFrameContainer.h"
+#include "VideoSegment.h"
+
+class nsDOMMediaStream;
+
+namespace mozilla {
+
+/**
+ * Microseconds relative to the start of the graph timeline.
+ */
+typedef PRInt64 GraphTime;
+const GraphTime GRAPH_TIME_MAX = MEDIA_TIME_MAX;
+
+/*
+ * MediaStreamGraph is a framework for synchronized audio/video processing
+ * and playback. It is designed to be used by other browser components such as
+ * HTML media elements, media capture APIs, real-time media streaming APIs,
+ * multitrack media APIs, and advanced audio APIs.
+ *
+ * The MediaStreamGraph uses a dedicated thread to process media --- the media
+ * graph thread. This ensures that we can process media through the graph
+ * without blocking on main-thread activity. The media graph is only modified
+ * on the media graph thread, to ensure graph changes can be processed without
+ * interfering with media processing. All interaction with the media graph
+ * thread is done with message passing.
+ *
+ * APIs that modify the graph or its properties are described as "control APIs".
+ * These APIs are asynchronous; they queue graph changes internally and
+ * those changes are processed all-at-once by the MediaStreamGraph. The
+ * MediaStreamGraph monitors the main thread event loop via nsIAppShell::RunInStableState
+ * to ensure that graph changes from a single event loop task are always
+ * processed all together. Control APIs should only be used on the main thread,
+ * currently; we may be able to relax that later.
+ *
+ * To allow precise synchronization of times in the control API, the
+ * MediaStreamGraph maintains a "media timeline". Control APIs that take or
+ * return times use that timeline. Those times never advance during
+ * an event loop task. This time is returned by MediaStreamGraph::GetCurrentTime().
+ *
+ * Media decoding, audio processing and media playback use thread-safe APIs to
+ * the media graph to ensure they can continue while the main thread is blocked.
+ *
+ * When the graph is changed, we may need to throw out buffered data and
+ * reprocess it. This is triggered automatically by the MediaStreamGraph.
+ *
+ * Streams that use different sampling rates complicate things a lot. We
+ * considered forcing all streams to have the same audio sample rate, resampling
+ * at inputs and outputs only, but that would create situations where a stream
+ * is resampled from X to Y and then back to X unnecessarily. It seems easier
+ * to just live with streams having different sample rates. We do require that
+ * the sample rate for a stream be constant for the life of a stream.
+ *
+ * XXX does not yet support blockInput/blockOutput functionality.
+ */
+
+class MediaStreamGraph;
+
+/**
+ * This is a base class for listener callbacks. Override methods to be
+ * notified of audio or video data or changes in stream state.
+ *
+ * This can be used by stream recorders or network connections that receive
+ * stream input. It could also be used for debugging.
+ *
+ * All notification methods are called from the media graph thread. Overriders
+ * of these methods are responsible for all synchronization. Beware!
+ * These methods are called without the media graph monitor held, so
+ * reentry into media graph methods is possible, although very much discouraged!
+ * You should do something non-blocking and non-reentrant (e.g. dispatch an
+ * event to some thread) and return.
+ */
+class MediaStreamListener {
+public:
+  virtual ~MediaStreamListener() {}
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStreamListener)
+
+  enum Blocking {
+    BLOCKED,
+    UNBLOCKED
+  };
+  /**
+   * Notify that the blocking status of the stream changed.
+   */
+  virtual void NotifyBlockingChanged(MediaStreamGraph* aGraph, Blocking aBlocked) {}
+
+  /**
+   * Notify that the stream output is advancing.
+   */
+  virtual void NotifyOutput(MediaStreamGraph* aGraph) {}
+
+  /**
+   * Notify that the stream finished.
+   */
+  virtual void NotifyFinished(MediaStreamGraph* aGraph) {}
+
+  enum {
+    TRACK_EVENT_CREATED = 0x01,
+    TRACK_EVENT_ENDED = 0x02
+  };
+  /**
+   * Notify that changes to one of the stream tracks have been queued.
+   * aTrackEvents can be any combination of TRACK_EVENT_CREATED and
+   * TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
+   * at aTrackOffset (relative to the start of the stream).
+   */
+  virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
+                                        TrackRate aTrackRate,
+                                        TrackTicks aTrackOffset,
+                                        PRUint32 aTrackEvents,
+                                        const MediaSegment& aQueuedMedia) {}
+};
+
+class MediaStreamGraphImpl;
+class SourceMediaStream;
+
+/**
+ * A stream of synchronized audio and video data. All (not blocked) streams
+ * progress at the same rate --- "real time". Streams cannot seek. The only
+ * operation readers can perform on a stream is to read the next data.
+ *
+ * Consumers of a stream can be reading from it at different offsets, but that
+ * should only happen due to the order in which consumers are being run.
+ * Those offsets must not diverge in the long term, otherwise we would require
+ * unbounded buffering.
+ *
+ * Streams can be in a "blocked" state. While blocked, a stream does not
+ * produce data. A stream can be explicitly blocked via the control API,
+ * or implicitly blocked by whatever's generating it (e.g. an underrun in the
+ * source resource), or implicitly blocked because something consuming it
+ * blocks, or implicitly because it has finished.
+ *
+ * A stream can be in a "finished" state. "Finished" streams are permanently
+ * blocked.
+ *
+ * Transitions into and out of the "blocked" and "finished" states are managed
+ * by the MediaStreamGraph on the media graph thread.
+ *
+ * We buffer media data ahead of the consumers' reading offsets. It is possible
+ * to have buffered data but still be blocked.
+ *
+ * Any stream can have its audio and video playing when requested. The media
+ * stream graph plays audio by constructing audio output streams as necessary.
+ * Video is played by setting video frames into an VideoFrameContainer at the right
+ * time. To ensure video plays in sync with audio, make sure that the same
+ * stream is playing both the audio and video.
+ *
+ * The data in a stream is managed by StreamBuffer. It consists of a set of
+ * tracks of various types that can start and end over time.
+ *
+ * Streams are explicitly managed. The client creates them via
+ * MediaStreamGraph::CreateInput/ProcessedMediaStream, and releases them by calling
+ * Destroy() when no longer needed (actual destruction will be deferred).
+ * The actual object is owned by the MediaStreamGraph. The basic idea is that
+ * main thread objects will keep Streams alive as long as necessary (using the
+ * cycle collector to clean up whenever needed).
+ *
+ * We make them refcounted only so that stream-related messages with MediaStream*
+ * pointers can be sent to the main thread safely.
+ */
+class MediaStream {
+public:
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
+
+  MediaStream(nsDOMMediaStream* aWrapper)
+    : mBufferStartTime(0)
+    , mExplicitBlockerCount(0)
+    , mBlocked(false)
+    , mGraphUpdateIndices(0)
+    , mFinished(false)
+    , mNotifiedFinished(false)
+    , mAudioPlaybackStartTime(0)
+    , mBlockedAudioTime(0)
+    , mMessageAffectedTime(0)
+    , mWrapper(aWrapper)
+    , mMainThreadCurrentTime(0)
+    , mMainThreadFinished(false)
+  {
+    for (PRUint32 i = 0; i < ArrayLength(mFirstActiveTracks); ++i) {
+      mFirstActiveTracks[i] = TRACK_NONE;
+    }
+  }
+  virtual ~MediaStream() {}
+
+  /**
+   * Returns the graph that owns this stream.
+   */
+  MediaStreamGraphImpl* GraphImpl();
+
+  // Control API.
+  // Since a stream can be played multiple ways, we need to combine independent
+  // volume settings. The aKey parameter is used to keep volume settings
+  // separate. Since the stream is always playing the same contents, only
+  // a single audio output stream is used; the volumes are combined.
+  // Currently only the first enabled audio track is played.
+  // XXX change this so all enabled audio tracks are mixed and played.
+  void AddAudioOutput(void* aKey);
+  void SetAudioOutputVolume(void* aKey, float aVolume);
+  void RemoveAudioOutput(void* aKey);
+  // Since a stream can be played multiple ways, we need to be able to
+  // play to multiple VideoFrameContainers.
+  // Only the first enabled video track is played.
+  void AddVideoOutput(VideoFrameContainer* aContainer);
+  void RemoveVideoOutput(VideoFrameContainer* aContainer);
+  // Explicitly block. Useful for example if a media element is pausing
+  // and we need to stop its stream emitting its buffered data.
+  void ChangeExplicitBlockerCount(PRInt32 aDelta);
+  // Events will be dispatched by calling methods of aListener.
+  void AddListener(MediaStreamListener* aListener);
+  void RemoveListener(MediaStreamListener* aListener);
+  // Signal that the client is done with this MediaStream. It will be deleted later.
+  void Destroy();
+  // Returns the main-thread's view of how much data has been processed by
+  // this stream.
+  StreamTime GetCurrentTime() { return mMainThreadCurrentTime; }
+  // Return the main thread's view of whether this stream has finished.
+  bool IsFinished() { return mMainThreadFinished; }
+
+  friend class MediaStreamGraphImpl;
+
+  virtual SourceMediaStream* AsSourceStream() { return nsnull; }
+
+  // media graph thread only
+  void Init();
+  // These Impl methods perform the core functionality of the control methods
+  // above, on the media graph thread.
+  /**
+   * Stop all stream activity and disconnect it from all inputs and outputs.
+   * This must be idempotent.
+   */
+  virtual void DestroyImpl();
+  StreamTime GetBufferEnd() { return mBuffer.GetEnd(); }
+  void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
+  void AddAudioOutputImpl(void* aKey)
+  {
+    mAudioOutputs.AppendElement(AudioOutput(aKey));
+  }
+  void RemoveAudioOutputImpl(void* aKey);
+  void AddVideoOutputImpl(already_AddRefed<VideoFrameContainer> aContainer)
+  {
+    *mVideoOutputs.AppendElement() = aContainer;
+  }
+  void RemoveVideoOutputImpl(VideoFrameContainer* aContainer)
+  {
+    mVideoOutputs.RemoveElement(aContainer);
+  }
+  void ChangeExplicitBlockerCountImpl(StreamTime aTime, PRInt32 aDelta)
+  {
+    mExplicitBlockerCount.SetAtAndAfter(aTime, mExplicitBlockerCount.GetAt(aTime) + aDelta);
+  }
+  void AddListenerImpl(already_AddRefed<MediaStreamListener> aListener)
+  {
+    *mListeners.AppendElement() = aListener;
+  }
+  void RemoveListenerImpl(MediaStreamListener* aListener)
+  {
+    mListeners.RemoveElement(aListener);
+  }
+
+#ifdef DEBUG
+  const StreamBuffer& GetStreamBuffer() { return mBuffer; }
+#endif
+
+protected:
+  virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
+  {
+    mBufferStartTime += aBlockedTime;
+    mGraphUpdateIndices.InsertTimeAtStart(aBlockedTime);
+    mGraphUpdateIndices.AdvanceCurrentTime(aCurrentTime);
+    mExplicitBlockerCount.AdvanceCurrentTime(aCurrentTime);
+
+    mBuffer.ForgetUpTo(aCurrentTime - mBufferStartTime);
+  }
+
+  // This state is all initialized on the main thread but
+  // otherwise modified only on the media graph thread.
+
+  // Buffered data. The start of the buffer corresponds to mBufferStartTime.
+  // Conceptually the buffer contains everything this stream has ever played,
+  // but we forget some prefix of the buffered data to bound the space usage.
+  StreamBuffer mBuffer;
+  // The time when the buffered data could be considered to have started playing.
+  // This increases over time to account for time the stream was blocked before
+  // mCurrentTime.
+  GraphTime mBufferStartTime;
+
+  // Client-set volume of this stream
+  struct AudioOutput {
+    AudioOutput(void* aKey) : mKey(aKey), mVolume(1.0f) {}
+    void* mKey;
+    float mVolume;
+  };
+  nsTArray<AudioOutput> mAudioOutputs;
+  nsTArray<nsRefPtr<VideoFrameContainer> > mVideoOutputs;
+  // We record the last played video frame to avoid redundant setting
+  // of the current video frame.
+  VideoFrame mLastPlayedVideoFrame;
+  // The number of times this stream has been explicitly blocked by the control
+  // API, minus the number of times it has been explicitly unblocked.
+  TimeVarying<GraphTime,PRUint32> mExplicitBlockerCount;
+  nsTArray<nsRefPtr<MediaStreamListener> > mListeners;
+
+  // Precomputed blocking status (over GraphTime).
+  // This is only valid between the graph's mCurrentTime and
+  // mBlockingDecisionsMadeUntilTime. The stream is considered to have
+  // not been blocked before mCurrentTime (its mBufferStartTime is increased
+  // as necessary to account for that time instead) --- this avoids us having to
+  // record the entire history of the stream's blocking-ness in mBlocked.
+  TimeVarying<GraphTime,bool> mBlocked;
+  // Maps graph time to the graph update that affected this stream at that time
+  TimeVarying<GraphTime,PRInt64> mGraphUpdateIndices;
+
+  /**
+   * When true, this means the stream will be finished once all
+   * buffered data has been consumed.
+   */
+  bool mFinished;
+  /**
+   * When true, mFinished is true and we've played all the data in this stream
+   * and fired NotifyFinished notifications.
+   */
+  bool mNotifiedFinished;
+
+  // Where audio output is going
+  nsRefPtr<nsAudioStream> mAudioOutput;
+  // When we started audio playback for this stream.
+  // Add mAudioOutput->GetPosition() to find the current audio playback position.
+  GraphTime mAudioPlaybackStartTime;
+  // Amount of time that we've wanted to play silence because of the stream
+  // blocking.
+  MediaTime mBlockedAudioTime;
+
+  // For each track type, this is the first active track found for that type.
+  // The first active track is the track that started earliest; if multiple
+  // tracks start at the same time, the one with the lowest ID.
+  TrackID mFirstActiveTracks[MediaSegment::TYPE_COUNT];
+
+  // Temporary data used by MediaStreamGraph on the graph thread
+  // The earliest time for which we would like to change this stream's output.
+  GraphTime mMessageAffectedTime;
+
+  // This state is only used on the main thread.
+  nsDOMMediaStream* mWrapper;
+  // Main-thread views of state
+  StreamTime mMainThreadCurrentTime;
+  bool mMainThreadFinished;
+};
+
+/**
+ * This is a stream into which a decoder can write audio and video.
+ *
+ * Audio and video can be written on any thread, but you probably want to
+ * always write from the same thread to avoid unexpected interleavings.
+ */
+class SourceMediaStream : public MediaStream {
+public:
+  SourceMediaStream(nsDOMMediaStream* aWrapper) :
+    MediaStream(aWrapper), mMutex("mozilla::media::SourceMediaStream"),
+    mUpdateKnownTracksTime(0), mUpdateFinished(false)
+  {}
+
+  virtual SourceMediaStream* AsSourceStream() { return this; }
+
+  // Call these on any thread.
+  /**
+   * Add a new track to the stream starting at the given base time (which
+   * must be greater than or equal to the last time passed to
+   * AdvanceKnownTracksTime). Takes ownership of aSegment. aSegment should
+   * contain data starting after aStart.
+   */
+  void AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
+                MediaSegment* aSegment);
+  /**
+   * Append media data to a track. Ownership of aSegment remains with the caller,
+   * but aSegment is emptied.
+   */
+  void AppendToTrack(TrackID aID, MediaSegment* aSegment);
+  /**
+   * Returns true if the buffer currently has enough data.
+   */
+  bool HaveEnoughBuffered(TrackID aID);
+  /**
+   * Ensures that aSignalRunnable will be dispatched to aSignalThread
+   * when we don't have enough buffered data in the track (which could be
+   * immediately).
+   */
+  void DispatchWhenNotEnoughBuffered(TrackID aID,
+      nsIThread* aSignalThread, nsIRunnable* aSignalRunnable);
+  /**
+   * Indicate that a track has ended. Do not do any more API calls
+   * affecting this track.
+   */
+  void EndTrack(TrackID aID);
+  /**
+   * Indicate that no tracks will be added starting before time aKnownTime.
+   * aKnownTime must be >= its value at the last call to AdvanceKnownTracksTime.
+   */
+  void AdvanceKnownTracksTime(StreamTime aKnownTime);
+  /**
+   * Indicate that this stream should enter the "finished" state. All tracks
+   * must have been ended via EndTrack. The finish time of the stream is
+   * when all tracks have ended and when latest time sent to
+   * AdvanceKnownTracksTime() has been reached.
+   */
+  void Finish();
+
+  // XXX need a Reset API
+
+  friend class MediaStreamGraph;
+  friend class MediaStreamGraphImpl;
+
+  struct ThreadAndRunnable {
+    void Init(nsIThread* aThread, nsIRunnable* aRunnable)
+    {
+      mThread = aThread;
+      mRunnable = aRunnable;
+    }
+
+    nsCOMPtr<nsIThread> mThread;
+    nsCOMPtr<nsIRunnable> mRunnable;
+  };
+  enum TrackCommands {
+    TRACK_CREATE = MediaStreamListener::TRACK_EVENT_CREATED,
+    TRACK_END = MediaStreamListener::TRACK_EVENT_ENDED
+  };
+  /**
+   * Data for each track that hasn't ended.
+   */
+  struct TrackData {
+    TrackID mID;
+    TrackRate mRate;
+    TrackTicks mStart;
+    // Each time the track updates are flushed to the media graph thread,
+    // this is cleared.
+    PRUint32 mCommands;
+    // Each time the track updates are flushed to the media graph thread,
+    // the segment buffer is emptied.
+    nsAutoPtr<MediaSegment> mData;
+    nsTArray<ThreadAndRunnable> mDispatchWhenNotEnough;
+    bool mHaveEnough;
+  };
+
+protected:
+  TrackData* FindDataForTrack(TrackID aID)
+  {
+    for (PRUint32 i = 0; i < mUpdateTracks.Length(); ++i) {
+      if (mUpdateTracks[i].mID == aID) {
+        return &mUpdateTracks[i];
+      }
+    }
+    NS_ERROR("Bad track ID!");
+    return nsnull;
+  }
+
+  Mutex mMutex;
+  // protected by mMutex
+  StreamTime mUpdateKnownTracksTime;
+  nsTArray<TrackData> mUpdateTracks;
+  bool mUpdateFinished;
+};
+
+/**
+ * Initially, at least, we will have a singleton MediaStreamGraph per
+ * process.
+ */
+class MediaStreamGraph {
+public:
+  // Main thread only
+  static MediaStreamGraph* GetInstance();
+  // Control API.
+  /**
+   * Create a stream that a media decoder (or some other source of
+   * media data, such as a camera) can write to.
+   */
+  SourceMediaStream* CreateInputStream(nsDOMMediaStream* aWrapper);
+  /**
+   * Returns the number of graph updates sent. This can be used to track
+   * whether a given update has been processed by the graph thread and reflected
+   * in main-thread stream state.
+   */
+  PRInt64 GetCurrentGraphUpdateIndex() { return mGraphUpdatesSent; }
+
+  /**
+   * Media graph thread only.
+   * Dispatches a runnable that will run on the main thread after all
+   * main-thread stream state has been next updated.
+   * Should only be called during MediaStreamListener callbacks.
+   */
+  void DispatchToMainThreadAfterStreamStateUpdate(nsIRunnable* aRunnable)
+  {
+    mPendingUpdateRunnables.AppendElement(aRunnable);
+  }
+
+protected:
+  MediaStreamGraph()
+    : mGraphUpdatesSent(1)
+  {
+    MOZ_COUNT_CTOR(MediaStreamGraph);
+  }
+  ~MediaStreamGraph()
+  {
+    MOZ_COUNT_DTOR(MediaStreamGraph);
+  }
+
+  // Media graph thread only
+  nsTArray<nsCOMPtr<nsIRunnable> > mPendingUpdateRunnables;
+
+  // Main thread only
+  // The number of updates we have sent to the media graph thread. We start
+  // this at 1 just to ensure that 0 is usable as a special value.
+  PRInt64 mGraphUpdatesSent;
+};
+
+}
+
+#endif /* MOZILLA_MEDIASTREAMGRAPH_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/SharedBuffer.h
@@ -0,0 +1,45 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_SHAREDBUFFER_H_
+#define MOZILLA_SHAREDBUFFER_H_
+
+#include "mozilla/mozalloc.h"
+
+namespace mozilla {
+
+/**
+ * Heap-allocated chunk of arbitrary data with threadsafe refcounting.
+ * Typically you would allocate one of these, fill it in, and then treat it as
+ * immutable while it's shared.
+ * This only guarantees 4-byte alignment of the data. For alignment we
+ * simply assume that the refcount is at least 4-byte aligned and its size
+ * is divisible by 4.
+ */
+class SharedBuffer {
+public:
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(SharedBuffer)
+  ~SharedBuffer() {}
+
+  void* Data() { return this + 1; }
+
+  // Takes ownership of aData (which will be freed via moz_free()).
+  // aData consists of aChannels consecutive buffers, each of aLength samples.
+  static already_AddRefed<SharedBuffer> Create(size_t aSize)
+  {
+    void* m = moz_xmalloc(sizeof(SharedBuffer) + aSize);
+    nsRefPtr<SharedBuffer> p = new (m) SharedBuffer();
+    NS_ASSERTION((reinterpret_cast<char*>(p.get() + 1) - reinterpret_cast<char*>(p.get())) % 4 == 0,
+                 "SharedBuffers should be at least 4-byte aligned");
+    return p.forget();
+  }
+
+private:
+  SharedBuffer() {}
+};
+
+}
+
+#endif /* MOZILLA_SHAREDBUFFER_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/StreamBuffer.cpp
@@ -0,0 +1,60 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "StreamBuffer.h"
+
+namespace mozilla {
+
+StreamTime
+StreamBuffer::GetEnd() const
+{
+  StreamTime t = mTracksKnownTime;
+  for (PRUint32 i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (!track->IsEnded()) {
+      t = NS_MIN(t, track->GetEndTimeRoundDown());
+    }
+  }
+  return t;
+}
+
+StreamBuffer::Track*
+StreamBuffer::FindTrack(TrackID aID)
+{
+  if (aID == TRACK_NONE)
+    return nsnull;
+  for (PRUint32 i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (track->GetID() == aID) {
+      return track;
+    }
+  }
+  return nsnull;
+}
+
+void
+StreamBuffer::ForgetUpTo(StreamTime aTime)
+{
+  // Round to nearest 50ms so we don't spend too much time pruning segments.
+  const int roundTo = MillisecondsToMediaTime(50);
+  StreamTime forget = (aTime/roundTo)*roundTo;
+  if (forget <= mForgottenTime) {
+    return;
+  }
+  mForgottenTime = forget;
+
+  for (PRUint32 i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (track->IsEnded() && track->GetEndTimeRoundDown() <= forget) {
+      mTracks.RemoveElementAt(i);
+      --i;
+      continue;
+    }
+    TrackTicks forgetTo = NS_MIN(track->GetEnd() - 1, track->TimeToTicksRoundDown(forget));
+    track->ForgetUpTo(forgetTo);
+  }
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/StreamBuffer.h
@@ -0,0 +1,286 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_STREAMBUFFER_H_
+#define MOZILLA_STREAMBUFFER_H_
+
+#include "mozilla/Util.h"
+#include "MediaSegment.h"
+#include "nsAutoPtr.h"
+
+namespace mozilla {
+
+/**
+ * Media time relative to the start of a StreamBuffer.
+ */
+typedef MediaTime StreamTime;
+const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX;
+
+/**
+ * Track rate in Hz. Maximum 1 << MEDIA_TIME_FRAC_BITS Hz. This ensures
+ * calculations below don't overflow.
+ */
+typedef PRInt32 TrackRate;
+const TrackRate TRACK_RATE_MAX = 1 << MEDIA_TIME_FRAC_BITS;
+
+/**
+ * Unique ID for track within a StreamBuffer. Tracks from different
+ * StreamBuffers may have the same ID; this matters when appending StreamBuffers,
+ * since tracks with the same ID are matched. Only IDs greater than 0 are allowed.
+ */
+typedef PRInt32 TrackID;
+const TrackID TRACK_NONE = 0;
+
+inline TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aMicroseconds)
+{
+  NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
+  NS_ASSERTION(0 <= aMicroseconds && aMicroseconds <= STREAM_TIME_MAX, "Bad microseconds");
+  return (aMicroseconds*aRate + (1 << MEDIA_TIME_FRAC_BITS) - 1) >> MEDIA_TIME_FRAC_BITS;
+}
+
+inline TrackTicks TimeToTicksRoundDown(TrackRate aRate, StreamTime aMicroseconds)
+{
+  NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
+  NS_ASSERTION(0 <= aMicroseconds && aMicroseconds <= STREAM_TIME_MAX, "Bad microseconds");
+  return (aMicroseconds*aRate) >> MEDIA_TIME_FRAC_BITS;
+}
+
+inline StreamTime TicksToTimeRoundUp(TrackRate aRate, TrackTicks aTicks)
+{
+  NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
+  NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad samples");
+  return ((aTicks << MEDIA_TIME_FRAC_BITS) + aRate - 1)/aRate;
+}
+
+inline StreamTime TicksToTimeRound(TrackRate aRate, TrackTicks aTicks)
+{
+  NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
+  NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad samples");
+  return ((aTicks << MEDIA_TIME_FRAC_BITS) + aRate/2)/aRate;
+}
+
+inline StreamTime TicksToTimeRoundDown(TrackRate aRate, TrackTicks aTicks)
+{
+  NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
+  NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad samples");
+  return (aTicks << MEDIA_TIME_FRAC_BITS)/aRate;
+}
+
+/**
+ * This object contains the decoded data for a stream's tracks.
+ * A StreamBuffer can be appended to. Logically a StreamBuffer only gets longer,
+ * but we also have the ability to "forget" data before a certain time that
+ * we know won't be used again. (We prune a whole number of seconds internally.)
+ *
+ * StreamBuffers should only be used from one thread at a time.
+ *
+ * A StreamBuffer has a set of tracks that can be of arbitrary types ---
+ * the data for each track is a MediaSegment. The set of tracks can vary
+ * over the timeline of the StreamBuffer.
+ */
+class StreamBuffer {
+public:
+  /**
+   * Every track has a start time --- when it started in the StreamBuffer.
+   * It has an end flag; when false, no end point is known; when true,
+   * the track ends when the data we have for the track runs out.
+   * Tracks have a unique ID assigned at creation. This allows us to identify
+   * the same track across StreamBuffers. A StreamBuffer should never have
+   * two tracks with the same ID (even if they don't overlap in time).
+   * TODO Tracks can also be enabled and disabled over time.
+   * TODO Add TimeVarying<TrackTicks,bool> mEnabled.
+   */
+  class Track {
+  public:
+    Track(TrackID aID, TrackRate aRate, TrackTicks aStart, MediaSegment* aSegment)
+      : mStart(aStart),
+        mSegment(aSegment),
+        mRate(aRate),
+        mID(aID),
+        mEnded(false)
+    {
+      MOZ_COUNT_CTOR(Track);
+
+      NS_ASSERTION(aID > TRACK_NONE, "Bad track ID");
+      NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Invalid rate");
+      NS_ASSERTION(0 <= aStart && aStart <= aSegment->GetDuration(), "Bad start position");
+    }
+    ~Track()
+    {
+      MOZ_COUNT_DTOR(Track);
+    }
+    template <class T> T* Get() const
+    {
+      if (mSegment->GetType() == T::StaticType()) {
+        return static_cast<T*>(mSegment.get());
+      }
+      return nsnull;
+    }
+    MediaSegment* GetSegment() const { return mSegment; }
+    TrackRate GetRate() const { return mRate; }
+    TrackID GetID() const { return mID; }
+    bool IsEnded() const { return mEnded; }
+    TrackTicks GetStart() const { return mStart; }
+    TrackTicks GetEnd() const { return mSegment->GetDuration(); }
+    StreamTime GetEndTimeRoundDown() const
+    {
+      return mozilla::TicksToTimeRoundDown(mRate, mSegment->GetDuration());
+    }
+    StreamTime GetStartTimeRoundDown() const
+    {
+      return mozilla::TicksToTimeRoundDown(mRate, mStart);
+    }
+    TrackTicks TimeToTicksRoundDown(StreamTime aTime) const
+    {
+      return mozilla::TimeToTicksRoundDown(mRate, aTime);
+    }
+    StreamTime TicksToTimeRoundDown(TrackTicks aTicks) const
+    {
+      return mozilla::TicksToTimeRoundDown(mRate, aTicks);
+    }
+    MediaSegment::Type GetType() const { return mSegment->GetType(); }
+
+    void SetEnded() { mEnded = true; }
+    void AppendFrom(Track* aTrack)
+    {
+      NS_ASSERTION(!mEnded, "Can't append to ended track");
+      NS_ASSERTION(aTrack->mID == mID, "IDs must match");
+      NS_ASSERTION(aTrack->mStart == 0, "Source track must start at zero");
+      NS_ASSERTION(aTrack->mSegment->GetType() == GetType(), "Track types must match");
+      NS_ASSERTION(aTrack->mRate == mRate, "Track rates must match");
+
+      mSegment->AppendFrom(aTrack->mSegment);
+      mEnded = aTrack->mEnded;
+    }
+    MediaSegment* RemoveSegment()
+    {
+      return mSegment.forget();
+    }
+    void ForgetUpTo(TrackTicks aTime)
+    {
+      mSegment->ForgetUpTo(aTime);
+    }
+
+  protected:
+    friend class StreamBuffer;
+
+    // Start offset is in ticks at rate mRate
+    TrackTicks mStart;
+    // The segment data starts at the start of the owning StreamBuffer, i.e.,
+    // there's mStart silence/no video at the beginning.
+    nsAutoPtr<MediaSegment> mSegment;
+    TrackRate mRate; // rate in ticks per second
+    // Unique ID
+    TrackID mID;
+    // True when the track ends with the data in mSegment
+    bool mEnded;
+  };
+
+  class CompareTracksByID {
+  public:
+    bool Equals(Track* aA, Track* aB) const {
+      return aA->GetID() == aB->GetID();
+    }
+    bool LessThan(Track* aA, Track* aB) const {
+      return aA->GetID() < aB->GetID();
+    }
+  };
+
+  StreamBuffer()
+    : mTracksKnownTime(0), mForgottenTime(0)
+  {
+    MOZ_COUNT_CTOR(StreamBuffer);
+  }
+  ~StreamBuffer()
+  {
+    MOZ_COUNT_DTOR(StreamBuffer);
+  }
+
+  /**
+   * Takes ownership of aSegment. Don't do this while iterating, or while
+   * holding a Track reference.
+   * aSegment must have aStart worth of null data.
+   */
+  Track& AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart, MediaSegment* aSegment)
+  {
+    NS_ASSERTION(TimeToTicksRoundDown(aRate, mTracksKnownTime) <= aStart,
+                 "Start time too early");
+    NS_ASSERTION(!FindTrack(aID), "Track with this ID already exists");
+
+    return **mTracks.InsertElementSorted(new Track(aID, aRate, aStart, aSegment),
+                                         CompareTracksByID());
+  }
+  void AdvanceKnownTracksTime(StreamTime aKnownTime)
+  {
+    NS_ASSERTION(aKnownTime >= mTracksKnownTime, "Can't move tracks-known time earlier");
+    mTracksKnownTime = aKnownTime;
+  }
+
+  /**
+   * The end time for the StreamBuffer is the latest time for which we have
+   * data for all tracks that haven't ended by that time.
+   */
+  StreamTime GetEnd() const;
+
+  Track* FindTrack(TrackID aID);
+
+  class TrackIter {
+  public:
+    /**
+     * Iterate through the tracks of aBuffer in order of ID.
+     */
+    TrackIter(const StreamBuffer& aBuffer) :
+      mBuffer(&aBuffer.mTracks), mIndex(0), mMatchType(false) {}
+    /**
+     * Iterate through the tracks of aBuffer with type aType, in order of ID.
+     */
+    TrackIter(const StreamBuffer& aBuffer, MediaSegment::Type aType) :
+      mBuffer(&aBuffer.mTracks), mIndex(0), mType(aType), mMatchType(true) { FindMatch(); }
+    bool IsEnded() { return mIndex >= mBuffer->Length(); }
+    void Next()
+    {
+      ++mIndex;
+      FindMatch();
+    }
+    Track& operator*() { return *mBuffer->ElementAt(mIndex); }
+    Track* operator->() { return mBuffer->ElementAt(mIndex); }
+  private:
+    void FindMatch()
+    {
+      if (!mMatchType)
+        return;
+      while (mIndex < mBuffer->Length() &&
+             mBuffer->ElementAt(mIndex)->GetType() != mType) {
+        ++mIndex;
+      }
+    }
+
+    const nsTArray<nsAutoPtr<Track> >* mBuffer;
+    PRUint32 mIndex;
+    MediaSegment::Type mType;
+    bool mMatchType;
+  };
+  friend class TrackIter;
+
+  /**
+   * Forget stream data before aTime; they will no longer be needed.
+   * Also can forget entire tracks that have ended at or before aTime.
+   * Can't be used to forget beyond GetEnd().
+   */
+  void ForgetUpTo(StreamTime aTime);
+
+protected:
+  // Any new tracks added will start at or after this time. In other words, the track
+  // list is complete and correct for all times less than this time.
+  StreamTime mTracksKnownTime;
+  StreamTime mForgottenTime;
+  // All known tracks for this StreamBuffer
+  nsTArray<nsAutoPtr<Track> > mTracks;
+};
+
+}
+
+#endif /* MOZILLA_STREAMBUFFER_H_ */
+
new file mode 100644
--- /dev/null
+++ b/content/media/TimeVarying.h
@@ -0,0 +1,223 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_TIMEVARYING_H_
+#define MOZILLA_TIMEVARYING_H_
+
+#include "nsTArray.h"
+
+namespace mozilla {
+
+/**
+ * This is just for CTOR/DTOR tracking. We can't put this in
+ * TimeVarying directly because the different template instances have
+ * different sizes and that confuses things.
+ */
+class TimeVaryingBase {
+protected:
+  TimeVaryingBase()
+  {
+    MOZ_COUNT_CTOR(TimeVaryingBase);
+  }
+  ~TimeVaryingBase()
+  {
+    MOZ_COUNT_DTOR(TimeVaryingBase);
+  }
+};
+
+/**
+ * Objects of this class represent values that can change over time ---
+ * a mathematical function of time.
+ * Time is the type of time values, T is the value that changes over time.
+ * There are a finite set of "change times"; at each change time, the function
+ * instantly changes to a new value.
+ * There is also a "current time" which must always advance (not go backward).
+ * The function is constant for all times less than the current time.
+ * When the current time is advanced, the value of the function at the new
+ * current time replaces the values for all previous times.
+ *
+ * The implementation records a mCurrent (the value at the current time)
+ * and an array of "change times" (greater than the current time) and the
+ * new value for each change time. This is a simple but dumb implementation.
+ */
+template <typename Time, typename T>
+class TimeVarying : public TimeVaryingBase {
+public:
+  TimeVarying(const T& aInitial) : mCurrent(aInitial) {}
+  /**
+   * This constructor can only be called if mCurrent has a no-argument
+   * constructor.
+   */
+  TimeVarying() : mCurrent() {}
+
+  /**
+   * Sets the value for all times >= aTime to aValue.
+   */
+  void SetAtAndAfter(Time aTime, const T& aValue)
+  {
+    for (PRInt32 i = mChanges.Length() - 1; i >= 0; --i) {
+      NS_ASSERTION(i == PRInt32(mChanges.Length() - 1),
+                   "Always considering last element of array");
+      if (aTime > mChanges[i].mTime) {
+        if (mChanges[i].mValue != aValue) {
+          mChanges.AppendElement(Entry(aTime, aValue));
+        }
+        return;
+      }
+      if (aTime == mChanges[i].mTime) {
+        if ((i > 0 ? mChanges[i - 1].mValue : mCurrent) == aValue) {
+          mChanges.RemoveElementAt(i);
+          return;
+        }
+        mChanges[i].mValue = aValue;
+        return;
+      }
+      mChanges.RemoveElementAt(i);
+    }
+    mChanges.InsertElementAt(0, Entry(aTime, aValue));
+  }
+  /**
+   * Returns the final value of the function. If aTime is non-null,
+   * sets aTime to the time at which the function changes to that final value.
+   * If there are no changes after the current time, returns PR_INT64_MIN in aTime.
+   */
+  const T& GetLast(Time* aTime = nsnull) const
+  {
+    if (mChanges.IsEmpty()) {
+      if (aTime) {
+        *aTime = PR_INT64_MIN;
+      }
+      return mCurrent;
+    }
+    if (aTime) {
+      *aTime = mChanges[mChanges.Length() - 1].mTime;
+    }
+    return mChanges[mChanges.Length() - 1].mValue;
+  }
+  /**
+   * Returns the value of the function just before time aTime.
+   */
+  const T& GetBefore(Time aTime) const
+  {
+    if (mChanges.IsEmpty() || aTime <= mChanges[0].mTime) {
+      return mCurrent;
+    }
+    PRInt32 changesLength = mChanges.Length();
+    if (mChanges[changesLength - 1].mTime < aTime) {
+      return mChanges[changesLength - 1].mValue;
+    }
+    for (PRUint32 i = 1; ; ++i) {
+      if (aTime <= mChanges[i].mTime) {
+        NS_ASSERTION(mChanges[i].mValue != mChanges[i - 1].mValue,
+                     "Only changed values appear in array");
+        return mChanges[i - 1].mValue;
+      }
+    }
+  }
+  /**
+   * Returns the value of the function at time aTime.
+   * If aEnd is non-null, sets *aEnd to the time at which the function will
+   * change from the returned value to a new value, or PR_INT64_MAX if that
+   * never happens.
+   * If aStart is non-null, sets *aStart to the time at which the function
+   * changed to the returned value, or PR_INT64_MIN if that happened at or
+   * before the current time.
+   *
+   * Currently uses a linear search, but could use a binary search.
+   */
+  const T& GetAt(Time aTime, Time* aEnd = nsnull, Time* aStart = nsnull) const
+  {
+    if (mChanges.IsEmpty() || aTime < mChanges[0].mTime) {
+      if (aStart) {
+        *aStart = PR_INT64_MIN;
+      }
+      if (aEnd) {
+        *aEnd = mChanges.IsEmpty() ? PR_INT64_MAX : mChanges[0].mTime;
+      }
+      return mCurrent;
+    }
+    PRInt32 changesLength = mChanges.Length();
+    if (mChanges[changesLength - 1].mTime <= aTime) {
+      if (aEnd) {
+        *aEnd = PR_INT64_MAX;
+      }
+      if (aStart) {
+        *aStart = mChanges[changesLength - 1].mTime;
+      }
+      return mChanges[changesLength - 1].mValue;
+    }
+
+    for (PRUint32 i = 1; ; ++i) {
+      if (aTime < mChanges[i].mTime) {
+        if (aEnd) {
+          *aEnd = mChanges[i].mTime;
+        }
+        if (aStart) {
+          *aStart = mChanges[i - 1].mTime;
+        }
+        NS_ASSERTION(mChanges[i].mValue != mChanges[i - 1].mValue,
+                     "Only changed values appear in array");
+        return mChanges[i - 1].mValue;
+      }
+    }
+  }
+  /**
+   * Advance the current time to aTime.
+   */
+  void AdvanceCurrentTime(Time aTime)
+  {
+    for (PRUint32 i = 0; i < mChanges.Length(); ++i) {
+      if (aTime < mChanges[i].mTime) {
+        mChanges.RemoveElementsAt(0, i);
+        return;
+      }
+      mCurrent = mChanges[i].mValue;
+    }
+    mChanges.Clear();
+  }
+  /**
+   * Make all currently pending changes happen aDelta later than their
+   * current change times.
+   */
+  void InsertTimeAtStart(Time aDelta)
+  {
+    for (PRUint32 i = 0; i < mChanges.Length(); ++i) {
+      mChanges[i].mTime += aDelta;
+    }
+  }
+
+  /**
+   * Replace the values of this function at aTimeOffset and later with the
+   * values of aOther taken from zero, so if aOther is V at time T >= 0
+   * then this function will be V at time T + aTimeOffset. aOther's current
+   * time must be >= 0.
+   */
+  void Append(const TimeVarying& aOther, Time aTimeOffset)
+  {
+    NS_ASSERTION(aOther.mChanges.IsEmpty() || aOther.mChanges[0].mTime >= 0,
+                 "Negative time not allowed here");
+    NS_ASSERTION(&aOther != this, "Can't self-append");
+    SetAtAndAfter(aTimeOffset, aOther.mCurrent);
+    for (PRUint32 i = 0; i < aOther.mChanges.Length(); ++i) {
+      const Entry& e = aOther.mChanges[i];
+      SetAtAndAfter(aTimeOffset + e.mTime, e.mValue);
+    }
+  }
+
+private:
+  struct Entry {
+    Entry(Time aTime, const T& aValue) : mTime(aTime), mValue(aValue) {}
+
+    // The time at which the value changes to mValue
+    Time mTime;
+    T mValue;
+  };
+  nsTArray<Entry> mChanges;
+  T mCurrent;
+};
+
+}
+
+#endif /* MOZILLA_TIMEVARYING_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/VideoSegment.h
@@ -0,0 +1,117 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_VIDEOSEGMENT_H_
+#define MOZILLA_VIDEOSEGMENT_H_
+
+#include "MediaSegment.h"
+#include "ImageLayers.h"
+
+namespace mozilla {
+
+class VideoFrame {
+public:
+  typedef mozilla::layers::Image Image;
+
+  VideoFrame(already_AddRefed<Image> aImage, const gfxIntSize& aIntrinsicSize)
+    : mImage(aImage), mIntrinsicSize(aIntrinsicSize) {}
+  VideoFrame() : mIntrinsicSize(0, 0) {}
+
+  bool operator==(const VideoFrame& aFrame) const
+  {
+    return mImage == aFrame.mImage && mIntrinsicSize == aFrame.mIntrinsicSize;
+  }
+  bool operator!=(const VideoFrame& aFrame) const
+  {
+    return !operator==(aFrame);
+  }
+
+  Image* GetImage() const { return mImage; }
+  const gfxIntSize& GetIntrinsicSize() const { return mIntrinsicSize; }
+  void SetNull() { mImage = nsnull; mIntrinsicSize = gfxIntSize(0, 0); }
+  void TakeFrom(VideoFrame* aFrame)
+  {
+    mImage = aFrame->mImage.forget();
+    mIntrinsicSize = aFrame->mIntrinsicSize;
+  }
+
+protected:
+  // mImage can be null to indicate "no video" (aka "empty frame"). It can
+  // still have an intrinsic size in this case.
+  nsRefPtr<Image> mImage;
+  // The desired size to render the video frame at.
+  gfxIntSize mIntrinsicSize;
+};
+
+
+struct VideoChunk {
+  void SliceTo(TrackTicks aStart, TrackTicks aEnd)
+  {
+    NS_ASSERTION(aStart >= 0 && aStart < aEnd && aEnd <= mDuration,
+                 "Slice out of bounds");
+    mDuration = aEnd - aStart;
+  }
+  TrackTicks GetDuration() const { return mDuration; }
+  bool CanCombineWithFollowing(const VideoChunk& aOther) const
+  {
+    return aOther.mFrame == mFrame;
+  }
+  bool IsNull() const { return !mFrame.GetImage(); }
+  void SetNull(TrackTicks aDuration)
+  {
+    mDuration = aDuration;
+    mFrame.SetNull();
+  }
+
+  TrackTicks mDuration;
+  VideoFrame mFrame;
+};
+
+class VideoSegment : public MediaSegmentBase<VideoSegment, VideoChunk> {
+public:
+  typedef mozilla::layers::Image Image;
+
+  VideoSegment() : MediaSegmentBase<VideoSegment, VideoChunk>(VIDEO) {}
+
+  void AppendFrame(already_AddRefed<Image> aImage, TrackTicks aDuration,
+                   const gfxIntSize& aIntrinsicSize)
+  {
+    VideoChunk* chunk = AppendChunk(aDuration);
+    VideoFrame frame(aImage, aIntrinsicSize);
+    chunk->mFrame.TakeFrom(&frame);
+  }
+  const VideoFrame* GetFrameAt(TrackTicks aOffset, TrackTicks* aStart = nsnull)
+  {
+    VideoChunk* c = FindChunkContaining(aOffset, aStart);
+    if (!c) {
+      return nsnull;
+    }
+    return &c->mFrame;
+  }
+  const VideoFrame* GetLastFrame(TrackTicks* aStart = nsnull)
+  {
+    VideoChunk* c = GetLastChunk();
+    if (!c) {
+      return nsnull;
+    }
+    if (aStart) {
+      *aStart = mDuration - c->mDuration;
+    }
+    return &c->mFrame;
+  }
+
+  // Segment-generic methods not in MediaSegmentBase
+  void InitFrom(const VideoSegment& aOther)
+  {
+  }
+  void SliceFrom(const VideoSegment& aOther, TrackTicks aStart, TrackTicks aEnd) {
+    BaseSliceFrom(aOther, aStart, aEnd);
+  }
+  static Type StaticType() { return VIDEO; }
+};
+
+}
+
+#endif /* MOZILLA_VIDEOSEGMENT_H_ */
--- a/content/media/nsAudioAvailableEventManager.cpp
+++ b/content/media/nsAudioAvailableEventManager.cpp
@@ -156,20 +156,26 @@ void nsAudioAvailableEventManager::Queue
     if (aEndTimeSampleOffset > mSignalBufferPosition + audioDataLength) {
       time = (aEndTimeSampleOffset - mSignalBufferPosition - audioDataLength) / 
              mSamplesPerSecond;
     }
 
     // Fill the signalBuffer.
     PRUint32 i;
     float *signalBuffer = mSignalBuffer.get() + mSignalBufferPosition;
-    for (i = 0; i < signalBufferTail; ++i) {
-      signalBuffer[i] = MOZ_CONVERT_AUDIO_SAMPLE(audioData[i]);
+    if (audioData) {
+      for (i = 0; i < signalBufferTail; ++i) {
+        signalBuffer[i] = MOZ_CONVERT_AUDIO_SAMPLE(audioData[i]);
+      }
+    } else {
+      memset(signalBuffer, 0, signalBufferTail*sizeof(signalBuffer[0]));
     }
-    audioData += signalBufferTail;
+    if (audioData) {
+      audioData += signalBufferTail;
+    }
 
     NS_ASSERTION(audioDataLength >= signalBufferTail,
                  "audioDataLength about to wrap past zero to +infinity!");
     audioDataLength -= signalBufferTail;
 
     if (mPendingEvents.Length() > 0) {
       // Check last event timecode to make sure that all queued events
       // are in non-descending sequence.
@@ -199,18 +205,22 @@ void nsAudioAvailableEventManager::Queue
 
   NS_ASSERTION(mSignalBufferPosition + audioDataLength < mSignalBufferLength,
                "Intermediate signal buffer must fit at least one more item.");
 
   if (audioDataLength > 0) {
     // Add data to the signalBuffer.
     PRUint32 i;
     float *signalBuffer = mSignalBuffer.get() + mSignalBufferPosition;
-    for (i = 0; i < audioDataLength; ++i) {
-      signalBuffer[i] = MOZ_CONVERT_AUDIO_SAMPLE(audioData[i]);
+    if (audioData) {
+      for (i = 0; i < audioDataLength; ++i) {
+        signalBuffer[i] = MOZ_CONVERT_AUDIO_SAMPLE(audioData[i]);
+      }
+    } else {
+      memset(signalBuffer, 0, audioDataLength*sizeof(signalBuffer[0]));
     }
     mSignalBufferPosition += audioDataLength;
   }
 }
 
 void nsAudioAvailableEventManager::Clear()
 {
   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
--- a/content/media/nsBuiltinDecoder.cpp
+++ b/content/media/nsBuiltinDecoder.cpp
@@ -77,16 +77,45 @@ void nsBuiltinDecoder::SetVolume(double 
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   mInitialVolume = aVolume;
   if (mDecoderStateMachine) {
     mDecoderStateMachine->SetVolume(aVolume);
   }
 }
 
+void nsBuiltinDecoder::SetAudioCaptured(bool aCaptured)
+{
+  NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
+  mInitialAudioCaptured = aCaptured;
+  if (mDecoderStateMachine) {
+    mDecoderStateMachine->SetAudioCaptured(aCaptured);
+  }
+}
+
+void nsBuiltinDecoder::AddOutputStream(SourceMediaStream* aStream, bool aFinishWhenEnded)
+{
+  NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
+
+  {
+    ReentrantMonitorAutoEnter mon(mReentrantMonitor);
+    OutputMediaStream* ms = mOutputStreams.AppendElement();
+    ms->Init(PRInt64(mCurrentTime*USECS_PER_S), aStream, aFinishWhenEnded);
+  }
+
+  // This can be called before Load(), in which case our mDecoderStateMachine
+  // won't have been created yet and we can rely on Load() to schedule it
+  // once it is created.
+  if (mDecoderStateMachine) {
+    // Make sure the state machine thread runs so that any buffered data
+    // is fed into our stream.
+    ScheduleStateMachineThread();
+  }
+}
+
 double nsBuiltinDecoder::GetDuration()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (mInfiniteStream) {
     return std::numeric_limits<double>::infinity();
   }
   if (mDuration >= 0) {
      return static_cast<double>(mDuration) / static_cast<double>(USECS_PER_S);
@@ -687,16 +716,23 @@ void nsBuiltinDecoder::NotifyDownloadEnd
     ResourceLoaded();
   }
   else if (aStatus != NS_BASE_STREAM_CLOSED) {
     NetworkError();
   }
   UpdateReadyStateForData();
 }
 
+void nsBuiltinDecoder::NotifyPrincipalChanged()
+{
+  if (mElement) {
+    mElement->NotifyDecoderPrincipalChanged();
+  }
+}
+
 void nsBuiltinDecoder::NotifyBytesConsumed(PRInt64 aBytes)
 {
   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
   NS_ASSERTION(OnStateMachineThread() || mDecoderStateMachine->OnDecodeThread(),
                "Should be on play state machine or decode thread.");
   if (!mIgnoreProgressData) {
     mDecoderPosition += aBytes;
     mPlaybackStatistics.AddBytes(aBytes);
--- a/content/media/nsBuiltinDecoder.h
+++ b/content/media/nsBuiltinDecoder.h
@@ -257,16 +257,17 @@ public:
 
   // Return the current decode state. The decoder monitor must be
   // obtained before calling this.
   virtual State GetState() = 0;
 
   // Set the audio volume. The decoder monitor must be obtained before
   // calling this.
   virtual void SetVolume(double aVolume) = 0;
+  virtual void SetAudioCaptured(bool aCapture) = 0;
 
   virtual void Shutdown() = 0;
 
   // Called from the main thread to get the duration. The decoder monitor
   // must be obtained before calling this. It is in units of microseconds.
   virtual PRInt64 GetDuration() = 0;
 
   // Called from the main thread to set the duration of the media resource
@@ -392,27 +393,76 @@ public:
 
   // Seek to the time position in (seconds) from the start of the video.
   virtual nsresult Seek(double aTime);
 
   virtual nsresult PlaybackRateChanged();
 
   virtual void Pause();
   virtual void SetVolume(double aVolume);
+  virtual void SetAudioCaptured(bool aCaptured);
+
+  virtual void AddOutputStream(SourceMediaStream* aStream, bool aFinishWhenEnded);
+  // Protected by mReentrantMonitor. All decoder output is copied to these streams.
+  struct OutputMediaStream {
+    void Init(PRInt64 aInitialTime, SourceMediaStream* aStream, bool aFinishWhenEnded)
+    {
+      mLastAudioPacketTime = -1;
+      mLastAudioPacketEndTime = -1;
+      mAudioFramesWrittenBaseTime = aInitialTime;
+      mAudioFramesWritten = 0;
+      mNextVideoTime = aInitialTime;
+      mStream = aStream;
+      mStreamInitialized = false;
+      mFinishWhenEnded = aFinishWhenEnded;
+      mHaveSentFinish = false;
+      mHaveSentFinishAudio = false;
+      mHaveSentFinishVideo = false;
+    }
+    PRInt64 mLastAudioPacketTime; // microseconds
+    PRInt64 mLastAudioPacketEndTime; // microseconds
+    // Count of audio frames written to the stream
+    PRInt64 mAudioFramesWritten;
+    // Timestamp of the first audio packet whose frames we wrote.
+    PRInt64 mAudioFramesWrittenBaseTime; // microseconds
+    // mNextVideoTime is the end timestamp for the last packet sent to the stream.
+    // Therefore video packets starting at or after this time need to be copied
+    // to the output stream.
+    PRInt64 mNextVideoTime; // microseconds
+    // The last video image sent to the stream. Useful if we need to replicate
+    // the image.
+    nsRefPtr<Image> mLastVideoImage;
+    nsRefPtr<SourceMediaStream> mStream;
+    gfxIntSize mLastVideoImageDisplaySize;
+    // This is set to true when the stream is initialized (audio and
+    // video tracks added).
+    bool mStreamInitialized;
+    bool mFinishWhenEnded;
+    bool mHaveSentFinish;
+    bool mHaveSentFinishAudio;
+    bool mHaveSentFinishVideo;
+  };
+  nsTArray<OutputMediaStream>& OutputStreams()
+  {
+    GetReentrantMonitor().AssertCurrentThreadIn();
+    return mOutputStreams;
+  }
+
   virtual double GetDuration();
 
   virtual void SetInfinite(bool aInfinite);
   virtual bool IsInfinite();
 
   virtual MediaResource* GetResource() { return mResource; }
   virtual already_AddRefed<nsIPrincipal> GetCurrentPrincipal();
 
   virtual void NotifySuspendedStatusChanged();
   virtual void NotifyBytesDownloaded();
   virtual void NotifyDownloadEnded(nsresult aStatus);
+  virtual void NotifyPrincipalChanged();
   // Called by the decode thread to keep track of the number of bytes read
   // from the resource.
   void NotifyBytesConsumed(PRInt64 aBytes);
 
   // Called when the video file has completed downloading.
   // Call on the main thread only.
   void ResourceLoaded();
 
@@ -658,16 +708,19 @@ public:
   // started this is reset to negative.
   double mRequestedSeekTime;
 
   // Duration of the media resource. Set to -1 if unknown.
   // Set when the metadata is loaded. Accessed on the main thread
   // only.
   PRInt64 mDuration;
 
+  // True when playback should start with audio captured (not playing).
+  bool mInitialAudioCaptured;
+
   // True if the media resource is seekable (server supports byte range
   // requests).
   bool mSeekable;
 
   /******
    * The following member variables can be accessed from any thread.
    ******/
 
@@ -681,16 +734,19 @@ public:
   // Media data resource.
   nsAutoPtr<MediaResource> mResource;
 
   // ReentrantMonitor for detecting when the video play state changes. A call
   // to Wait on this monitor will block the thread until the next
   // state change.
   ReentrantMonitor mReentrantMonitor;
 
+  // Data about MediaStreams that are being fed by this decoder.
+  nsTArray<OutputMediaStream> mOutputStreams;
+
   // Set to one of the valid play states. It is protected by the
   // monitor mReentrantMonitor. This monitor must be acquired when reading or
   // writing the state. Any change to the state on the main thread
   // must call NotifyAll on the monitor so the decode thread can wake up.
   PlayState mPlayState;
 
   // The state to change to after a seek or load operation. It must only
   // be changed from the main thread. The decoder monitor must be acquired
--- a/content/media/nsBuiltinDecoderReader.cpp
+++ b/content/media/nsBuiltinDecoderReader.cpp
@@ -66,16 +66,31 @@ extern PRLogModuleInfo* gBuiltinDecoderL
 #else
 #define SEEK_LOG(type, msg)
 #endif
 #else
 #define LOG(type, msg)
 #define SEEK_LOG(type, msg)
 #endif
 
+void
+AudioData::EnsureAudioBuffer()
+{
+  if (mAudioBuffer)
+    return;
+  mAudioBuffer = SharedBuffer::Create(mFrames*mChannels*sizeof(AudioDataValue));
+
+  AudioDataValue* data = static_cast<AudioDataValue*>(mAudioBuffer->Data());
+  for (PRUint32 i = 0; i < mFrames; ++i) {
+    for (PRUint32 j = 0; j < mChannels; ++j) {
+      data[j*mFrames + i] = mAudioData[i*mChannels + j];
+    }
+  }
+}
+
 static bool
 ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane)
 {
   return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
          aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
          aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
          aPlane.mStride > 0;
 }
@@ -110,17 +125,25 @@ VideoData* VideoData::Create(nsVideoInfo
                              PRInt64 aTime,
                              PRInt64 aEndTime,
                              const YCbCrBuffer& aBuffer,
                              bool aKeyframe,
                              PRInt64 aTimecode,
                              nsIntRect aPicture)
 {
   if (!aContainer) {
-    return nsnull;
+    // Create a dummy VideoData with no image. This gives us something to
+    // send to media streams if necessary.
+    nsAutoPtr<VideoData> v(new VideoData(aOffset,
+                                         aTime,
+                                         aEndTime,
+                                         aKeyframe,
+                                         aTimecode,
+                                         aInfo.mDisplay));
+    return v.forget();
   }
 
   // The following situation should never happen unless there is a bug
   // in the decoder
   if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
       aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
     NS_ERROR("C planes with different sizes");
     return nsnull;
--- a/content/media/nsBuiltinDecoderReader.h
+++ b/content/media/nsBuiltinDecoderReader.h
@@ -38,23 +38,25 @@
  * ***** END LICENSE BLOCK ***** */
 #if !defined(nsBuiltinDecoderReader_h_)
 #define nsBuiltinDecoderReader_h_
 
 #include <nsDeque.h>
 #include "ImageLayers.h"
 #include "nsSize.h"
 #include "mozilla/ReentrantMonitor.h"
+#include "MediaStreamGraph.h"
+#include "SharedBuffer.h"
 
 // Stores info relevant to presenting media frames.
 class nsVideoInfo {
 public:
   nsVideoInfo()
-    : mAudioRate(0),
-      mAudioChannels(0),
+    : mAudioRate(44100),
+      mAudioChannels(2),
       mDisplay(0,0),
       mStereoMode(mozilla::layers::STEREO_MODE_MONO),
       mHasAudio(false),
       mHasVideo(false)
   {}
 
   // Returns true if it's safe to use aPicture as the picture to be
   // extracted inside a frame of size aFrame, and scaled up to and displayed
@@ -108,16 +110,18 @@ typedef float AudioDataValue;
 #define MOZ_CONVERT_AUDIO_SAMPLE(x) (x)
 #define MOZ_SAMPLE_TYPE_FLOAT32 1
 
 #endif
 
 // Holds chunk a decoded audio frames.
 class AudioData {
 public:
+  typedef mozilla::SharedBuffer SharedBuffer;
+
   AudioData(PRInt64 aOffset,
             PRInt64 aTime,
             PRInt64 aDuration,
             PRUint32 aFrames,
             AudioDataValue* aData,
             PRUint32 aChannels)
   : mOffset(aOffset),
     mTime(aTime),
@@ -129,24 +133,33 @@ public:
     MOZ_COUNT_CTOR(AudioData);
   }
 
   ~AudioData()
   {
     MOZ_COUNT_DTOR(AudioData);
   }
 
+  // If mAudioBuffer is null, creates it from mAudioData.
+  void EnsureAudioBuffer();
+
+  PRInt64 GetEnd() { return mTime + mDuration; }
+
   // Approximate byte offset of the end of the page on which this chunk
   // ends.
   const PRInt64 mOffset;
 
   PRInt64 mTime; // Start time of data in usecs.
   const PRInt64 mDuration; // In usecs.
   const PRUint32 mFrames;
   const PRUint32 mChannels;
+  // At least one of mAudioBuffer/mAudioData must be non-null.
+  // mChannels channels, each with mFrames frames
+  nsRefPtr<SharedBuffer> mAudioBuffer;
+  // mFrames frames, each with mChannels values
   nsAutoArrayPtr<AudioDataValue> mAudioData;
 };
 
 // Holds a decoded video frame, in YCbCr format. These are queued in the reader.
 class VideoData {
 public:
   typedef mozilla::layers::ImageContainer ImageContainer;
   typedef mozilla::layers::Image Image;
@@ -193,16 +206,18 @@ public:
     return new VideoData(aOffset, aTime, aEndTime, aTimecode);
   }
 
   ~VideoData()
   {
     MOZ_COUNT_DTOR(VideoData);
   }
 
+  PRInt64 GetEnd() { return mEndTime; }
+
   // Dimensions at which to display the video frame. The picture region
   // will be scaled to this size. This is should be the picture region's
   // dimensions scaled with respect to its aspect ratio.
   nsIntSize mDisplay;
 
   // Approximate byte offset of the end of the frame in the media.
   PRInt64 mOffset;
 
@@ -365,16 +380,35 @@ template <class T> class MediaQueue : pr
     return last->mTime - first->mTime;
   }
 
   void LockedForEach(nsDequeFunctor& aFunctor) const {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     ForEach(aFunctor);
   }
 
+  // Extracts elements from the queue into aResult, in order.
+  // Elements whose start time is before aTime are ignored.
+  void GetElementsAfter(PRInt64 aTime, nsTArray<T*>* aResult) {
+    ReentrantMonitorAutoEnter mon(mReentrantMonitor);
+    if (!GetSize())
+      return;
+    PRInt32 i;
+    for (i = GetSize() - 1; i > 0; --i) {
+      T* v = static_cast<T*>(ObjectAt(i));
+      if (v->GetEnd() < aTime)
+        break;
+    }
+    // Elements less than i have a end time before aTime. It's also possible
+    // that the element at i has a end time before aTime, but that's OK.
+    for (; i < GetSize(); ++i) {
+      aResult->AppendElement(static_cast<T*>(ObjectAt(i)));
+    }
+  }
+
 private:
   mutable ReentrantMonitor mReentrantMonitor;
 
   // True when we've decoded the last frame of data in the
   // bitstream for which we're queueing frame data.
   bool mEndOfStream;
 };
 
@@ -403,17 +437,17 @@ public:
   // false if the audio is finished, end of file has been reached,
   // or an un-recoverable read error has occured.
   virtual bool DecodeAudioData() = 0;
 
   // Reads and decodes one video frame. Packets with a timestamp less
   // than aTimeThreshold will be decoded (unless they're not keyframes
   // and aKeyframeSkip is true), but will not be added to the queue.
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
-                                  PRInt64 aTimeThreshold) = 0;
+                                PRInt64 aTimeThreshold) = 0;
 
   virtual bool HasAudio() = 0;
   virtual bool HasVideo() = 0;
 
   // Read header data for all bitstreams in the file. Fills mInfo with
   // the data required to present the media. Returns NS_OK on success,
   // or NS_ERROR_FAILURE on failure.
   virtual nsresult ReadMetadata(nsVideoInfo* aInfo) = 0;
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -41,16 +41,18 @@
 #include "nsTArray.h"
 #include "nsBuiltinDecoder.h"
 #include "nsBuiltinDecoderReader.h"
 #include "nsBuiltinDecoderStateMachine.h"
 #include "mozilla/mozalloc.h"
 #include "VideoUtils.h"
 #include "nsTimeRanges.h"
 #include "nsDeque.h"
+#include "AudioSegment.h"
+#include "VideoSegment.h"
 
 #include "mozilla/Preferences.h"
 #include "mozilla/StandardInteger.h"
 #include "mozilla/Util.h"
 
 using namespace mozilla;
 using namespace mozilla::layers;
 
@@ -415,30 +417,33 @@ nsBuiltinDecoderStateMachine::nsBuiltinD
   mSeekTime(0),
   mFragmentEndTime(-1),
   mReader(aReader),
   mCurrentFrameTime(0),
   mAudioStartTime(-1),
   mAudioEndTime(-1),
   mVideoFrameEndTime(-1),
   mVolume(1.0),
+  mAudioCaptured(false),
   mSeekable(true),
   mPositionChangeQueued(false),
   mAudioCompleted(false),
   mGotDurationFromMetaData(false),
   mStopDecodeThread(true),
   mDecodeThreadIdle(false),
   mStopAudioThread(true),
   mQuickBuffering(false),
   mIsRunning(false),
   mRunAgain(false),
   mDispatchedRunEvent(false),
   mDecodeThreadWaiting(false),
   mRealTime(aRealTime),
   mRequestedNewDecodeThread(false),
+  mDidThrottleAudioDecoding(false),
+  mDidThrottleVideoDecoding(false),
   mEventManager(aDecoder)
 {
   MOZ_COUNT_CTOR(nsBuiltinDecoderStateMachine);
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
 
   StateMachineTracker::Instance().EnsureGlobalStateMachine();
 
   // only enable realtime mode when "media.realtime_decoder.enabled" is true.
@@ -516,16 +521,313 @@ void nsBuiltinDecoderStateMachine::Decod
       DecodeSeek();
     }
   }
 
   mDecodeThreadIdle = true;
   LOG(PR_LOG_DEBUG, ("%p Decode thread finished", mDecoder.get()));
 }
 
+void nsBuiltinDecoderStateMachine::SendOutputStreamAudio(AudioData* aAudio,
+                                                         OutputMediaStream* aStream,
+                                                         AudioSegment* aOutput)
+{
+  mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
+
+  if (aAudio->mTime <= aStream->mLastAudioPacketTime) {
+    // ignore packet that we've already processed
+    return;
+  }
+  aStream->mLastAudioPacketTime = aAudio->mTime;
+  aStream->mLastAudioPacketEndTime = aAudio->GetEnd();
+
+  NS_ASSERTION(aOutput->GetChannels() == aAudio->mChannels,
+               "Wrong number of channels");
+
+  // This logic has to mimic AudioLoop closely to make sure we write
+  // the exact same silences
+  CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudioRate,
+      aStream->mAudioFramesWrittenBaseTime + mStartTime) + aStream->mAudioFramesWritten;
+  CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudioRate, aAudio->mTime);
+  if (!audioWrittenOffset.valid() || !frameOffset.valid())
+    return;
+  if (audioWrittenOffset.value() < frameOffset.value()) {
+    // Write silence to catch up
+    LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of silence to MediaStream",
+                       mDecoder.get(), PRInt32(frameOffset.value() - audioWrittenOffset.value())));
+    AudioSegment silence;
+    silence.InitFrom(*aOutput);
+    silence.InsertNullDataAtStart(frameOffset.value() - audioWrittenOffset.value());
+    aStream->mAudioFramesWritten += silence.GetDuration();
+    aOutput->AppendFrom(&silence);
+  }
+
+  PRInt64 offset;
+  if (aStream->mAudioFramesWritten == 0) {
+    NS_ASSERTION(frameOffset.value() <= audioWrittenOffset.value(),
+                 "Otherwise we'd have taken the write-silence path");
+    // We're starting in the middle of a packet. Split the packet.
+    offset = audioWrittenOffset.value() - frameOffset.value();
+  } else {
+    // Write the entire packet.
+    offset = 0;
+  }
+
+  if (offset >= aAudio->mFrames)
+    return;
+
+  aAudio->EnsureAudioBuffer();
+  nsRefPtr<SharedBuffer> buffer = aAudio->mAudioBuffer;
+  aOutput->AppendFrames(buffer.forget(), aAudio->mFrames, PRInt32(offset), aAudio->mFrames,
+                        MOZ_AUDIO_DATA_FORMAT);
+  LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of data to MediaStream for AudioData at %lld",
+                     mDecoder.get(), aAudio->mFrames - PRInt32(offset), aAudio->mTime));
+  aStream->mAudioFramesWritten += aAudio->mFrames - PRInt32(offset);
+}
+
+static void WriteVideoToMediaStream(Image* aImage,
+                                    PRInt64 aDuration, const gfxIntSize& aIntrinsicSize,
+                                    VideoSegment* aOutput)
+{
+  nsRefPtr<Image> image = aImage;
+  aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
+}
+
+static const TrackID TRACK_AUDIO = 1;
+static const TrackID TRACK_VIDEO = 2;
+static const TrackRate RATE_VIDEO = USECS_PER_S;
+
+void nsBuiltinDecoderStateMachine::SendOutputStreamData()
+{
+  mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
+
+  if (mState == DECODER_STATE_DECODING_METADATA)
+    return;
+
+  nsTArray<OutputMediaStream>& streams = mDecoder->OutputStreams();
+  PRInt64 minLastAudioPacketTime = PR_INT64_MAX;
+
+  bool finished =
+      (!mInfo.mHasAudio || mReader->mAudioQueue.IsFinished()) &&
+      (!mInfo.mHasVideo || mReader->mVideoQueue.IsFinished());
+
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    SourceMediaStream* mediaStream = stream->mStream;
+    StreamTime endPosition = 0;
+
+    if (!stream->mStreamInitialized) {
+      if (mInfo.mHasAudio) {
+        AudioSegment* audio = new AudioSegment();
+        audio->Init(mInfo.mAudioChannels);
+        mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudioRate, 0, audio);
+      }
+      if (mInfo.mHasVideo) {
+        VideoSegment* video = new VideoSegment();
+        mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
+      }
+      stream->mStreamInitialized = true;
+    }
+
+    if (mInfo.mHasAudio) {
+      nsAutoTArray<AudioData*,10> audio;
+      // It's OK to hold references to the AudioData because while audio
+      // is captured, only the decoder thread pops from the queue (see below).
+      mReader->mAudioQueue.GetElementsAfter(stream->mLastAudioPacketTime, &audio);
+      AudioSegment output;
+      output.Init(mInfo.mAudioChannels);
+      for (PRUint32 i = 0; i < audio.Length(); ++i) {
+        AudioData* a = audio[i];
+        SendOutputStreamAudio(audio[i], stream, &output);
+      }
+      if (output.GetDuration() > 0) {
+        mediaStream->AppendToTrack(TRACK_AUDIO, &output);
+      }
+      if (mReader->mAudioQueue.IsFinished() && !stream->mHaveSentFinishAudio) {
+        mediaStream->EndTrack(TRACK_AUDIO);
+        stream->mHaveSentFinishAudio = true;
+      }
+      minLastAudioPacketTime = NS_MIN(minLastAudioPacketTime, stream->mLastAudioPacketTime);
+      endPosition = NS_MAX(endPosition,
+          TicksToTimeRoundDown(mInfo.mAudioRate, stream->mAudioFramesWritten));
+    }
+
+    if (mInfo.mHasVideo) {
+      nsAutoTArray<VideoData*,10> video;
+      // It's OK to hold references to the VideoData only the decoder thread
+      // pops from the queue.
+      mReader->mVideoQueue.GetElementsAfter(stream->mNextVideoTime + mStartTime, &video);
+      VideoSegment output;
+      for (PRUint32 i = 0; i < video.Length(); ++i) {
+        VideoData* v = video[i];
+        if (stream->mNextVideoTime + mStartTime < v->mTime) {
+          LOG(PR_LOG_DEBUG, ("%p Decoder writing last video to MediaStream for %lld ms",
+                             mDecoder.get(), v->mTime - (stream->mNextVideoTime + mStartTime)));
+          // Write last video frame to catch up. mLastVideoImage can be null here
+          // which is fine, it just means there's no video.
+          WriteVideoToMediaStream(stream->mLastVideoImage,
+              v->mTime - (stream->mNextVideoTime + mStartTime), stream->mLastVideoImageDisplaySize,
+              &output);
+          stream->mNextVideoTime = v->mTime - mStartTime;
+        }
+        if (stream->mNextVideoTime + mStartTime < v->mEndTime) {
+          LOG(PR_LOG_DEBUG, ("%p Decoder writing video frame %lld to MediaStream",
+                             mDecoder.get(), v->mTime));
+          WriteVideoToMediaStream(v->mImage,
+              v->mEndTime - (stream->mNextVideoTime + mStartTime), v->mDisplay,
+              &output);
+          stream->mNextVideoTime = v->mEndTime - mStartTime;
+          stream->mLastVideoImage = v->mImage;
+          stream->mLastVideoImageDisplaySize = v->mDisplay;
+        } else {
+          LOG(PR_LOG_DEBUG, ("%p Decoder skipping writing video frame %lld to MediaStream",
+                             mDecoder.get(), v->mTime));
+        }
+      }
+      if (output.GetDuration() > 0) {
+        mediaStream->AppendToTrack(TRACK_VIDEO, &output);
+      }
+      if (mReader->mVideoQueue.IsFinished() && !stream->mHaveSentFinishVideo) {
+        mediaStream->EndTrack(TRACK_VIDEO);
+        stream->mHaveSentFinishVideo = true;
+      }
+      endPosition = NS_MAX(endPosition,
+          TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime));
+    }
+
+    if (!stream->mHaveSentFinish) {
+      stream->mStream->AdvanceKnownTracksTime(endPosition);
+    }
+
+    if (finished && !stream->mHaveSentFinish) {
+      stream->mHaveSentFinish = true;
+      stream->mStream->Finish();
+    }
+  }
+
+  if (mAudioCaptured) {
+    // Discard audio packets that are no longer needed.
+    PRInt64 audioPacketTimeToDiscard =
+        NS_MIN(minLastAudioPacketTime, mStartTime + mCurrentFrameTime);
+    while (true) {
+      nsAutoPtr<AudioData> a(mReader->mAudioQueue.PopFront());
+      if (!a)
+        break;
+      // Packet times are not 100% reliable so this may discard packets that
+      // actually contain data for mCurrentFrameTime. This means if someone might
+      // create a new output stream and we actually don't have the audio for the
+      // very start. That's OK, we'll play silence instead for a brief moment.
+      // That's OK. Seeking to this time would have a similar issue for such
+      // badly muxed resources.
+      if (a->GetEnd() >= audioPacketTimeToDiscard) {
+        mReader->mAudioQueue.PushFront(a.forget());
+        break;
+      }
+    }
+
+    if (finished) {
+      mAudioCompleted = true;
+      UpdateReadyState();
+    }
+  }
+}
+
+void nsBuiltinDecoderStateMachine::FinishOutputStreams()
+{
+  // Tell all our output streams that all tracks have ended and we've
+  // finished.
+  nsTArray<OutputMediaStream>& streams = mDecoder->OutputStreams();
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    if (!stream->mStreamInitialized) {
+      continue;
+    }
+    SourceMediaStream* mediaStream = stream->mStream;
+    if (mInfo.mHasAudio && !stream->mHaveSentFinishAudio) {
+      mediaStream->EndTrack(TRACK_AUDIO);
+      stream->mHaveSentFinishAudio = true;
+    }
+    if (mInfo.mHasVideo && !stream->mHaveSentFinishVideo) {
+      mediaStream->EndTrack(TRACK_VIDEO);
+      stream->mHaveSentFinishVideo = true;
+    }
+    // XXX ignoring mFinishWhenEnded for now. Immediate goal is to not crash.
+    if (!stream->mHaveSentFinish) {
+      mediaStream->Finish();
+      stream->mHaveSentFinish = true;
+    }
+  }
+}
+
+bool nsBuiltinDecoderStateMachine::HaveEnoughDecodedAudio(PRInt64 aAmpleAudioUSecs)
+{
+  mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
+
+  if (mReader->mAudioQueue.GetSize() == 0 ||
+      GetDecodedAudioDuration() < aAmpleAudioUSecs) {
+    return false;
+  }
+  if (!mAudioCaptured) {
+    return true;
+  }
+
+  nsTArray<OutputMediaStream>& streams = mDecoder->OutputStreams();
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    if (!stream->mHaveSentFinishAudio &&
+        !stream->mStream->HaveEnoughBuffered(TRACK_AUDIO)) {
+      return false;
+    }
+  }
+
+  nsIThread* thread = GetStateMachineThread();
+  nsCOMPtr<nsIRunnable> callback = NS_NewRunnableMethod(this,
+      &nsBuiltinDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder);
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    if (!stream->mHaveSentFinishAudio) {
+      stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO, thread, callback);
+    }
+  }
+  return true;
+}
+
+bool nsBuiltinDecoderStateMachine::HaveEnoughDecodedVideo()
+{
+  mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
+
+  if (static_cast<PRUint32>(mReader->mVideoQueue.GetSize()) < AMPLE_VIDEO_FRAMES) {
+    return false;
+  }
+
+  nsTArray<OutputMediaStream>& streams = mDecoder->OutputStreams();
+  if (streams.IsEmpty()) {
+    return true;
+  }
+
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    if (!stream->mHaveSentFinishVideo &&
+        !stream->mStream->HaveEnoughBuffered(TRACK_VIDEO)) {
+      return false;
+    }
+  }
+
+  nsIThread* thread = GetStateMachineThread();
+  nsCOMPtr<nsIRunnable> callback = NS_NewRunnableMethod(this,
+      &nsBuiltinDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder);
+  for (PRUint32 i = 0; i < streams.Length(); ++i) {
+    OutputMediaStream* stream = &streams[i];
+    if (!stream->mHaveSentFinishVideo) {
+      stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO, thread, callback);
+    }
+  }
+  return true;
+}
+
 void nsBuiltinDecoderStateMachine::DecodeLoop()
 {
   LOG(PR_LOG_DEBUG, ("%p Start DecodeLoop()", mDecoder.get()));
 
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
 
   // We want to "pump" the decode until we've got a few frames decoded
@@ -553,17 +855,16 @@ void nsBuiltinDecoderStateMachine::Decod
   PRInt64 lowAudioThreshold = LOW_AUDIO_USECS;
 
   // Our local ample audio threshold. If we increase lowAudioThreshold, we'll
   // also increase this too appropriately (we don't want lowAudioThreshold to
   // be greater than ampleAudioThreshold, else we'd stop decoding!).
   PRInt64 ampleAudioThreshold = AMPLE_AUDIO_USECS;
 
   MediaQueue<VideoData>& videoQueue = mReader->mVideoQueue;
-  MediaQueue<AudioData>& audioQueue = mReader->mAudioQueue;
 
   // Main decode loop.
   bool videoPlaying = HasVideo();
   bool audioPlaying = HasAudio();
   while ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
          !mStopDecodeThread &&
          (videoPlaying || audioPlaying))
   {
@@ -587,30 +888,33 @@ void nsBuiltinDecoderStateMachine::Decod
     // audio, or if we're low on video, provided we're not running low on
     // data to decode. If we're running low on downloaded data to decode,
     // we won't start keyframe skipping, as we'll be pausing playback to buffer
     // soon anyway and we'll want to be able to display frames immediately
     // after buffering finishes.
     if (mState == DECODER_STATE_DECODING &&
         !skipToNextKeyframe &&
         videoPlaying &&
-        ((!audioPump && audioPlaying && GetDecodedAudioDuration() < lowAudioThreshold) ||
-         (!videoPump &&
-           videoPlaying &&
-           static_cast<PRUint32>(videoQueue.GetSize()) < LOW_VIDEO_FRAMES)) &&
+        ((!audioPump && audioPlaying && !mDidThrottleAudioDecoding && GetDecodedAudioDuration() < lowAudioThreshold) ||
+         (!videoPump && videoPlaying && !mDidThrottleVideoDecoding &&
+          static_cast<PRUint32>(videoQueue.GetSize()) < LOW_VIDEO_FRAMES)) &&
         !HasLowUndecodedData())
 
     {
       skipToNextKeyframe = true;
       LOG(PR_LOG_DEBUG, ("%p Skipping video decode to the next keyframe", mDecoder.get()));
     }
 
     // Video decode.
-    if (videoPlaying &&
-        static_cast<PRUint32>(videoQueue.GetSize()) < AMPLE_VIDEO_FRAMES)
+    bool throttleVideoDecoding = !videoPlaying || HaveEnoughDecodedVideo();
+    if (mDidThrottleVideoDecoding && !throttleVideoDecoding) {
+      videoPump = true;
+    }
+    mDidThrottleVideoDecoding = throttleVideoDecoding;
+    if (!throttleVideoDecoding)
     {
       // Time the video decode, so that if it's slow, we can increase our low
       // audio threshold to reduce the chance of an audio underrun while we're
       // waiting for a video decode to complete.
       TimeDuration decodeTime;
       {
         PRInt64 currentTime = GetMediaTime();
         ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
@@ -627,39 +931,40 @@ void nsBuiltinDecoderStateMachine::Decod
                                      ampleAudioThreshold);
         LOG(PR_LOG_DEBUG,
             ("Slow video decode, set lowAudioThreshold=%lld ampleAudioThreshold=%lld",
              lowAudioThreshold, ampleAudioThreshold));
       }
     }
 
     // Audio decode.
-    if (audioPlaying &&
-        (GetDecodedAudioDuration() < ampleAudioThreshold || audioQueue.GetSize() == 0))
-    {
+    bool throttleAudioDecoding = !audioPlaying || HaveEnoughDecodedAudio(ampleAudioThreshold);
+    if (mDidThrottleAudioDecoding && !throttleAudioDecoding) {
+      audioPump = true;
+    }
+    mDidThrottleAudioDecoding = throttleAudioDecoding;
+    if (!mDidThrottleAudioDecoding) {
       ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
       audioPlaying = mReader->DecodeAudioData();
     }
 
+    SendOutputStreamData();
+
     // Notify to ensure that the AudioLoop() is not waiting, in case it was
     // waiting for more audio to be decoded.
     mDecoder->GetReentrantMonitor().NotifyAll();
 
     // The ready state can change when we've decoded data, so update the
     // ready state, so that DOM events can fire.
     UpdateReadyState();
 
     if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
         !mStopDecodeThread &&
         (videoPlaying || audioPlaying) &&
-        (!audioPlaying || (GetDecodedAudioDuration() >= ampleAudioThreshold &&
-                           audioQueue.GetSize() > 0))
-        &&
-        (!videoPlaying ||
-          static_cast<PRUint32>(videoQueue.GetSize()) >= AMPLE_VIDEO_FRAMES))
+        throttleAudioDecoding && throttleVideoDecoding)
     {
       // All active bitstreams' decode is well ahead of the playback
       // position, we may as well wait for the playback to catch up. Note the
       // audio push thread acquires and notifies the decoder monitor every time
       // it pops AudioData off the audio queue. So if the audio push thread pops
       // the last AudioData off the audio queue right after that queue reported
       // it was non-empty here, we'll receive a notification on the decoder
       // monitor which will wake us up shortly after we sleep, thus preventing
@@ -692,16 +997,25 @@ void nsBuiltinDecoderStateMachine::Decod
 
 bool nsBuiltinDecoderStateMachine::IsPlaying()
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
 
   return !mPlayStartTime.IsNull();
 }
 
+static void WriteSilence(nsAudioStream* aStream, PRUint32 aFrames)
+{
+  PRUint32 numSamples = aFrames * aStream->GetChannels();
+  nsAutoTArray<AudioDataValue, 1000> buf;
+  buf.SetLength(numSamples);
+  memset(buf.Elements(), 0, numSamples * sizeof(AudioDataValue));
+  aStream->Write(buf.Elements(), aFrames);
+}
+
 void nsBuiltinDecoderStateMachine::AudioLoop()
 {
   NS_ASSERTION(OnAudioThread(), "Should be on audio thread.");
   LOG(PR_LOG_DEBUG, ("%p Begun audio thread/loop", mDecoder.get()));
   PRInt64 audioDuration = 0;
   PRInt64 audioStartTime = -1;
   PRUint32 channels, rate;
   double volume = -1;
@@ -755,16 +1069,17 @@ void nsBuiltinDecoderStateMachine::Audio
       {
         if (!IsPlaying() && !mAudioStream->IsPaused()) {
           mAudioStream->Pause();
         }
         mon.Wait();
       }
 
       // If we're shutting down, break out and exit the audio thread.
+      // Also break out if audio is being captured.
       if (mState == DECODER_STATE_SHUTDOWN ||
           mStopAudioThread ||
           mReader->mAudioQueue.AtEndOfStream())
       {
         break;
       }
 
       // We only want to go to the expense of changing the volume if
@@ -808,16 +1123,18 @@ void nsBuiltinDecoderStateMachine::Audio
     PRInt64 framesWritten = 0;
     if (missingFrames.value() > 0) {
       // The next audio chunk begins some time after the end of the last chunk
       // we pushed to the audio hardware. We must push silence into the audio
       // hardware so that the next audio chunk begins playback at the correct
       // time.
       missingFrames = NS_MIN(static_cast<PRInt64>(PR_UINT32_MAX),
                              missingFrames.value());
+      LOG(PR_LOG_DEBUG, ("%p Decoder playing %d frames of silence",
+                         mDecoder.get(), PRInt32(missingFrames.value())));
       framesWritten = PlaySilence(static_cast<PRUint32>(missingFrames.value()),
                                   channels, playedFrames.value());
     } else {
       framesWritten = PlayFromAudioQueue(sampleTime.value(), channels);
     }
     audioDuration += framesWritten;
     {
       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
@@ -845,20 +1162,17 @@ void nsBuiltinDecoderStateMachine::Audio
         // written isn't an exact multiple of minWriteFrames, we'll have
         // left over audio data which hasn't yet been written to the hardware,
         // and so that audio will not start playing. Write silence to ensure
         // the last block gets pushed to hardware, so that playback starts.
         PRInt64 framesToWrite = minWriteFrames - unplayedFrames;
         if (framesToWrite < PR_UINT32_MAX / channels) {
           // Write silence manually rather than using PlaySilence(), so that
           // the AudioAPI doesn't get a copy of the audio frames.
-          PRUint32 numSamples = framesToWrite * channels;
-          nsAutoArrayPtr<AudioDataValue> buf(new AudioDataValue[numSamples]);
-          memset(buf.get(), 0, numSamples * sizeof(AudioDataValue));
-          mAudioStream->Write(buf, framesToWrite);
+          WriteSilence(mAudioStream, framesToWrite);
         }
       }
 
       PRInt64 oldPosition = -1;
       PRInt64 position = GetMediaTime();
       while (oldPosition != position &&
              mAudioEndTime - position > 0 &&
              mState != DECODER_STATE_SEEKING &&
@@ -880,20 +1194,22 @@ void nsBuiltinDecoderStateMachine::Audio
   }
   LOG(PR_LOG_DEBUG, ("%p Reached audio stream end.", mDecoder.get()));
   {
     // Must hold lock while anulling the audio stream to prevent
     // state machine thread trying to use it while we're destroying it.
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     mAudioStream = nsnull;
     mEventManager.Clear();
-    mAudioCompleted = true;
-    UpdateReadyState();
-    // Kick the decode thread; it may be sleeping waiting for this to finish.
-    mDecoder->GetReentrantMonitor().NotifyAll();
+    if (!mAudioCaptured) {
+      mAudioCompleted = true;
+      UpdateReadyState();
+      // Kick the decode thread; it may be sleeping waiting for this to finish.
+      mDecoder->GetReentrantMonitor().NotifyAll();
+    }
   }
 
   // Must not hold the decoder monitor while we shutdown the audio stream, as
   // it makes a synchronous dispatch on Android.
   audioStream->Shutdown();
   audioStream = nsnull;
 
   LOG(PR_LOG_DEBUG, ("%p Audio stream finished playing, audio thread exit", mDecoder.get()));
@@ -903,49 +1219,49 @@ PRUint32 nsBuiltinDecoderStateMachine::P
                                                    PRUint32 aChannels,
                                                    PRUint64 aFrameOffset)
 
 {
   NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
   NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
   PRUint32 maxFrames = SILENCE_BYTES_CHUNK / aChannels / sizeof(AudioDataValue);
   PRUint32 frames = NS_MIN(aFrames, maxFrames);
-  PRUint32 numSamples = frames * aChannels;
-  nsAutoArrayPtr<AudioDataValue> buf(new AudioDataValue[numSamples]);
-  memset(buf.get(), 0, numSamples * sizeof(AudioDataValue));
-  mAudioStream->Write(buf, frames);
+  WriteSilence(mAudioStream, frames);
   // Dispatch events to the DOM for the audio just written.
-  mEventManager.QueueWrittenAudioData(buf.get(), frames * aChannels,
+  mEventManager.QueueWrittenAudioData(nsnull, frames * aChannels,
                                       (aFrameOffset + frames) * aChannels);
   return frames;
 }
 
 PRUint32 nsBuiltinDecoderStateMachine::PlayFromAudioQueue(PRUint64 aFrameOffset,
                                                           PRUint32 aChannels)
 {
   NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
   NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
   nsAutoPtr<AudioData> audio(mReader->mAudioQueue.PopFront());
   {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     NS_WARN_IF_FALSE(IsPlaying(), "Should be playing");
+    NS_ASSERTION(!mAudioCaptured, "Audio cannot be captured here!");
     // Awaken the decode loop if it's waiting for space to free up in the
     // audio queue.
     mDecoder->GetReentrantMonitor().NotifyAll();
   }
   PRInt64 offset = -1;
   PRUint32 frames = 0;
   // The state machine could have paused since we've released the decoder
   // monitor and acquired the audio monitor. Rather than acquire both
   // monitors, the audio stream also maintains whether its paused or not.
   // This prevents us from doing a blocking write while holding the audio
   // monitor while paused; we would block, and the state machine won't be
   // able to acquire the audio monitor in order to resume or destroy the
   // audio stream.
   if (!mAudioStream->IsPaused()) {
+    LOG(PR_LOG_DEBUG, ("%p Decoder playing %d frames of data to stream for AudioData at %lld",
+                       mDecoder.get(), audio->mFrames, audio->mTime));
     mAudioStream->Write(audio->mAudioData,
                         audio->mFrames);
 
     offset = audio->mOffset;
     frames = audio->mFrames;
 
     // Dispatch events to the DOM for the audio just written.
     mEventManager.QueueWrittenAudioData(audio->mAudioData.get(),
@@ -1072,16 +1388,26 @@ nsHTMLMediaElement::NextFrameStatus nsBu
 
 void nsBuiltinDecoderStateMachine::SetVolume(double volume)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   mVolume = volume;
 }
 
+void nsBuiltinDecoderStateMachine::SetAudioCaptured(bool aCaptured)
+{
+  NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  if (!mAudioCaptured && aCaptured) {
+    StopAudioThread();
+  }
+  mAudioCaptured = aCaptured;
+}
+
 double nsBuiltinDecoderStateMachine::GetCurrentTime() const
 {
   NS_ASSERTION(NS_IsMainThread() ||
                OnStateMachineThread() ||
                OnDecodeThread(),
                "Should be on main, decode, or state machine thread.");
 
   return static_cast<double>(mCurrentFrameTime) / static_cast<double>(USECS_PER_S);
@@ -1358,17 +1684,17 @@ nsBuiltinDecoderStateMachine::StartDecod
 
 nsresult
 nsBuiltinDecoderStateMachine::StartAudioThread()
 {
   NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
                "Should be on state machine or decode thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   mStopAudioThread = false;
-  if (HasAudio() && !mAudioThread) {
+  if (HasAudio() && !mAudioThread && !mAudioCaptured) {
     nsresult rv = NS_NewThread(getter_AddRefs(mAudioThread),
                                nsnull,
                                MEDIA_THREAD_STACK_SIZE);
     if (NS_FAILED(rv)) {
       LOG(PR_LOG_DEBUG, ("%p Changed state to SHUTDOWN because failed to create audio thread", mDecoder.get()));
       mState = DECODER_STATE_SHUTDOWN;
       return rv;
     }
@@ -1537,16 +1863,19 @@ nsresult nsBuiltinDecoderStateMachine::D
 
 void nsBuiltinDecoderStateMachine::DecodeSeek()
 {
   NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(mState == DECODER_STATE_SEEKING,
                "Only call when in seeking state");
 
+  mDidThrottleAudioDecoding = false;
+  mDidThrottleVideoDecoding = false;
+
   // During the seek, don't have a lock on the decoder state,
   // otherwise long seek operations can block the main thread.
   // The events dispatched to the main thread are SYNC calls.
   // These calls are made outside of the decode monitor lock so
   // it is safe for the main thread to makes calls that acquire
   // the lock since it won't deadlock. We check the state when
   // acquiring the lock again in case shutdown has occurred
   // during the time when we didn't have the lock.
@@ -1591,25 +1920,24 @@ void nsBuiltinDecoderStateMachine::Decod
       AudioData* audio = HasAudio() ? mReader->mAudioQueue.PeekFront() : nsnull;
       NS_ASSERTION(!audio || (audio->mTime <= seekTime &&
                               seekTime <= audio->mTime + audio->mDuration),
                     "Seek target should lie inside the first audio block after seek");
       PRInt64 startTime = (audio && audio->mTime < seekTime) ? audio->mTime : seekTime;
       mAudioStartTime = startTime;
       mPlayDuration = startTime - mStartTime;
       if (HasVideo()) {
-        nsAutoPtr<VideoData> video(mReader->mVideoQueue.PeekFront());
+        VideoData* video = mReader->mVideoQueue.PeekFront();
         if (video) {
           NS_ASSERTION(video->mTime <= seekTime && seekTime <= video->mEndTime,
                         "Seek target should lie inside the first frame after seek");
           {
             ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
             RenderVideoFrame(video, TimeStamp::Now());
           }
-          mReader->mVideoQueue.PopFront();
           nsCOMPtr<nsIRunnable> event =
             NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::Invalidate);
           NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
         }
       }
     }
   }
   mDecoder->StartProgressUpdates();
@@ -1702,16 +2030,20 @@ nsresult nsBuiltinDecoderStateMachine::R
     case DECODER_STATE_SHUTDOWN: {
       if (IsPlaying()) {
         StopPlayback();
       }
       StopAudioThread();
       StopDecodeThread();
       NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
                    "How did we escape from the shutdown state?");
+      // Need to call this before dispatching nsDispatchDisposeEvent below, to
+      // ensure that any notifications dispatched by the stream graph
+      // will run before nsDispatchDisposeEvent below.
+      FinishOutputStreams();
       // We must daisy-chain these events to destroy the decoder. We must
       // destroy the decoder on the main thread, but we can't destroy the
       // decoder while this thread holds the decoder monitor. We can't
       // dispatch an event to the main thread to destroy the decoder from
       // here, as the event may run before the dispatch returns, and we
       // hold the decoder monitor here. We also want to guarantee that the
       // state machine is destroyed on the main thread, and so the
       // event runner running this function (which holds a reference to the
@@ -1877,28 +2209,31 @@ void nsBuiltinDecoderStateMachine::Rende
   NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
                "Should be on state machine or decode thread.");
   mDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
 
   if (aData->mDuplicate) {
     return;
   }
 
+  LOG(PR_LOG_DEBUG, ("%p Decoder playing video frame %lld",
+                     mDecoder.get(), aData->mTime));
+
   VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   if (container) {
     container->SetCurrentFrame(aData->mDisplay, aData->mImage, aTarget);
   }
 }
 
 PRInt64
 nsBuiltinDecoderStateMachine::GetAudioClock()
 {
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
-  if (!HasAudio())
+  if (!HasAudio() || mAudioCaptured)
     return -1;
   // We must hold the decoder monitor while using the audio stream off the
   // audio thread to ensure that it doesn't get destroyed on the audio thread
   // while we're using it.
   if (!mAudioStream) {
     // Audio thread hasn't played any data yet.
     return mAudioStartTime;
   }
@@ -1948,16 +2283,17 @@ void nsBuiltinDecoderStateMachine::Advan
   PRInt64 remainingTime = AUDIO_DURATION_USECS;
   NS_ASSERTION(clock_time >= mStartTime, "Should have positive clock time.");
   nsAutoPtr<VideoData> currentFrame;
   if (mReader->mVideoQueue.GetSize() > 0) {
     VideoData* frame = mReader->mVideoQueue.PeekFront();
     while (mRealTime || clock_time >= frame->mTime) {
       mVideoFrameEndTime = frame->mEndTime;
       currentFrame = frame;
+      LOG(PR_LOG_DEBUG, ("%p Decoder discarding video frame %lld", mDecoder.get(), frame->mTime));
       mReader->mVideoQueue.PopFront();
       // Notify the decode thread that the video queue's buffers may have
       // free'd up space for more frames.
       mDecoder->GetReentrantMonitor().NotifyAll();
       mDecoder->UpdatePlaybackOffset(frame->mOffset);
       if (mReader->mVideoQueue.GetSize() == 0)
         break;
       frame = mReader->mVideoQueue.PeekFront();
@@ -2235,16 +2571,22 @@ void nsBuiltinDecoderStateMachine::Timeo
   // as soon as possible. Nothing else needed to do, the state machine is
   // going to run anyway.
 }
 
 nsresult nsBuiltinDecoderStateMachine::ScheduleStateMachine() {
   return ScheduleStateMachine(0);
 }
 
+void nsBuiltinDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder() {
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  mon.NotifyAll();
+  ScheduleStateMachine(0);
+}
+
 nsresult nsBuiltinDecoderStateMachine::ScheduleStateMachine(PRInt64 aUsecs) {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ABORT_IF_FALSE(GetStateMachineThread(),
     "Must have a state machine thread to schedule");
 
   if (mState == DECODER_STATE_SHUTDOWN) {
     return NS_ERROR_FAILURE;
   }
--- a/content/media/nsBuiltinDecoderStateMachine.h
+++ b/content/media/nsBuiltinDecoderStateMachine.h
@@ -112,16 +112,18 @@ hardware (via nsAudioStream and libsydne
 #include "prmem.h"
 #include "nsThreadUtils.h"
 #include "nsBuiltinDecoder.h"
 #include "nsBuiltinDecoderReader.h"
 #include "nsAudioAvailableEventManager.h"
 #include "nsHTMLMediaElement.h"
 #include "mozilla/ReentrantMonitor.h"
 #include "nsITimer.h"
+#include "AudioSegment.h"
+#include "VideoSegment.h"
 
 /*
   The state machine class. This manages the decoding and seeking in the
   nsBuiltinDecoderReader on the decode thread, and A/V sync on the shared
   state machine thread, and controls the audio "push" thread.
 
   All internal state is synchronised via the decoder monitor. State changes
   are either propagated by NotifyAll on the monitor (typically when state
@@ -132,28 +134,33 @@ hardware (via nsAudioStream and libsydne
 */
 class nsBuiltinDecoderStateMachine : public nsDecoderStateMachine
 {
 public:
   typedef mozilla::ReentrantMonitor ReentrantMonitor;
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::TimeDuration TimeDuration;
   typedef mozilla::VideoFrameContainer VideoFrameContainer;
+  typedef nsBuiltinDecoder::OutputMediaStream OutputMediaStream;
+  typedef mozilla::SourceMediaStream SourceMediaStream;
+  typedef mozilla::AudioSegment AudioSegment;
+  typedef mozilla::VideoSegment VideoSegment;
 
   nsBuiltinDecoderStateMachine(nsBuiltinDecoder* aDecoder, nsBuiltinDecoderReader* aReader, bool aRealTime = false);
   ~nsBuiltinDecoderStateMachine();
 
   // nsDecoderStateMachine interface
   virtual nsresult Init(nsDecoderStateMachine* aCloneDonor);
   State GetState()
   { 
     mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
     return mState; 
   }
   virtual void SetVolume(double aVolume);
+  virtual void SetAudioCaptured(bool aCapture);
   virtual void Shutdown();
   virtual PRInt64 GetDuration();
   virtual void SetDuration(PRInt64 aDuration);
   void SetEndTime(PRInt64 aEndTime);
   virtual bool OnDecodeThread() const {
     return IsCurrentThread(mDecodeThread);
   }
 
@@ -244,16 +251,20 @@ public:
   static nsIThread* GetStateMachineThread();
 
   // Schedules the shared state machine thread to run the state machine.
   // If the state machine thread is the currently running the state machine,
   // we wait until that has completely finished before running the state
   // machine again.
   nsresult ScheduleStateMachine();
 
+  // Calls ScheduleStateMachine() after taking the decoder lock. Also
+  // notifies the decoder thread in case it's waiting on the decoder lock.
+  void ScheduleStateMachineWithLockAndWakeDecoder();
+
   // Schedules the shared state machine thread to run the state machine
   // in aUsecs microseconds from now, if it's not already scheduled to run
   // earlier, in which case the request is discarded.
   nsresult ScheduleStateMachine(PRInt64 aUsecs);
 
   // Creates and starts a new decode thread. Don't call this directly,
   // request a new decode thread by calling
   // StateMachineTracker::RequestCreateDecodeThread().
@@ -268,16 +279,23 @@ public:
 
   // Drop reference to decoder.  Only called during shutdown dance.
   void ReleaseDecoder() { mDecoder = nsnull; }
 
    // Called when a "MozAudioAvailable" event listener is added to the media
    // element. Called on the main thread.
    void NotifyAudioAvailableListener();
 
+  // Copy queued audio/video data in the reader to any output MediaStreams that
+  // need it.
+  void SendOutputStreamData();
+  void FinishOutputStreams();
+  bool HaveEnoughDecodedAudio(PRInt64 aAmpleAudioUSecs);
+  bool HaveEnoughDecodedVideo();
+
 protected:
 
   // Returns true if we've got less than aAudioUsecs microseconds of decoded
   // and playable data. The decoder monitor must be held.
   bool HasLowDecodedData(PRInt64 aAudioUsecs) const;
 
   // Returns true if we're running low on data which is not yet decoded.
   // The decoder monitor must be held.
@@ -431,16 +449,21 @@ protected:
   // Decode loop, decodes data until EOF or shutdown.
   // Called on the decode thread.
   void DecodeLoop();
 
   // Decode thread run function. Determines which of the Decode*() functions
   // to call.
   void DecodeThreadRun();
 
+  // Copy audio from an AudioData packet to aOutput. This may require
+  // inserting silence depending on the timing of the audio packet.
+  void SendOutputStreamAudio(AudioData* aAudio, OutputMediaStream* aStream,
+                             AudioSegment* aOutput);
+
   // State machine thread run function. Defers to RunStateMachine().
   nsresult CallRunStateMachine();
 
   // Performs one "cycle" of the state machine. Polls the state, and may send
   // a video frame to be displayed, and generally manages the decode. Called
   // periodically via timer to ensure the video stays in sync.
   nsresult RunStateMachine();
 
@@ -564,16 +587,20 @@ protected:
   // Volume of playback. 0.0 = muted. 1.0 = full volume. Read/Written
   // from the state machine and main threads. Synchronised via decoder
   // monitor.
   double mVolume;
 
   // Time at which we started decoding. Synchronised via decoder monitor.
   TimeStamp mDecodeStartTime;
 
+  // True if we shouldn't play our audio (but still write it to any capturing
+  // streams).
+  bool mAudioCaptured;
+
   // True if the media resource can be seeked. Accessed from the state
   // machine and main threads. Synchronised via decoder monitor.
   bool mSeekable;
 
   // True if an event to notify about a change in the playback
   // position has been queued, but not yet run. It is set to false when
   // the event is run. This allows coalescing of these events as they can be
   // produced many times per second. Synchronised via decoder monitor.
@@ -631,16 +658,22 @@ protected:
   // True if the decode thread has gone filled its buffers and is now
   // waiting to be awakened before it continues decoding. Synchronized
   // by the decoder monitor.
   bool mDecodeThreadWaiting;
 
   // True is we are decoding a realtime stream, like a camera stream
   bool mRealTime;
 
+  // Record whether audio and video decoding were throttled during the
+  // previous iteration of DecodeLooop. When we transition from
+  // throttled to not-throttled we need to pump decoding.
+  bool mDidThrottleAudioDecoding;
+  bool mDidThrottleVideoDecoding;
+
   // True if we've requested a new decode thread, but it has not yet been
   // created. Synchronized by the decoder monitor.
   bool mRequestedNewDecodeThread;
   
   PRUint32 mBufferingWait;
   PRInt64  mLowDataThresholdUsecs;
 
 private:
new file mode 100644
--- /dev/null
+++ b/content/media/nsDOMMediaStream.cpp
@@ -0,0 +1,58 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsDOMMediaStream.h"
+#include "nsDOMClassInfoID.h"
+#include "nsContentUtils.h"
+
+using namespace mozilla;
+
+DOMCI_DATA(MediaStream, nsDOMMediaStream)
+
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsDOMMediaStream)
+  NS_INTERFACE_MAP_ENTRY(nsISupports)
+  NS_INTERFACE_MAP_ENTRY(nsIDOMMediaStream)
+  NS_DOM_INTERFACE_MAP_ENTRY_CLASSINFO(MediaStream)
+NS_INTERFACE_MAP_END
+
+NS_IMPL_CYCLE_COLLECTING_ADDREF(nsDOMMediaStream)
+NS_IMPL_CYCLE_COLLECTING_RELEASE(nsDOMMediaStream)
+
+NS_IMPL_CYCLE_COLLECTION_CLASS(nsDOMMediaStream)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsDOMMediaStream)
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsDOMMediaStream)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
+nsDOMMediaStream::~nsDOMMediaStream()
+{
+  if (mStream) {
+    mStream->Destroy();
+  }
+}
+
+NS_IMETHODIMP
+nsDOMMediaStream::GetCurrentTime(double *aCurrentTime)
+{
+  *aCurrentTime = mStream ? MediaTimeToSeconds(mStream->GetCurrentTime()) : 0.0;
+  return NS_OK;
+}
+
+already_AddRefed<nsDOMMediaStream>
+nsDOMMediaStream::CreateInputStream()
+{
+  nsRefPtr<nsDOMMediaStream> stream = new nsDOMMediaStream();
+  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
+  stream->mStream = gm->CreateInputStream(stream);
+  return stream.forget();
+}
+
+bool
+nsDOMMediaStream::CombineWithPrincipal(nsIPrincipal* aPrincipal)
+{
+  return nsContentUtils::CombineResourcePrincipals(&mPrincipal, aPrincipal);
+}
new file mode 100644
--- /dev/null
+++ b/content/media/nsDOMMediaStream.h
@@ -0,0 +1,60 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/