Merge from mozilla-central to mozilla-inbound - DONTBUILD (no actual diffs from 7134aa74087d)
authorMatt Brubeck <mbrubeck@mozilla.com>
Thu, 29 Sep 2011 22:08:14 -0700
changeset 77903 78dd488fa082702bdd3dc1bb508edfee8545ab5d
parent 77886 b5b082d183d0a3913b93ff3d8dd7f759c56894d4 (current diff)
parent 77902 7134aa74087d50f855dfea90b14d2b7424a8bf3c (diff)
child 77904 9c0aad93edc441090a14d47bf657b175187035dd
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
milestone10.0a1
Merge from mozilla-central to mozilla-inbound - DONTBUILD (no actual diffs from 7134aa74087d)
--- a/accessible/src/base/AccIterator.cpp
+++ b/accessible/src/base/AccIterator.cpp
@@ -175,24 +175,27 @@ HTMLLabelIterator::Next()
     return nsnull;
 
   // Go up tree to get a name of ancestor label if there is one (an ancestor
   // <label> implicitly points to us). Don't go up farther than form or
   // document.
   nsAccessible* walkUp = mAcc->Parent();
   while (walkUp && !walkUp->IsDoc()) {
     nsIContent* walkUpElm = walkUp->GetContent();
-    if (walkUpElm->Tag() == nsGkAtoms::label) {
-      mLabelFilter = eSkipAncestorLabel; // prevent infinite loop
-      return walkUp;
+    if (walkUpElm->IsHTML()) {
+      if (walkUpElm->Tag() == nsGkAtoms::label &&
+          !walkUpElm->HasAttr(kNameSpaceID_None, nsGkAtoms::_for)) {
+        mLabelFilter = eSkipAncestorLabel; // prevent infinite loop
+        return walkUp;
+      }
+
+      if (walkUpElm->Tag() == nsGkAtoms::form)
+        break;
     }
 
-    if (walkUpElm->Tag() == nsGkAtoms::form)
-      break;
-
     walkUp = walkUp->Parent();
   }
 
   return nsnull;
 }
 
 
 ////////////////////////////////////////////////////////////////////////////////
--- a/accessible/src/base/Statistics.h
+++ b/accessible/src/base/Statistics.h
@@ -44,14 +44,26 @@
 
 namespace mozilla {
 namespace a11y {
 namespace statistics {
 
   inline void A11yInitialized()
     { Telemetry::Accumulate(Telemetry::A11Y_INSTANTIATED, true); }
 
+  /**
+   * Report that ISimpleDOM* has been used.
+   */
+  inline void ISimpleDOMUsed()
+    { Telemetry::Accumulate(Telemetry::ISIMPLE_DOM_USAGE, 1); }
+
+  /**
+   * Report that IAccessibleTable has been used.
+   */
+  inline void IAccessibleTableUsed()
+    { Telemetry::Accumulate(Telemetry::IACCESSIBLE_TABLE_USAGE, 1); }
+
 } // namespace statistics
 } // namespace a11y
 } // namespace mozilla
 
 #endif
 
--- a/accessible/src/msaa/CAccessibleTable.cpp
+++ b/accessible/src/msaa/CAccessibleTable.cpp
@@ -44,31 +44,35 @@
 #include "AccessibleTable_i.c"
 #include "AccessibleTable2_i.c"
 
 #include "nsIAccessible.h"
 #include "nsIAccessibleTable.h"
 #include "nsIWinAccessNode.h"
 #include "nsAccessNodeWrap.h"
 #include "nsWinUtils.h"
+#include "Statistics.h"
 
 #include "nsCOMPtr.h"
 #include "nsString.h"
 
+using namespace mozilla::a11y;
+
 #define CANT_QUERY_ASSERTION_MSG \
 "Subclass of CAccessibleTable doesn't implement nsIAccessibleTable"\
 
 // IUnknown
 
 STDMETHODIMP
 CAccessibleTable::QueryInterface(REFIID iid, void** ppv)
 {
   *ppv = NULL;
 
   if (IID_IAccessibleTable == iid) {
+    statistics::IAccessibleTableUsed();
     *ppv = static_cast<IAccessibleTable*>(this);
     (reinterpret_cast<IUnknown*>(*ppv))->AddRef();
     return S_OK;
   }
 
   if (IID_IAccessibleTable2 == iid) {
     *ppv = static_cast<IAccessibleTable2*>(this);
     (reinterpret_cast<IUnknown*>(*ppv))->AddRef();
--- a/accessible/src/msaa/nsAccessNodeWrap.cpp
+++ b/accessible/src/msaa/nsAccessNodeWrap.cpp
@@ -41,29 +41,31 @@
 #include "AccessibleApplication.h"
 #include "ISimpleDOMNode_i.c"
 
 #include "nsAccessibilityService.h"
 #include "nsApplicationAccessibleWrap.h"
 #include "nsCoreUtils.h"
 #include "nsRootAccessible.h"
 #include "nsWinUtils.h"
+#include "Statistics.h"
 
 #include "nsAttrName.h"
 #include "nsIDocument.h"
 #include "nsIDOMNodeList.h"
 #include "nsIDOMNSHTMLElement.h"
 #include "nsIFrame.h"
 #include "nsINameSpaceManager.h"
 #include "nsPIDOMWindow.h"
 #include "nsIServiceManager.h"
 
 #include "mozilla/Preferences.h"
 
 using namespace mozilla;
+using namespace mozilla::a11y;
 
 /// the accessible library and cached methods
 HINSTANCE nsAccessNodeWrap::gmAccLib = nsnull;
 HINSTANCE nsAccessNodeWrap::gmUserLib = nsnull;
 LPFNACCESSIBLEOBJECTFROMWINDOW nsAccessNodeWrap::gmAccessibleObjectFromWindow = nsnull;
 LPFNLRESULTFROMOBJECT nsAccessNodeWrap::gmLresultFromObject = NULL;
 LPFNNOTIFYWINEVENT nsAccessNodeWrap::gmNotifyWinEvent = nsnull;
 LPFNGETGUITHREADINFO nsAccessNodeWrap::gmGetGUIThreadInfo = nsnull;
@@ -115,21 +117,24 @@ nsAccessNodeWrap::QueryNativeInterface(R
 //-----------------------------------------------------
 // IUnknown interface methods - see iunknown.h for documentation
 //-----------------------------------------------------
 
 STDMETHODIMP nsAccessNodeWrap::QueryInterface(REFIID iid, void** ppv)
 {
   *ppv = nsnull;
 
-  if (IID_IUnknown == iid || IID_ISimpleDOMNode == iid)
+  if (IID_IUnknown == iid) {
     *ppv = static_cast<ISimpleDOMNode*>(this);
-
-  if (nsnull == *ppv)
+  } else if (IID_ISimpleDOMNode == iid) {
+    statistics::ISimpleDOMUsed();
+    *ppv = static_cast<ISimpleDOMNode*>(this);
+  } else {
     return E_NOINTERFACE;      //iid not supported.
+  }
    
   (reinterpret_cast<IUnknown*>(*ppv))->AddRef(); 
   return S_OK;
 }
 
 STDMETHODIMP
 nsAccessNodeWrap::QueryService(REFGUID guidService, REFIID iid, void** ppv)
 {
--- a/accessible/src/msaa/nsDocAccessibleWrap.cpp
+++ b/accessible/src/msaa/nsDocAccessibleWrap.cpp
@@ -38,27 +38,30 @@
 
 #include "mozilla/dom/TabChild.h"
 
 #include "nsDocAccessibleWrap.h"
 #include "ISimpleDOMDocument_i.c"
 #include "nsIAccessibilityService.h"
 #include "nsRootAccessible.h"
 #include "nsWinUtils.h"
+#include "Statistics.h"
 
 #include "nsIDocShell.h"
 #include "nsIDocShellTreeNode.h"
 #include "nsIFrame.h"
 #include "nsIInterfaceRequestorUtils.h"
 #include "nsISelectionController.h"
 #include "nsIServiceManager.h"
 #include "nsIURI.h"
 #include "nsIViewManager.h"
 #include "nsIWebNavigation.h"
 
+using namespace mozilla::a11y;
+
 /* For documentation of the accessibility architecture, 
  * see http://lxr.mozilla.org/seamonkey/source/accessible/accessible-docs.html
  */
 
 ////////////////////////////////////////////////////////////////////////////////
 // nsDocAccessibleWrap
 ////////////////////////////////////////////////////////////////////////////////
 
@@ -86,22 +89,21 @@ STDMETHODIMP_(ULONG) nsDocAccessibleWrap
   return nsAccessNode::Release();
 }
 
 // Microsoft COM QueryInterface
 STDMETHODIMP nsDocAccessibleWrap::QueryInterface(REFIID iid, void** ppv)
 {
   *ppv = NULL;
 
-  if (IID_ISimpleDOMDocument == iid)
-    *ppv = static_cast<ISimpleDOMDocument*>(this);
+  if (IID_ISimpleDOMDocument != iid)
+    return nsHyperTextAccessibleWrap::QueryInterface(iid, ppv);
 
-  if (NULL == *ppv)
-    return nsHyperTextAccessibleWrap::QueryInterface(iid, ppv);
-    
+  statistics::ISimpleDOMUsed();
+  *ppv = static_cast<ISimpleDOMDocument*>(this);
   (reinterpret_cast<IUnknown*>(*ppv))->AddRef();
   return S_OK;
 }
 
 STDMETHODIMP nsDocAccessibleWrap::get_URL(/* [out] */ BSTR __RPC_FAR *aURL)
 {
 __try {
   *aURL = NULL;
--- a/accessible/src/msaa/nsTextAccessibleWrap.cpp
+++ b/accessible/src/msaa/nsTextAccessibleWrap.cpp
@@ -36,23 +36,26 @@
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsTextAccessibleWrap.h"
 #include "ISimpleDOMText_i.c"
 
 #include "nsCoreUtils.h"
 #include "nsDocAccessible.h"
+#include "Statistics.h"
 #include "nsIFrame.h"
 #include "nsFontMetrics.h"
 #include "nsPresContext.h"
 #include "nsLayoutUtils.h"
 
 #include "gfxFont.h"
 
+using namespace mozilla::a11y;
+
 ////////////////////////////////////////////////////////////////////////////////
 // nsTextAccessibleWrap Accessible
 ////////////////////////////////////////////////////////////////////////////////
 
 nsTextAccessibleWrap::
   nsTextAccessibleWrap(nsIContent *aContent, nsIWeakReference *aShell) :
   nsTextAccessible(aContent, aShell)
 {
@@ -67,21 +70,24 @@ STDMETHODIMP_(ULONG) nsTextAccessibleWra
 {
   return nsAccessNode::Release();
 }
 
 STDMETHODIMP nsTextAccessibleWrap::QueryInterface(REFIID iid, void** ppv)
 {
   *ppv = nsnull;
 
-  if (IID_IUnknown == iid || IID_ISimpleDOMText == iid)
+  if (IID_IUnknown == iid) {
     *ppv = static_cast<ISimpleDOMText*>(this);
-
-  if (nsnull == *ppv)
+  } else if (IID_ISimpleDOMText == iid) {
+    statistics::ISimpleDOMUsed();
+    *ppv = static_cast<ISimpleDOMText*>(this);
+  } else {
     return nsAccessibleWrap::QueryInterface(iid, ppv);
+  }
    
   (reinterpret_cast<IUnknown*>(*ppv))->AddRef(); 
   return S_OK;
 }
 
 STDMETHODIMP nsTextAccessibleWrap::get_domText( 
     /* [retval][out] */ BSTR __RPC_FAR *aDomText)
 {
--- a/accessible/tests/mochitest/name/test_general.html
+++ b/accessible/tests/mochitest/name/test_general.html
@@ -165,21 +165,23 @@
 
       testName("textboxinstart", "Two days.");
       testName("textbox1", "days.");
 
       testName("comboinmiddle", "Subscribe to ATOM feed.");
       testName("combo4", "Subscribe to ATOM feed.");
 
       testName("comboinmiddle2", "Play the Haliluya sound when new mail arrives");
-      testName("combo5", "Play the Haliluya sound when new mail arrives");
+      testName("combo5", null); // label isn't used as a name for control
       testName("checkbox", "Play the Haliluya sound when new mail arrives");
+      testName("comboinmiddle3", "Play the Haliluya sound when new mail arrives");
+      testName("combo6", "Play the Haliluya sound when new mail arrives");
 
       testName("comboinend", "This day was sunny");
-      testName("combo6", "This day was");
+      testName("combo7", "This day was");
 
       testName("textboxinend", "This day was sunny");
       testName("textbox2", "This day was");
 
       // placeholder
       testName("ph_password", "a placeholder");
       testName("ph_text", "a placeholder");
       testName("ph_textarea", "a placeholder");
@@ -208,16 +210,21 @@
      title="Clean up our tree walker ">
     Mozilla Bug 530081
   </a><br>
   <a target="_blank"
      href="https://bugzilla.mozilla.org/show_bug.cgi?id=604391"
      title="Use placeholder as name if name is otherwise empty">
     Mozilla Bug 604391
   </a>
+  <a target="_blank"
+     href="https://bugzilla.mozilla.org/show_bug.cgi?id=669312"
+     title="Accessible name is duplicated when input has a label associated uisng for/id and is wrapped around the input">
+    Mozilla Bug 669312
+  </a>
   <p id="display"></p>
   <div id="content" style="display: none"></div>
   <pre id="test">
   </pre>
 
   <!-- aria-label, simple label -->
   <span id="btn_simple_aria_label" role="button" aria-label="I am a button"/>
   <br/>
@@ -432,20 +439,28 @@
       <select id="combo5">
         <option>Haliluya</option>
         <option>Hurra</option>
       </select>
       sound when new mail arrives
     </label>
     <input id="checkbox" type="checkbox" />
 
+    <label id="comboinmiddle3" for="combo6">Play the
+      <select id="combo6">
+        <option>Haliluya</option>
+        <option>Hurra</option>
+      </select>
+      sound when new mail arrives
+    </label>
+
     <!-- at the end (without and with whitespaces) -->
     <label id="comboinend">
       This day was
-      <select id="combo6" name="occupation">
+      <select id="combo7" name="occupation">
         <option>sunny</option>
         <option>rainy</option>
       </select></label>
 
     <label id="textboxinend">
       This day was
       <input id="textbox2" value="sunny">
     </label>
--- a/accessible/tests/mochitest/relations/test_general.html
+++ b/accessible/tests/mochitest/relations/test_general.html
@@ -36,16 +36,20 @@
       testRelation("control1_6", RELATION_LABELLED_BY, "label1_6");
       testRelation("control1_7", RELATION_LABELLED_BY, "label1_7");
       testRelation("control1_8", RELATION_LABELLED_BY, "label1_8");
       testRelation("control1_9", RELATION_LABELLED_BY, "label1_9");
       testRelation("control1_10", RELATION_LABELLED_BY, "label1_10");
       testRelation("control1_11", RELATION_LABELLED_BY, "label1_11");
       testRelation("control1_12", RELATION_LABELLED_BY, "label1_12");
 
+      testRelation("label1_13", RELATION_LABEL_FOR, null);
+      testRelation("control1_13", RELATION_LABELLED_BY, null);
+      testRelation("control1_14", RELATION_LABELLED_BY, "label1_14");
+
       // aria-labelledby
       testRelation("label2", RELATION_LABEL_FOR, "checkbox2");
       testRelation("checkbox2", RELATION_LABELLED_BY, "label2");
 
       // aria-labelledby, multiple relations
       testRelation("label3", RELATION_LABEL_FOR, "checkbox3");
       testRelation("label4", RELATION_LABEL_FOR, "checkbox3");
       testRelation("checkbox3", RELATION_LABELLED_BY, ["label3", "label4"]);
@@ -153,16 +157,21 @@
     Mozilla Bug 475298
   </a>
   <a target="_blank"
      href="https://bugzilla.mozilla.org/show_bug.cgi?id=558036"
      title="make HTML <output> accessible">
     Mozilla Bug 558036
   </a>
   <a target="_blank"
+     href="https://bugzilla.mozilla.org/show_bug.cgi?id=682790"
+     title="Ignore implicit label association when it's associated explicitly">
+    Mozilla Bug 682790
+  </a>
+  <a target="_blank"
      href="https://bugzilla.mozilla.org/show_bug.cgi?id=687393"
      title="HTML select options gets relation from containing label">
     Mozilla Bug 687393
   </a>
   <p id="display"></p>
   <div id="content" style="display: none"></div>
   <pre id="test">
   </pre>
@@ -199,16 +208,23 @@
   </label>
   <label id="label1_11">Label
     <input id="control1_11" type="image">
   </label>
   <label id="label1_12">Label
     <progress id="control1_12"></progress>
   </label>
 
+  <label id="label1_13" for="">Label
+    <input id="control1_13">
+  </label>
+  <label id="label1_14" for="control1_14">Label
+    <input id="control1_14">
+  </label>
+
   <span id="label2">label</span>
   <span role="checkbox" id="checkbox2" aria-labelledby="label2"></span>
 
   <span id="label3">label1</span>
   <span id="label4">label2</span>
   <span role="checkbox" id="checkbox3" aria-labelledby="label3 label4"></span>
 
   <span id="descr1">description</span>
--- a/content/html/content/public/nsHTMLMediaElement.h
+++ b/content/html/content/public/nsHTMLMediaElement.h
@@ -318,17 +318,17 @@ public:
 
   /**
    * Returns the load group for this media element's owner document.
    * XXX XBL2 issue.
    */
   already_AddRefed<nsILoadGroup> GetDocumentLoadGroup();
 
   /**
-   * Returns PR_TRUE if the media has played or completed a seek.
+   * Returns true if the media has played or completed a seek.
    * Used by video frame to determine whether to paint the poster.
    */
   bool GetPlayedOrSeeked() const { return mHasPlayedOrSeeked; }
 
   nsresult CopyInnerTo(nsGenericElement* aDest) const;
 
   /**
    * Sets the Accept header on the HTTP channel to the required
@@ -338,17 +338,17 @@ public:
 
   /**
    * Sets the required request headers on the HTTP channel for
    * video or audio requests.
    */
   void SetRequestHeaders(nsIHttpChannel* aChannel);
 
   /**
-   * Fires a timeupdate event. If aPeriodic is PR_TRUE, the event will only
+   * Fires a timeupdate event. If aPeriodic is true, the event will only
    * be fired if we've not fired a timeupdate event (for any reason) in the
    * last 250ms, as required by the spec when the current time is periodically
    * increasing during playback.
    */
   void FireTimeUpdate(bool aPeriodic);
 
 protected:
   class MediaLoadListener;
@@ -634,17 +634,17 @@ protected:
 
   // Reference to the source element last returned by GetNextSource().
   // This is the child source element which we're trying to load from.
   nsCOMPtr<nsIContent> mSourceLoadCandidate;
 
   // An audio stream for writing audio directly from JS.
   nsRefPtr<nsAudioStream> mAudioStream;
 
-  // PR_TRUE if MozAudioAvailable events can be safely dispatched, based on
+  // True if MozAudioAvailable events can be safely dispatched, based on
   // a media and element same-origin check.
   bool mAllowAudioData;
 
   // If true then we have begun downloading the media content.
   // Set to false when completed, or not yet started.
   bool mBegun;
 
   // True when the decoder has loaded enough data to display the
@@ -674,63 +674,63 @@ protected:
   bool mMuted;
 
   // If TRUE then the media element was actively playing before the currently
   // in progress seeking. If FALSE then the media element is either not seeking
   // or was not actively playing before the current seek. Used to decide whether
   // to raise the 'waiting' event as per 4.7.1.8 in HTML 5 specification.
   bool mPlayingBeforeSeek;
 
-  // PR_TRUE iff this element is paused because the document is inactive
+  // True iff this element is paused because the document is inactive
   bool mPausedForInactiveDocument;
 
-  // PR_TRUE if we've reported a "waiting" event since the last
+  // True if we've reported a "waiting" event since the last
   // readyState change to HAVE_CURRENT_DATA.
   bool mWaitingFired;
 
-  // PR_TRUE if we're running the "load()" method.
+  // True if we're running the "load()" method.
   bool mIsRunningLoadMethod;
 
-  // PR_TRUE if we're loading the resource from the child source elements.
+  // True if we're loading the resource from the child source elements.
   bool mIsLoadingFromSourceChildren;
 
-  // PR_TRUE if we're delaying the "load" event. They are delayed until either
+  // True if we're delaying the "load" event. They are delayed until either
   // an error occurs, or the first frame is loaded.
   bool mDelayingLoadEvent;
 
-  // PR_TRUE when we've got a task queued to call SelectResource(),
+  // True when we've got a task queued to call SelectResource(),
   // or while we're running SelectResource().
   bool mIsRunningSelectResource;
 
-  // PR_TRUE if we suspended the decoder because we were paused,
+  // True if we suspended the decoder because we were paused,
   // preloading metadata is enabled, autoplay was not enabled, and we loaded
   // the first frame.
   bool mSuspendedAfterFirstFrame;
 
-  // PR_TRUE if we are allowed to suspend the decoder because we were paused,
+  // True if we are allowed to suspend the decoder because we were paused,
   // preloading metdata was enabled, autoplay was not enabled, and we loaded
   // the first frame.
   bool mAllowSuspendAfterFirstFrame;
 
-  // PR_TRUE if we've played or completed a seek. We use this to determine
+  // True if we've played or completed a seek. We use this to determine
   // when the poster frame should be shown.
   bool mHasPlayedOrSeeked;
 
-  // PR_TRUE if we've added a reference to ourselves to keep the element
+  // True if we've added a reference to ourselves to keep the element
   // alive while no-one is referencing it but the element may still fire
   // events of its own accord.
   bool mHasSelfReference;
 
-  // PR_TRUE if we've received a notification that the engine is shutting
+  // True if we've received a notification that the engine is shutting
   // down.
   bool mShuttingDown;
 
-  // PR_TRUE if we've suspended a load in the resource selection algorithm
-  // due to loading a preload:none media. When PR_TRUE, the resource we'll
+  // True if we've suspended a load in the resource selection algorithm
+  // due to loading a preload:none media. When true, the resource we'll
   // load when the user initiates either playback or an explicit load is
   // stored in mPreloadURI.
   bool mLoadIsSuspended;
 
-  // PR_TRUE if a same-origin check has been done for the media element and resource.
+  // True if a same-origin check has been done for the media element and resource.
   bool mMediaSecurityVerified;
 };
 
 #endif
--- a/content/html/content/src/nsHTMLAudioElement.cpp
+++ b/content/html/content/src/nsHTMLAudioElement.cpp
@@ -120,17 +120,17 @@ nsHTMLAudioElement::~nsHTMLAudioElement(
 NS_IMETHODIMP
 nsHTMLAudioElement::Initialize(nsISupports* aOwner, JSContext* aContext,
                                JSObject *aObj, PRUint32 argc, jsval *argv)
 {
   // Audio elements created using "new Audio(...)" should have
   // 'preload' set to 'auto' (since the script must intend to
   // play the audio)
   nsresult rv = SetAttr(kNameSpaceID_None, nsGkAtoms::preload,
-                        NS_LITERAL_STRING("auto"), PR_TRUE);
+                        NS_LITERAL_STRING("auto"), true);
   if (NS_FAILED(rv))
     return rv;
 
   if (argc <= 0) {
     // Nothing more to do here if we don't get any arguments.
     return NS_OK;
   }
 
@@ -138,17 +138,17 @@ nsHTMLAudioElement::Initialize(nsISuppor
   JSString* jsstr = JS_ValueToString(aContext, argv[0]);
   if (!jsstr)
     return NS_ERROR_FAILURE;
 
   nsDependentJSString str;
   if (!str.init(aContext, jsstr))
     return NS_ERROR_FAILURE;
 
-  rv = SetAttr(kNameSpaceID_None, nsGkAtoms::src, str, PR_TRUE);
+  rv = SetAttr(kNameSpaceID_None, nsGkAtoms::src, str, true);
   if (NS_FAILED(rv))
     return rv;
 
   // We have been specified with a src URL. Begin a load.
   QueueSelectResourceTask();
 
   return NS_OK;
 }
@@ -262,10 +262,10 @@ nsresult nsHTMLAudioElement::SetAcceptHe
       "audio/*;q=0.9,"
 #ifdef MOZ_OGG
       "application/ogg;q=0.7,"
 #endif
       "video/*;q=0.6,*/*;q=0.5");
 
     return aChannel->SetRequestHeader(NS_LITERAL_CSTRING("Accept"),
                                       value,
-                                      PR_FALSE);
+                                      false);
 }
--- a/content/html/content/src/nsHTMLMediaElement.cpp
+++ b/content/html/content/src/nsHTMLMediaElement.cpp
@@ -222,18 +222,18 @@ public:
   NS_IMETHOD Run() {
     // Silently cancel if our load has been cancelled.
     if (IsCancelled())
       return NS_OK;
     LOG_EVENT(PR_LOG_DEBUG, ("%p Dispatching simple event source error", mElement.get()));
     return nsContentUtils::DispatchTrustedEvent(mElement->GetOwnerDoc(),
                                                 mSource,
                                                 NS_LITERAL_STRING("error"),
-                                                PR_FALSE,
-                                                PR_TRUE);
+                                                false,
+                                                true);
   }
 };
 
 /**
  * There is a reference cycle involving this class: MediaLoadListener
  * holds a reference to the nsHTMLMediaElement, which holds a reference
  * to an nsIChannel, which holds a reference to this listener.
  * We break the reference cycle in OnStartRequest by clearing mElement.
@@ -411,17 +411,17 @@ NS_IMETHODIMP nsHTMLMediaElement::GetErr
   NS_IF_ADDREF(*aError = mError);
 
   return NS_OK;
 }
 
 /* readonly attribute boolean ended; */
 NS_IMETHODIMP nsHTMLMediaElement::GetEnded(bool *aEnded)
 {
-  *aEnded = mDecoder ? mDecoder->IsEnded() : PR_FALSE;
+  *aEnded = mDecoder ? mDecoder->IsEnded() : false;
 
   return NS_OK;
 }
 
 /* readonly attribute DOMString currentSrc; */
 NS_IMETHODIMP nsHTMLMediaElement::GetCurrentSrc(nsAString & aCurrentSrc)
 {
   nsCAutoString src;
@@ -455,17 +455,17 @@ nsHTMLMediaElement::OnChannelRedirect(ns
  
   nsCAutoString rangeVal;
   if (NS_SUCCEEDED(http->GetRequestHeader(rangeHdr, rangeVal))) {
     NS_ENSURE_STATE(!rangeVal.IsEmpty());
 
     http = do_QueryInterface(aNewChannel);
     NS_ENSURE_STATE(http);
  
-    nsresult rv = http->SetRequestHeader(rangeHdr, rangeVal, PR_FALSE);
+    nsresult rv = http->SetRequestHeader(rangeHdr, rangeVal, false);
     NS_ENSURE_SUCCESS(rv, rv);
   }
  
   return NS_OK;
 }
 
 void nsHTMLMediaElement::AbortExistingLoads()
 {
@@ -485,56 +485,56 @@ void nsHTMLMediaElement::AbortExistingLo
 
   if (mNetworkState == nsIDOMHTMLMediaElement::NETWORK_LOADING ||
       mNetworkState == nsIDOMHTMLMediaElement::NETWORK_IDLE)
   {
     DispatchEvent(NS_LITERAL_STRING("abort"));
   }
 
   mError = nsnull;
-  mLoadedFirstFrame = PR_FALSE;
-  mAutoplaying = PR_TRUE;
-  mIsLoadingFromSourceChildren = PR_FALSE;
-  mSuspendedAfterFirstFrame = PR_FALSE;
-  mAllowSuspendAfterFirstFrame = PR_TRUE;
+  mLoadedFirstFrame = false;
+  mAutoplaying = true;
+  mIsLoadingFromSourceChildren = false;
+  mSuspendedAfterFirstFrame = false;
+  mAllowSuspendAfterFirstFrame = true;
   mSourcePointer = nsnull;
 
   // TODO: The playback rate must be set to the default playback rate.
 
   if (mNetworkState != nsIDOMHTMLMediaElement::NETWORK_EMPTY) {
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_NOTHING);
-    mPaused = PR_TRUE;
+    mPaused = true;
 
     if (fireTimeUpdate) {
       // Since we destroyed the decoder above, the current playback position
       // will now be reported as 0. The playback position was non-zero when
       // we destroyed the decoder, so fire a timeupdate event so that the
       // change will be reflected in the controls.
-      FireTimeUpdate(PR_FALSE);
+      FireTimeUpdate(false);
     }
     DispatchEvent(NS_LITERAL_STRING("emptied"));
   }
 
   // We may have changed mPaused, mAutoplaying, mNetworkState and other
   // things which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 
-  mIsRunningSelectResource = PR_FALSE;
+  mIsRunningSelectResource = false;
 }
 
 void nsHTMLMediaElement::NoSupportedMediaSourceError()
 {
   NS_ASSERTION(mDelayingLoadEvent, "Load event not delayed during source selection?");
 
   mError = new nsMediaError(nsIDOMMediaError::MEDIA_ERR_SRC_NOT_SUPPORTED);
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_NO_SOURCE;
   DispatchAsyncEvent(NS_LITERAL_STRING("error"));
   // This clears mDelayingLoadEvent, so AddRemoveSelfReference will be called
-  ChangeDelayLoadStatus(PR_FALSE);
+  ChangeDelayLoadStatus(false);
 }
 
 typedef void (nsHTMLMediaElement::*SyncSectionFn)();
 
 // Runs a "synchronous section", a function that must run once the event loop
 // has reached a "stable state". See:
 // http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#synchronous-section
 class nsSyncSection : public nsMediaEvent
@@ -568,70 +568,70 @@ void AsyncAwaitStableState(nsHTMLMediaEl
 {
   nsCOMPtr<nsIRunnable> event = new nsSyncSection(aElement, aClosure);
   nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
   appShell->RunInStableState(event);
 }
 
 void nsHTMLMediaElement::QueueLoadFromSourceTask()
 {
-  ChangeDelayLoadStatus(PR_TRUE);
+  ChangeDelayLoadStatus(true);
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
   AsyncAwaitStableState(this, &nsHTMLMediaElement::LoadFromSourceChildren);
 }
 
 void nsHTMLMediaElement::QueueSelectResourceTask()
 {
   // Don't allow multiple async select resource calls to be queued.
   if (mIsRunningSelectResource)
     return;
-  mIsRunningSelectResource = PR_TRUE;
+  mIsRunningSelectResource = true;
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_NO_SOURCE;
   AsyncAwaitStableState(this, &nsHTMLMediaElement::SelectResource);
 }
 
 /* void load (); */
 NS_IMETHODIMP nsHTMLMediaElement::Load()
 {
   if (mIsRunningLoadMethod)
     return NS_OK;
-  SetPlayedOrSeeked(PR_FALSE);
-  mIsRunningLoadMethod = PR_TRUE;
+  SetPlayedOrSeeked(false);
+  mIsRunningLoadMethod = true;
   AbortExistingLoads();
   QueueSelectResourceTask();
-  mIsRunningLoadMethod = PR_FALSE;
+  mIsRunningLoadMethod = false;
   return NS_OK;
 }
 
 static bool HasSourceChildren(nsIContent *aElement)
 {
   for (nsIContent* child = aElement->GetFirstChild();
        child;
        child = child->GetNextSibling()) {
     if (child->IsHTML(nsGkAtoms::source))
     {
-      return PR_TRUE;
+      return true;
     }
   }
-  return PR_FALSE;
+  return false;
 }
 
 void nsHTMLMediaElement::SelectResource()
 {
   if (!HasAttr(kNameSpaceID_None, nsGkAtoms::src) && !HasSourceChildren(this)) {
     // The media element has neither a src attribute nor any source
     // element children, abort the load.
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
     // This clears mDelayingLoadEvent, so AddRemoveSelfReference will be called
-    ChangeDelayLoadStatus(PR_FALSE);
-    mIsRunningSelectResource = PR_FALSE;
+    ChangeDelayLoadStatus(false);
+    mIsRunningSelectResource = false;
     return;
   }
 
-  ChangeDelayLoadStatus(PR_TRUE);
+  ChangeDelayLoadStatus(true);
 
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
   // Load event was delayed, and still is, so no need to call
   // AddRemoveSelfReference, since it must still be held
   DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
 
   nsAutoString src;
   nsCOMPtr<nsIURI> uri;
@@ -643,33 +643,33 @@ void nsHTMLMediaElement::SelectResource(
       LOG(PR_LOG_DEBUG, ("%p Trying load from src=%s", this, NS_ConvertUTF16toUTF8(src).get()));
       NS_ASSERTION(!mIsLoadingFromSourceChildren,
         "Should think we're not loading from source children by default");
       mLoadingSrc = uri;
       if (mPreloadAction == nsHTMLMediaElement::PRELOAD_NONE) {
         // preload:none media, suspend the load here before we make any
         // network requests.
         SuspendLoad(uri);
-        mIsRunningSelectResource = PR_FALSE;
+        mIsRunningSelectResource = false;
         return;
       }
 
       rv = LoadResource(uri);
       if (NS_SUCCEEDED(rv)) {
-        mIsRunningSelectResource = PR_FALSE;
+        mIsRunningSelectResource = false;
         return;
       }
     }
     NoSupportedMediaSourceError();
   } else {
     // Otherwise, the source elements will be used.
-    mIsLoadingFromSourceChildren = PR_TRUE;
+    mIsLoadingFromSourceChildren = true;
     LoadFromSourceChildren();
   }
-  mIsRunningSelectResource = PR_FALSE;
+  mIsRunningSelectResource = false;
 }
 
 void nsHTMLMediaElement::NotifyLoadError()
 {
   if (!mIsLoadingFromSourceChildren) {
     LOG(PR_LOG_DEBUG, ("NotifyLoadError(), no supported media error"));
     NoSupportedMediaSourceError();
   } else if (mSourceLoadCandidate) {
@@ -689,56 +689,56 @@ void nsHTMLMediaElement::NotifyAudioAvai
   // Otherwise we hand ownership of the memory over to the event created by 
   // DispatchAudioAvailableEvent().
   nsAutoArrayPtr<float> frameBuffer(aFrameBuffer);
   // Do same-origin check on element and media before allowing MozAudioAvailable events.
   if (!mMediaSecurityVerified) {
     nsCOMPtr<nsIPrincipal> principal = GetCurrentPrincipal();
     nsresult rv = NodePrincipal()->Subsumes(principal, &mAllowAudioData);
     if (NS_FAILED(rv)) {
-      mAllowAudioData = PR_FALSE;
+      mAllowAudioData = false;
     }
   }
 
   DispatchAudioAvailableEvent(frameBuffer.forget(), aFrameBufferLength, aTime);
 }
 
 bool nsHTMLMediaElement::MayHaveAudioAvailableEventListener()
 {
   // Determine if the current element is focused, if it is not focused
   // then we should not try to blur.  Note: we allow for the case of
   // |var a = new Audio()| with no parent document.
   nsIDocument *document = GetDocument();
   if (!document) {
-    return PR_TRUE;
+    return true;
   }
 
   nsPIDOMWindow *window = document->GetInnerWindow();
   if (!window) {
-    return PR_TRUE;
+    return true;
   }
 
   return window->HasAudioAvailableEventListeners();
 }
 
 void nsHTMLMediaElement::LoadFromSourceChildren()
 {
   NS_ASSERTION(mDelayingLoadEvent,
                "Should delay load event (if in document) during load");
   NS_ASSERTION(mIsLoadingFromSourceChildren,
                "Must remember we're loading from source children");
-  while (PR_TRUE) {
+  while (true) {
     nsresult rv;
     nsIContent* child = GetNextSource();
     if (!child) {
       // Exhausted candidates, wait for more candidates to be appended to
       // the media element.
       mLoadWaitStatus = WAITING_FOR_SOURCE;
       mNetworkState = nsIDOMHTMLMediaElement::NETWORK_NO_SOURCE;
-      ChangeDelayLoadStatus(PR_FALSE);
+      ChangeDelayLoadStatus(false);
       return;
     }
 
     nsCOMPtr<nsIURI> uri;
     nsAutoString src,type;
 
     // Must have src attribute.
     if (!child->GetAttr(kNameSpaceID_None, nsGkAtoms::src, src)) {
@@ -780,29 +780,29 @@ void nsHTMLMediaElement::LoadFromSourceC
     // If we fail to load, loop back and try loading the next resource.
     DispatchAsyncSourceError(child);
   }
   NS_NOTREACHED("Execution should not reach here!");
 }
 
 void nsHTMLMediaElement::SuspendLoad(nsIURI* aURI)
 {
-  mLoadIsSuspended = PR_TRUE;
+  mLoadIsSuspended = true;
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
   DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
-  ChangeDelayLoadStatus(PR_FALSE);
+  ChangeDelayLoadStatus(false);
 }
 
 void nsHTMLMediaElement::ResumeLoad(PreloadAction aAction)
 {
   NS_ASSERTION(mLoadIsSuspended, "Can only resume preload if halted for one");
   nsCOMPtr<nsIURI> uri = mLoadingSrc;
-  mLoadIsSuspended = PR_FALSE;
+  mLoadIsSuspended = false;
   mPreloadAction = aAction;
-  ChangeDelayLoadStatus(PR_TRUE);
+  ChangeDelayLoadStatus(true);
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
   if (!mIsLoadingFromSourceChildren) {
     // We were loading from the element's src attribute.
     if (NS_FAILED(LoadResource(uri))) {
       NoSupportedMediaSourceError();
     }
   } else {
     // We were loading from a child <source> element. Try to resume the
@@ -876,17 +876,17 @@ void nsHTMLMediaElement::UpdatePreloadAc
     } else {
       // Preload as much of the video as we can, i.e. don't suspend after
       // the first frame.
       StopSuspendingAfterFirstFrame();
     }
 
   } else if (nextAction == nsHTMLMediaElement::PRELOAD_METADATA) {
     // Ensure that the video can be suspended after first frame.
-    mAllowSuspendAfterFirstFrame = PR_TRUE;
+    mAllowSuspendAfterFirstFrame = true;
     if (mLoadIsSuspended) {
       // Our load was previouly suspended due to the media having preload
       // value "none". The preload value has changed to preload:metadata, so
       // resume the load. We'll pause the load again after we've read the
       // metadata.
       ResumeLoad(PRELOAD_METADATA);
     }
   }
@@ -959,17 +959,17 @@ nsresult nsHTMLMediaElement::LoadResourc
   channel->SetNotificationCallbacks(loadListener);
 
   nsCOMPtr<nsIStreamListener> listener;
   if (ShouldCheckAllowOrigin()) {
     listener =
       new nsCORSListenerProxy(loadListener,
                               NodePrincipal(),
                               channel,
-                              PR_FALSE,
+                              false,
                               &rv);
   } else {
     rv = nsContentUtils::GetSecurityManager()->
            CheckLoadURIWithPrincipal(NodePrincipal(),
                                      aURI,
                                      nsIScriptSecurityManager::STANDARD);
     listener = loadListener;
   }
@@ -977,17 +977,17 @@ nsresult nsHTMLMediaElement::LoadResourc
 
   nsCOMPtr<nsIHttpChannel> hc = do_QueryInterface(channel);
   if (hc) {
     // Use a byte range request from the start of the resource.
     // This enables us to detect if the stream supports byte range
     // requests, and therefore seeking, early.
     hc->SetRequestHeader(NS_LITERAL_CSTRING("Range"),
                          NS_LITERAL_CSTRING("bytes=0-"),
-                         PR_FALSE);
+                         false);
 
     SetRequestHeaders(hc);
   }
 
   rv = channel->AsyncOpen(listener, nsnull);
   NS_ENSURE_SUCCESS(rv, rv);
 
   // Else the channel must be open and starting to download. If it encounters
@@ -1006,21 +1006,21 @@ nsresult nsHTMLMediaElement::LoadWithCha
 {
   NS_ENSURE_ARG_POINTER(aChannel);
   NS_ENSURE_ARG_POINTER(aListener);
 
   *aListener = nsnull;
 
   AbortExistingLoads();
 
-  ChangeDelayLoadStatus(PR_TRUE);
+  ChangeDelayLoadStatus(true);
 
   nsresult rv = InitializeDecoderForChannel(aChannel, aListener);
   if (NS_FAILED(rv)) {
-    ChangeDelayLoadStatus(PR_FALSE);
+    ChangeDelayLoadStatus(false);
     return rv;
   }
 
   DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
 
   return NS_OK;
 }
 
@@ -1030,21 +1030,21 @@ NS_IMETHODIMP nsHTMLMediaElement::MozLoa
 
   AbortExistingLoads();
 
   nsCOMPtr<nsIContent> content = do_QueryInterface(aOther);
   nsHTMLMediaElement* other = static_cast<nsHTMLMediaElement*>(content.get());
   if (!other || !other->mDecoder)
     return NS_OK;
 
-  ChangeDelayLoadStatus(PR_TRUE);
+  ChangeDelayLoadStatus(true);
 
   nsresult rv = InitializeDecoderAsClone(other->mDecoder);
   if (NS_FAILED(rv)) {
-    ChangeDelayLoadStatus(PR_FALSE);
+    ChangeDelayLoadStatus(false);
     return rv;
   }
 
   DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
 
   return NS_OK;
 }
 
@@ -1145,23 +1145,23 @@ NS_IMETHODIMP nsHTMLMediaElement::Pause(
     LOG(PR_LOG_DEBUG, ("Loading due to Pause()"));
     nsresult rv = Load();
     NS_ENSURE_SUCCESS(rv, rv);
   } else if (mDecoder) {
     mDecoder->Pause();
   }
 
   bool oldPaused = mPaused;
-  mPaused = PR_TRUE;
-  mAutoplaying = PR_FALSE;
+  mPaused = true;
+  mAutoplaying = false;
   // We changed mPaused and mAutoplaying which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 
   if (!oldPaused) {
-    FireTimeUpdate(PR_FALSE);
+    FireTimeUpdate(false);
     DispatchAsyncEvent(NS_LITERAL_STRING("pause"));
   }
 
   return NS_OK;
 }
 
 /* attribute double volume; */
 NS_IMETHODIMP nsHTMLMediaElement::GetVolume(double *aVolume)
@@ -1271,37 +1271,37 @@ nsHTMLMediaElement::nsHTMLMediaElement(a
     mVolume(1.0),
     mChannels(0),
     mRate(0),
     mPreloadAction(PRELOAD_UNDEFINED),
     mMediaSize(-1,-1),
     mLastCurrentTime(0.0),
     mFragmentStart(-1.0),
     mFragmentEnd(-1.0),
-    mAllowAudioData(PR_FALSE),
-    mBegun(PR_FALSE),
-    mLoadedFirstFrame(PR_FALSE),
-    mAutoplaying(PR_TRUE),
-    mAutoplayEnabled(PR_TRUE),
-    mPaused(PR_TRUE),
-    mMuted(PR_FALSE),
-    mPlayingBeforeSeek(PR_FALSE),
-    mPausedForInactiveDocument(PR_FALSE),
-    mWaitingFired(PR_FALSE),
-    mIsRunningLoadMethod(PR_FALSE),
-    mIsLoadingFromSourceChildren(PR_FALSE),
-    mDelayingLoadEvent(PR_FALSE),
-    mIsRunningSelectResource(PR_FALSE),
-    mSuspendedAfterFirstFrame(PR_FALSE),
-    mAllowSuspendAfterFirstFrame(PR_TRUE),
-    mHasPlayedOrSeeked(PR_FALSE),
-    mHasSelfReference(PR_FALSE),
-    mShuttingDown(PR_FALSE),
-    mLoadIsSuspended(PR_FALSE),
-    mMediaSecurityVerified(PR_FALSE)
+    mAllowAudioData(false),
+    mBegun(false),
+    mLoadedFirstFrame(false),
+    mAutoplaying(true),
+    mAutoplayEnabled(true),
+    mPaused(true),
+    mMuted(false),
+    mPlayingBeforeSeek(false),
+    mPausedForInactiveDocument(false),
+    mWaitingFired(false),
+    mIsRunningLoadMethod(false),
+    mIsLoadingFromSourceChildren(false),
+    mDelayingLoadEvent(false),
+    mIsRunningSelectResource(false),
+    mSuspendedAfterFirstFrame(false),
+    mAllowSuspendAfterFirstFrame(true),
+    mHasPlayedOrSeeked(false),
+    mHasSelfReference(false),
+    mShuttingDown(false),
+    mLoadIsSuspended(false),
+    mMediaSecurityVerified(false)
 {
 #ifdef PR_LOGGING
   if (!gMediaElementLog) {
     gMediaElementLog = PR_NewLogModule("nsMediaElement");
   }
   if (!gMediaElementEventsLog) {
     gMediaElementEventsLog = PR_NewLogModule("nsMediaElementEvents");
   }
@@ -1328,22 +1328,22 @@ nsHTMLMediaElement::~nsHTMLMediaElement(
   if (mAudioStream) {
     mAudioStream->Shutdown();
     mAudioStream = nsnull;
   }
 }
 
 void nsHTMLMediaElement::StopSuspendingAfterFirstFrame()
 {
-  mAllowSuspendAfterFirstFrame = PR_FALSE;
+  mAllowSuspendAfterFirstFrame = false;
   if (!mSuspendedAfterFirstFrame)
     return;
-  mSuspendedAfterFirstFrame = PR_FALSE;
+  mSuspendedAfterFirstFrame = false;
   if (mDecoder) {
-    mDecoder->Resume(PR_TRUE);
+    mDecoder->Resume(true);
   }
 }
 
 void nsHTMLMediaElement::SetPlayedOrSeeked(bool aValue)
 {
   if (aValue == mHasPlayedOrSeeked)
     return;
 
@@ -1355,17 +1355,17 @@ void nsHTMLMediaElement::SetPlayedOrSeek
   frame->PresContext()->PresShell()->FrameNeedsReflow(frame,
                                                       nsIPresShell::eTreeChange,
                                                       NS_FRAME_IS_DIRTY);
 }
 
 NS_IMETHODIMP nsHTMLMediaElement::Play()
 {
   StopSuspendingAfterFirstFrame();
-  SetPlayedOrSeeked(PR_TRUE);
+  SetPlayedOrSeeked(true);
 
   if (mNetworkState == nsIDOMHTMLMediaElement::NETWORK_EMPTY) {
     nsresult rv = Load();
     NS_ENSURE_SUCCESS(rv, rv);
   }  else if (mLoadIsSuspended) {
     ResumeLoad(PRELOAD_ENOUGH);
   } else if (mDecoder) {
     if (mDecoder->IsEnded()) {
@@ -1383,28 +1383,28 @@ NS_IMETHODIMP nsHTMLMediaElement::Play()
   if (mPaused) {
     DispatchAsyncEvent(NS_LITERAL_STRING("play"));
     switch (mReadyState) {
     case nsIDOMHTMLMediaElement::HAVE_NOTHING:
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
       break;
     case nsIDOMHTMLMediaElement::HAVE_METADATA:
     case nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA:
-      FireTimeUpdate(PR_FALSE);
+      FireTimeUpdate(false);
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
       break;
     case nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA:
     case nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA:
       DispatchAsyncEvent(NS_LITERAL_STRING("playing"));
       break;
     }
   }
 
-  mPaused = PR_FALSE;
-  mAutoplaying = PR_FALSE;
+  mPaused = false;
+  mAutoplaying = false;
   // We changed mPaused and mAutoplaying which can affect AddRemoveSelfReference
   // and our preload status.
   AddRemoveSelfReference();
   UpdatePreloadAction();
 
   return NS_OK;
 }
 
@@ -1425,20 +1425,20 @@ bool nsHTMLMediaElement::ParseAttribute(
   if (aNamespaceID == kNameSpaceID_None) {
     if (aAttribute == nsGkAtoms::loopstart
        || aAttribute == nsGkAtoms::loopend
        || aAttribute == nsGkAtoms::start
        || aAttribute == nsGkAtoms::end) {
       return aResult.ParseDoubleValue(aValue);
     }
     else if (ParseImageAttribute(aAttribute, aValue, aResult)) {
-      return PR_TRUE;
+      return true;
     }
     else if (aAttribute == nsGkAtoms::preload) {
-      return aResult.ParseEnumValue(aValue, kPreloadTable, PR_FALSE);
+      return aResult.ParseEnumValue(aValue, kPreloadTable, false);
     }
   }
 
   return nsGenericHTMLElement::ParseAttribute(aNamespaceID, aAttribute, aValue,
                                               aResult);
 }
 
 nsresult nsHTMLMediaElement::SetAttr(PRInt32 aNameSpaceID, nsIAtom* aName,
@@ -1530,22 +1530,22 @@ static const char* gRawCodecs[] = {
 static bool IsRawEnabled()
 {
   return Preferences::GetBool("media.raw.enabled");
 }
 
 static bool IsRawType(const nsACString& aType)
 {
   if (!IsRawEnabled())
-    return PR_FALSE;
+    return false;
   for (PRUint32 i = 0; i < NS_ARRAY_LENGTH(gRawTypes); ++i) {
     if (aType.EqualsASCII(gRawTypes[i]))
-      return PR_TRUE;
+      return true;
   }
-  return PR_FALSE;
+  return false;
 }
 #endif
 #ifdef MOZ_OGG
 // See http://www.rfc-editor.org/rfc/rfc5334.txt for the definitions
 // of Ogg media types and codec types
 const char nsHTMLMediaElement::gOggTypes[3][16] = {
   "video/ogg",
   "audio/ogg",
@@ -1563,22 +1563,22 @@ nsHTMLMediaElement::IsOggEnabled()
 {
   return Preferences::GetBool("media.ogg.enabled");
 }
 
 bool
 nsHTMLMediaElement::IsOggType(const nsACString& aType)
 {
   if (!IsOggEnabled())
-    return PR_FALSE;
+    return false;
   for (PRUint32 i = 0; i < NS_ARRAY_LENGTH(gOggTypes); ++i) {
     if (aType.EqualsASCII(gOggTypes[i]))
-      return PR_TRUE;
+      return true;
   }
-  return PR_FALSE;
+  return false;
 }
 #endif
 
 #ifdef MOZ_WAVE
 // See http://www.rfc-editor.org/rfc/rfc2361.txt for the definitions
 // of WAVE media types and codec types. However, the audio/vnd.wave
 // MIME type described there is not used.
 const char nsHTMLMediaElement::gWaveTypes[4][16] = {
@@ -1598,22 +1598,22 @@ nsHTMLMediaElement::IsWaveEnabled()
 {
   return Preferences::GetBool("media.wave.enabled");
 }
 
 bool
 nsHTMLMediaElement::IsWaveType(const nsACString& aType)
 {
   if (!IsWaveEnabled())
-    return PR_FALSE;
+    return false;
   for (PRUint32 i = 0; i < NS_ARRAY_LENGTH(gWaveTypes); ++i) {
     if (aType.EqualsASCII(gWaveTypes[i]))
-      return PR_TRUE;
+      return true;
   }
-  return PR_FALSE;
+  return false;
 }
 #endif
 
 #ifdef MOZ_WEBM
 const char nsHTMLMediaElement::gWebMTypes[2][17] = {
   "video/webm",
   "audio/webm"
 };
@@ -1630,22 +1630,22 @@ nsHTMLMediaElement::IsWebMEnabled()
 {
   return Preferences::GetBool("media.webm.enabled");
 }
 
 bool
 nsHTMLMediaElement::IsWebMType(const nsACString& aType)
 {
   if (!IsWebMEnabled())
-    return PR_FALSE;
+    return false;
   for (PRUint32 i = 0; i < NS_ARRAY_LENGTH(gWebMTypes); ++i) {
     if (aType.EqualsASCII(gWebMTypes[i]))
-      return PR_TRUE;
+      return true;
   }
-  return PR_FALSE;
+  return false;
 }
 #endif
 
 /* static */
 nsHTMLMediaElement::CanPlayStatus 
 nsHTMLMediaElement::CanHandleMediaType(const char* aMIMEType,
                                        char const *const ** aCodecList)
 {
@@ -1676,42 +1676,42 @@ nsHTMLMediaElement::CanHandleMediaType(c
   return CANPLAY_NO;
 }
 
 /* static */
 bool nsHTMLMediaElement::ShouldHandleMediaType(const char* aMIMEType)
 {
 #ifdef MOZ_RAW
   if (IsRawType(nsDependentCString(aMIMEType)))
-    return PR_TRUE;
+    return true;
 #endif
 #ifdef MOZ_OGG
   if (IsOggType(nsDependentCString(aMIMEType)))
-    return PR_TRUE;
+    return true;
 #endif
 #ifdef MOZ_WEBM
   if (IsWebMType(nsDependentCString(aMIMEType)))
-    return PR_TRUE;
+    return true;
 #endif
   // We should not return true for Wave types, since there are some
   // Wave codecs actually in use in the wild that we don't support, and
   // we should allow those to be handled by plugins or helper apps.
   // Furthermore people can play Wave files on most platforms by other
   // means.
-  return PR_FALSE;
+  return false;
 }
 
 static bool
 CodecListContains(char const *const * aCodecs, const nsAString& aCodec)
 {
   for (PRInt32 i = 0; aCodecs[i]; ++i) {
     if (aCodec.EqualsASCII(aCodecs[i]))
-      return PR_TRUE;
+      return true;
   }
-  return PR_FALSE;
+  return false;
 }
 
 /* static */
 nsHTMLMediaElement::CanPlayStatus
 nsHTMLMediaElement::GetCanPlay(const nsAString& aType)
 {
   nsContentTypeParser parser(aType);
   nsAutoString mimeType;
@@ -1874,36 +1874,36 @@ nsresult nsHTMLMediaElement::InitializeD
 nsresult nsHTMLMediaElement::FinishDecoderSetup(nsMediaDecoder* aDecoder)
 {
   mDecoder = aDecoder;
 
   // Decoder has assumed ownership responsibility for remembering the URI.
   mLoadingSrc = nsnull;
 
   // Force a same-origin check before allowing events for this media resource.
-  mMediaSecurityVerified = PR_FALSE;
+  mMediaSecurityVerified = false;
 
   // The new stream has not been suspended by us.
-  mPausedForInactiveDocument = PR_FALSE;
+  mPausedForInactiveDocument = false;
   // But we may want to suspend it now.
   // This will also do an AddRemoveSelfReference.
   NotifyOwnerDocumentActivityChanged();
 
   nsresult rv = NS_OK;
 
   mDecoder->SetVolume(mMuted ? 0.0 : mVolume);
 
   if (!mPaused) {
-    SetPlayedOrSeeked(PR_TRUE);
+    SetPlayedOrSeeked(true);
     if (!mPausedForInactiveDocument) {
       rv = mDecoder->Play();
     }
   }
 
-  mBegun = PR_TRUE;
+  mBegun = true;
   return rv;
 }
 
 nsresult nsHTMLMediaElement::NewURIFromString(const nsAutoString& aURISpec, nsIURI** aURI)
 {
   NS_ENSURE_ARG_POINTER(aURI);
 
   *aURI = nsnull;
@@ -1972,32 +1972,32 @@ void nsHTMLMediaElement::MetadataLoaded(
   }
 }
 
 void nsHTMLMediaElement::FirstFrameLoaded(bool aResourceFullyLoaded)
 {
   ChangeReadyState(aResourceFullyLoaded ?
     nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA :
     nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
-  ChangeDelayLoadStatus(PR_FALSE);
+  ChangeDelayLoadStatus(false);
 
   NS_ASSERTION(!mSuspendedAfterFirstFrame, "Should not have already suspended");
 
   if (mDecoder && mAllowSuspendAfterFirstFrame && mPaused &&
       !aResourceFullyLoaded &&
       !HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay) &&
       mPreloadAction == nsHTMLMediaElement::PRELOAD_METADATA) {
-    mSuspendedAfterFirstFrame = PR_TRUE;
+    mSuspendedAfterFirstFrame = true;
     mDecoder->Suspend();
   }
 }
 
 void nsHTMLMediaElement::ResourceLoaded()
 {
-  mBegun = PR_FALSE;
+  mBegun = false;
   mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
   AddRemoveSelfReference();
   if (mReadyState >= nsIDOMHTMLMediaElement::HAVE_METADATA) {
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
   }
   // Ensure a progress event is dispatched at the end of download.
   DispatchAsyncEvent(NS_LITERAL_STRING("progress"));
   // The download has stopped.
@@ -2035,53 +2035,53 @@ void nsHTMLMediaElement::LoadAborted()
 
 void nsHTMLMediaElement::Error(PRUint16 aErrorCode)
 {
   NS_ASSERTION(aErrorCode == nsIDOMMediaError::MEDIA_ERR_DECODE ||
                aErrorCode == nsIDOMMediaError::MEDIA_ERR_NETWORK ||
                aErrorCode == nsIDOMMediaError::MEDIA_ERR_ABORTED,
                "Only use nsIDOMMediaError codes!");
   mError = new nsMediaError(aErrorCode);
-  mBegun = PR_FALSE;
+  mBegun = false;
   DispatchAsyncEvent(NS_LITERAL_STRING("error"));
   if (mReadyState == nsIDOMHTMLMediaElement::HAVE_NOTHING) {
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
     DispatchAsyncEvent(NS_LITERAL_STRING("emptied"));
   } else {
     mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
   }
   AddRemoveSelfReference();
-  ChangeDelayLoadStatus(PR_FALSE);
+  ChangeDelayLoadStatus(false);
 }
 
 void nsHTMLMediaElement::PlaybackEnded()
 {
   NS_ASSERTION(mDecoder->IsEnded(), "Decoder fired ended, but not in ended state");
   // We changed the state of IsPlaybackEnded which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 
   if (mDecoder && mDecoder->IsInfinite()) {
     LOG(PR_LOG_DEBUG, ("%p, got duration by reaching the end of the stream", this));
     DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
   }
 
-  FireTimeUpdate(PR_FALSE);
+  FireTimeUpdate(false);
   DispatchAsyncEvent(NS_LITERAL_STRING("ended"));
 }
 
 void nsHTMLMediaElement::SeekStarted()
 {
   DispatchAsyncEvent(NS_LITERAL_STRING("seeking"));
-  FireTimeUpdate(PR_FALSE);
+  FireTimeUpdate(false);
 }
 
 void nsHTMLMediaElement::SeekCompleted()
 {
-  mPlayingBeforeSeek = PR_FALSE;
-  SetPlayedOrSeeked(PR_TRUE);
+  mPlayingBeforeSeek = false;
+  SetPlayedOrSeeked(true);
   DispatchAsyncEvent(NS_LITERAL_STRING("seeked"));
   // We changed whether we're seeking so we need to AddRemoveSelfReference
   AddRemoveSelfReference();
 }
 
 void nsHTMLMediaElement::DownloadSuspended()
 {
   DispatchAsyncEvent(NS_LITERAL_STRING("progress"));
@@ -2120,19 +2120,19 @@ void nsHTMLMediaElement::UpdateReadyStat
     // a chance to run.
     // The arrival of more data can't change us out of this readyState.
     return;
   }
 
   if (aNextFrame != NEXT_FRAME_AVAILABLE) {
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
     if (!mWaitingFired && aNextFrame == NEXT_FRAME_UNAVAILABLE_BUFFERING) {
-      FireTimeUpdate(PR_FALSE);
+      FireTimeUpdate(false);
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
-      mWaitingFired = PR_TRUE;
+      mWaitingFired = true;
     }
     return;
   }
 
   // Now see if we should set HAVE_ENOUGH_DATA.
   // If it's something we don't know the size of, then we can't
   // make a real estimate, so we go straight to HAVE_ENOUGH_DATA once
   // we've downloaded enough data that our download rate is considered
@@ -2179,21 +2179,21 @@ void nsHTMLMediaElement::ChangeReadyStat
     DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
   }
 
   if (oldState < nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA &&
       mReadyState >= nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA &&
       !mLoadedFirstFrame)
   {
     DispatchAsyncEvent(NS_LITERAL_STRING("loadeddata"));
-    mLoadedFirstFrame = PR_TRUE;
+    mLoadedFirstFrame = true;
   }
 
   if (mReadyState == nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA) {
-    mWaitingFired = PR_FALSE;
+    mWaitingFired = false;
   }
 
   if (oldState < nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA &&
       mReadyState >= nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA) {
     DispatchAsyncEvent(NS_LITERAL_STRING("canplay"));
   }
 
   if (mReadyState == nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA) {
@@ -2219,22 +2219,22 @@ bool nsHTMLMediaElement::CanActivateAuto
          HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay) &&
          mAutoplayEnabled &&
          !IsEditable();
 }
 
 void nsHTMLMediaElement::NotifyAutoplayDataReady()
 {
   if (CanActivateAutoplay()) {
-    mPaused = PR_FALSE;
+    mPaused = false;
     // We changed mPaused which can affect AddRemoveSelfReference
     AddRemoveSelfReference();
 
     if (mDecoder) {
-      SetPlayedOrSeeked(PR_TRUE);
+      SetPlayedOrSeeked(true);
       mDecoder->Play();
     }
     DispatchAsyncEvent(NS_LITERAL_STRING("play"));
   }
 }
 
 ImageContainer* nsHTMLMediaElement::GetImageContainer()
 {
@@ -2253,17 +2253,17 @@ ImageContainer* nsHTMLMediaElement::GetI
 
   nsRefPtr<LayerManager> manager =
     nsContentUtils::PersistentLayerManagerForDocument(GetOwnerDoc());
   if (!manager)
     return nsnull;
 
   mImageContainer = manager->CreateImageContainer();
   if (manager->IsCompositingCheap()) {
-    mImageContainer->SetDelayedConversion(PR_TRUE);
+    mImageContainer->SetDelayedConversion(true);
   }
   return mImageContainer;
 }
 
 nsresult nsHTMLMediaElement::DispatchAudioAvailableEvent(float* aFrameBuffer,
                                                          PRUint32 aFrameBufferLength,
                                                          float aTime)
 {
@@ -2279,17 +2279,17 @@ nsresult nsHTMLMediaElement::DispatchAud
 
   nsCOMPtr<nsIDOMEvent> event;
   nsresult rv = domDoc->CreateEvent(NS_LITERAL_STRING("MozAudioAvailableEvent"),
                                     getter_AddRefs(event));
   nsCOMPtr<nsIDOMNotifyAudioAvailableEvent> audioavailableEvent(do_QueryInterface(event));
   NS_ENSURE_SUCCESS(rv, rv);
 
   rv = audioavailableEvent->InitAudioAvailableEvent(NS_LITERAL_STRING("MozAudioAvailable"),
-                                                    PR_TRUE, PR_TRUE, frameBuffer.forget(), aFrameBufferLength,
+                                                    true, true, frameBuffer.forget(), aFrameBufferLength,
                                                     aTime, mAllowAudioData);
   NS_ENSURE_SUCCESS(rv, rv);
 
   bool dummy;
   return target->DispatchEvent(event, &dummy);
 }
 
 nsresult nsHTMLMediaElement::DispatchEvent(const nsAString& aName)
@@ -2302,18 +2302,18 @@ nsresult nsHTMLMediaElement::DispatchEve
   if (mPausedForInactiveDocument) {
     mPendingEvents.AppendElement(aName);
     return NS_OK;
   }
 
   return nsContentUtils::DispatchTrustedEvent(GetOwnerDoc(),
                                               static_cast<nsIContent*>(this),
                                               aName,
-                                              PR_FALSE,
-                                              PR_TRUE);
+                                              false,
+                                              true);
 }
 
 nsresult nsHTMLMediaElement::DispatchAsyncEvent(const nsAString& aName)
 {
   LOG_EVENT(PR_LOG_DEBUG, ("%p Queuing event %s", this,
             NS_ConvertUTF16toUTF8(aName).get()));
 
   nsCOMPtr<nsIRunnable> event = new nsAsyncEventRunner(aName, this);
@@ -2348,17 +2348,17 @@ bool nsHTMLMediaElement::IsPotentiallyPl
 }
 
 bool nsHTMLMediaElement::IsPlaybackEnded() const
 {
   // TODO:
   //   the current playback position is equal to the effective end of the media resource.
   //   See bug 449157.
   return mNetworkState >= nsIDOMHTMLMediaElement::HAVE_METADATA &&
-    mDecoder ? mDecoder->IsEnded() : PR_FALSE;
+    mDecoder ? mDecoder->IsEnded() : false;
 }
 
 already_AddRefed<nsIPrincipal> nsHTMLMediaElement::GetCurrentPrincipal()
 {
   if (!mDecoder)
     return nsnull;
 
   return mDecoder->GetCurrentPrincipal();
@@ -2379,17 +2379,17 @@ void nsHTMLMediaElement::NotifyOwnerDocu
 
   if (pauseForInactiveDocument != mPausedForInactiveDocument) {
     mPausedForInactiveDocument = pauseForInactiveDocument;
     if (mDecoder) {
       if (pauseForInactiveDocument) {
         mDecoder->Pause();
         mDecoder->Suspend();
       } else {
-        mDecoder->Resume(PR_FALSE);
+        mDecoder->Resume(false);
         DispatchPendingMediaEvents();
         if (!mPaused && !mDecoder->IsEnded()) {
           mDecoder->Play();
         }
       }
     }
   }
 
@@ -2440,17 +2440,17 @@ void nsHTMLMediaElement::DoRemoveSelfRef
 }
 
 nsresult nsHTMLMediaElement::Observe(nsISupports* aSubject,
                                      const char* aTopic, const PRUnichar* aData)
 {
   NS_ENSURE_TRUE(nsContentUtils::IsCallerChrome(), NS_ERROR_NOT_AVAILABLE);
   
   if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
-    mShuttingDown = PR_TRUE;
+    mShuttingDown = true;
     AddRemoveSelfReference();
   }
   return NS_OK;
 }
 
 bool
 nsHTMLMediaElement::IsNodeOfType(PRUint32 aFlags) const
 {
@@ -2493,21 +2493,21 @@ nsIContent* nsHTMLMediaElement::GetNextS
 
   if (!mSourcePointer) {
     // First time this has been run, create a selection to cover children.
     mSourcePointer = do_CreateInstance("@mozilla.org/content/range;1");
 
     rv = mSourcePointer->SelectNodeContents(thisDomNode);
     if (NS_FAILED(rv)) return nsnull;
 
-    rv = mSourcePointer->Collapse(PR_TRUE);
+    rv = mSourcePointer->Collapse(true);
     if (NS_FAILED(rv)) return nsnull;
   }
 
-  while (PR_TRUE) {
+  while (true) {
 #ifdef DEBUG
     nsCOMPtr<nsIDOMNode> startContainer;
     rv = mSourcePointer->GetStartContainer(getter_AddRefs(startContainer));
     if (NS_FAILED(rv)) return nsnull;
     NS_ASSERTION(startContainer == thisDomNode,
                 "Should only iterate over direct children");
 #endif
 
@@ -2547,17 +2547,17 @@ void nsHTMLMediaElement::ChangeDelayLoad
     LOG(PR_LOG_DEBUG, ("%p ChangeDelayLoadStatus(%d) doc=0x%p", this, aDelay, mLoadBlockedDoc.get()));
   } else {
     if (mDecoder) {
       mDecoder->MoveLoadsToBackground();
     }
     LOG(PR_LOG_DEBUG, ("%p ChangeDelayLoadStatus(%d) doc=0x%p", this, aDelay, mLoadBlockedDoc.get()));
     // mLoadBlockedDoc might be null due to GC unlinking
     if (mLoadBlockedDoc) {
-      mLoadBlockedDoc->UnblockOnload(PR_FALSE);
+      mLoadBlockedDoc->UnblockOnload(false);
       mLoadBlockedDoc = nsnull;
     }
   }
 
   // We changed mDelayingLoadEvent which can affect AddRemoveSelfReference
   AddRemoveSelfReference();
 }
 
@@ -2617,17 +2617,17 @@ void nsHTMLMediaElement::SetRequestHeade
   SetAcceptHeader(aChannel);
 
   // Apache doesn't send Content-Length when gzip transfer encoding is used,
   // which prevents us from estimating the video length (if explicit Content-Duration
   // and a length spec in the container are not present either) and from seeking.
   // So, disable the standard "Accept-Encoding: gzip,deflate" that we usually send.
   // See bug 614760.
   aChannel->SetRequestHeader(NS_LITERAL_CSTRING("Accept-Encoding"),
-                             NS_LITERAL_CSTRING(""), PR_FALSE);
+                             NS_LITERAL_CSTRING(""), false);
 
   // Set the Referer header
   nsIDocument* doc = GetOwnerDoc();
   if (doc) {
     aChannel->SetReferrer(doc->GetDocumentURI());
   }
 }
 
--- a/content/html/content/src/nsHTMLVideoElement.cpp
+++ b/content/html/content/src/nsHTMLVideoElement.cpp
@@ -171,17 +171,17 @@ nsresult nsHTMLVideoElement::SetAcceptHe
         "video/*;q=0.9,"
 #ifdef MOZ_OGG
         "application/ogg;q=0.7,"
 #endif
         "audio/*;q=0.6,*/*;q=0.5");
 
     return aChannel->SetRequestHeader(NS_LITERAL_CSTRING("Accept"),
                                       value,
-                                      PR_FALSE);
+                                      false);
 }
 
 NS_IMPL_URI_ATTR(nsHTMLVideoElement, Poster, poster)
 
 NS_IMETHODIMP nsHTMLVideoElement::GetMozParsedFrames(PRUint32 *aMozParsedFrames)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   *aMozParsedFrames = mDecoder ? mDecoder->GetFrameStatistics().GetParsedFrames() : 0;
--- a/content/media/VideoUtils.cpp
+++ b/content/media/VideoUtils.cpp
@@ -34,58 +34,58 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "VideoUtils.h"
 #include "nsMathUtils.h"
 #include "prtypes.h"
 
-// Adds two 32bit unsigned numbers, retuns PR_TRUE if addition succeeded,
-// or PR_FALSE the if addition would result in an overflow.
+// Adds two 32bit unsigned numbers, retuns true if addition succeeded,
+// or false the if addition would result in an overflow.
 bool AddOverflow32(PRUint32 a, PRUint32 b, PRUint32& aResult) {
   PRUint64 rl = static_cast<PRUint64>(a) + static_cast<PRUint64>(b);
   if (rl > PR_UINT32_MAX) {
-    return PR_FALSE;
+    return false;
   }
   aResult = static_cast<PRUint32>(rl);
   return true;
 }
 
 bool MulOverflow32(PRUint32 a, PRUint32 b, PRUint32& aResult)
 {
-  // 32 bit integer multiplication with overflow checking. Returns PR_TRUE
-  // if the multiplication was successful, or PR_FALSE if the operation resulted
+  // 32 bit integer multiplication with overflow checking. Returns true
+  // if the multiplication was successful, or false if the operation resulted
   // in an integer overflow.
   PRUint64 a64 = a;
   PRUint64 b64 = b;
   PRUint64 r64 = a64 * b64;
   if (r64 > PR_UINT32_MAX)
-     return PR_FALSE;
+     return false;
   aResult = static_cast<PRUint32>(r64);
-  return PR_TRUE;
+  return true;
 }
 
-// Adds two 64bit numbers, retuns PR_TRUE if addition succeeded, or PR_FALSE
+// Adds two 64bit numbers, retuns true if addition succeeded, or false
 // if addition would result in an overflow.
 bool AddOverflow(PRInt64 a, PRInt64 b, PRInt64& aResult) {
   if (b < 1) {
     if (PR_INT64_MIN - b <= a) {
       aResult = a + b;
-      return PR_TRUE;
+      return true;
     }
   } else if (PR_INT64_MAX - b >= a) {
     aResult = a + b;
-    return PR_TRUE;
+    return true;
   }
-  return PR_FALSE;
+  return false;
 }
 
-// 64 bit integer multiplication with overflow checking. Returns PR_TRUE
-// if the multiplication was successful, or PR_FALSE if the operation resulted
+// 64 bit integer multiplication with overflow checking. Returns true
+// if the multiplication was successful, or false if the operation resulted
 // in an integer overflow.
 bool MulOverflow(PRInt64 a, PRInt64 b, PRInt64& aResult) {
   // We break a multiplication a * b into of sign_a * sign_b * abs(a) * abs(b)
   //
   // This is equivalent to:
   //
   // (sign_a * sign_b) * ((a_hi * 2^32) + a_lo) * ((b_hi * 2^32) + b_lo)
   //
@@ -108,29 +108,29 @@ bool MulOverflow(PRInt64 a, PRInt64 b, P
 
   PRInt64 abs_a = (a < 0) ? -a : a;
   PRInt64 abs_b = (b < 0) ? -b : b;
 
   if (abs_a < 0) {
     NS_ASSERTION(a == PR_INT64_MIN, "How else can this happen?");
     if (b == 0 || b == 1) {
       aResult = a * b;
-      return PR_TRUE;
+      return true;
     } else {
-      return PR_FALSE;
+      return false;
     }
   }
 
   if (abs_b < 0) {
     NS_ASSERTION(b == PR_INT64_MIN, "How else can this happen?");
     if (a == 0 || a == 1) {
       aResult = a * b;
-      return PR_TRUE;
+      return true;
     } else {
-      return PR_FALSE;
+      return false;
     }
   }
 
   NS_ASSERTION(abs_a >= 0 && abs_b >= 0, "abs values must be non-negative");
 
   PRInt64 a_hi = abs_a >> 32;
   PRInt64 a_lo = abs_a & 0xFFFFFFFF;
   PRInt64 b_hi = abs_b >> 32;
@@ -138,69 +138,69 @@ bool MulOverflow(PRInt64 a, PRInt64 b, P
 
   NS_ASSERTION((a_hi<<32) + a_lo == abs_a, "Partition must be correct");
   NS_ASSERTION((b_hi<<32) + b_lo == abs_b, "Partition must be correct");
 
   // In the sub-equation (a_hi * b_hi << 64), if a_hi or b_hi
   // are non-zero, this will overflow as it's shifted by 64.
   // Abort if this overflows.
   if (a_hi != 0 && b_hi != 0) {
-    return PR_FALSE;
+    return false;
   }
 
   // We can now assume that either a_hi or b_hi is 0.
   NS_ASSERTION(a_hi == 0 || b_hi == 0, "One of these must be 0");
 
   // Next we calculate:
   // (a_hi * b_lo << 32) + (a_lo * b_hi << 32)
   // We can factor this as:
   // (a_hi * b_lo + a_lo * b_hi) << 32
   PRInt64 q = a_hi * b_lo + a_lo * b_hi;
   if (q > PR_INT32_MAX) {
     // q will overflow when we shift by 32; abort.
-    return PR_FALSE;
+    return false;
   }
   q <<= 32;
 
   // Both a_lo and b_lo are less than INT32_MAX, so can't overflow.
   PRUint64 lo = a_lo * b_lo;
   if (lo > PR_INT64_MAX) {
-    return PR_FALSE;
+    return false;
   }
 
   // Add the final result. We must check for overflow during addition.
   if (!AddOverflow(q, static_cast<PRInt64>(lo), aResult)) {
-    return PR_FALSE;
+    return false;
   }
 
   aResult *= sign;
   NS_ASSERTION(a * b == aResult, "We didn't overflow, but result is wrong!");
-  return PR_TRUE;
+  return true;
 }
 
 // Converts from number of audio frames to microseconds, given the specified
 // audio rate.
 bool FramesToUsecs(PRInt64 aFrames, PRUint32 aRate, PRInt64& aOutUsecs)
 {
   PRInt64 x;
   if (!MulOverflow(aFrames, USECS_PER_S, x))
-    return PR_FALSE;
+    return false;
   aOutUsecs = x / aRate;
-  return PR_TRUE;
+  return true;
 }
 
 // Converts from microseconds to number of audio frames, given the specified
 // audio rate.
 bool UsecsToFrames(PRInt64 aUsecs, PRUint32 aRate, PRInt64& aOutFrames)
 {
   PRInt64 x;
   if (!MulOverflow(aUsecs, aRate, x))
-    return PR_FALSE;
+    return false;
   aOutFrames = x / USECS_PER_S;
-  return PR_TRUE;
+  return true;
 }
 
 static PRInt32 ConditionDimension(float aValue)
 {
   // This will exclude NaNs and too-big values.
   if (aValue > 1.0 && aValue <= PR_INT32_MAX)
     return PRInt32(NS_round(aValue));
   return 0;
--- a/content/media/VideoUtils.h
+++ b/content/media/VideoUtils.h
@@ -105,43 +105,43 @@ private:
     static void* operator new(size_t) CPP_THROW_NEW;
     static void operator delete(void*);
 
     ReentrantMonitor* mReentrantMonitor;
 };
 
 } // namespace mozilla
 
-// Adds two 32bit unsigned numbers, retuns PR_TRUE if addition succeeded,
-// or PR_FALSE the if addition would result in an overflow.
+// Adds two 32bit unsigned numbers, retuns true if addition succeeded,
+// or false the if addition would result in an overflow.
 bool AddOverflow32(PRUint32 a, PRUint32 b, PRUint32& aResult);
  
-// 32 bit integer multiplication with overflow checking. Returns PR_TRUE
-// if the multiplication was successful, or PR_FALSE if the operation resulted
+// 32 bit integer multiplication with overflow checking. Returns true
+// if the multiplication was successful, or false if the operation resulted
 // in an integer overflow.
 bool MulOverflow32(PRUint32 a, PRUint32 b, PRUint32& aResult);
 
-// Adds two 64bit numbers, retuns PR_TRUE if addition succeeded, or PR_FALSE
+// Adds two 64bit numbers, retuns true if addition succeeded, or false
 // if addition would result in an overflow.
 bool AddOverflow(PRInt64 a, PRInt64 b, PRInt64& aResult);
 
-// 64 bit integer multiplication with overflow checking. Returns PR_TRUE
-// if the multiplication was successful, or PR_FALSE if the operation resulted
+// 64 bit integer multiplication with overflow checking. Returns true
+// if the multiplication was successful, or false if the operation resulted
 // in an integer overflow.
 bool MulOverflow(PRInt64 a, PRInt64 b, PRInt64& aResult);
 
 // Converts from number of audio frames (aFrames) to microseconds, given
-// the specified audio rate (aRate). Stores result in aOutUsecs. Returns PR_TRUE
-// if the operation succeeded, or PR_FALSE if there was an integer overflow
+// the specified audio rate (aRate). Stores result in aOutUsecs. Returns true
+// if the operation succeeded, or false if there was an integer overflow
 // while calulating the conversion.
 bool FramesToUsecs(PRInt64 aFrames, PRUint32 aRate, PRInt64& aOutUsecs);
 
 // Converts from microseconds (aUsecs) to number of audio frames, given the
 // specified audio rate (aRate). Stores the result in aOutFrames. Returns
-// PR_TRUE if the operation succeeded, or PR_FALSE if there was an integer
+// true if the operation succeeded, or false if there was an integer
 // overflow while calulating the conversion.
 bool UsecsToFrames(PRInt64 aUsecs, PRUint32 aRate, PRInt64& aOutFrames);
 
 // Number of microseconds per second. 1e6.
 static const PRInt64 USECS_PER_S = 1000000;
 
 // Number of microseconds per millisecond.
 static const PRInt64 USECS_PER_MS = 1000;
--- a/content/media/nsAudioStream.cpp
+++ b/content/media/nsAudioStream.cpp
@@ -105,20 +105,20 @@ class nsNativeAudioStream : public nsAud
 
   double mVolume;
   void* mAudioHandle;
   int mRate;
   int mChannels;
 
   SampleFormat mFormat;
 
-  // PR_TRUE if this audio stream is paused.
+  // True if this audio stream is paused.
   bool mPaused;
 
-  // PR_TRUE if this stream has encountered an error.
+  // True if this stream has encountered an error.
   bool mInError;
 
 };
 
 class nsRemotedAudioStream : public nsAudioStream
 {
  public:
   NS_DECL_ISUPPORTS
@@ -143,17 +143,17 @@ private:
   nsRefPtr<AudioChild> mAudioChild;
 
   SampleFormat mFormat;
   int mRate;
   int mChannels;
 
   PRInt32 mBytesPerFrame;
 
-  // PR_TRUE if this audio stream is paused.
+  // True if this audio stream is paused.
   bool mPaused;
 
   friend class AudioInitEvent;
 };
 
 class AudioInitEvent : public nsRunnable
 {
  public:
@@ -388,18 +388,18 @@ nsAudioStream::~nsAudioStream()
 }
 
 nsNativeAudioStream::nsNativeAudioStream() :
   mVolume(1.0),
   mAudioHandle(0),
   mRate(0),
   mChannels(0),
   mFormat(FORMAT_S16_LE),
-  mPaused(PR_FALSE),
-  mInError(PR_FALSE)
+  mPaused(false),
+  mInError(false)
 {
 }
 
 nsNativeAudioStream::~nsNativeAudioStream()
 {
   Shutdown();
 }
 
@@ -413,41 +413,41 @@ nsresult nsNativeAudioStream::Init(PRInt
 
   if (sa_stream_create_pcm(reinterpret_cast<sa_stream_t**>(&mAudioHandle),
                            NULL,
                            SA_MODE_WRONLY,
                            SA_PCM_FORMAT_S16_NE,
                            aRate,
                            aNumChannels) != SA_SUCCESS) {
     mAudioHandle = nsnull;
-    mInError = PR_TRUE;
+    mInError = true;
     PR_LOG(gAudioStreamLog, PR_LOG_ERROR, ("nsNativeAudioStream: sa_stream_create_pcm error"));
     return NS_ERROR_FAILURE;
   }
 
   if (sa_stream_open(static_cast<sa_stream_t*>(mAudioHandle)) != SA_SUCCESS) {
     sa_stream_destroy(static_cast<sa_stream_t*>(mAudioHandle));
     mAudioHandle = nsnull;
-    mInError = PR_TRUE;
+    mInError = true;
     PR_LOG(gAudioStreamLog, PR_LOG_ERROR, ("nsNativeAudioStream: sa_stream_open error"));
     return NS_ERROR_FAILURE;
   }
-  mInError = PR_FALSE;
+  mInError = false;
 
   return NS_OK;
 }
 
 void nsNativeAudioStream::Shutdown()
 {
   if (!mAudioHandle)
     return;
 
   sa_stream_destroy(static_cast<sa_stream_t*>(mAudioHandle));
   mAudioHandle = nsnull;
-  mInError = PR_TRUE;
+  mInError = true;
 }
 
 nsresult nsNativeAudioStream::Write(const void* aBuf, PRUint32 aFrames)
 {
   NS_ASSERTION(!mPaused, "Don't write audio when paused, you'll block");
 
   if (mInError)
     return NS_ERROR_FAILURE;
@@ -496,17 +496,17 @@ nsresult nsNativeAudioStream::Write(cons
       }
     }
 
     if (sa_stream_write(static_cast<sa_stream_t*>(mAudioHandle),
                         s_data.get(),
                         samples * sizeof(short)) != SA_SUCCESS)
     {
       PR_LOG(gAudioStreamLog, PR_LOG_ERROR, ("nsNativeAudioStream: sa_stream_write error"));
-      mInError = PR_TRUE;
+      mInError = true;
       return NS_ERROR_FAILURE;
     }
   }
   return NS_OK;
 }
 
 PRUint32 nsNativeAudioStream::Available()
 {
@@ -523,50 +523,50 @@ PRUint32 nsNativeAudioStream::Available(
 }
 
 void nsNativeAudioStream::SetVolume(double aVolume)
 {
   NS_ASSERTION(aVolume >= 0.0 && aVolume <= 1.0, "Invalid volume");
 #if defined(SA_PER_STREAM_VOLUME)
   if (sa_stream_set_volume_abs(static_cast<sa_stream_t*>(mAudioHandle), aVolume) != SA_SUCCESS) {
     PR_LOG(gAudioStreamLog, PR_LOG_ERROR, ("nsNativeAudioStream: sa_stream_set_volume_abs error"));
-    mInError = PR_TRUE;
+    mInError = true;
   }
 #else
   mVolume = aVolume;
 #endif
 }
 
 void nsNativeAudioStream::Drain()
 {
   NS_ASSERTION(!mPaused, "Don't drain audio when paused, it won't finish!");
 
   if (mInError)
     return;
 
   int r = sa_stream_drain(static_cast<sa_stream_t*>(mAudioHandle));
   if (r != SA_SUCCESS && r != SA_ERROR_INVALID) {
     PR_LOG(gAudioStreamLog, PR_LOG_ERROR, ("nsNativeAudioStream: sa_stream_drain error"));
-    mInError = PR_TRUE;
+    mInError = true;
   }
 }
 
 void nsNativeAudioStream::Pause()
 {
   if (mInError)
     return;
-  mPaused = PR_TRUE;
+  mPaused = true;
   sa_stream_pause(static_cast<sa_stream_t*>(mAudioHandle));
 }
 
 void nsNativeAudioStream::Resume()
 {
   if (mInError)
     return;
-  mPaused = PR_FALSE;
+  mPaused = false;
   sa_stream_resume(static_cast<sa_stream_t*>(mAudioHandle));
 }
 
 PRInt64 nsNativeAudioStream::GetPosition()
 {
   PRInt64 position = GetPositionInFrames();
   if (position >= 0) {
     return ((USECS_PER_S * position) / mRate);
@@ -612,17 +612,17 @@ PRInt32 nsNativeAudioStream::GetMinWrite
 }
 
 nsRemotedAudioStream::nsRemotedAudioStream()
  : mAudioChild(nsnull),
    mFormat(FORMAT_S16_LE),
    mRate(0),
    mChannels(0),
    mBytesPerFrame(0),
-   mPaused(PR_FALSE)
+   mPaused(false)
 {}
 
 nsRemotedAudioStream::~nsRemotedAudioStream()
 {
   Shutdown();
 }
 
 NS_IMPL_THREADSAFE_ISUPPORTS0(nsRemotedAudioStream)
@@ -710,30 +710,30 @@ nsRemotedAudioStream::Drain()
   nsCOMPtr<nsIRunnable> event = new AudioDrainEvent(mAudioChild);
   NS_DispatchToMainThread(event);
   mAudioChild->WaitForDrain();
 }
 
 void
 nsRemotedAudioStream::Pause()
 {
-  mPaused = PR_TRUE;
+  mPaused = true;
   if (!mAudioChild)
     return;
-  nsCOMPtr<nsIRunnable> event = new AudioPauseEvent(mAudioChild, PR_TRUE);
+  nsCOMPtr<nsIRunnable> event = new AudioPauseEvent(mAudioChild, true);
   NS_DispatchToMainThread(event);
 }
 
 void
 nsRemotedAudioStream::Resume()
 {
-  mPaused = PR_FALSE;
+  mPaused = false;
   if (!mAudioChild)
     return;
-  nsCOMPtr<nsIRunnable> event = new AudioPauseEvent(mAudioChild, PR_FALSE);
+  nsCOMPtr<nsIRunnable> event = new AudioPauseEvent(mAudioChild, false);
   NS_DispatchToMainThread(event);
 }
 
 PRInt64 nsRemotedAudioStream::GetPosition()
 {
   PRInt64 position = GetPositionInFrames();
   if (position >= 0) {
     return ((USECS_PER_S * position) / mRate);
--- a/content/media/nsAudioStream.h
+++ b/content/media/nsAudioStream.h
@@ -109,17 +109,17 @@ public:
   // Return the position in microseconds of the audio frame being played by
   // the audio hardware.
   virtual PRInt64 GetPosition() = 0;
 
   // Return the position, measured in audio frames played since the stream
   // was opened, of the audio hardware.
   virtual PRInt64 GetPositionInFrames() = 0;
 
-  // Returns PR_TRUE when the audio stream is paused.
+  // Returns true when the audio stream is paused.
   virtual bool IsPaused() = 0;
 
   // Returns the minimum number of audio frames which must be written before
   // you can be sure that something will be played.
   // Unsafe to call with the decoder monitor held.
   virtual PRInt32 GetMinWriteSize() = 0;
 
 protected:
--- a/content/media/nsBuiltinDecoder.cpp
+++ b/content/media/nsBuiltinDecoder.cpp
@@ -108,52 +108,52 @@ bool nsBuiltinDecoder::IsInfinite()
 
 nsBuiltinDecoder::nsBuiltinDecoder() :
   mDecoderPosition(0),
   mPlaybackPosition(0),
   mCurrentTime(0.0),
   mInitialVolume(0.0),
   mRequestedSeekTime(-1.0),
   mDuration(-1),
-  mSeekable(PR_TRUE),
+  mSeekable(true),
   mReentrantMonitor("media.decoder"),
   mPlayState(PLAY_STATE_PAUSED),
   mNextState(PLAY_STATE_PAUSED),
-  mResourceLoaded(PR_FALSE),
-  mIgnoreProgressData(PR_FALSE),
-  mInfiniteStream(PR_FALSE)
+  mResourceLoaded(false),
+  mIgnoreProgressData(false),
+  mInfiniteStream(false)
 {
   MOZ_COUNT_CTOR(nsBuiltinDecoder);
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
 #ifdef PR_LOGGING
   if (!gBuiltinDecoderLog) {
     gBuiltinDecoderLog = PR_NewLogModule("nsBuiltinDecoder");
   }
 #endif
 }
 
 bool nsBuiltinDecoder::Init(nsHTMLMediaElement* aElement)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (!nsMediaDecoder::Init(aElement))
-    return PR_FALSE;
+    return false;
 
   nsContentUtils::RegisterShutdownObserver(this);
   mImageContainer = aElement->GetImageContainer();
-  return PR_TRUE;
+  return true;
 }
 
 void nsBuiltinDecoder::Shutdown()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   
   if (mShuttingDown)
     return;
 
-  mShuttingDown = PR_TRUE;
+  mShuttingDown = true;
 
   // This changes the decoder state to SHUTDOWN and does other things
   // necessary to unblock the state machine thread if it's blocked, so
   // the asynchronous shutdown in nsDestroyStateMachine won't deadlock.
   if (mDecoderStateMachine) {
     mDecoderStateMachine->Shutdown();
   }
 
@@ -265,40 +265,40 @@ nsresult nsBuiltinDecoder::Play()
   if (mPlayState == PLAY_STATE_ENDED)
     return Seek(0);
 
   ChangeState(PLAY_STATE_PLAYING);
   return NS_OK;
 }
 
 /**
- * Returns PR_TRUE if aValue is inside a range of aRanges, and put the range
+ * Returns true if aValue is inside a range of aRanges, and put the range
  * index in aIntervalIndex if it is not null.
- * If aValue is not inside a range, PR_FALSE is returned, and aIntervalIndex, if
+ * If aValue is not inside a range, false is returned, and aIntervalIndex, if
  * not null, is set to the index of the range which ends immediatly before aValue
  * (and can be -1 if aValue is before aRanges.Start(0)).
  */
 static bool IsInRanges(nsTimeRanges& aRanges, double aValue, PRInt32& aIntervalIndex) {
   PRUint32 length;
   aRanges.GetLength(&length);
   for (PRUint32 i = 0; i < length; i++) {
     double start, end;
     aRanges.Start(i, &start);
     if (start > aValue) {
       aIntervalIndex = i - 1;
-      return PR_FALSE;
+      return false;
     }
     aRanges.End(i, &end);
     if (aValue <= end) {
       aIntervalIndex = i;
-      return PR_TRUE;
+      return true;
     }
   }
   aIntervalIndex = length - 1;
-  return PR_FALSE;
+  return false;
 }
 
 nsresult nsBuiltinDecoder::Seek(double aTime)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
 
   NS_ABORT_IF_FALSE(aTime >= 0.0, "Cannot seek to a negative value.");
@@ -424,17 +424,17 @@ void nsBuiltinDecoder::MetadataLoaded(PR
     mDuration = mDecoderStateMachine ? mDecoderStateMachine->GetDuration() : -1;
     // Duration has changed so we should recompute playback rate
     UpdatePlaybackRate();
 
     notifyElement = mNextState != PLAY_STATE_SEEKING;
   }
 
   if (mDuration == -1) {
-    SetInfinite(PR_TRUE);
+    SetInfinite(true);
   }
 
   if (mElement && notifyElement) {
     // Make sure the element and the frame (if any) are told about
     // our new size.
     Invalidate();
     mElement->MetadataLoaded(aChannels, aRate);
   }
@@ -487,19 +487,19 @@ void nsBuiltinDecoder::ResourceLoaded()
 
   {
     // If we are seeking or loading then the resource loaded notification we get
     // should be ignored, since it represents the end of the seek request.
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     if (mIgnoreProgressData || mResourceLoaded || mPlayState == PLAY_STATE_LOADING)
       return;
 
-    Progress(PR_FALSE);
+    Progress(false);
 
-    mResourceLoaded = PR_TRUE;
+    mResourceLoaded = true;
     StopProgress();
   }
 
   // Ensure the final progress event gets fired
   if (mElement) {
     mElement->ResourceLoaded();
   }
 }
@@ -549,17 +549,17 @@ void nsBuiltinDecoder::PlaybackEnded()
   if (mElement)  {
     UpdateReadyStateForData();
     mElement->PlaybackEnded();
   }
 
   // This must be called after |mElement->PlaybackEnded()| call above, in order
   // to fire the required durationchange.
   if (IsInfinite()) {
-    SetInfinite(PR_FALSE);
+    SetInfinite(false);
   }
 }
 
 NS_IMETHODIMP nsBuiltinDecoder::Observe(nsISupports *aSubjet,
                                         const char *aTopic,
                                         const PRUnichar *someData)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
@@ -585,19 +585,19 @@ nsBuiltinDecoder::GetStatistics()
       mStream->GetCachedDataEnd(mDecoderPosition);
     result.mTotalBytes = mStream->GetLength();
     result.mPlaybackRate = ComputePlaybackRate(&result.mPlaybackRateReliable);
     result.mDecoderPosition = mDecoderPosition;
     result.mPlaybackPosition = mPlaybackPosition;
   }
   else {
     result.mDownloadRate = 0;
-    result.mDownloadRateReliable = PR_TRUE;
+    result.mDownloadRateReliable = true;
     result.mPlaybackRate = 0;
-    result.mPlaybackRateReliable = PR_TRUE;
+    result.mPlaybackRateReliable = true;
     result.mDecoderPosition = 0;
     result.mPlaybackPosition = 0;
     result.mDownloadPosition = 0;
     result.mTotalBytes = 0;
   }
 
   return result;
 }
@@ -605,17 +605,17 @@ nsBuiltinDecoder::GetStatistics()
 double nsBuiltinDecoder::ComputePlaybackRate(bool* aReliable)
 {
   GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(NS_IsMainThread() || OnStateMachineThread(),
                "Should be on main or state machine thread.");
 
   PRInt64 length = mStream ? mStream->GetLength() : -1;
   if (mDuration >= 0 && length >= 0) {
-    *aReliable = PR_TRUE;
+    *aReliable = true;
     return length * static_cast<double>(USECS_PER_S) / mDuration;
   }
   return mPlaybackStatistics.GetRateAtLastStop(aReliable);
 }
 
 void nsBuiltinDecoder::UpdatePlaybackRate()
 {
   NS_ASSERTION(NS_IsMainThread() || OnStateMachineThread(),
@@ -648,17 +648,17 @@ void nsBuiltinDecoder::NotifySuspendedSt
     mElement->NotifyAutoplayDataReady();
   }
 }
 
 void nsBuiltinDecoder::NotifyBytesDownloaded()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   UpdateReadyStateForData();
-  Progress(PR_FALSE);
+  Progress(false);
 }
 
 void nsBuiltinDecoder::NotifyDownloadEnded(nsresult aStatus)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
 
   if (aStatus == NS_BINDING_ABORTED) {
     // Download has been cancelled by user.
@@ -739,17 +739,17 @@ void nsBuiltinDecoder::SeekingStopped()
   bool seekWasAborted = false;
   {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
 
     // An additional seek was requested while the current seek was
     // in operation.
     if (mRequestedSeekTime >= 0.0) {
       ChangeState(PLAY_STATE_SEEKING);
-      seekWasAborted = PR_TRUE;
+      seekWasAborted = true;
     } else {
       UnpinForSeek();
       ChangeState(mNextState);
     }
   }
 
   if (mElement) {
     UpdateReadyStateForData();
@@ -772,20 +772,20 @@ void nsBuiltinDecoder::SeekingStoppedAtE
   bool seekWasAborted = false;
   {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
 
     // An additional seek was requested while the current seek was
     // in operation.
     if (mRequestedSeekTime >= 0.0) {
       ChangeState(PLAY_STATE_SEEKING);
-      seekWasAborted = PR_TRUE;
+      seekWasAborted = true;
     } else {
       UnpinForSeek();
-      fireEnded = PR_TRUE;
+      fireEnded = true;
       ChangeState(PLAY_STATE_ENDED);
     }
   }
 
   if (mElement) {
     UpdateReadyStateForData();
     if (!seekWasAborted) {
       mElement->SeekCompleted();
@@ -947,17 +947,17 @@ void nsBuiltinDecoder::SetEndTime(double
     mDecoderStateMachine->SetFragmentEndTime(static_cast<PRInt64>(aTime * USECS_PER_S));
   }
 }
 
 void nsBuiltinDecoder::Suspend()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (mStream) {
-    mStream->Suspend(PR_TRUE);
+    mStream->Suspend(true);
   }
 }
 
 void nsBuiltinDecoder::Resume(bool aForceBuffering)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (mStream) {
     mStream->Resume();
@@ -968,28 +968,28 @@ void nsBuiltinDecoder::Resume(bool aForc
   }
 }
 
 void nsBuiltinDecoder::StopProgressUpdates()
 {
   NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
                "Should be on state machine or decode thread.");
   GetReentrantMonitor().AssertCurrentThreadIn();
-  mIgnoreProgressData = PR_TRUE;
+  mIgnoreProgressData = true;
   if (mStream) {
     mStream->SetReadMode(nsMediaCacheStream::MODE_METADATA);
   }
 }
 
 void nsBuiltinDecoder::StartProgressUpdates()
 {
   NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
                "Should be on state machine or decode thread.");
   GetReentrantMonitor().AssertCurrentThreadIn();
-  mIgnoreProgressData = PR_FALSE;
+  mIgnoreProgressData = false;
   if (mStream) {
     mStream->SetReadMode(nsMediaCacheStream::MODE_PLAYBACK);
     mDecoderPosition = mPlaybackPosition = mStream->Tell();
   }
 }
 
 void nsBuiltinDecoder::MoveLoadsToBackground()
 {
--- a/content/media/nsBuiltinDecoder.h
+++ b/content/media/nsBuiltinDecoder.h
@@ -283,17 +283,17 @@ public:
 
   // Set the media fragment end time. aEndTime is in microseconds.
   virtual void SetFragmentEndTime(PRInt64 aEndTime) = 0;
 
   // Functions used by assertions to ensure we're calling things
   // on the appropriate threads.
   virtual bool OnDecodeThread() const = 0;
 
-  // Returns PR_TRUE if the current thread is the state machine thread.
+  // Returns true if the current thread is the state machine thread.
   virtual bool OnStateMachineThread() const = 0;
 
   virtual nsHTMLMediaElement::NextFrameStatus GetNextFrameStatus() = 0;
 
   // Cause state transitions. These methods obtain the decoder monitor
   // to synchronise the change of state, and to notify other threads
   // that the state has changed.
   virtual void Play() = 0;
@@ -311,17 +311,17 @@ public:
   // be called with the decode monitor held.
   virtual void ClearPositionChangeFlag() = 0;
 
   // Called from the main thread to set whether the media resource can
   // seek into unbuffered ranges. The decoder monitor must be obtained
   // before calling this.
   virtual void SetSeekable(bool aSeekable) = 0;
 
-  // Returns PR_TRUE if the media resource can seek into unbuffered ranges,
+  // Returns true if the media resource can seek into unbuffered ranges,
   // as set by SetSeekable(). The decoder monitor must be obtained before
   // calling this.
   virtual bool IsSeekable() = 0;
 
   // Update the playback position. This can result in a timeupdate event
   // and an invalidate of the frame being dispatched asynchronously if
   // there is no such event currently queued.
   // Only called on the decoder thread. Must be called with
@@ -414,33 +414,33 @@ class nsBuiltinDecoder : public nsMediaD
   // Called when the video file has completed downloading.
   // Call on the main thread only.
   void ResourceLoaded();
 
   // Called if the media file encounters a network error.
   // Call on the main thread only.
   virtual void NetworkError();
 
-  // Call from any thread safely. Return PR_TRUE if we are currently
+  // Call from any thread safely. Return true if we are currently
   // seeking in the media resource.
   virtual bool IsSeeking() const;
 
-  // Return PR_TRUE if the decoder has reached the end of playback.
+  // Return true if the decoder has reached the end of playback.
   // Call on the main thread only.
   virtual bool IsEnded() const;
 
   // Set the duration of the media resource in units of seconds.
   // This is called via a channel listener if it can pick up the duration
   // from a content header. Must be called from the main thread only.
   virtual void SetDuration(double aDuration);
 
   // Set a flag indicating whether seeking is supported
   virtual void SetSeekable(bool aSeekable);
 
-  // Return PR_TRUE if seeking is supported.
+  // Return true if seeking is supported.
   virtual bool IsSeekable();
 
   virtual nsresult GetSeekable(nsTimeRanges* aSeekable);
 
   // Set the end time of the media resource. When playback reaches
   // this point the media pauses. aTime is in seconds.
   virtual void SetEndTime(double aTime);
 
@@ -699,13 +699,13 @@ public:
 
   // True when seeking or otherwise moving the play position around in
   // such a manner that progress event data is inaccurate. This is set
   // during seek and duration operations to prevent the progress indicator
   // from jumping around. Read/Write from any thread. Must have decode monitor
   // locked before accessing.
   bool mIgnoreProgressData;
 
-  // PR_TRUE if the stream is infinite (e.g. a webradio).
+  // True if the stream is infinite (e.g. a webradio).
   bool mInfiniteStream;
 };
 
 #endif
--- a/content/media/nsBuiltinDecoderReader.h
+++ b/content/media/nsBuiltinDecoderReader.h
@@ -53,21 +53,21 @@ class nsBuiltinDecoderStateMachine;
 // Stores info relevant to presenting media frames.
 class nsVideoInfo {
 public:
   nsVideoInfo()
     : mAudioRate(0),
       mAudioChannels(0),
       mDisplay(0,0),
       mStereoMode(mozilla::layers::STEREO_MODE_MONO),
-      mHasAudio(PR_FALSE),
-      mHasVideo(PR_FALSE)
+      mHasAudio(false),
+      mHasVideo(false)
   {}
 
-  // Returns PR_TRUE if it's safe to use aPicture as the picture to be
+  // Returns true if it's safe to use aPicture as the picture to be
   // extracted inside a frame of size aFrame, and scaled up to and displayed
   // at a size of aDisplay. You should validate the frame, picture, and
   // display regions before using them to display video frames.
   static bool ValidateVideoRegion(const nsIntSize& aFrame,
                                     const nsIntRect& aPicture,
                                     const nsIntSize& aDisplay);
 
   // Sample rate.
@@ -78,20 +78,20 @@ public:
 
   // Size in pixels at which the video is rendered. This is after it has
   // been scaled by its aspect ratio.
   nsIntSize mDisplay;
 
   // Indicates the frame layout for single track stereo videos.
   mozilla::layers::StereoMode mStereoMode;
 
-  // PR_TRUE if we have an active audio bitstream.
+  // True if we have an active audio bitstream.
   bool mHasAudio;
 
-  // PR_TRUE if we have an active video bitstream.
+  // True if we have an active video bitstream.
   bool mHasVideo;
 };
 
 #ifdef MOZ_TREMOR
 #include <ogg/os_types.h>
 typedef ogg_int32_t VorbisPCMValue;
 typedef short AudioDataValue;
 
@@ -220,46 +220,46 @@ public:
 
   // Codec specific internal time code. For Ogg based codecs this is the
   // granulepos.
   PRInt64 mTimecode;
 
   // This frame's image.
   nsRefPtr<Image> mImage;
 
-  // When PR_TRUE, denotes that this frame is identical to the frame that
+  // When true, denotes that this frame is identical to the frame that
   // came before; it's a duplicate. mBuffer will be empty.
   bool mDuplicate;
   bool mKeyframe;
 
 public:
   VideoData(PRInt64 aOffset, PRInt64 aTime, PRInt64 aEndTime, PRInt64 aTimecode)
     : mOffset(aOffset),
       mTime(aTime),
       mEndTime(aEndTime),
       mTimecode(aTimecode),
-      mDuplicate(PR_TRUE),
-      mKeyframe(PR_FALSE)
+      mDuplicate(true),
+      mKeyframe(false)
   {
     MOZ_COUNT_CTOR(VideoData);
     NS_ASSERTION(aEndTime >= aTime, "Frame must start before it ends.");
   }
 
   VideoData(PRInt64 aOffset,
             PRInt64 aTime,
             PRInt64 aEndTime,
             bool aKeyframe,
             PRInt64 aTimecode,
             nsIntSize aDisplay)
     : mDisplay(aDisplay),
       mOffset(aOffset),
       mTime(aTime),
       mEndTime(aEndTime),
       mTimecode(aTimecode),
-      mDuplicate(PR_FALSE),
+      mDuplicate(false),
       mKeyframe(aKeyframe)
   {
     MOZ_COUNT_CTOR(VideoData);
     NS_ASSERTION(aEndTime >= aTime, "Frame must start before it ends.");
   }
 
 };
 
@@ -333,36 +333,36 @@ template <class T> class MediaQueue : pr
   }
 
   void Reset() {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     while (GetSize() > 0) {
       T* x = PopFront();
       delete x;
     }
-    mEndOfStream = PR_FALSE;
+    mEndOfStream = false;
   }
 
   bool AtEndOfStream() {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     return GetSize() == 0 && mEndOfStream;
   }
 
-  // Returns PR_TRUE if the media queue has had it last item added to it.
+  // Returns true if the media queue has had it last item added to it.
   // This happens when the media stream has been completely decoded. Note this
   // does not mean that the corresponding stream has finished playback.
   bool IsFinished() {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     return mEndOfStream;
   }
 
   // Informs the media queue that it won't be receiving any more items.
   void Finish() {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
-    mEndOfStream = PR_TRUE;
+    mEndOfStream = true;
   }
 
   // Returns the approximate number of microseconds of items in the queue.
   PRInt64 Duration() {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     if (GetSize() < 2) {
       return 0;
     }
@@ -374,17 +374,17 @@ template <class T> class MediaQueue : pr
   void LockedForEach(nsDequeFunctor& aFunctor) const {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     ForEach(aFunctor);
   }
 
 private:
   mutable ReentrantMonitor mReentrantMonitor;
 
-  // PR_TRUE when we've decoded the last frame of data in the
+  // True when we've decoded the last frame of data in the
   // bitstream for which we're queueing frame data.
   bool mEndOfStream;
 };
 
 // Encapsulates the decoding and reading of media data. Reading can only be
 // done on the decode thread thread. Never hold the decoder monitor when
 // calling into this class. Unless otherwise specified, methods and fields of
 // this class can only be accessed on the decode thread.
@@ -399,24 +399,24 @@ public:
   // Initializes the reader, returns NS_OK on success, or NS_ERROR_FAILURE
   // on failure.
   virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor) = 0;
 
   // Resets all state related to decoding, emptying all buffers etc.
   virtual nsresult ResetDecode();
 
   // Decodes an unspecified amount of audio data, enqueuing the audio data
-  // in mAudioQueue. Returns PR_TRUE when there's more audio to decode,
-  // PR_FALSE if the audio is finished, end of file has been reached,
+  // in mAudioQueue. Returns true when there's more audio to decode,
+  // false if the audio is finished, end of file has been reached,
   // or an un-recoverable read error has occured.
   virtual bool DecodeAudioData() = 0;
 
   // Reads and decodes one video frame. Packets with a timestamp less
   // than aTimeThreshold will be decoded (unless they're not keyframes
-  // and aKeyframeSkip is PR_TRUE), but will not be added to the queue.
+  // and aKeyframeSkip is true), but will not be added to the queue.
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
                                   PRInt64 aTimeThreshold) = 0;
 
   virtual bool HasAudio() = 0;
   virtual bool HasVideo() = 0;
 
   // Read header data for all bitstreams in the file. Fills mInfo with
   // the data required to present the media. Returns NS_OK on success,
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -199,45 +199,45 @@ nsBuiltinDecoderStateMachine::nsBuiltinD
   mSeekTime(0),
   mFragmentEndTime(-1),
   mReader(aReader),
   mCurrentFrameTime(0),
   mAudioStartTime(-1),
   mAudioEndTime(-1),
   mVideoFrameEndTime(-1),
   mVolume(1.0),
-  mSeekable(PR_TRUE),
-  mPositionChangeQueued(PR_FALSE),
-  mAudioCompleted(PR_FALSE),
-  mGotDurationFromMetaData(PR_FALSE),
-  mStopDecodeThread(PR_TRUE),
-  mDecodeThreadIdle(PR_FALSE),
-  mStopAudioThread(PR_TRUE),
-  mQuickBuffering(PR_FALSE),
-  mIsRunning(PR_FALSE),
-  mRunAgain(PR_FALSE),
-  mDispatchedRunEvent(PR_FALSE),
-  mDecodeThreadWaiting(PR_FALSE),
-  mEventManager(aDecoder),
-  mRealTime(aRealTime)
+  mSeekable(true),
+  mPositionChangeQueued(false),
+  mAudioCompleted(false),
+  mGotDurationFromMetaData(false),
+  mStopDecodeThread(true),
+  mDecodeThreadIdle(false),
+  mStopAudioThread(true),
+  mQuickBuffering(false),
+  mIsRunning(false),
+  mRunAgain(false),
+  mDispatchedRunEvent(false),
+  mDecodeThreadWaiting(false),
+  mRealTime(aRealTime),
+  mEventManager(aDecoder)
 {
   MOZ_COUNT_CTOR(nsBuiltinDecoderStateMachine);
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (gStateMachineCount == 0) {
     NS_ASSERTION(!gStateMachineThread, "Should have null state machine thread!");
     nsresult res = NS_NewThread(&gStateMachineThread,
                                 nsnull,
                                 MEDIA_THREAD_STACK_SIZE);
     NS_ABORT_IF_FALSE(NS_SUCCEEDED(res), "Can't create media state machine thread");
   }
   gStateMachineCount++;
 
   // only enable realtime mode when "media.realtime_decoder.enabled" is true.
   if (Preferences::GetBool("media.realtime_decoder.enabled", false) == false)
-    mRealTime = PR_FALSE;
+    mRealTime = false;
 
   mBufferingWait = mRealTime ? 0 : BUFFERING_WAIT;
   mLowDataThresholdUsecs = mRealTime ? 0 : LOW_DATA_THRESHOLD_USECS;
 }
 
 nsBuiltinDecoderStateMachine::~nsBuiltinDecoderStateMachine()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
@@ -306,17 +306,17 @@ void nsBuiltinDecoderStateMachine::Decod
   {
     if (mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) {
       DecodeLoop();
     } else if (mState == DECODER_STATE_SEEKING) {
       DecodeSeek();
     }
   }
 
-  mDecodeThreadIdle = PR_TRUE;
+  mDecodeThreadIdle = true;
   LOG(PR_LOG_DEBUG, ("%p Decode thread finished", mDecoder.get()));
 }
 
 void nsBuiltinDecoderStateMachine::DecodeLoop()
 {
   LOG(PR_LOG_DEBUG, ("%p Start DecodeLoop()", mDecoder.get()));
 
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
@@ -324,17 +324,17 @@ void nsBuiltinDecoderStateMachine::Decod
 
   // We want to "pump" the decode until we've got a few frames decoded
   // before we consider whether decode is falling behind.
   bool audioPump = true;
   bool videoPump = true;
 
   // If the video decode is falling behind the audio, we'll start dropping the
   // inter-frames up until the next keyframe which is at or before the current
-  // playback position. skipToNextKeyframe is PR_TRUE if we're currently
+  // playback position. skipToNextKeyframe is true if we're currently
   // skipping up to the next keyframe.
   bool skipToNextKeyframe = false;
 
   // Once we've decoded more than videoPumpThreshold video frames, we'll
   // no longer be considered to be "pumping video".
   const unsigned videoPumpThreshold = mRealTime ? 0 : AMPLE_VIDEO_FRAMES / 2;
 
   // After the audio decode fills with more than audioPumpThreshold usecs
@@ -362,24 +362,24 @@ void nsBuiltinDecoderStateMachine::Decod
          (videoPlaying || audioPlaying))
   {
     // We don't want to consider skipping to the next keyframe if we've
     // only just started up the decode loop, so wait until we've decoded
     // some frames before enabling the keyframe skip logic on video.
     if (videoPump &&
         static_cast<PRUint32>(videoQueue.GetSize()) >= videoPumpThreshold)
     {
-      videoPump = PR_FALSE;
+      videoPump = false;
     }
 
     // We don't want to consider skipping to the next keyframe if we've
     // only just started up the decode loop, so wait until we've decoded
     // some audio data before enabling the keyframe skip logic on audio.
     if (audioPump && GetDecodedAudioDuration() >= audioPumpThreshold) {
-      audioPump = PR_FALSE;
+      audioPump = false;
     }
 
     // We'll skip the video decode to the nearest keyframe if we're low on
     // audio, or if we're low on video, provided we're not running low on
     // data to decode. If we're running low on downloaded data to decode,
     // we won't start keyframe skipping, as we'll be pausing playback to buffer
     // soon anyway and we'll want to be able to display frames immediately
     // after buffering finishes.
@@ -388,17 +388,17 @@ void nsBuiltinDecoderStateMachine::Decod
         videoPlaying &&
         ((!audioPump && audioPlaying && GetDecodedAudioDuration() < lowAudioThreshold) ||
          (!videoPump &&
            videoPlaying &&
            static_cast<PRUint32>(videoQueue.GetSize()) < LOW_VIDEO_FRAMES)) &&
         !HasLowUndecodedData())
 
     {
-      skipToNextKeyframe = PR_TRUE;
+      skipToNextKeyframe = true;
       LOG(PR_LOG_DEBUG, ("%p Skipping video decode to the next keyframe", mDecoder.get()));
     }
 
     // Video decode.
     if (videoPlaying &&
         static_cast<PRUint32>(videoQueue.GetSize()) < AMPLE_VIDEO_FRAMES)
     {
       // Time the video decode, so that if it's slow, we can increase our low
@@ -454,26 +454,26 @@ void nsBuiltinDecoderStateMachine::Decod
       // position, we may as well wait for the playback to catch up. Note the
       // audio push thread acquires and notifies the decoder monitor every time
       // it pops AudioData off the audio queue. So if the audio push thread pops
       // the last AudioData off the audio queue right after that queue reported
       // it was non-empty here, we'll receive a notification on the decoder
       // monitor which will wake us up shortly after we sleep, thus preventing
       // both the decode and audio push threads waiting at the same time.
       // See bug 620326.
-      mDecodeThreadWaiting = PR_TRUE;
+      mDecodeThreadWaiting = true;
       if (mDecoder->GetState() != nsBuiltinDecoder::PLAY_STATE_PLAYING) {
         // We're not playing, and the decode is about to wait. This means
         // the decode thread may not be needed in future. Signal the state
         // machine thread to run, so it can decide whether to shutdown the
         // decode thread.
         ScheduleStateMachine();
       }
       mDecoder->GetReentrantMonitor().Wait();
-      mDecodeThreadWaiting = PR_FALSE;
+      mDecodeThreadWaiting = false;
     }
 
   } // End decode loop.
 
   if (!mStopDecodeThread &&
       mState != DECODER_STATE_SHUTDOWN &&
       mState != DECODER_STATE_SEEKING)
   {
@@ -499,17 +499,17 @@ void nsBuiltinDecoderStateMachine::Audio
   PRInt64 audioStartTime = -1;
   PRInt64 framesWritten = 0;
   PRUint32 channels, rate;
   double volume = -1;
   bool setVolume;
   PRInt32 minWriteFrames = -1;
   {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
-    mAudioCompleted = PR_FALSE;
+    mAudioCompleted = false;
     audioStartTime = mAudioStartTime;
     channels = mInfo.mAudioChannels;
     rate = mInfo.mAudioRate;
     NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
   }
 
   // It is unsafe to call some methods of nsAudioStream with the decoder
   // monitor held, as on Android those methods do a synchronous dispatch to
@@ -702,17 +702,17 @@ void nsBuiltinDecoderStateMachine::Audio
   }
   LOG(PR_LOG_DEBUG, ("%p Reached audio stream end.", mDecoder.get()));
   {
     // Must hold lock while anulling the audio stream to prevent
     // state machine thread trying to use it while we're destroying it.
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     mAudioStream = nsnull;
     mEventManager.Clear();
-    mAudioCompleted = PR_TRUE;
+    mAudioCompleted = true;
     UpdateReadyState();
     // Kick the decode thread; it may be sleeping waiting for this to finish.
     mDecoder->GetReentrantMonitor().NotifyAll();
   }
 
   // Must not hold the decoder monitor while we shutdown the audio stream, as
   // it makes a synchronous dispatch on Android.
   audioStream->Shutdown();
@@ -854,17 +854,17 @@ void nsBuiltinDecoderStateMachine::Updat
 }
 
 void nsBuiltinDecoderStateMachine::UpdatePlaybackPosition(PRInt64 aTime)
 {
   UpdatePlaybackPositionInternal(aTime);
 
   bool fragmentEnded = mFragmentEndTime >= 0 && GetMediaTime() >= mFragmentEndTime;
   if (!mPositionChangeQueued || fragmentEnded) {
-    mPositionChangeQueued = PR_TRUE;
+    mPositionChangeQueued = true;
     nsCOMPtr<nsIRunnable> event =
       NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::PlaybackPositionChanged);
     NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
   }
 
   // Notify DOM of any queued up audioavailable events
   mEventManager.DispatchPendingEvents(GetMediaTime());
 
@@ -873,17 +873,17 @@ void nsBuiltinDecoderStateMachine::Updat
   }
 }
 
 void nsBuiltinDecoderStateMachine::ClearPositionChangeFlag()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
 
-  mPositionChangeQueued = PR_FALSE;
+  mPositionChangeQueued = false;
 }
 
 nsHTMLMediaElement::NextFrameStatus nsBuiltinDecoderStateMachine::GetNextFrameStatus()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   if (IsBuffering() || IsSeeking()) {
     return nsHTMLMediaElement::NEXT_FRAME_UNAVAILABLE_BUFFERING;
   } else if (HaveNextFrameData()) {
@@ -1002,17 +1002,17 @@ void nsBuiltinDecoderStateMachine::Play(
 }
 
 void nsBuiltinDecoderStateMachine::ResetPlayback()
 {
   NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
   mVideoFrameEndTime = -1;
   mAudioStartTime = -1;
   mAudioEndTime = -1;
-  mAudioCompleted = PR_FALSE;
+  mAudioCompleted = false;
 }
 
 void nsBuiltinDecoderStateMachine::Seek(double aTime)
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   // nsBuiltinDecoder::mPlayState should be SEEKING while we seek, and
   // in that case nsBuiltinDecoder shouldn't be calling us.
@@ -1039,76 +1039,76 @@ void nsBuiltinDecoderStateMachine::Seek(
   mState = DECODER_STATE_SEEKING;
   ScheduleStateMachine();
 }
 
 void nsBuiltinDecoderStateMachine::StopDecodeThread()
 {
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
-  mStopDecodeThread = PR_TRUE;
+  mStopDecodeThread = true;
   mDecoder->GetReentrantMonitor().NotifyAll();
   if (mDecodeThread) {
     LOG(PR_LOG_DEBUG, ("%p Shutdown decode thread", mDecoder.get()));
     {
       ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
       mDecodeThread->Shutdown();
     }
     mDecodeThread = nsnull;
-    mDecodeThreadIdle = PR_FALSE;
+    mDecodeThreadIdle = false;
   }
 }
 
 void nsBuiltinDecoderStateMachine::StopAudioThread()
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
-  mStopAudioThread = PR_TRUE;
+  mStopAudioThread = true;
   mDecoder->GetReentrantMonitor().NotifyAll();
   if (mAudioThread) {
     LOG(PR_LOG_DEBUG, ("%p Shutdown audio thread", mDecoder.get()));
     {
       ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
       mAudioThread->Shutdown();
     }
     mAudioThread = nsnull;
   }
 }
 
 nsresult
 nsBuiltinDecoderStateMachine::StartDecodeThread()
 {
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
-  mStopDecodeThread = PR_FALSE;
+  mStopDecodeThread = false;
   if ((mDecodeThread && !mDecodeThreadIdle) || mState >= DECODER_STATE_COMPLETED)
     return NS_OK;
 
   if (!mDecodeThread) {
     nsresult rv = NS_NewThread(getter_AddRefs(mDecodeThread),
                                nsnull,
                                MEDIA_THREAD_STACK_SIZE);
     if (NS_FAILED(rv)) {
       mState = DECODER_STATE_SHUTDOWN;
       return rv;
     }
   }
   nsCOMPtr<nsIRunnable> event =
     NS_NewRunnableMethod(this, &nsBuiltinDecoderStateMachine::DecodeThreadRun);
   mDecodeThread->Dispatch(event, NS_DISPATCH_NORMAL);
-  mDecodeThreadIdle = PR_FALSE;
+  mDecodeThreadIdle = false;
   return NS_OK;
 }
 
 nsresult
 nsBuiltinDecoderStateMachine::StartAudioThread()
 {
   NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
                "Should be on state machine or decode thread.");
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
-  mStopAudioThread = PR_FALSE;
+  mStopAudioThread = false;
   if (HasAudio() && !mAudioThread) {
     nsresult rv = NS_NewThread(getter_AddRefs(mAudioThread),
                                nsnull,
                                MEDIA_THREAD_STACK_SIZE);
     if (NS_FAILED(rv)) {
       mState = DECODER_STATE_SHUTDOWN;
       return rv;
     }
@@ -1379,17 +1379,17 @@ void nsBuiltinDecoderStateMachine::Decod
   {
     ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
     NS_DispatchToMainThread(stopEvent, NS_DISPATCH_SYNC);
   }
 
   // Reset quick buffering status. This ensures that if we began the
   // seek while quick-buffering, we won't bypass quick buffering mode
   // if we need to buffer after the seek.
-  mQuickBuffering = PR_FALSE;
+  mQuickBuffering = false;
 
   ScheduleStateMachine();
 }
 
 // Runnable to dispose of the decoder and state machine on the main thread.
 class nsDecoderDisposeEvent : public nsRunnable {
 public:
   nsDecoderDisposeEvent(already_AddRefed<nsBuiltinDecoder> aDecoder,
@@ -1913,32 +1913,32 @@ nsresult nsBuiltinDecoderStateMachine::R
 
   return CallRunStateMachine();
 }
 
 nsresult nsBuiltinDecoderStateMachine::CallRunStateMachine()
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
-  // This will be set to PR_TRUE by ScheduleStateMachine() if it's called
+  // This will be set to true by ScheduleStateMachine() if it's called
   // while we're in RunStateMachine().
-  mRunAgain = PR_FALSE;
+  mRunAgain = false;
 
-  // Set to PR_TRUE whenever we dispatch an event to run this state machine.
+  // Set to true whenever we dispatch an event to run this state machine.
   // This flag prevents us from dispatching
-  mDispatchedRunEvent = PR_FALSE;
+  mDispatchedRunEvent = false;
 
   mTimeout = TimeStamp();
 
-  mIsRunning = PR_TRUE;
+  mIsRunning = true;
   nsresult res = RunStateMachine();
-  mIsRunning = PR_FALSE;
+  mIsRunning = false;
 
   if (mRunAgain && !mDispatchedRunEvent) {
-    mDispatchedRunEvent = PR_TRUE;
+    mDispatchedRunEvent = true;
     return NS_DispatchToCurrentThread(this);
   }
 
   return res;
 }
 
 static void TimeoutExpired(nsITimer *aTimer, void *aClosure) {
   nsBuiltinDecoderStateMachine *machine =
@@ -1947,17 +1947,17 @@ static void TimeoutExpired(nsITimer *aTi
   machine->TimeoutExpired();
 }
 
 void nsBuiltinDecoderStateMachine::TimeoutExpired()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
   if (mIsRunning) {
-    mRunAgain = PR_TRUE;
+    mRunAgain = true;
   } else if (!mDispatchedRunEvent) {
     // We don't have an event dispatched to run the state machine, so we
     // can just run it from here.
     CallRunStateMachine();
   }
   // Otherwise, an event has already been dispatched to run the state machine
   // as soon as possible. Nothing else needed to do, the state machine is
   // going to run anyway.
@@ -1993,22 +1993,22 @@ nsresult nsBuiltinDecoderStateMachine::S
 
   PRUint32 ms = static_cast<PRUint32>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
   if (mRealTime && ms > 40)
     ms = 40;
   if (ms == 0) {
     if (mIsRunning) {
       // We're currently running this state machine on the state machine
       // thread. Signal it to run again once it finishes its current cycle.
-      mRunAgain = PR_TRUE;
+      mRunAgain = true;
       return NS_OK;
     } else if (!mDispatchedRunEvent) {
       // We're not currently running this state machine on the state machine
       // thread. Dispatch an event to run one cycle of the state machine.
-      mDispatchedRunEvent = PR_TRUE;
+      mDispatchedRunEvent = true;
       return gStateMachineThread->Dispatch(this, NS_DISPATCH_NORMAL);
     }
     // We're not currently running this state machine on the state machine
     // thread, but something has already dispatched an event to run it again,
     // so just exit; it's going to run real soon.
     return NS_OK;
   }
 
--- a/content/media/nsBuiltinDecoderStateMachine.h
+++ b/content/media/nsBuiltinDecoderStateMachine.h
@@ -264,39 +264,39 @@ public:
   // Set the media fragment end time. aEndTime is in microseconds.
   void SetFragmentEndTime(PRInt64 aEndTime);
 
   // Drop reference to decoder.  Only called during shutdown dance.
   void ReleaseDecoder() { mDecoder = nsnull; }
 
 protected:
 
-  // Returns PR_TRUE if we've got less than aAudioUsecs microseconds of decoded
+  // Returns true if we've got less than aAudioUsecs microseconds of decoded
   // and playable data. The decoder monitor must be held.
   bool HasLowDecodedData(PRInt64 aAudioUsecs) const;
 
-  // Returns PR_TRUE if we're running low on data which is not yet decoded.
+  // Returns true if we're running low on data which is not yet decoded.
   // The decoder monitor must be held.
   bool HasLowUndecodedData() const;
 
   // Returns the number of microseconds of undecoded data available for
   // decoding. The decoder monitor must be held.
   PRInt64 GetUndecodedData() const;
 
   // Returns the number of unplayed usecs of audio we've got decoded and/or
   // pushed to the hardware waiting to play. This is how much audio we can
   // play without having to run the audio decoder. The decoder monitor
   // must be held.
   PRInt64 AudioDecodedUsecs() const;
 
-  // Returns PR_TRUE when there's decoded audio waiting to play.
+  // Returns true when there's decoded audio waiting to play.
   // The decoder monitor must be held.
   bool HasFutureAudio() const;
 
-  // Returns PR_TRUE if we recently exited "quick buffering" mode.
+  // Returns true if we recently exited "quick buffering" mode.
   bool JustExitedQuickBuffering();
 
   // Waits on the decoder ReentrantMonitor for aUsecs microseconds. If the decoder
   // monitor is awoken by a Notify() call, we'll continue waiting, unless
   // we've moved into shutdown state. This enables us to ensure that we
   // wait for a specified time, and that the myriad of Notify()s we do on
   // the decoder monitor don't cause the audio thread to be starved. aUsecs
   // values of less than 1 millisecond are rounded up to 1 millisecond
@@ -383,17 +383,17 @@ protected:
   // Must be called with the decode monitor held. Called on the state machine
   // and decode threads.
   void StartPlayback();
 
   // Moves the decoder into decoding state. Called on the state machine
   // thread. The decoder monitor must be held.
   void StartDecoding();
 
-  // Returns PR_TRUE if we're currently playing. The decoder monitor must
+  // Returns true if we're currently playing. The decoder monitor must
   // be held.
   bool IsPlaying();
 
   // Returns the "media time". This is the absolute time which the media
   // playback has reached. i.e. this returns values in the range
   // [mStartTime, mEndTime], and mStartTime will not be 0 if the media does
   // not start at 0. Note this is different to the value returned
   // by GetCurrentTime(), which is in the range [0,duration].
@@ -435,17 +435,17 @@ protected:
   // periodically via timer to ensure the video stays in sync.
   nsresult RunStateMachine();
 
   bool IsStateMachineScheduled() const {
     mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
     return !mTimeout.IsNull() || mRunAgain;
   }
 
-  // Returns PR_TRUE if we're not playing and the decode thread has filled its
+  // Returns true if we're not playing and the decode thread has filled its
   // decode buffers and is waiting. We can shut the decode thread down in this
   // case as it may not be needed again.
   bool IsPausedAndDecoderWaiting();
 
   // The decoder object that created this state machine. The state machine
   // holds a strong reference to the decoder to ensure that the decoder stays
   // alive once media element has started the decoder shutdown process, and has
   // dropped its reference to the decoder. This enables the state machine to
@@ -555,81 +555,81 @@ protected:
   // Volume of playback. 0.0 = muted. 1.0 = full volume. Read/Written
   // from the state machine and main threads. Synchronised via decoder
   // monitor.
   double mVolume;
 
   // Time at which we started decoding. Synchronised via decoder monitor.
   TimeStamp mDecodeStartTime;
 
-  // PR_TRUE if the media resource can be seeked. Accessed from the state
+  // True if the media resource can be seeked. Accessed from the state
   // machine and main threads. Synchronised via decoder monitor.
   bool mSeekable;
 
-  // PR_TRUE if an event to notify about a change in the playback
-  // position has been queued, but not yet run. It is set to PR_FALSE when
+  // True if an event to notify about a change in the playback
+  // position has been queued, but not yet run. It is set to false when
   // the event is run. This allows coalescing of these events as they can be
   // produced many times per second. Synchronised via decoder monitor.
   // Accessed on main and state machine threads.
   bool mPositionChangeQueued;
 
-  // PR_TRUE if the audio playback thread has finished. It is finished
+  // True if the audio playback thread has finished. It is finished
   // when either all the audio frames in the Vorbis bitstream have completed
   // playing, or we've moved into shutdown state, and the threads are to be
   // destroyed. Written by the audio playback thread and read and written by
   // the state machine thread. Synchronised via decoder monitor.
   bool mAudioCompleted;
 
-  // PR_TRUE if mDuration has a value obtained from an HTTP header, or from
+  // True if mDuration has a value obtained from an HTTP header, or from
   // the media index/metadata. Accessed on the state machine thread.
   bool mGotDurationFromMetaData;
     
-  // PR_FALSE while decode thread should be running. Accessed state machine
+  // False while decode thread should be running. Accessed state machine
   // and decode threads. Syncrhonised by decoder monitor.
   bool mStopDecodeThread;
 
-  // PR_TRUE when the decode thread run function has finished, but the thread
+  // True when the decode thread run function has finished, but the thread
   // has not necessarily been shut down yet. This can happen if we switch
   // from COMPLETED state to SEEKING before the state machine has a chance
   // to run in the COMPLETED state and shutdown the decode thread.
   // Synchronised by the decoder monitor.
   bool mDecodeThreadIdle;
 
-  // PR_FALSE while audio thread should be running. Accessed state machine
+  // False while audio thread should be running. Accessed state machine
   // and audio threads. Syncrhonised by decoder monitor.
   bool mStopAudioThread;
 
-  // If this is PR_TRUE while we're in buffering mode, we can exit early,
+  // If this is true while we're in buffering mode, we can exit early,
   // as it's likely we may be able to playback. This happens when we enter
   // buffering mode soon after the decode starts, because the decode-ahead
   // ran fast enough to exhaust all data while the download is starting up.
   // Synchronised via decoder monitor.
   bool mQuickBuffering;
 
-  // PR_TRUE if the shared state machine thread is currently running this
+  // True if the shared state machine thread is currently running this
   // state machine.
   bool mIsRunning;
 
-  // PR_TRUE if we should run the state machine again once the current
+  // True if we should run the state machine again once the current
   // state machine run has finished.
   bool mRunAgain;
 
-  // PR_TRUE if we've dispatched an event to run the state machine. It's
+  // True if we've dispatched an event to run the state machine. It's
   // imperative that we don't dispatch multiple events to run the state
   // machine at the same time, as our code assume all events are synchronous.
   // If we dispatch multiple events, the second event can run while the
   // first is shutting down a thread, causing inconsistent state.
   bool mDispatchedRunEvent;
 
-  // PR_TRUE if the decode thread has gone filled its buffers and is now
+  // True if the decode thread has gone filled its buffers and is now
   // waiting to be awakened before it continues decoding. Synchronized
   // by the decoder monitor.
   bool mDecodeThreadWaiting;
 
-  // true is we are decoding a realtime stream, like a camera stream
+  // True is we are decoding a realtime stream, like a camera stream
   bool mRealTime;
   
   PRUint32 mBufferingWait;
   PRInt64  mLowDataThresholdUsecs;
 
 private:
   // Manager for queuing and dispatching MozAudioAvailable events.  The
   // event manager is accessed from the state machine and audio threads,
--- a/content/media/nsMediaCache.cpp
+++ b/content/media/nsMediaCache.cpp
@@ -116,33 +116,33 @@ void nsMediaCacheFlusher::Init()
   }
 
   gMediaCacheFlusher = new nsMediaCacheFlusher();
   NS_ADDREF(gMediaCacheFlusher);
 
   nsCOMPtr<nsIObserverService> observerService =
     mozilla::services::GetObserverService();
   if (observerService) {
-    observerService->AddObserver(gMediaCacheFlusher, NS_PRIVATE_BROWSING_SWITCH_TOPIC, PR_TRUE);
+    observerService->AddObserver(gMediaCacheFlusher, NS_PRIVATE_BROWSING_SWITCH_TOPIC, true);
   }
 }
 
 class nsMediaCache {
 public:
   friend class nsMediaCacheStream::BlockList;
   typedef nsMediaCacheStream::BlockList BlockList;
   enum {
     BLOCK_SIZE = nsMediaCacheStream::BLOCK_SIZE
   };
 
   nsMediaCache() : mNextResourceID(1),
     mReentrantMonitor("nsMediaCache.mReentrantMonitor"),
-    mFD(nsnull), mFDCurrentPos(0), mUpdateQueued(PR_FALSE)
+    mFD(nsnull), mFDCurrentPos(0), mUpdateQueued(false)
 #ifdef DEBUG
-    , mInUpdate(PR_FALSE)
+    , mInUpdate(false)
 #endif
   {
     MOZ_COUNT_CTOR(nsMediaCache);
   }
   ~nsMediaCache() {
     NS_ASSERTION(mStreams.IsEmpty(), "Stream(s) still open!");
     Truncate();
     NS_ASSERTION(mIndex.Length() == 0, "Blocks leaked?");
@@ -776,20 +776,20 @@ nsMediaCache::FindBlockForIncomingData(T
 bool
 nsMediaCache::BlockIsReusable(PRInt32 aBlockIndex)
 {
   Block* block = &mIndex[aBlockIndex];
   for (PRUint32 i = 0; i < block->mOwners.Length(); ++i) {
     nsMediaCacheStream* stream = block->mOwners[i].mStream;
     if (stream->mPinCount > 0 ||
         stream->mStreamOffset/BLOCK_SIZE == block->mOwners[i].mStreamBlock) {
-      return PR_FALSE;
+      return false;
     }
   }
-  return PR_TRUE;
+  return true;
 }
 
 void
 nsMediaCache::AppendMostReusableBlock(BlockList* aBlockList,
                                       nsTArray<PRUint32>* aResult,
                                       PRInt32 aBlockIndexLimit)
 {
   mReentrantMonitor.AssertCurrentThreadIn();
@@ -1087,19 +1087,19 @@ nsMediaCache::Update()
   // The action to use for each stream. We store these so we can make
   // decisions while holding the cache lock but implement those decisions
   // without holding the cache lock, since we need to call out to
   // stream, decoder and element code.
   nsAutoTArray<StreamAction,10> actions;
 
   {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
-    mUpdateQueued = PR_FALSE;
+    mUpdateQueued = false;
 #ifdef DEBUG
-    mInUpdate = PR_TRUE;
+    mInUpdate = true;
 #endif
 
     PRInt32 maxBlocks = GetMaxBlocks();
     TimeStamp now = TimeStamp::Now();
 
     PRInt32 freeBlockCount = mFreeBlocks.GetCount();
     // Try to trim back the cache to its desired maximum size. The cache may
     // have overflowed simply due to data being received when we have
@@ -1259,41 +1259,41 @@ nsMediaCache::Update()
         // already there.
         LOG(PR_LOG_DEBUG, ("Stream %p at end of stream", stream));
         enableReading = !stream->mCacheSuspended &&
           stream->mStreamLength == stream->mChannelOffset;
       } else if (desiredOffset < stream->mStreamOffset) {
         // We're reading to try to catch up to where the current stream
         // reader wants to be. Better not stop.
         LOG(PR_LOG_DEBUG, ("Stream %p catching up", stream));
-        enableReading = PR_TRUE;
+        enableReading = true;
       } else if (desiredOffset < stream->mStreamOffset + BLOCK_SIZE) {
         // The stream reader is waiting for us, or nearly so. Better feed it.
         LOG(PR_LOG_DEBUG, ("Stream %p feeding reader", stream));
-        enableReading = PR_TRUE;
+        enableReading = true;
       } else if (!stream->mIsSeekable &&
                  nonSeekableReadaheadBlockCount >= maxBlocks*NONSEEKABLE_READAHEAD_MAX) {
         // This stream is not seekable and there are already too many blocks
         // being cached for readahead for nonseekable streams (which we can't
         // free). So stop reading ahead now.
         LOG(PR_LOG_DEBUG, ("Stream %p throttling non-seekable readahead", stream));
-        enableReading = PR_FALSE;
+        enableReading = false;
       } else if (mIndex.Length() > PRUint32(maxBlocks)) {
         // We're in the process of bringing the cache size back to the
         // desired limit, so don't bring in more data yet
         LOG(PR_LOG_DEBUG, ("Stream %p throttling to reduce cache size", stream));
-        enableReading = PR_FALSE;
+        enableReading = false;
       } else if (freeBlockCount > 0 || mIndex.Length() < PRUint32(maxBlocks)) {
         // Free blocks in the cache, so keep reading
         LOG(PR_LOG_DEBUG, ("Stream %p reading since there are free blocks", stream));
-        enableReading = PR_TRUE;
+        enableReading = true;
       } else if (latestNextUse <= TimeDuration(0)) {
         // No reusable blocks, so can't read anything
         LOG(PR_LOG_DEBUG, ("Stream %p throttling due to no reusable blocks", stream));
-        enableReading = PR_FALSE;
+        enableReading = false;
       } else {
         // Read ahead if the data we expect to read is more valuable than
         // the least valuable block in the main part of the cache
         TimeDuration predictedNewDataUse = PredictNextUseForIncomingData(stream);
         LOG(PR_LOG_DEBUG, ("Stream %p predict next data in %f, current worst block is %f",
             stream, predictedNewDataUse.ToSeconds(), latestNextUse.ToSeconds()));
         enableReading = predictedNewDataUse < latestNextUse;
       }
@@ -1301,17 +1301,17 @@ nsMediaCache::Update()
       if (enableReading) {
         for (PRUint32 j = 0; j < i; ++j) {
           nsMediaCacheStream* other = mStreams[j];
           if (other->mResourceID == stream->mResourceID &&
               !other->mClient->IsSuspended() &&
               other->mChannelOffset/BLOCK_SIZE == desiredOffset/BLOCK_SIZE) {
             // This block is already going to be read by the other stream.
             // So don't try to read it from this stream as well.
-            enableReading = PR_FALSE;
+            enableReading = false;
             LOG(PR_LOG_DEBUG, ("Stream %p waiting on same block (%lld) from stream %p",
                                stream, desiredOffset/BLOCK_SIZE, other));
             break;
           }
         }
       }
 
       if (stream->mChannelOffset != desiredOffset && enableReading) {
@@ -1325,17 +1325,17 @@ nsMediaCache::Update()
         actions[i] = SEEK;
       } else if (enableReading && stream->mCacheSuspended) {
         actions[i] = RESUME;
       } else if (!enableReading && !stream->mCacheSuspended) {
         actions[i] = SUSPEND;
       }
     }
 #ifdef DEBUG
-    mInUpdate = PR_FALSE;
+    mInUpdate = false;
 #endif
   }
 
   // Update the channel state without holding our cache lock. While we're
   // doing this, decoder threads may be running and seeking, reading or changing
   // other cache state. That's OK, they'll trigger new Update events and we'll
   // get back here and revise our decisions. The important thing here is that
   // performing these actions only depends on mChannelOffset and
@@ -1345,29 +1345,29 @@ nsMediaCache::Update()
     nsMediaCacheStream* stream = mStreams[i];
     nsresult rv = NS_OK;
     switch (actions[i]) {
     case SEEK:
       LOG(PR_LOG_DEBUG, ("Stream %p CacheSeek to %lld (resume=%d)", stream,
            (long long)stream->mChannelOffset, stream->mCacheSuspended));
       rv = stream->mClient->CacheClientSeek(stream->mChannelOffset,
                                             stream->mCacheSuspended);
-      stream->mCacheSuspended = PR_FALSE;
+      stream->mCacheSuspended = false;
       break;
 
     case RESUME:
       LOG(PR_LOG_DEBUG, ("Stream %p Resumed", stream));
       rv = stream->mClient->CacheClientResume();
-      stream->mCacheSuspended = PR_FALSE;
+      stream->mCacheSuspended = false;
       break;
 
     case SUSPEND:
       LOG(PR_LOG_DEBUG, ("Stream %p Suspended", stream));
       rv = stream->mClient->CacheClientSuspend();
-      stream->mCacheSuspended = PR_TRUE;
+      stream->mCacheSuspended = true;
       break;
 
     default:
       break;
     }
 
     if (NS_FAILED(rv)) {
       // Close the streams that failed due to error. This will cause all
@@ -1397,17 +1397,17 @@ nsMediaCache::QueueUpdate()
   mReentrantMonitor.AssertCurrentThreadIn();
 
   // Queuing an update while we're in an update raises a high risk of
   // triggering endless events
   NS_ASSERTION(!mInUpdate,
                "Queuing an update while we're in an update");
   if (mUpdateQueued)
     return;
-  mUpdateQueued = PR_TRUE;
+  mUpdateQueued = true;
   nsCOMPtr<nsIRunnable> event = new UpdateEvent();
   NS_DispatchToMainThread(event);
 }
 
 #ifdef DEBUG_VERIFY_CACHE
 void
 nsMediaCache::Verify()
 {
@@ -1743,17 +1743,17 @@ nsMediaCacheStream::UpdatePrincipal(nsIP
 
   bool equal;
   nsresult rv = mPrincipal->Equals(aPrincipal, &equal);
   if (NS_SUCCEEDED(rv) && equal)
     return;
 
   // Principals are not equal, so set mPrincipal to a null principal.
   mPrincipal = do_CreateInstance("@mozilla.org/nullprincipal;1");
-  mUsingNullPrincipal = PR_TRUE;
+  mUsingNullPrincipal = true;
 }
 
 void
 nsMediaCacheStream::NotifyDataReceived(PRInt64 aSize, const char* aData,
     nsIPrincipal* aPrincipal)
 {
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
 
@@ -1777,17 +1777,17 @@ nsMediaCacheStream::NotifyDataReceived(P
     if (blockOffset == 0 && chunkSize == BLOCK_SIZE) {
       // We received a whole block, so avoid a useless copy through
       // mPartialBlockBuffer
       blockDataToStore = data;
     } else {
       if (blockOffset == 0) {
         // We've just started filling this buffer so now is a good time
         // to clear this flag.
-        mMetadataInPartialBlockBuffer = PR_FALSE;
+        mMetadataInPartialBlockBuffer = false;
       }
       memcpy(reinterpret_cast<char*>(mPartialBlockBuffer) + blockOffset,
              data, chunkSize);
 
       if (blockOffset + chunkSize == BLOCK_SIZE) {
         // We completed a block, so lets write it out.
         blockDataToStore = reinterpret_cast<char*>(mPartialBlockBuffer);
         if (mMetadataInPartialBlockBuffer) {
@@ -1895,17 +1895,17 @@ nsMediaCacheStream::Close()
 
 void
 nsMediaCacheStream::CloseInternal(ReentrantMonitorAutoEnter& aReentrantMonitor)
 {
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
 
   if (mClosed)
     return;
-  mClosed = PR_TRUE;
+  mClosed = true;
   gMediaCache->ReleaseStreamBlocks(this);
   // Wake up any blocked readers
   aReentrantMonitor.NotifyAll();
 }
 
 void
 nsMediaCacheStream::Pin()
 {
@@ -1948,17 +1948,17 @@ nsMediaCacheStream::GetCachedDataEnd(PRI
   return GetCachedDataEndInternal(aOffset);
 }
 
 bool
 nsMediaCacheStream::IsDataCachedToEndOfStream(PRInt64 aOffset)
 {
   ReentrantMonitorAutoEnter mon(gMediaCache->GetReentrantMonitor());
   if (mStreamLength < 0)
-    return PR_FALSE;
+    return false;
   return GetCachedDataEndInternal(aOffset) >= mStreamLength;
 }
 
 PRInt64
 nsMediaCacheStream::GetCachedDataEndInternal(PRInt64 aOffset)
 {
   gMediaCache->GetReentrantMonitor().AssertCurrentThreadIn();
   PRUint32 startBlockIndex = aOffset/BLOCK_SIZE;
@@ -2003,17 +2003,17 @@ nsMediaCacheStream::GetNextCachedDataInt
 
   // Is the current block cached?
   if (mBlocks[startBlockIndex] != -1)
     return aOffset;
 
   // Count the number of uncached blocks
   bool hasPartialBlock = (mChannelOffset % BLOCK_SIZE) != 0;
   PRUint32 blockIndex = startBlockIndex + 1;
-  while (PR_TRUE) {
+  while (true) {
     if ((hasPartialBlock && blockIndex == channelBlockIndex) ||
         (blockIndex < mBlocks.Length() && mBlocks[blockIndex] != -1)) {
       // We at the incoming channel block, which has has data in it,
       // or are we at a cached block. Return index of block start.
       return blockIndex * BLOCK_SIZE;
     }
 
     // No more cached blocks?
@@ -2124,17 +2124,17 @@ nsMediaCacheStream::Read(char* aBuffer, 
     if (channelBlock == streamBlock && mStreamOffset < mChannelOffset) {
       // We can just use the data in mPartialBlockBuffer. In fact we should
       // use it rather than waiting for the block to fill and land in
       // the cache.
       bytes = NS_MIN<PRInt64>(size, mChannelOffset - mStreamOffset);
       memcpy(aBuffer + count,
         reinterpret_cast<char*>(mPartialBlockBuffer) + offsetInStreamBlock, bytes);
       if (mCurrentMode == MODE_METADATA) {
-        mMetadataInPartialBlockBuffer = PR_TRUE;
+        mMetadataInPartialBlockBuffer = true;
       }
       gMediaCache->NoteBlockUsage(this, cacheBlock, mCurrentMode, TimeStamp::Now());
     } else {
       if (cacheBlock < 0) {
         if (count > 0) {
           // Some data has been read, so return what we've got instead of
           // blocking
           break;
@@ -2238,17 +2238,17 @@ nsMediaCacheStream::Init()
 
   if (mInitialized)
     return NS_OK;
 
   InitMediaCache();
   if (!gMediaCache)
     return NS_ERROR_FAILURE;
   gMediaCache->OpenStream(this);
-  mInitialized = PR_TRUE;
+  mInitialized = true;
   return NS_OK;
 }
 
 nsresult
 nsMediaCacheStream::InitAsClone(nsMediaCacheStream* aOriginal)
 {
   if (mInitialized)
     return NS_OK;
@@ -2262,17 +2262,17 @@ nsMediaCacheStream::InitAsClone(nsMediaC
   ReentrantMonitorAutoEnter mon(gMediaCache->GetReentrantMonitor());
 
   mPrincipal = aOriginal->mPrincipal;
   mStreamLength = aOriginal->mStreamLength;
   mIsSeekable = aOriginal->mIsSeekable;
 
   // Cloned streams are initially suspended, since there is no channel open
   // initially for a clone.
-  mCacheSuspended = PR_TRUE;
+  mCacheSuspended = true;
 
   for (PRUint32 i = 0; i < aOriginal->mBlocks.Length(); ++i) {
     PRInt32 cacheBlockIndex = aOriginal->mBlocks[i];
     if (cacheBlockIndex < 0)
       continue;
 
     while (i >= mBlocks.Length()) {
       mBlocks.AppendElement(-1);
--- a/content/media/nsMediaCache.h
+++ b/content/media/nsMediaCache.h
@@ -221,24 +221,24 @@ public:
   enum ReadMode {
     MODE_METADATA,
     MODE_PLAYBACK
   };
 
   // aClient provides the underlying transport that cache will use to read
   // data for this stream.
   nsMediaCacheStream(nsMediaChannelStream* aClient)
-    : mClient(aClient), mResourceID(0), mInitialized(PR_FALSE),
-      mIsSeekable(PR_FALSE), mCacheSuspended(PR_FALSE),
-      mUsingNullPrincipal(PR_FALSE),
+    : mClient(aClient), mResourceID(0), mInitialized(false),
+      mIsSeekable(false), mCacheSuspended(false),
+      mUsingNullPrincipal(false),
       mChannelOffset(0), mStreamLength(-1),  
       mStreamOffset(0), mPlaybackBytesPerSecond(10000),
       mPinCount(0), mCurrentMode(MODE_PLAYBACK),
-      mMetadataInPartialBlockBuffer(PR_FALSE),
-      mClosed(PR_FALSE) {}
+      mMetadataInPartialBlockBuffer(false),
+      mClosed(false) {}
   ~nsMediaCacheStream();
 
   // Set up this stream with the cache. Can fail on OOM. One
   // of InitAsClone or Init must be called before any other method on
   // this class. Does nothing if already initialized.
   nsresult Init();
 
   // Set up this stream with the cache, assuming it's for the same data
@@ -447,21 +447,21 @@ private:
   // Set to true when Init or InitAsClone has been called
   bool                   mInitialized;
 
   // The following fields are protected by the cache's monitor but are
   // only written on the main thread. 
 
   // The last reported seekability state for the underlying channel
   bool mIsSeekable;
-  // true if the cache has suspended our channel because the cache is
+  // True if the cache has suspended our channel because the cache is
   // full and the priority of the data that would be received is lower
   // than the priority of the data already in the cache
   bool mCacheSuspended;
-  // true if mPrincipal is a null principal because we saw data from
+  // True if mPrincipal is a null principal because we saw data from
   // multiple origins
   bool mUsingNullPrincipal;
   // The offset where the next data from the channel will arrive
   PRInt64      mChannelOffset;
   // The reported or discovered length of the data, or -1 if nothing is
   // known
   PRInt64      mStreamLength;
 
@@ -483,17 +483,17 @@ private:
   BlockList         mPlayedBlocks;
   // The last reported estimate of the decoder's playback rate
   PRUint32          mPlaybackBytesPerSecond;
   // The number of times this stream has been Pinned without a
   // corresponding Unpin
   PRUint32          mPinCount;
   // The last reported read mode
   ReadMode          mCurrentMode;
-  // true if some data in mPartialBlockBuffer has been read as metadata
+  // True if some data in mPartialBlockBuffer has been read as metadata
   bool              mMetadataInPartialBlockBuffer;
   // Set to true when the stream has been closed either explicitly or
   // due to an internal cache error
   bool              mClosed;
 
   // The following field is protected by the cache's monitor but are
   // only written on the main thread.
 
--- a/content/media/nsMediaDecoder.cpp
+++ b/content/media/nsMediaDecoder.cpp
@@ -71,35 +71,35 @@ static const PRUint32 STALL_MS = 3000;
 static const PRInt64 CAN_PLAY_THROUGH_MARGIN = 10;
 
 nsMediaDecoder::nsMediaDecoder() :
   mElement(nsnull),
   mRGBWidth(-1),
   mRGBHeight(-1),
   mVideoUpdateLock("nsMediaDecoder.mVideoUpdateLock"),
   mFrameBufferLength(0),
-  mPinnedForSeek(PR_FALSE),
-  mSizeChanged(PR_FALSE),
-  mImageContainerSizeChanged(PR_FALSE),
-  mShuttingDown(PR_FALSE)
+  mPinnedForSeek(false),
+  mSizeChanged(false),
+  mImageContainerSizeChanged(false),
+  mShuttingDown(false)
 {
   MOZ_COUNT_CTOR(nsMediaDecoder);
   MediaMemoryReporter::AddMediaDecoder(this);
 }
 
 nsMediaDecoder::~nsMediaDecoder()
 {
   MOZ_COUNT_DTOR(nsMediaDecoder);
   MediaMemoryReporter::RemoveMediaDecoder(this);
 }
 
 bool nsMediaDecoder::Init(nsHTMLMediaElement* aElement)
 {
   mElement = aElement;
-  return PR_TRUE;
+  return true;
 }
 
 void nsMediaDecoder::Shutdown()
 {
   StopProgress();
   mElement = nsnull;
 }
 
@@ -126,21 +126,21 @@ void nsMediaDecoder::Invalidate()
   nsIFrame* frame = mElement->GetPrimaryFrame();
   bool invalidateFrame = false;
 
   {
     MutexAutoLock lock(mVideoUpdateLock);
 
     // Get mImageContainerSizeChanged while holding the lock.
     invalidateFrame = mImageContainerSizeChanged;
-    mImageContainerSizeChanged = PR_FALSE;
+    mImageContainerSizeChanged = false;
 
     if (mSizeChanged) {
       mElement->UpdateMediaSize(nsIntSize(mRGBWidth, mRGBHeight));
-      mSizeChanged = PR_FALSE;
+      mSizeChanged = false;
 
       if (frame) {
         nsPresContext* presContext = frame->PresContext();
         nsIPresShell *presShell = presContext->PresShell();
         presShell->FrameNeedsReflow(frame,
                                     nsIPresShell::eStyleChange,
                                     NS_FRAME_IS_DIRTY);
       }
@@ -157,17 +157,17 @@ void nsMediaDecoder::Invalidate()
   }
 
   nsSVGEffects::InvalidateDirectRenderingObservers(mElement);
 }
 
 static void ProgressCallback(nsITimer* aTimer, void* aClosure)
 {
   nsMediaDecoder* decoder = static_cast<nsMediaDecoder*>(aClosure);
-  decoder->Progress(PR_TRUE);
+  decoder->Progress(true);
 }
 
 void nsMediaDecoder::Progress(bool aTimer)
 {
   if (!mElement)
     return;
 
   TimeStamp now = TimeStamp::Now();
@@ -216,42 +216,42 @@ nsresult nsMediaDecoder::StopProgress()
 
   return rv;
 }
 
 void nsMediaDecoder::FireTimeUpdate()
 {
   if (!mElement)
     return;
-  mElement->FireTimeUpdate(PR_TRUE);
+  mElement->FireTimeUpdate(true);
 }
 
 void nsMediaDecoder::SetVideoData(const gfxIntSize& aSize,
                                   Image* aImage,
                                   TimeStamp aTarget)
 {
   MutexAutoLock lock(mVideoUpdateLock);
 
   if (mRGBWidth != aSize.width || mRGBHeight != aSize.height) {
     mRGBWidth = aSize.width;
     mRGBHeight = aSize.height;
-    mSizeChanged = PR_TRUE;
+    mSizeChanged = true;
   }
   if (mImageContainer && aImage) {
     gfxIntSize oldFrameSize = mImageContainer->GetCurrentSize();
 
     TimeStamp paintTime = mImageContainer->GetPaintTime();
     if (!paintTime.IsNull() && !mPaintTarget.IsNull()) {
       mPaintDelay = paintTime - mPaintTarget;
     }
 
     mImageContainer->SetCurrentImage(aImage);
     gfxIntSize newFrameSize = mImageContainer->GetCurrentSize();
     if (oldFrameSize != newFrameSize) {
-      mImageContainerSizeChanged = PR_TRUE;
+      mImageContainerSizeChanged = true;
     }
   }
 
   mPaintTarget = aTarget;
 }
 
 double nsMediaDecoder::GetFrameDelay()
 {
@@ -260,45 +260,45 @@ double nsMediaDecoder::GetFrameDelay()
 }
 
 void nsMediaDecoder::PinForSeek()
 {
   nsMediaStream* stream = GetCurrentStream();
   if (!stream || mPinnedForSeek) {
     return;
   }
-  mPinnedForSeek = PR_TRUE;
+  mPinnedForSeek = true;
   stream->Pin();
 }
 
 void nsMediaDecoder::UnpinForSeek()
 {
   nsMediaStream* stream = GetCurrentStream();
   if (!stream || !mPinnedForSeek) {
     return;
   }
-  mPinnedForSeek = PR_FALSE;
+  mPinnedForSeek = false;
   stream->Unpin();
 }
 
 bool nsMediaDecoder::CanPlayThrough()
 {
   Statistics stats = GetStatistics();
   if (!stats.mDownloadRateReliable || !stats.mPlaybackRateReliable) {
-    return PR_FALSE;
+    return false;
   }
   PRInt64 bytesToDownload = stats.mTotalBytes - stats.mDownloadPosition;
   PRInt64 bytesToPlayback = stats.mTotalBytes - stats.mPlaybackPosition;
   double timeToDownload = bytesToDownload / stats.mDownloadRate;
   double timeToPlay = bytesToPlayback / stats.mPlaybackRate;
 
   if (timeToDownload > timeToPlay) {
     // Estimated time to download is greater than the estimated time to play.
     // We probably can't play through without having to stop to buffer.
-    return PR_FALSE;
+    return false;
   }
 
   // Estimated time to download is less than the estimated time to play.
   // We can probably play through without having to buffer, but ensure that
   // we've got a reasonable amount of data buffered after the current
   // playback position, so that if the bitrate of the media fluctuates, or if
   // our download rate or decode rate estimation is otherwise inaccurate,
   // we don't suddenly discover that we need to buffer. This is particularly
--- a/content/media/nsMediaDecoder.h
+++ b/content/media/nsMediaDecoder.h
@@ -81,17 +81,17 @@ public:
 
   nsMediaDecoder();
   virtual ~nsMediaDecoder();
 
   // Create a new decoder of the same type as this one.
   virtual nsMediaDecoder* Clone() = 0;
 
   // Perform any initialization required for the decoder.
-  // Return PR_TRUE on successful initialisation, PR_FALSE
+  // Return true on successful initialisation, false
   // on failure.
   virtual bool Init(nsHTMLMediaElement* aElement);
 
   // Get the current nsMediaStream being used. Its URI will be returned
   // by currentSrc.
   virtual nsMediaStream* GetCurrentStream() = 0;
 
   // Return the principal of the current URI being played or downloaded.
@@ -145,21 +145,21 @@ public:
                         nsMediaDecoder* aCloneDonor) = 0;
 
   // Called when the video file has completed downloading.
   virtual void ResourceLoaded() = 0;
 
   // Called if the media file encounters a network error.
   virtual void NetworkError() = 0;
 
-  // Call from any thread safely. Return PR_TRUE if we are currently
+  // Call from any thread safely. Return true if we are currently
   // seeking in the media resource.
   virtual bool IsSeeking() const = 0;
 
-  // Return PR_TRUE if the decoder has reached the end of playback.
+  // Return true if the decoder has reached the end of playback.
   // Call in the main thread only.
   virtual bool IsEnded() const = 0;
 
   struct Statistics {
     // Estimate of the current playback rate (bytes/second).
     double mPlaybackRate;
     // Estimate of the current download rate (bytes/second). This
     // ignores time that the channel was paused by Gecko.
@@ -285,31 +285,31 @@ public:
   // Set the duration of the media resource in units of seconds.
   // This is called via a channel listener if it can pick up the duration
   // from a content header. Must be called from the main thread only.
   virtual void SetDuration(double aDuration) = 0;
 
   // Set a flag indicating whether seeking is supported
   virtual void SetSeekable(bool aSeekable) = 0;
 
-  // Return PR_TRUE if seeking is supported.
+  // Return true if seeking is supported.
   virtual bool IsSeekable() = 0;
 
   // Return the time ranges that can be seeked into.
   virtual nsresult GetSeekable(nsTimeRanges* aSeekable) = 0;
 
   // Set the end time of the media resource. When playback reaches
   // this point the media pauses. aTime is in seconds.
   virtual void SetEndTime(double aTime) = 0;
 
   // Invalidate the frame.
   virtual void Invalidate();
 
   // Fire progress events if needed according to the time and byte
-  // constraints outlined in the specification. aTimer is PR_TRUE
+  // constraints outlined in the specification. aTimer is true
   // if the method is called as a result of the progress timer rather
   // than the result of downloaded data.
   virtual void Progress(bool aTimer);
 
   // Fire timeupdate events if needed according to the time constraints
   // outlined in the specification.
   virtual void FireTimeUpdate();
 
@@ -341,17 +341,17 @@ public:
   // to throttle the download. Call on the main thread only. This can
   // be called multiple times, there's an internal "suspend count".
   virtual void Suspend() = 0;
 
   // Resume any media downloads that have been suspended. Called by the
   // media element when it is restored from the bfcache, or when we need
   // to stop throttling the download. Call on the main thread only.
   // The download will only actually resume once as many Resume calls
-  // have been made as Suspend calls. When aForceBuffering is PR_TRUE,
+  // have been made as Suspend calls. When aForceBuffering is true,
   // we force the decoder to go into buffering state before resuming
   // playback.
   virtual void Resume(bool aForceBuffering) = 0;
 
   // Returns a weak reference to the media element we're decoding for,
   // if it's available.
   nsHTMLMediaElement* GetMediaElement();
 
@@ -381,17 +381,17 @@ public:
   void SetVideoData(const gfxIntSize& aSize,
                     Image* aImage,
                     TimeStamp aTarget);
 
   // Constructs the time ranges representing what segments of the media
   // are buffered and playable.
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered) = 0;
 
-  // Returns PR_TRUE if we can play the entire media through without stopping
+  // Returns true if we can play the entire media through without stopping
   // to buffer, given the current download and playback rates.
   bool CanPlayThrough();
 
   // Returns the size, in bytes, of the heap memory used by the currently
   // queued decoded video and audio data.
   virtual PRInt64 VideoQueueMemoryInUse() = 0;
   virtual PRInt64 AudioQueueMemoryInUse() = 0;
 
@@ -454,27 +454,27 @@ protected:
   // to the RGB buffer must obtain this lock first to ensure that
   // the video element does not use video data or sizes that are
   // in the midst of being changed.
   Mutex mVideoUpdateLock;
 
   // The framebuffer size to use for audioavailable events.
   PRUint32 mFrameBufferLength;
 
-  // PR_TRUE when our media stream has been pinned. We pin the stream
+  // True when our media stream has been pinned. We pin the stream
   // while seeking.
   bool mPinnedForSeek;
 
-  // Set to PR_TRUE when the video width, height or pixel aspect ratio is
+  // Set to true when the video width, height or pixel aspect ratio is
   // changed by SetVideoData().  The next call to Invalidate() will recalculate
   // and update the intrinsic size on the element, request a frame reflow and
   // then reset this flag.
   bool mSizeChanged;
 
-  // Set to PR_TRUE in SetVideoData() if the new image has a different size
+  // Set to true in SetVideoData() if the new image has a different size
   // than the current image.  The image size is also affected by transforms
   // so this can be true even if mSizeChanged is false, for example when
   // zooming.  The next call to Invalidate() will call nsIFrame::Invalidate
   // when this flag is set, rather than just InvalidateLayer, and then reset
   // this flag.
   bool mImageContainerSizeChanged;
 
   // True if the decoder is being shutdown. At this point all events that
--- a/content/media/nsMediaStream.cpp
+++ b/content/media/nsMediaStream.cpp
@@ -66,21 +66,21 @@ static const PRUint32 HTTP_OK_CODE = 200
 static const PRUint32 HTTP_PARTIAL_RESPONSE_CODE = 206;
 
 using namespace mozilla;
 
 nsMediaChannelStream::nsMediaChannelStream(nsMediaDecoder* aDecoder,
     nsIChannel* aChannel, nsIURI* aURI)
   : nsMediaStream(aDecoder, aChannel, aURI),
     mOffset(0), mSuspendCount(0),
-    mReopenOnError(PR_FALSE), mIgnoreClose(PR_FALSE),
+    mReopenOnError(false), mIgnoreClose(false),
     mCacheStream(this),
     mLock("nsMediaChannelStream.mLock"),
     mCacheSuspendCount(0),
-    mIgnoreResume(PR_FALSE)
+    mIgnoreResume(false)
 {
 }
 
 nsMediaChannelStream::~nsMediaChannelStream()
 {
   if (mListener) {
     // Kill its reference to us since we're going away
     mListener->Revoke();
@@ -224,29 +224,29 @@ nsMediaChannelStream::OnStartRequest(nsI
       }
 
       if (NS_SUCCEEDED(rv)) {
         double duration = durationText.ToDouble(&ec);
         if (ec == NS_OK && duration >= 0) {
           mDecoder->SetDuration(duration);
         }
       } else {
-        mDecoder->SetInfinite(PR_TRUE);
+        mDecoder->SetInfinite(true);
       }
     }
 
     if (mOffset > 0 && responseStatus == HTTP_OK_CODE) {
       // If we get an OK response but we were seeking, we have to assume
       // that seeking doesn't work. We also need to tell the cache that
       // it's getting data for the start of the stream.
       mCacheStream.NotifyDataStarted(0);
       mOffset = 0;
 
       // The server claimed it supported range requests.  It lied.
-      acceptsRanges = PR_FALSE;
+      acceptsRanges = false;
     } else if (mOffset == 0 &&
                (responseStatus == HTTP_OK_CODE ||
                 responseStatus == HTTP_PARTIAL_RESPONSE_CODE)) {
       // We weren't seeking and got a valid response status,
       // set the length of the content.
       PRInt32 cl = -1;
       hc->GetContentLength(&cl);
       if (cl >= 0) {
@@ -258,49 +258,49 @@ nsMediaChannelStream::OnStartRequest(nsI
 
     // If we get an HTTP_OK_CODE response to our byte range request,
     // and the server isn't sending Accept-Ranges:bytes then we don't
     // support seeking.
     seekable =
       responseStatus == HTTP_PARTIAL_RESPONSE_CODE || acceptsRanges;
 
     if (seekable) {
-      mDecoder->SetInfinite(PR_FALSE);
+      mDecoder->SetInfinite(false);
     }
   }
   mDecoder->SetSeekable(seekable);
   mCacheStream.SetSeekable(seekable);
 
   nsCOMPtr<nsICachingChannel> cc = do_QueryInterface(aRequest);
   if (cc) {
     bool fromCache = false;
     rv = cc->IsFromCache(&fromCache);
     if (NS_SUCCEEDED(rv) && !fromCache) {
-      cc->SetCacheAsFile(PR_TRUE);
+      cc->SetCacheAsFile(true);
     }
   }
 
   {
     MutexAutoLock lock(mLock);
     mChannelStatistics.Start(TimeStamp::Now());
   }
 
-  mReopenOnError = PR_FALSE;
-  mIgnoreClose = PR_FALSE;
+  mReopenOnError = false;
+  mIgnoreClose = false;
   if (mSuspendCount > 0) {
     // Re-suspend the channel if it needs to be suspended
     // No need to call PossiblySuspend here since the channel is
     // definitely in the right state for us in OneStartRequest.
     mChannel->Suspend();
-    mIgnoreResume = PR_FALSE;
+    mIgnoreResume = false;
   }
 
   // Fires an initial progress event and sets up the stall counter so stall events
   // fire if no download occurs within the required time frame.
-  mDecoder->Progress(PR_FALSE);
+  mDecoder->Progress(false);
 
   return NS_OK;
 }
 
 nsresult
 nsMediaChannelStream::OnStopRequest(nsIRequest* aRequest, nsresult aStatus)
 {
   NS_ASSERTION(mChannel.get() == aRequest, "Wrong channel!");
@@ -319,31 +319,31 @@ nsMediaChannelStream::OnStopRequest(nsIR
   // But don't reopen if we need to seek and we don't think we can... that would
   // cause us to just re-read the stream, which would be really bad.
   if (mReopenOnError &&
       aStatus != NS_ERROR_PARSED_DATA_CACHED && aStatus != NS_BINDING_ABORTED &&
       (mOffset == 0 || mCacheStream.IsSeekable())) {
     // If the stream did close normally, then if the server is seekable we'll
     // just seek to the end of the resource and get an HTTP 416 error because
     // there's nothing there, so this isn't bad.
-    nsresult rv = CacheClientSeek(mOffset, PR_FALSE);
+    nsresult rv = CacheClientSeek(mOffset, false);
     if (NS_SUCCEEDED(rv))
       return rv;
     // If the reopen/reseek fails, just fall through and treat this
     // error as fatal.
   }
 
   if (!mIgnoreClose) {
     mCacheStream.NotifyDataEnded(aStatus);
 
     // Move this request back into the foreground.  This is necessary for
     // requests owned by video documents to ensure the load group fires
     // OnStopRequest when restoring from session history.
     if (mLoadInBackground) {
-      mLoadInBackground = PR_FALSE;
+      mLoadInBackground = false;
 
       nsLoadFlags loadFlags;
       DebugOnly<nsresult> rv = mChannel->GetLoadFlags(&loadFlags);
       NS_ASSERTION(NS_SUCCEEDED(rv), "GetLoadFlags() failed!");
 
       loadFlags &= ~nsIRequest::LOAD_BACKGROUND;
       ModifyLoadFlags(loadFlags);
     }
@@ -465,17 +465,17 @@ nsresult nsMediaChannelStream::OpenChann
     nsHTMLMediaElement* element = mDecoder->GetMediaElement();
     NS_ENSURE_TRUE(element, NS_ERROR_FAILURE);
     if (element->ShouldCheckAllowOrigin()) {
       nsresult rv;
       nsCORSListenerProxy* crossSiteListener =
         new nsCORSListenerProxy(mListener,
                                 element->NodePrincipal(),
                                 mChannel,
-                                PR_FALSE,
+                                false,
                                 &rv);
       listener = crossSiteListener;
       NS_ENSURE_TRUE(crossSiteListener, NS_ERROR_OUT_OF_MEMORY);
       NS_ENSURE_SUCCESS(rv, rv);
     } else {
       nsresult rv = nsContentUtils::GetSecurityManager()->
         CheckLoadURIWithPrincipal(element->NodePrincipal(),
                                   mURI,
@@ -498,17 +498,17 @@ void nsMediaChannelStream::SetupChannelH
   // of the resource.
   // This enables us to detect if the stream supports byte range
   // requests, and therefore seeking, early.
   nsCOMPtr<nsIHttpChannel> hc = do_QueryInterface(mChannel);
   if (hc) {
     nsCAutoString rangeString("bytes=");
     rangeString.AppendInt(mOffset);
     rangeString.Append("-");
-    hc->SetRequestHeader(NS_LITERAL_CSTRING("Range"), rangeString, PR_FALSE);
+    hc->SetRequestHeader(NS_LITERAL_CSTRING("Range"), rangeString, false);
 
     // Send Accept header for video and audio types only (Bug 489071)
     NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
     nsHTMLMediaElement* element = mDecoder->GetMediaElement();
     if (!element) {
       return;
     }
     element->SetRequestHeaders(hc);
@@ -627,17 +627,17 @@ void nsMediaChannelStream::Suspend(bool 
   if (!element) {
     // Shutting down; do nothing.
     return;
   }
 
   if (mChannel) {
     if (aCloseImmediately && mCacheStream.IsSeekable()) {
       // Kill off our channel right now, but don't tell anyone about it.
-      mIgnoreClose = PR_TRUE;
+      mIgnoreClose = true;
       CloseChannel();
       element->DownloadSuspended();
     } else if (mSuspendCount == 0) {
       {
         MutexAutoLock lock(mLock);
         mChannelStatistics.Stop(TimeStamp::Now());
       }
       PossiblySuspend();
@@ -665,30 +665,30 @@ void nsMediaChannelStream::Resume()
     if (mChannel) {
       // Just wake up our existing channel
       {
         MutexAutoLock lock(mLock);
         mChannelStatistics.Start(TimeStamp::Now());
       }
       // if an error occurs after Resume, assume it's because the server
       // timed out the connection and we should reopen it.
-      mReopenOnError = PR_TRUE;
+      mReopenOnError = true;
       PossiblyResume();
       element->DownloadResumed();
     } else {
       PRInt64 totalLength = mCacheStream.GetLength();
       // If mOffset is at the end of the stream, then we shouldn't try to
       // seek to it. The seek will fail and be wasted anyway. We can leave
       // the channel dead; if the media cache wants to read some other data
       // in the future, it will call CacheClientSeek itself which will reopen the
       // channel.
       if (totalLength < 0 || mOffset < totalLength) {
         // There is (or may be) data to read at mOffset, so start reading it.
         // Need to recreate the channel.
-        CacheClientSeek(mOffset, PR_FALSE);
+        CacheClientSeek(mOffset, false);
       }
       element->DownloadResumed();
     }
   }
 }
 
 nsresult
 nsMediaChannelStream::RecreateChannel()
@@ -787,17 +787,17 @@ nsMediaChannelStream::CacheClientSeek(PR
 
 nsresult
 nsMediaChannelStream::CacheClientSuspend()
 {
   {
     MutexAutoLock lock(mLock);
     ++mCacheSuspendCount;
   }
-  Suspend(PR_FALSE);
+  Suspend(false);
 
   mDecoder->NotifySuspendedStatusChanged();
   return NS_OK;
 }
 
 nsresult
 nsMediaChannelStream::CacheClientResume()
 {
@@ -883,29 +883,29 @@ nsMediaChannelStream::GetLength()
 
 void
 nsMediaChannelStream::PossiblySuspend()
 {
   bool isPending = false;
   nsresult rv = mChannel->IsPending(&isPending);
   if (NS_SUCCEEDED(rv) && isPending) {
     mChannel->Suspend();
-    mIgnoreResume = PR_FALSE;
+    mIgnoreResume = false;
   } else {
-    mIgnoreResume = PR_TRUE;
+    mIgnoreResume = true;
   }
 }
 
 void
 nsMediaChannelStream::PossiblyResume()
 {
   if (!mIgnoreResume) {
     mChannel->Resume();
   } else {
-    mIgnoreResume = PR_FALSE;
+    mIgnoreResume = false;
   }
 }
 
 class nsMediaFileStream : public nsMediaStream
 {
 public:
   nsMediaFileStream(nsMediaDecoder* aDecoder, nsIChannel* aChannel, nsIURI* aURI) :
     nsMediaStream(aDecoder, aChannel, aURI), mSize(-1),
@@ -935,17 +935,17 @@ public:
   virtual PRInt64  Tell();
 
   // Any thread
   virtual void    Pin() {}
   virtual void    Unpin() {}
   virtual double  GetDownloadRate(bool* aIsReliable)
   {
     // The data's all already here
-    *aIsReliable = PR_TRUE;
+    *aIsReliable = true;
     return 100*1024*1024; // arbitray, use 100MB/s
   }
   virtual PRInt64 GetLength() { return mSize; }
   virtual PRInt64 GetNextCachedData(PRInt64 aOffset)
   {
     return (aOffset < mSize) ? aOffset : -1;
   }
   virtual PRInt64 GetCachedDataEnd(PRInt64 aOffset) { return NS_MAX(aOffset, mSize); }
@@ -1191,17 +1191,17 @@ nsMediaStream::Create(nsMediaDecoder* aD
   if (fc) {
     return new nsMediaFileStream(aDecoder, aChannel, uri);
   }
   return new nsMediaChannelStream(aDecoder, aChannel, uri);
 }
 
 void nsMediaStream::MoveLoadsToBackground() {
   NS_ASSERTION(!mLoadInBackground, "Why are you calling this more than once?");
-  mLoadInBackground = PR_TRUE;
+  mLoadInBackground = true;
   if (!mChannel) {
     // No channel, resource is probably already loaded.
     return;
   }
 
   nsresult rv;
   nsHTMLMediaElement* element = mDecoder->GetMediaElement();
   if (!element) {
--- a/content/media/nsMediaStream.h
+++ b/content/media/nsMediaStream.h
@@ -75,29 +75,29 @@ public:
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::TimeDuration TimeDuration;
 
   nsChannelStatistics() { Reset(); }
   void Reset() {
     mLastStartTime = TimeStamp();
     mAccumulatedTime = TimeDuration(0);
     mAccumulatedBytes = 0;
-    mIsStarted = PR_FALSE;
+    mIsStarted = false;
   }
   void Start(TimeStamp aNow) {
     if (mIsStarted)
       return;
     mLastStartTime = aNow;
-    mIsStarted = PR_TRUE;
+    mIsStarted = true;
   }
   void Stop(TimeStamp aNow) {
     if (!mIsStarted)
       return;
     mAccumulatedTime += aNow - mLastStartTime;
-    mIsStarted = PR_FALSE;
+    mIsStarted = false;
   }
   void AddBytes(PRInt64 aBytes) {
     if (!mIsStarted) {
       // ignore this data, it may be related to seeking or some other
       // operation we don't care about
       return;
     }
     mAccumulatedBytes += aBytes;
@@ -299,17 +299,17 @@ public:
    */
   virtual nsresult GetCachedRanges(nsTArray<nsByteRange>& aRanges) = 0;
 
 protected:
   nsMediaStream(nsMediaDecoder* aDecoder, nsIChannel* aChannel, nsIURI* aURI) :
     mDecoder(aDecoder),
     mChannel(aChannel),
     mURI(aURI),
-    mLoadInBackground(PR_FALSE)
+    mLoadInBackground(false)
   {
     MOZ_COUNT_CTOR(nsMediaStream);
   }
 
   // Set the request's load flags to aFlags.  If the request is part of a
   // load group, the request is removed from the group, the flags are set, and
   // then the request is added back to the load group.
   void ModifyLoadFlags(nsLoadFlags aFlags);
@@ -322,17 +322,17 @@ protected:
   // Channel used to download the media data. Must be accessed
   // from the main thread only.
   nsCOMPtr<nsIChannel> mChannel;
 
   // URI in case the stream needs to be re-opened. Access from
   // main thread only.
   nsCOMPtr<nsIURI> mURI;
 
-  // PR_TRUE if MoveLoadsToBackground() has been called, i.e. the load event
+  // True if MoveLoadsToBackground() has been called, i.e. the load event
   // has been fired, and all channel loads will be in the background.
   bool mLoadInBackground;
 };
 
 /**
  * This is the nsMediaStream implementation that wraps Necko channels.
  * Much of its functionality is actually delegated to nsMediaCache via
  * an underlying nsMediaCacheStream.
@@ -373,17 +373,17 @@ public:
   nsresult CacheClientResume();
 
   // Main thread
   virtual nsresult Open(nsIStreamListener** aStreamListener);
   virtual nsresult Close();
   virtual void     Suspend(bool aCloseImmediately);
   virtual void     Resume();
   virtual already_AddRefed<nsIPrincipal> GetCurrentPrincipal();
-  // Return PR_TRUE if the stream has been closed.
+  // Return true if the stream has been closed.
   bool IsClosed() const { return mCacheStream.IsClosed(); }
   virtual nsMediaStream* CloneData(nsMediaDecoder* aDecoder);
   virtual nsresult ReadFromCache(char* aBuffer, PRInt64 aOffset, PRUint32 aCount);
 
   // Other thread
   virtual void     SetReadMode(nsMediaCacheStream::ReadMode aMode);
   virtual void     SetPlaybackRate(PRUint32 aBytesPerSecond);
   virtual nsresult Read(char* aBuffer, PRUint32 aCount, PRUint32* aBytes);
@@ -475,15 +475,15 @@ protected:
   // Any thread access
   nsMediaCacheStream mCacheStream;
 
   // This lock protects mChannelStatistics and mCacheSuspendCount
   Mutex               mLock;
   nsChannelStatistics mChannelStatistics;
   PRUint32            mCacheSuspendCount;
 
-  // PR_TRUE if we couldn't suspend the stream and we therefore don't want
+  // True if we couldn't suspend the stream and we therefore don't want
   // to resume later. This is usually due to the channel not being in the
   // isPending state at the time of the suspend request.
   bool mIgnoreResume;
 };
 
 #endif
--- a/content/media/ogg/nsOggCodecState.cpp
+++ b/content/media/ogg/nsOggCodecState.cpp
@@ -58,17 +58,17 @@ nsOggCodecState::Create(ogg_page* aPage)
   nsAutoPtr<nsOggCodecState> codecState;
   if (aPage->body_len > 6 && memcmp(aPage->body+1, "theora", 6) == 0) {
     codecState = new nsTheoraState(aPage);
   } else if (aPage->body_len > 6 && memcmp(aPage->body+1, "vorbis", 6) == 0) {
     codecState = new nsVorbisState(aPage);
   } else if (aPage->body_len > 8 && memcmp(aPage->body, "fishead\0", 8) == 0) {
     codecState = new nsSkeletonState(aPage);
   } else {
-    codecState = new nsOggCodecState(aPage, PR_FALSE);
+    codecState = new nsOggCodecState(aPage, false);
   }
   return codecState->nsOggCodecState::Init() ? codecState.forget() : nsnull;
 }
 
 nsOggCodecState::nsOggCodecState(ogg_page* aBosPage, bool aActive) :
   mPacketCount(0),
   mSerial(ogg_page_serialno(aBosPage)),
   mActive(aActive),
@@ -159,17 +159,18 @@ ogg_packet* nsOggCodecState::PacketOut()
     return nsnull;
   }
   return mPackets.PopFront();
 }
 
 nsresult nsOggCodecState::PageIn(ogg_page* aPage) {
   if (!mActive)
     return NS_OK;
-  NS_ASSERTION(ogg_page_serialno(aPage) == mSerial, "Page must be for this stream!");
+  NS_ASSERTION(static_cast<PRUint32>(ogg_page_serialno(aPage)) == mSerial,
+               "Page must be for this stream!");
   if (ogg_stream_pagein(&mState, aPage) == -1)
     return NS_ERROR_FAILURE;
   int r;
   do {
     ogg_packet packet;
     r = ogg_stream_packetout(&mState, &packet);
     if (r == 1) {
       mPackets.Append(Clone(&packet));
@@ -179,17 +180,17 @@ nsresult nsOggCodecState::PageIn(ogg_pag
     NS_WARNING("Unrecoverable error in ogg_stream_packetout");
     return NS_ERROR_FAILURE;
   }
   return NS_OK;
 }
 
 nsresult nsOggCodecState::PacketOutUntilGranulepos(bool& aFoundGranulepos) {
   int r;
-  aFoundGranulepos = PR_FALSE;
+  aFoundGranulepos = false;
   // Extract packets from the sync state until either no more packets
   // come out, or we get a data packet with non -1 granulepos.
   do {
     ogg_packet packet;
     r = ogg_stream_packetout(&mState, &packet);
     if (r == 1) {
       ogg_packet* clone = Clone(&packet);
       if (IsHeader(&packet)) {
@@ -207,17 +208,17 @@ nsresult nsOggCodecState::PacketOutUntil
   if (ogg_stream_check(&mState)) {
     NS_WARNING("Unrecoverable error in ogg_stream_packetout");
     return NS_ERROR_FAILURE;
   }
   return NS_OK;
 }
 
 nsTheoraState::nsTheoraState(ogg_page* aBosPage) :
-  nsOggCodecState(aBosPage, PR_TRUE),
+  nsOggCodecState(aBosPage, true),
   mSetup(0),
   mCtx(0),
   mPixelAspectRatio(0)
 {
   MOZ_COUNT_CTOR(nsTheoraState);
   th_info_init(&mInfo);
   th_comment_init(&mComment);
 }
@@ -227,38 +228,38 @@ nsTheoraState::~nsTheoraState() {
   th_setup_free(mSetup);
   th_decode_free(mCtx);
   th_comment_clear(&mComment);
   th_info_clear(&mInfo);
 }
 
 bool nsTheoraState::Init() {
   if (!mActive)
-    return PR_FALSE;
+    return false;
 
   PRInt64 n = mInfo.aspect_numerator;
   PRInt64 d = mInfo.aspect_denominator;
 
   mPixelAspectRatio = (n == 0 || d == 0) ?
     1.0f : static_cast<float>(n) / static_cast<float>(d);
 
   // Ensure the frame and picture regions aren't larger than our prescribed
   // maximum, or zero sized.
   nsIntSize frame(mInfo.frame_width, mInfo.frame_height);
   nsIntRect picture(mInfo.pic_x, mInfo.pic_y, mInfo.pic_width, mInfo.pic_height);
   if (!nsVideoInfo::ValidateVideoRegion(frame, picture, frame)) {
-    return mActive = PR_FALSE;
+    return mActive = false;
   }
 
   mCtx = th_decode_alloc(&mInfo, mSetup);
   if (mCtx == NULL) {
-    return mActive = PR_FALSE;
+    return mActive = false;
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 bool
 nsTheoraState::DecodeHeader(ogg_packet* aPacket)
 {
   mPacketCount++;
   int ret = th_decode_headerin(&mInfo,
                                &mComment,
@@ -278,21 +279,21 @@ nsTheoraState::DecodeHeader(ogg_packet* 
   //    0x81 -> Comment header
   //    0x82 -> Setup header
   // See http://www.theora.org/doc/Theora.pdf Chapter 6, "Bitstream Headers",
   // for more details of the Ogg/Theora containment scheme.
   bool isSetupHeader = aPacket->bytes > 0 && aPacket->packet[0] == 0x82;
   if (ret < 0 || mPacketCount > 3) {
     // We've received an error, or the first three packets weren't valid
     // header packets, assume bad input, and don't activate the bitstream.
-    mDoneReadingHeaders = PR_TRUE;
+    mDoneReadingHeaders = true;
   } else if (ret > 0 && isSetupHeader && mPacketCount == 3) {
     // Successfully read the three header packets.
-    mDoneReadingHeaders = PR_TRUE;
-    mActive = PR_TRUE;
+    mDoneReadingHeaders = true;
+    mActive = true;
   }
   return mDoneReadingHeaders;
 }
 
 PRInt64
 nsTheoraState::Time(PRInt64 granulepos) {
   if (!mActive) {
     return -1;
@@ -301,18 +302,18 @@ nsTheoraState::Time(PRInt64 granulepos) 
 }
 
 bool
 nsTheoraState::IsHeader(ogg_packet* aPacket) {
   return th_packet_isheader(aPacket);
 }
 
 # define TH_VERSION_CHECK(_info,_maj,_min,_sub) \
- ((_info)->version_major>(_maj)||(_info)->version_major==(_maj)&& \
- ((_info)->version_minor>(_min)||(_info)->version_minor==(_min)&& \
+ (((_info)->version_major>(_maj)||(_info)->version_major==(_maj))&& \
+ (((_info)->version_minor>(_min)||(_info)->version_minor==(_min))&& \
  (_info)->version_subminor>=(_sub)))
 
 PRInt64 nsTheoraState::Time(th_info* aInfo, PRInt64 aGranulepos)
 {
   if (aGranulepos < 0 || aInfo->fps_numerator == 0) {
     return -1;
   }
   PRInt64 t = 0;
@@ -502,17 +503,17 @@ nsresult nsVorbisState::Reset()
 
   mGranulepos = 0;
   mPrevVorbisBlockSize = 0;
 
   return res;
 }
 
 nsVorbisState::nsVorbisState(ogg_page* aBosPage) :
-  nsOggCodecState(aBosPage, PR_TRUE),
+  nsOggCodecState(aBosPage, true),
   mPrevVorbisBlockSize(0),
   mGranulepos(0)
 {
   MOZ_COUNT_CTOR(nsVorbisState);
   vorbis_info_init(&mInfo);
   vorbis_comment_init(&mComment);
   memset(&mDsp, 0, sizeof(vorbis_dsp_state));
   memset(&mBlock, 0, sizeof(vorbis_block));
@@ -548,45 +549,45 @@ bool nsVorbisState::DecodeHeader(ogg_pac
   // Specification, Chapter 4, Codec Setup and Packet Decode:
   // http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-580004
 
   bool isSetupHeader = aPacket->bytes > 0 && aPacket->packet[0] == 0x5;
 
   if (ret < 0 || mPacketCount > 3) {
     // We've received an error, or the first three packets weren't valid
     // header packets, assume bad input, and deactivate the bitstream.
-    mDoneReadingHeaders = PR_TRUE;
-    mActive = PR_FALSE;
+    mDoneReadingHeaders = true;
+    mActive = false;
   } else if (ret == 0 && isSetupHeader && mPacketCount == 3) {
     // Successfully read the three header packets.
     // The bitstream remains active.
-    mDoneReadingHeaders = PR_TRUE;
+    mDoneReadingHeaders = true;
   }
   return mDoneReadingHeaders;
 }
 
 bool nsVorbisState::Init()
 {
   if (!mActive)
-    return PR_FALSE;
+    return false;
 
   int ret = vorbis_synthesis_init(&mDsp, &mInfo);
   if (ret != 0) {
     NS_WARNING("vorbis_synthesis_init() failed initializing vorbis bitstream");
-    return mActive = PR_FALSE;
+    return mActive = false;
   }
   ret = vorbis_block_init(&mDsp, &mBlock);
   if (ret != 0) {
     NS_WARNING("vorbis_block_init() failed initializing vorbis bitstream");
     if (mActive) {
       vorbis_dsp_clear(&mDsp);
     }
-    return mActive = PR_FALSE;
+    return mActive = false;
   }
-  return PR_TRUE;
+  return true;
 }
 
 PRInt64 nsVorbisState::Time(PRInt64 granulepos)
 {
   if (!mActive) {
     return -1;
   }
 
@@ -606,17 +607,17 @@ PRInt64 nsVorbisState::Time(vorbis_info*
 bool
 nsVorbisState::IsHeader(ogg_packet* aPacket)
 {
   // The first byte in each Vorbis header packet is either 0x01, 0x03, or 0x05,
   // i.e. the first bit is odd. Audio data packets have their first bit as 0x0.
   // Any packet with its first bit set cannot be a data packet, it's a
   // (possibly invalid) header packet.
   // See: http://xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-610004.2.1
-  return aPacket->bytes > 0 ? (aPacket->packet[0] & 0x1) : PR_FALSE;
+  return aPacket->bytes > 0 ? (aPacket->packet[0] & 0x1) : false;
 }
 
 nsresult
 nsVorbisState::PageIn(ogg_page* aPage)
 {
   if (!mActive)
     return NS_OK;
   NS_ASSERTION(static_cast<PRUint32>(ogg_page_serialno(aPage)) == mSerial,
@@ -751,17 +752,17 @@ nsresult nsVorbisState::ReconstructVorbi
   mPrevVorbisBlockSize = NS_MAX(static_cast<long>(0), mPrevVorbisBlockSize);
   mGranulepos = last->granulepos;
 
   return NS_OK;
 }
 
 
 nsSkeletonState::nsSkeletonState(ogg_page* aBosPage)
-  : nsOggCodecState(aBosPage, PR_TRUE),
+  : nsOggCodecState(aBosPage, true),
     mVersion(0),
     mPresentationTime(0),
     mLength(0)
 {
   MOZ_COUNT_CTOR(nsSkeletonState);
 }
  
 nsSkeletonState::~nsSkeletonState()
@@ -858,57 +859,57 @@ static const unsigned char* ReadVariable
   return p;
 }
 
 bool nsSkeletonState::DecodeIndex(ogg_packet* aPacket)
 {
   NS_ASSERTION(aPacket->bytes >= SKELETON_4_0_MIN_INDEX_LEN,
                "Index must be at least minimum size");
   if (!mActive) {
-    return PR_FALSE;
+    return false;
   }
 
   PRUint32 serialno = LEUint32(aPacket->packet + INDEX_SERIALNO_OFFSET);
   PRInt64 numKeyPoints = LEInt64(aPacket->packet + INDEX_NUM_KEYPOINTS_OFFSET);
 
   PRInt64 n = 0;
   PRInt64 endTime = 0, startTime = 0;
   const unsigned char* p = aPacket->packet;
 
   PRInt64 timeDenom = LEInt64(aPacket->packet + INDEX_TIME_DENOM_OFFSET);
   if (timeDenom == 0) {
     LOG(PR_LOG_DEBUG, ("Ogg Skeleton Index packet for stream %u has 0 "
                        "timestamp denominator.", serialno));
-    return (mActive = PR_FALSE);
+    return (mActive = false);
   }
 
   // Extract the start time.
   n = LEInt64(p + INDEX_FIRST_NUMER_OFFSET);
   PRInt64 t;
   if (!MulOverflow(n, USECS_PER_S, t)) {
-    return (mActive = PR_FALSE);
+    return (mActive = false);
   } else {
     startTime = t / timeDenom;
   }
 
   // Extract the end time.
   n = LEInt64(p + INDEX_LAST_NUMER_OFFSET);
   if (!MulOverflow(n, USECS_PER_S, t)) {
-    return (mActive = PR_FALSE);
+    return (mActive = false);
   } else {
     endTime = t / timeDenom;
   }
 
   // Check the numKeyPoints value read, ensure we're not going to run out of
   // memory while trying to decode the index packet.
   PRInt64 minPacketSize;
   if (!MulOverflow(numKeyPoints, MIN_KEY_POINT_SIZE, minPacketSize) ||
       !AddOverflow(INDEX_KEYPOINT_OFFSET, minPacketSize, minPacketSize))
   {
-    return (mActive = PR_FALSE);
+    return (mActive = false);
   }
   
   PRInt64 sizeofIndex = aPacket->bytes - INDEX_KEYPOINT_OFFSET;
   PRInt64 maxNumKeyPoints = sizeofIndex / MIN_KEY_POINT_SIZE;
   if (aPacket->bytes < minPacketSize ||
       numKeyPoints > maxNumKeyPoints || 
       numKeyPoints < 0)
   {
@@ -917,17 +918,17 @@ bool nsSkeletonState::DecodeIndex(ogg_pa
     // that the numKeyPoints field is too large or small for the packet to
     // possibly contain as many packets as it claims to, so the numKeyPoints
     // field is possibly malicious. Don't try decoding this index, we may run
     // out of memory.
     LOG(PR_LOG_DEBUG, ("Possibly malicious number of key points reported "
                        "(%lld) in index packet for stream %u.",
                        numKeyPoints,
                        serialno));
-    return (mActive = PR_FALSE);
+    return (mActive = false);
   }
 
   nsAutoPtr<nsKeyFrameIndex> keyPoints(new nsKeyFrameIndex(startTime, endTime));
   
   p = aPacket->packet + INDEX_KEYPOINT_OFFSET;
   const unsigned char* limit = aPacket->packet + aPacket->bytes;
   PRInt64 numKeyPointsRead = 0;
   PRInt64 offset = 0;
@@ -937,41 +938,41 @@ bool nsSkeletonState::DecodeIndex(ogg_pa
   {
     PRInt64 delta = 0;
     p = ReadVariableLengthInt(p, limit, delta);
     if (p == limit ||
         !AddOverflow(offset, delta, offset) ||
         offset > mLength ||
         offset < 0)
     {
-      return (mActive = PR_FALSE);
+      return (mActive = false);
     }
     p = ReadVariableLengthInt(p, limit, delta);
     if (!AddOverflow(time, delta, time) ||
         time > endTime ||
         time < startTime)
     {
-      return (mActive = PR_FALSE);
+      return (mActive = false);
     }
     PRInt64 timeUsecs = 0;
     if (!MulOverflow(time, USECS_PER_S, timeUsecs))
-      return mActive = PR_FALSE;
+      return mActive = false;
     timeUsecs /= timeDenom;
     keyPoints->Add(offset, timeUsecs);
     numKeyPointsRead++;
   }
 
   PRInt32 keyPointsRead = keyPoints->Length();
   if (keyPointsRead > 0) {
     mIndex.Put(serialno, keyPoints.forget());
   }
 
   LOG(PR_LOG_DEBUG, ("Loaded %d keypoints for Skeleton on stream %u",
                      keyPointsRead, serialno));
-  return PR_TRUE;
+  return true;
 }
 
 nsresult nsSkeletonState::IndexedSeekTargetForTrack(PRUint32 aSerialno,
                                                     PRInt64 aTarget,
                                                     nsKeyPoint& aResult)
 {
   nsKeyFrameIndex* index = nsnull;
   mIndex.Get(aSerialno, &index);
@@ -1077,37 +1078,37 @@ bool nsSkeletonState::DecodeHeader(ogg_p
     mPresentationTime = d == 0 ? 0 : (static_cast<float>(n) / static_cast<float>(d)) * USECS_PER_S;
 
     mVersion = SKELETON_VERSION(verMajor, verMinor);
     if (mVersion < SKELETON_VERSION(4,0) ||
         mVersion >= SKELETON_VERSION(5,0) ||
         aPacket->bytes < SKELETON_4_0_MIN_HEADER_LEN)
     {
       // We can only care to parse Skeleton version 4.0+.
-      mActive = PR_FALSE;
-      return mDoneReadingHeaders = PR_TRUE;
+      mActive = false;
+      return mDoneReadingHeaders = true;
     }
 
     // Extract the segment length.
     mLength = LEInt64(aPacket->packet + SKELETON_FILE_LENGTH_OFFSET);
 
     LOG(PR_LOG_DEBUG, ("Skeleton segment length: %lld", mLength));
 
     // Initialize the serianlno-to-index map.
     bool init = mIndex.Init();
     if (!init) {
       NS_WARNING("Failed to initialize Ogg skeleton serialno-to-index map");
-      mActive = PR_FALSE;
-      return mDoneReadingHeaders = PR_TRUE;
+      mActive = false;
+      return mDoneReadingHeaders = true;
     }
-    mActive = PR_TRUE;
+    mActive = true;
   } else if (IsSkeletonIndex(aPacket) && mVersion >= SKELETON_VERSION(4,0)) {
     if (!DecodeIndex(aPacket)) {
       // Failed to parse index, or invalid/hostile index. DecodeIndex() will
       // have deactivated the track.
-      return mDoneReadingHeaders = PR_TRUE;
+      return mDoneReadingHeaders = true;
     }
 
   } else if (aPacket->e_o_s) {
-    mDoneReadingHeaders = PR_TRUE;
+    mDoneReadingHeaders = true;
   }
   return mDoneReadingHeaders;
 }
--- a/content/media/ogg/nsOggCodecState.h
+++ b/content/media/ogg/nsOggCodecState.h
@@ -106,46 +106,46 @@ public:
   virtual ~nsOggCodecState();
   
   // Factory for creating nsCodecStates. Use instead of constructor.
   // aPage should be a beginning-of-stream page.
   static nsOggCodecState* Create(ogg_page* aPage);
   
   virtual CodecType GetType() { return TYPE_UNKNOWN; }
   
-  // Reads a header packet. Returns PR_TRUE when last header has been read.
+  // Reads a header packet. Returns true when last header has been read.
   virtual bool DecodeHeader(ogg_packet* aPacket) {
-    return (mDoneReadingHeaders = PR_TRUE);
+    return (mDoneReadingHeaders = true);
   }
 
   // Returns the end time that a granulepos represents.
   virtual PRInt64 Time(PRInt64 granulepos) { return -1; }
 
   // Returns the start time that a granulepos represents.
   virtual PRInt64 StartTime(PRInt64 granulepos) { return -1; }
 
   // Initializes the codec state.
   virtual bool Init();
 
-  // Returns PR_TRUE when this bitstream has finished reading all its
+  // Returns true when this bitstream has finished reading all its
   // header packets.
   bool DoneReadingHeaders() { return mDoneReadingHeaders; }
 
   // Deactivates the bitstream. Only the primary video and audio bitstreams
   // should be active.
   void Deactivate() {
-    mActive = PR_FALSE;
-    mDoneReadingHeaders = PR_TRUE;
+    mActive = false;
+    mDoneReadingHeaders = true;
     Reset();
   }
 
   // Resets decoding state.
   virtual nsresult Reset();
 
-  // Returns PR_TRUE if the nsOggCodecState thinks this packet is a header
+  // Returns true if the nsOggCodecState thinks this packet is a header
   // packet. Note this does not verify the validity of the header packet,
   // it just guarantees that the packet is marked as a header packet (i.e.
   // it is definintely not a data packet). Do not use this to identify
   // streams, use it to filter header packets from data packets while
   // decoding.
   virtual bool IsHeader(ogg_packet* aPacket) { return false; }
 
   // Returns the next packet in the stream, or nsnull if there are no more
@@ -178,23 +178,23 @@ public:
 
   // Queue of as yet undecoded packets. Packets are guaranteed to have
   // a valid granulepos.
   nsPacketQueue mPackets;
 
   // Is the bitstream active; whether we're decoding and playing this bitstream.
   bool mActive;
   
-  // PR_TRUE when all headers packets have been read.
+  // True when all headers packets have been read.
   bool mDoneReadingHeaders;
 
 protected:
   // Constructs a new nsOggCodecState. aActive denotes whether the stream is
   // active. For streams of unsupported or unknown types, aActive should be
-  // PR_FALSE.
+  // false.
   nsOggCodecState(ogg_page* aBosPage, bool aActive);
 
   // Deallocates all packets stored in mUnstamped, and clears the array.
   void ClearUnstamped();
 
   // Extracts packets out of mState until a data packet with a non -1
   // granulepos is encountered, or no more packets are readable. Header
   // packets are pushed into the packet queue immediately, and data packets
@@ -326,17 +326,17 @@ public:
   nsSkeletonState(ogg_page* aBosPage);
   ~nsSkeletonState();
   CodecType GetType() { return TYPE_SKELETON; }
   bool DecodeHeader(ogg_packet* aPacket);
   PRInt64 Time(PRInt64 granulepos) { return -1; }
   bool Init() { return true; }
   bool IsHeader(ogg_packet* aPacket) { return true; }
 
-  // Return PR_TRUE if the given time (in milliseconds) is within
+  // Return true if the given time (in milliseconds) is within
   // the presentation time defined in the skeleton track.
   bool IsPresentable(PRInt64 aTime) { return aTime >= mPresentationTime; }
 
   // Stores the offset of the page on which a keyframe starts,
   // and its presentation time.
   class nsKeyPoint {
   public:
     nsKeyPoint()
@@ -386,17 +386,17 @@ public:
   // Returns the duration of the active tracks in the media, if we have
   // an index. aTracks must be filled with the serialnos of the active tracks.
   // The duration is calculated as the greatest end time of all active tracks,
   // minus the smalled start time of all the active tracks.
   nsresult GetDuration(const nsTArray<PRUint32>& aTracks, PRInt64& aDuration);
 
 private:
 
-  // Decodes an index packet. Returns PR_FALSE on failure.
+  // Decodes an index packet. Returns false on failure.
   bool DecodeIndex(ogg_packet* aPacket);
 
   // Gets the keypoint you must seek to in order to get the keyframe required
   // to render the stream at time aTarget on stream with serial aSerialno.
   nsresult IndexedSeekTargetForTrack(PRUint32 aSerialno,
                                      PRInt64 aTarget,
                                      nsKeyPoint& aResult);
 
--- a/content/media/ogg/nsOggReader.cpp
+++ b/content/media/ogg/nsOggReader.cpp
@@ -186,17 +186,17 @@ nsresult nsOggReader::ReadMetadata(nsVid
 
     int serial = ogg_page_serialno(&page);
     nsOggCodecState* codecState = 0;
 
     if (!ogg_page_bos(&page)) {
       // We've encountered a non Beginning Of Stream page. No more BOS pages
       // can follow in this Ogg segment, so there will be no other bitstreams
       // in the Ogg (unless it's invalid).
-      readAllBOS = PR_TRUE;
+      readAllBOS = true;
     } else if (!mCodecStates.Get(serial, nsnull)) {
       // We've not encountered a stream with this serial number before. Create
       // an nsOggCodecState to demux it, and map that to the nsOggCodecState
       // in mCodecStates.
       codecState = nsOggCodecState::Create(&page);
       DebugOnly<bool> r = mCodecStates.Put(serial, codecState);
       NS_ASSERTION(r, "Failed to insert into mCodecStates");
       bitstreams.AppendElement(codecState);
@@ -257,32 +257,32 @@ nsresult nsOggReader::ReadMetadata(nsVid
     // Apply the aspect ratio to produce the intrinsic display size we report
     // to the element.
     ScaleDisplayByAspectRatio(displaySize, mTheoraState->mPixelAspectRatio);
 
     nsIntSize frameSize(mTheoraState->mInfo.frame_width,
                         mTheoraState->mInfo.frame_height);
     if (nsVideoInfo::ValidateVideoRegion(frameSize, picture, displaySize)) {
       // Video track's frame sizes will not overflow. Activate the video track.
-      mInfo.mHasVideo = PR_TRUE;
+      mInfo.mHasVideo = true;
       mInfo.mDisplay = displaySize;
       mPicture = picture;
 
       mDecoder->SetVideoData(gfxIntSize(displaySize.width, displaySize.height),
                              nsnull,
                              TimeStamp::Now());
 
       // Copy Theora info data for time computations on other threads.
       memcpy(&mTheoraInfo, &mTheoraState->mInfo, sizeof(mTheoraInfo));
       mTheoraSerial = mTheoraState->mSerial;
     }
   }
 
   if (mVorbisState && ReadHeaders(mVorbisState)) {
-    mInfo.mHasAudio = PR_TRUE;
+    mInfo.mHasAudio = true;
     mInfo.mAudioRate = mVorbisState->mInfo.rate;
     mInfo.mAudioChannels = mVorbisState->mInfo.channels;
     // Copy Vorbis info data for time computations on other threads.
     memcpy(&mVorbisInfo, &mVorbisState->mInfo, sizeof(mVorbisInfo));
     mVorbisInfo.codec_setup = NULL;
     mVorbisSerial = mVorbisState->mSerial;
   } else {
     memset(&mVorbisInfo, 0, sizeof(mVorbisInfo));
@@ -394,32 +394,32 @@ bool nsOggReader::DecodeAudioData()
   do {
     if (packet) {
       nsOggCodecState::ReleasePacket(packet);
     }
     packet = NextOggPacket(mVorbisState);
   } while (packet && mVorbisState->IsHeader(packet));
   if (!packet) {
     mAudioQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
   NS_ASSERTION(packet && packet->granulepos != -1,
     "Must have packet with known granulepos");
   nsAutoReleasePacket autoRelease(packet);
   DecodeVorbis(packet);
   if (packet->e_o_s) {
     // We've encountered an end of bitstream packet, or we've hit the end of
     // file while trying to decode, so inform the audio queue that there'll
     // be no more samples.
     mAudioQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 nsresult nsOggReader::DecodeTheora(ogg_packet* aPacket, PRInt64 aTimeThreshold)
 {
   NS_ASSERTION(aPacket->granulepos >= TheoraVersion(&mTheoraState->mInfo,3,2,1),
     "Packets must have valid granulepos and packetno");
 
   int ret = th_decode_packetin(mTheoraState->mCtx, aPacket, 0);
@@ -497,44 +497,44 @@ bool nsOggReader::DecodeVideoFrame(bool 
   do {
     if (packet) {
       nsOggCodecState::ReleasePacket(packet);
     }
     packet = NextOggPacket(mTheoraState);
   } while (packet && mTheoraState->IsHeader(packet));
   if (!packet) {
     mVideoQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
   nsAutoReleasePacket autoRelease(packet);
 
   parsed++;
   NS_ASSERTION(packet && packet->granulepos != -1,
                 "Must know first packet's granulepos");
   bool eos = packet->e_o_s;
   PRInt64 frameEndTime = mTheoraState->Time(packet->granulepos);
   if (!aKeyframeSkip ||
      (th_packet_iskeyframe(packet) && frameEndTime >= aTimeThreshold))
   {
-    aKeyframeSkip = PR_FALSE;
+    aKeyframeSkip = false;
     nsresult res = DecodeTheora(packet, aTimeThreshold);
     decoded++;
     if (NS_FAILED(res)) {
-      return PR_FALSE;
+      return false;
     }
   }
 
   if (eos) {
     // We've encountered an end of bitstream packet. Inform the queue that
     // there will be no more frames.
     mVideoQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 PRInt64 nsOggReader::ReadOggPage(ogg_page* aPage)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   int ret = 0;
   while((ret = ogg_sync_pageseek(&mOggState, aPage)) <= 0) {
@@ -638,17 +638,17 @@ struct nsAutoOggSyncState {
 PRInt64 nsOggReader::RangeEndTime(PRInt64 aEndOffset)
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
                "Should be on state machine or decode thread.");
 
   nsMediaStream* stream = mDecoder->GetCurrentStream();
   NS_ENSURE_TRUE(stream != nsnull, -1);
   PRInt64 position = stream->Tell();
-  PRInt64 endTime = RangeEndTime(0, aEndOffset, PR_FALSE);
+  PRInt64 endTime = RangeEndTime(0, aEndOffset, false);
   nsresult res = stream->Seek(nsISeekableStream::NS_SEEK_SET, position);
   NS_ENSURE_SUCCESS(res, -1);
   return endTime;
 }
 
 PRInt64 nsOggReader::RangeEndTime(PRInt64 aStartOffset,
                                   PRInt64 aEndOffset,
                                   bool aCachedDataOnly)
@@ -664,28 +664,28 @@ PRInt64 nsOggReader::RangeEndTime(PRInt6
   // haven't found an end time yet, or return the last end time found.
   const int step = 5000;
   PRInt64 readStartOffset = aEndOffset;
   PRInt64 readHead = aEndOffset;
   PRInt64 endTime = -1;
   PRUint32 checksumAfterSeek = 0;
   PRUint32 prevChecksumAfterSeek = 0;
   bool mustBackOff = false;
-  while (PR_TRUE) {
+  while (true) {
     ogg_page page;    
     int ret = ogg_sync_pageseek(&sync.mState, &page);
     if (ret == 0) {
       // We need more data if we've not encountered a page we've seen before,
       // or we've read to the end of file.
       if (mustBackOff || readHead == aEndOffset || readHead == aStartOffset) {
         if (endTime != -1 || readStartOffset == 0) {
           // We have encountered a page before, or we're at the end of file.
           break;
         }
-        mustBackOff = PR_FALSE;
+        mustBackOff = false;
         prevChecksumAfterSeek = checksumAfterSeek;
         checksumAfterSeek = 0;
         ogg_sync_reset(&sync.mState);
         readStartOffset = NS_MAX(static_cast<PRInt64>(0), readStartOffset - step);
         readHead = NS_MAX(aStartOffset, readStartOffset);
       }
 
       PRInt64 limit = NS_MIN(static_cast<PRInt64>(PR_UINT32_MAX),
@@ -734,17 +734,17 @@ PRInt64 nsOggReader::RangeEndTime(PRInt6
       // this one, so we'll know to back off again.
       checksumAfterSeek = checksum;
     }
     if (checksum == prevChecksumAfterSeek) {
       // This page has the same checksum as the first page we encountered
       // after the last backoff/seek. Since we've already scanned after this
       // page and failed to find an end time, we may as well backoff again and
       // try to find an end time from an earlier page.
-      mustBackOff = PR_TRUE;
+      mustBackOff = true;
       continue;
     }
 
     PRInt64 granulepos = ogg_page_granulepos(&page);
     int serial = ogg_page_serialno(&page);
 
     nsOggCodecState* codecState = nsnull;
     mCodecStates.Get(serial, &codecState);
@@ -890,17 +890,17 @@ nsOggReader::IndexedSeekResult nsOggRead
   NS_ENSURE_SUCCESS(res, SEEK_FATAL_ERROR);
 
   // Check that the page the index thinks is exactly here is actually exactly
   // here. If not, the index is invalid.
   ogg_page page;
   int skippedBytes = 0;
   PageSyncResult syncres = PageSync(stream,
                                     &mOggState,
-                                    PR_FALSE,
+                                    false,
                                     mPageOffset,
                                     stream->GetLength(),
                                     &page,
                                     skippedBytes);
   NS_ENSURE_TRUE(syncres != PAGE_SYNC_ERROR, SEEK_FATAL_ERROR);
   if (syncres != PAGE_SYNC_OK || skippedBytes != 0) {
     LOG(PR_LOG_DEBUG, ("Indexed-seek failure: Ogg Skeleton Index is invalid "
                        "or sync error after seek"));
@@ -965,17 +965,17 @@ nsresult nsOggReader::SeekInBufferedRang
     PRInt64 keyframeGranulepos = (video->mTimecode >> shift) << shift;
     PRInt64 keyframeTime = mTheoraState->StartTime(keyframeGranulepos);
     SEEK_LOG(PR_LOG_DEBUG, ("Keyframe for %lld is at %lld, seeking back to it",
                             video->mTime, keyframeTime));
     SeekRange k = SelectSeekRange(aRanges,
                                   keyframeTime,
                                   aStartTime,
                                   aEndTime,
-                                  PR_FALSE);
+                                  false);
     res = SeekBisection(keyframeTime, k, SEEK_FUZZ_USECS);
   }
   return res;
 }
 
 nsresult nsOggReader::SeekInUnbuffered(PRInt64 aTarget,
                                        PRInt64 aStartTime,
                                        PRInt64 aEndTime,
@@ -997,17 +997,17 @@ nsresult nsOggReader::SeekInUnbuffered(P
   // keyframe).
   PRInt64 keyframeOffsetMs = 0;
   if (HasVideo() && mTheoraState) {
     keyframeOffsetMs = mTheoraState->MaxKeyframeOffset();
   }
   PRInt64 seekTarget = NS_MAX(aStartTime, aTarget - keyframeOffsetMs);
   // Minimize the bisection search space using the known timestamps from the
   // buffered ranges.
-  SeekRange k = SelectSeekRange(aRanges, seekTarget, aStartTime, aEndTime, PR_FALSE);
+  SeekRange k = SelectSeekRange(aRanges, seekTarget, aStartTime, aEndTime, false);
   return SeekBisection(seekTarget, k, SEEK_FUZZ_USECS);
 }
 
 nsresult nsOggReader::Seek(PRInt64 aTarget,
                            PRInt64 aStartTime,
                            PRInt64 aEndTime,
                            PRInt64 aCurrentTime)
 {
@@ -1039,17 +1039,17 @@ nsresult nsOggReader::Seek(PRInt64 aTarg
       // No index or other non-fatal index-related failure. Try to seek
       // using a bisection search. Determine the already downloaded data
       // in the media cache, so we can try to seek in the cached data first.
       nsAutoTArray<SeekRange, 16> ranges;
       res = GetSeekRanges(ranges);
       NS_ENSURE_SUCCESS(res,res);
 
       // Figure out if the seek target lies in a buffered range.
-      SeekRange r = SelectSeekRange(ranges, aTarget, aStartTime, aEndTime, PR_TRUE);
+      SeekRange r = SelectSeekRange(ranges, aTarget, aStartTime, aEndTime, true);
 
       if (!r.IsNull()) {
         // We know the buffered range in which the seek target lies, do a
         // bisection search in that buffered range.
         res = SeekInBufferedRange(aTarget, aStartTime, aEndTime, ranges, r);
         NS_ENSURE_SUCCESS(res,res);
       } else {
         // The target doesn't lie in a buffered range. Perform a bisection
@@ -1166,32 +1166,32 @@ nsresult nsOggReader::SeekBisection(PRIn
   int backsteps = 0;
   const int maxBackStep = 10;
   NS_ASSERTION(static_cast<PRUint64>(PAGE_STEP) * pow(2.0, maxBackStep) < PR_INT32_MAX,
                "Backstep calculation must not overflow");
 
   // Seek via bisection search. Loop until we find the offset where the page
   // before the offset is before the seek target, and the page after the offset
   // is after the seek target.
-  while (PR_TRUE) {
+  while (true) {
     ogg_int64_t duration = 0;
     double target = 0;
     ogg_int64_t interval = 0;
     ogg_int64_t guess = 0;
     ogg_page page;
     int skippedBytes = 0;
     ogg_int64_t pageOffset = 0;
     ogg_int64_t pageLength = 0;
     ogg_int64_t granuleTime = -1;
     bool mustBackoff = false;
 
     // Guess where we should bisect to, based on the bit rate and the time
     // remaining in the interval. Loop until we can determine the time at
     // the guess offset.
-    while (PR_TRUE) {
+    while (true) {
   
       // Discard any previously buffered packets/pages.
       if (NS_FAILED(ResetDecode())) {
         return NS_ERROR_FAILURE;
       }
 
       interval = endOffset - startOffset - startLength;
       if (interval == 0) {
@@ -1224,18 +1224,18 @@ nsresult nsOggReader::SeekBisection(PRIn
           // condition, and break out of the bisection loop. We'll begin
           // decoding from the start of the seek range.
           interval = 0;
           break;
         }
 
         backsteps = NS_MIN(backsteps + 1, maxBackStep);
         // We reset mustBackoff. If we still need to backoff further, it will
-        // be set to PR_TRUE again.
-        mustBackoff = PR_FALSE;
+        // be set to true again.
+        mustBackoff = false;
       } else {
         backsteps = 0;
       }
       guess = NS_MAX(guess, startOffset + startLength);
 
       SEEK_LOG(PR_LOG_DEBUG, ("Seek loop start[o=%lld..%lld t=%lld] "
                               "end[o=%lld t=%lld] "
                               "interval=%lld target=%lf guess=%lld",
@@ -1249,34 +1249,34 @@ nsresult nsOggReader::SeekBisection(PRIn
 
       hops++;
     
       // Locate the next page after our seek guess, and then figure out the
       // granule time of the audio and video bitstreams there. We can then
       // make a bisection decision based on our location in the media.
       PageSyncResult res = PageSync(stream,
                                     &mOggState,
-                                    PR_FALSE,
+                                    false,
                                     guess,
                                     endOffset,
                                     &page,
                                     skippedBytes);
       NS_ENSURE_TRUE(res != PAGE_SYNC_ERROR, NS_ERROR_FAILURE);
 
       // We've located a page of length |ret| at |guess + skippedBytes|.
       // Remember where the page is located.
       pageOffset = guess + skippedBytes;
       pageLength = page.header_len + page.body_len;
       mPageOffset = pageOffset + pageLength;
 
       if (res == PAGE_SYNC_END_OF_RANGE) {
         // Our guess was too close to the end, we've ended up reading the end
         // page. Backoff exponentially from the end point, in case the last
         // page/frame/sample is huge.
-        mustBackoff = PR_TRUE;
+        mustBackoff = true;
         SEEK_LOG(PR_LOG_DEBUG, ("Hit the end of range, backing off"));
         continue;
       }
 
       // Read pages until we can determine the granule time of the audio and 
       // video bitstream.
       ogg_int64_t audioTime = -1;
       ogg_int64_t videoTime = -1;
@@ -1331,17 +1331,17 @@ nsresult nsOggReader::SeekBisection(PRIn
           // terminate inside the interval, so we terminate the seek at the
           // start of the interval.
           interval = 0;
           break;
         }
 
         // We should backoff; cause the guess to back off from the end, so
         // that we've got more room to capture.
-        mustBackoff = PR_TRUE;
+        mustBackoff = true;
         continue;
       }
 
       // We've found appropriate time stamps here. Proceed to bisect
       // the search space.
       granuleTime = NS_MAX(audioTime, videoTime);
       NS_ASSERTION(granuleTime > 0, "Must get a granuletime");
       break;
@@ -1433,17 +1433,17 @@ nsresult nsOggReader::GetBuffered(nsTime
     // granulepos which we can convert into a timestamp to use as the time of
     // the start of the buffered range.
     ogg_sync_reset(&sync.mState);
     while (startTime == -1) {
       ogg_page page;
       PRInt32 discard;
       PageSyncResult res = PageSync(stream,
                                     &sync.mState,
-                                    PR_TRUE,
+                                    true,
                                     startOffset,
                                     endOffset,
                                     &page,
                                     discard);
       if (res == PAGE_SYNC_ERROR) {
         return NS_ERROR_FAILURE;
       } else if (res == PAGE_SYNC_END_OF_RANGE) {
         // Hit the end of range without reading a page, give up trying to
@@ -1479,30 +1479,30 @@ nsresult nsOggReader::GetBuffered(nsTime
         // ogg), return an error.
         return PAGE_SYNC_ERROR;
       }
     }
 
     if (startTime != -1) {
       // We were able to find a start time for that range, see if we can
       // find an end time.
-      PRInt64 endTime = RangeEndTime(startOffset, endOffset, PR_TRUE);
+      PRInt64 endTime = RangeEndTime(startOffset, endOffset, true);
       if (endTime != -1) {
         aBuffered->Add((startTime - aStartTime) / static_cast<double>(USECS_PER_S),
                        (endTime - aStartTime) / static_cast<double>(USECS_PER_S));
       }
     }
   }
 
   return NS_OK;
 }
 
 bool nsOggReader::IsKnownStream(PRUint32 aSerial)
 {
   for (PRUint32 i = 0; i < mKnownStreams.Length(); i++) {
     PRUint32 serial = mKnownStreams[i];
     if (serial == aSerial) {
-      return PR_TRUE;
+      return true;
     }
   }
 
-  return PR_FALSE;
+  return false;
 }
--- a/content/media/ogg/nsOggReader.h
+++ b/content/media/ogg/nsOggReader.h
@@ -161,17 +161,17 @@ private:
   // after playback finished at aEndOffset.
   PRInt64 RangeEndTime(PRInt64 aEndOffset);
 
   // Get the end time of aEndOffset, without reading before aStartOffset.
   // This is the playback position we'd reach after playback finished at
   // aEndOffset. If bool aCachedDataOnly is true, then we'll only read
   // from data which is cached in the media cached, otherwise we'll do
   // regular blocking reads from the media stream. If bool aCachedDataOnly
-  // is PR_TRUE, this can safely be called on the main thread, otherwise it
+  // is true, this can safely be called on the main thread, otherwise it
   // must be called on the state machine thread.
   PRInt64 RangeEndTime(PRInt64 aStartOffset,
                        PRInt64 aEndOffset,
                        bool aCachedDataOnly);
 
   // Get the start time of the range beginning at aOffset. This is the start
   // time of the first frame and or audio sample we'd be able to play if we
   // started playback at aOffset.
@@ -194,19 +194,19 @@ private:
   // Fills aRanges with SeekRanges denoting the sections of the media which
   // have been downloaded and are stored in the media cache. The reader
   // monitor must must be held with exactly one lock count. The nsMediaStream
   // must be pinned while calling this.
   nsresult GetSeekRanges(nsTArray<SeekRange>& aRanges);
 
   // Returns the range in which you should perform a seek bisection if
   // you wish to seek to aTarget usecs, given the known (buffered) byte ranges
-  // in aRanges. If aExact is PR_TRUE, we only return an exact copy of a
+  // in aRanges. If aExact is true, we only return an exact copy of a
   // range in which aTarget lies, or a null range if aTarget isn't contained
-  // in any of the (buffered) ranges. Otherwise, when aExact is PR_FALSE,
+  // in any of the (buffered) ranges. Otherwise, when aExact is false,
   // we'll construct the smallest possible range we can, based on the times
   // and byte offsets known in aRanges. We can then use this to minimize our
   // bisection's search space when the target isn't in a known buffered range.
   SeekRange SelectSeekRange(const nsTArray<SeekRange>& aRanges,
                             PRInt64 aTarget,
                             PRInt64 aStartTime,
                             PRInt64 aEndTime,
                             bool aExact);
@@ -224,17 +224,17 @@ private:
   nsresult DecodeTheora(ogg_packet* aPacket, PRInt64 aTimeThreshold);
 
   // Read a page of data from the Ogg file. Returns the offset of the start
   // of the page, or -1 if the page read failed.
   PRInt64 ReadOggPage(ogg_page* aPage);
 
   // Reads and decodes header packets for aState, until either header decode
   // fails, or is complete. Initializes the codec state before returning.
-  // Returns PR_TRUE if reading headers and initializtion of the stream
+  // Returns true if reading headers and initializtion of the stream
   // succeeds.
   bool ReadHeaders(nsOggCodecState* aState);
 
   // Returns the next Ogg packet for an bitstream/codec state. Returns a
   // pointer to an ogg_packet on success, or nsnull if the read failed.
   // The caller is responsible for deleting the packet and its |packet| field.
   ogg_packet* NextOggPacket(nsOggCodecState* aCodecState);
 
--- a/content/media/raw/nsRawDecoder.cpp
+++ b/content/media/raw/nsRawDecoder.cpp
@@ -35,10 +35,10 @@
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsBuiltinDecoderStateMachine.h"
 #include "nsRawReader.h"
 #include "nsRawDecoder.h"
 
 nsDecoderStateMachine* nsRawDecoder::CreateStateMachine()
 {
-  return new nsBuiltinDecoderStateMachine(this, new nsRawReader(this), PR_TRUE);
+  return new nsBuiltinDecoderStateMachine(this, new nsRawReader(this), true);
 }
--- a/content/media/raw/nsRawReader.cpp
+++ b/content/media/raw/nsRawReader.cpp
@@ -101,18 +101,18 @@ nsresult nsRawReader::ReadMetadata(nsVid
   ScaleDisplayByAspectRatio(display, pixelAspectRatio);
   mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight);
   nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight);
   if (!nsVideoInfo::ValidateVideoRegion(frameSize, mPicture, display)) {
     // Video track's frame sizes will overflow. Fail.
     return NS_ERROR_FAILURE;
   }
 
-  mInfo.mHasVideo = PR_TRUE;
-  mInfo.mHasAudio = PR_FALSE;
+  mInfo.mHasVideo = true;
+  mInfo.mHasAudio = false;
   mInfo.mDisplay = display;
 
   mFrameRate = static_cast<float>(mMetadata.framerateNumerator) /
                mMetadata.framerateDenominator;
 
   // Make some sanity checks
   if (mFrameRate > 45 ||
       mFrameRate == 0 ||
@@ -140,76 +140,76 @@ nsresult nsRawReader::ReadMetadata(nsVid
 
   return NS_OK;
 }
 
  bool nsRawReader::DecodeAudioData()
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
                "Should be on state machine thread or decode thread.");
-  return PR_FALSE;
+  return false;
 }
 
 // Helper method that either reads until it gets aLength bytes 
-// or returns PR_FALSE
+// or returns false
 bool nsRawReader::ReadFromStream(nsMediaStream *aStream, PRUint8* aBuf,
                                    PRUint32 aLength)
 {
   while (aLength > 0) {
     PRUint32 bytesRead = 0;
     nsresult rv;
 
     rv = aStream->Read(reinterpret_cast<char*>(aBuf), aLength, &bytesRead);
-    NS_ENSURE_SUCCESS(rv, PR_FALSE);
+    NS_ENSURE_SUCCESS(rv, false);
 
     if (bytesRead == 0) {
-      return PR_FALSE;
+      return false;
     }
 
     aLength -= bytesRead;
     aBuf += bytesRead;
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 bool nsRawReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                      PRInt64 aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(),
                "Should be on decode thread.");
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
   PRUint32 parsed = 0, decoded = 0;
   nsMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
 
   if (!mFrameSize)
-    return PR_FALSE; // Metadata read failed.  We should refuse to play.
+    return false; // Metadata read failed.  We should refuse to play.
 
   PRInt64 currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate;
   PRUint32 length = mFrameSize - sizeof(nsRawPacketHeader);
 
   nsAutoArrayPtr<PRUint8> buffer(new PRUint8[length]);
   nsMediaStream* stream = mDecoder->GetCurrentStream();
   NS_ASSERTION(stream, "Decoder has no media stream");
 
   // We're always decoding one frame when called
   while(true) {
     nsRawPacketHeader header;
 
     // Read in a packet header and validate
     if (!(ReadFromStream(stream, reinterpret_cast<PRUint8*>(&header),
                          sizeof(header))) ||
         !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) {
-      return PR_FALSE;
+      return false;
     }
 
     if (!ReadFromStream(stream, buffer, length)) {
-      return PR_FALSE;
+      return false;
     }
 
     parsed++;
 
     if (currentFrameTime >= aTimeThreshold)
       break;
 
     mCurrentFrame++;
@@ -239,24 +239,24 @@ bool nsRawReader::DecodeVideoFrame(bool 
                                    -1,
                                    currentFrameTime,
                                    currentFrameTime + (USECS_PER_S / mFrameRate),
                                    b,
                                    1, // In raw video every frame is a keyframe
                                    -1,
                                    mPicture);
   if (!v)
-    return PR_FALSE;
+    return false;
 
   mVideoQueue.Push(v);
   mCurrentFrame++;
   decoded++;
   currentFrameTime += USECS_PER_S / mFrameRate;
 
-  return PR_TRUE;
+  return true;
 }
 
 nsresult nsRawReader::Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(),
                "Should be on decode thread.");
 
   nsMediaStream *stream = mDecoder->GetCurrentStream();
--- a/content/media/raw/nsRawReader.h
+++ b/content/media/raw/nsRawReader.h
@@ -53,22 +53,22 @@ public:
   virtual nsresult ResetDecode();
   virtual bool DecodeAudioData();
 
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
                                   PRInt64 aTimeThreshold);
 
   virtual bool HasAudio()
   {
-    return PR_FALSE;
+    return false;
   }
 
   virtual bool HasVideo()
   {
-    return PR_TRUE;
+    return true;
   }
 
   virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
   virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
 
 private:
   bool ReadFromStream(nsMediaStream *aStream, PRUint8 *aBuf,
--- a/content/media/wave/nsWaveReader.cpp
+++ b/content/media/wave/nsWaveReader.cpp
@@ -154,18 +154,18 @@ nsresult nsWaveReader::ReadMetadata(nsVi
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   bool loaded = LoadRIFFChunk() && LoadFormatChunk() && FindDataOffset();
   if (!loaded) {
     return NS_ERROR_FAILURE;
   }
 
-  mInfo.mHasAudio = PR_TRUE;
-  mInfo.mHasVideo = PR_FALSE;
+  mInfo.mHasAudio = true;
+  mInfo.mHasVideo = false;
   mInfo.mAudioRate = mSampleRate;
   mInfo.mAudioChannels = mChannels;
 
   *aInfo = mInfo;
 
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
 
   mDecoder->GetStateMachine()->SetDuration(
@@ -191,17 +191,17 @@ bool nsWaveReader::DecodeAudioData()
   const size_t bufferSize = static_cast<size_t>(frames * mChannels);
   nsAutoArrayPtr<AudioDataValue> sampleBuffer(new AudioDataValue[bufferSize]);
 
   PR_STATIC_ASSERT(PRUint64(BLOCK_SIZE) < UINT_MAX / sizeof(char));
   nsAutoArrayPtr<char> dataBuffer(new char[static_cast<size_t>(readSize)]);
 
   if (!ReadAll(dataBuffer, readSize)) {
     mAudioQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
   // convert data to samples
   const char* d = dataBuffer.get();
   AudioDataValue* s = sampleBuffer.get();
   for (int i = 0; i < frames; ++i) {
     for (unsigned int j = 0; j < mChannels; ++j) {
       if (mSampleFormat == nsAudioStream::FORMAT_U8) {
@@ -231,25 +231,25 @@ bool nsWaveReader::DecodeAudioData()
 
   mAudioQueue.Push(new AudioData(pos,
                                  static_cast<PRInt64>(posTime * USECS_PER_S),
                                  static_cast<PRInt64>(readSizeTime * USECS_PER_S),
                                  static_cast<PRInt32>(frames),
                                  sampleBuffer.forget(),
                                  mChannels));
 
-  return PR_TRUE;
+  return true;
 }
 
 bool nsWaveReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                       PRInt64 aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
-  return PR_FALSE;
+  return false;
 }
 
 nsresult nsWaveReader::Seek(PRInt64 aTarget, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   LOG(PR_LOG_DEBUG, ("%p About to seek to %lld", mDecoder, aTarget));
   if (NS_FAILED(ResetDecode())) {
     return NS_ERROR_FAILURE;
@@ -293,94 +293,94 @@ nsWaveReader::ReadAll(char* aBuf, PRInt6
   PRUint32 got = 0;
   if (aBytesRead) {
     *aBytesRead = 0;
   }
   do {
     PRUint32 read = 0;
     if (NS_FAILED(mDecoder->GetCurrentStream()->Read(aBuf + got, PRUint32(aSize - got), &read))) {
       NS_WARNING("Stream read failed");
-      return PR_FALSE;
+      return false;
     }
     if (read == 0) {
-      return PR_FALSE;
+      return false;
     }
     mDecoder->NotifyBytesConsumed(read);
     got += read;
     if (aBytesRead) {
       *aBytesRead = got;
     }
   } while (got != aSize);
-  return PR_TRUE;
+  return true;
 }
 
 bool
 nsWaveReader::LoadRIFFChunk()
 {
   char riffHeader[RIFF_INITIAL_SIZE];
   const char* p = riffHeader;
 
   NS_ABORT_IF_FALSE(mDecoder->GetCurrentStream()->Tell() == 0,
                     "LoadRIFFChunk called when stream in invalid state");
 
   if (!ReadAll(riffHeader, sizeof(riffHeader))) {
-    return PR_FALSE;
+    return false;
   }
 
   PR_STATIC_ASSERT(sizeof(PRUint32) * 2 <= RIFF_INITIAL_SIZE);
   if (ReadUint32BE(&p) != RIFF_CHUNK_MAGIC) {
     NS_WARNING("Stream data not in RIFF format");
-    return PR_FALSE;
+    return false;
   }
 
   // Skip over RIFF size field.
   p += 4;
 
   if (ReadUint32BE(&p) != WAVE_CHUNK_MAGIC) {
     NS_WARNING("Expected WAVE chunk");
-    return PR_FALSE;
+    return false;
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 bool
 nsWaveReader::ScanForwardUntil(PRUint32 aWantedChunk, PRUint32* aChunkSize)
 {
   NS_ABORT_IF_FALSE(aChunkSize, "Require aChunkSize argument");
   *aChunkSize = 0;
 
   for (;;) {
     static const unsigned int CHUNK_HEADER_SIZE = 8;
     char chunkHeader[CHUNK_HEADER_SIZE];
     const char* p = chunkHeader;
 
     if (!ReadAll(chunkHeader, sizeof(chunkHeader))) {
-      return PR_FALSE;
+      return false;
     }
 
     PR_STATIC_ASSERT(sizeof(PRUint32) * 2 <= CHUNK_HEADER_SIZE);
     PRUint32 magic = ReadUint32BE(&p);
     PRUint32 chunkSize = ReadUint32LE(&p);
 
     if (magic == aWantedChunk) {
       *aChunkSize = chunkSize;
-      return PR_TRUE;
+      return true;
     }
 
     // RIFF chunks are two-byte aligned, so round up if necessary.
     chunkSize += chunkSize % 2;
 
     static const unsigned int MAX_CHUNK_SIZE = 1 << 16;
     PR_STATIC_ASSERT(MAX_CHUNK_SIZE < UINT_MAX / sizeof(char));
     nsAutoArrayPtr<char> chunk(new char[MAX_CHUNK_SIZE]);
     while (chunkSize > 0) {
       PRUint32 size = NS_MIN(chunkSize, MAX_CHUNK_SIZE);
       if (!ReadAll(chunk.get(), size)) {
-        return PR_FALSE;
+        return false;
       }
       chunkSize -= size;
     }
   }
 }
 
 bool
 nsWaveReader::LoadFormatChunk()
@@ -391,32 +391,32 @@ nsWaveReader::LoadFormatChunk()
 
   // RIFF chunks are always word (two byte) aligned.
   NS_ABORT_IF_FALSE(mDecoder->GetCurrentStream()->Tell() % 2 == 0,
                     "LoadFormatChunk called with unaligned stream");
 
   // The "format" chunk may not directly follow the "riff" chunk, so skip
   // over any intermediate chunks.
   if (!ScanForwardUntil(FRMT_CHUNK_MAGIC, &fmtSize)) {
-    return PR_FALSE;
+    return false;
   }
 
   if (!ReadAll(waveFormat, sizeof(waveFormat))) {
-    return PR_FALSE;
+    return false;
   }
 
   PR_STATIC_ASSERT(sizeof(PRUint16) +
                    sizeof(PRUint16) +
                    sizeof(PRUint32) +
                    4 +
                    sizeof(PRUint16) +
                    sizeof(PRUint16) <= sizeof(waveFormat));
   if (ReadUint16LE(&p) != WAVE_FORMAT_ENCODING_PCM) {
     NS_WARNING("WAVE is not uncompressed PCM, compressed encodings are not supported");
-    return PR_FALSE;
+    return false;
   }
 
   channels = ReadUint16LE(&p);
   rate = ReadUint32LE(&p);
 
   // Skip over average bytes per second field.
   p += 4;
 
@@ -429,87 +429,87 @@ nsWaveReader::LoadFormatChunk()
   // extension size of 0 bytes.  Be polite and handle this rather than
   // considering the file invalid.  This code skips any extension of the
   // "format" chunk.
   if (fmtSize > WAVE_FORMAT_CHUNK_SIZE) {
     char extLength[2];
     const char* p = extLength;
 
     if (!ReadAll(extLength, sizeof(extLength))) {
-      return PR_FALSE;
+      return false;
     }
 
     PR_STATIC_ASSERT(sizeof(PRUint16) <= sizeof(extLength));
     PRUint16 extra = ReadUint16LE(&p);
     if (fmtSize - (WAVE_FORMAT_CHUNK_SIZE + 2) != extra) {
       NS_WARNING("Invalid extended format chunk size");
-      return PR_FALSE;
+      return false;
     }
     extra += extra % 2;
 
     if (extra > 0) {
       PR_STATIC_ASSERT(PR_UINT16_MAX + (PR_UINT16_MAX % 2) < UINT_MAX / sizeof(char));
       nsAutoArrayPtr<char> chunkExtension(new char[extra]);
       if (!ReadAll(chunkExtension.get(), extra)) {
-        return PR_FALSE;
+        return false;
       }
     }
   }
 
   // RIFF chunks are always word (two byte) aligned.
   NS_ABORT_IF_FALSE(mDecoder->GetCurrentStream()->Tell() % 2 == 0,
                     "LoadFormatChunk left stream unaligned");
 
   // Make sure metadata is fairly sane.  The rate check is fairly arbitrary,
   // but the channels check is intentionally limited to mono or stereo
   // because that's what the audio backend currently supports.
   if (rate < 100 || rate > 96000 ||
       channels < 1 || channels > MAX_CHANNELS ||
       (frameSize != 1 && frameSize != 2 && frameSize != 4) ||
       (sampleFormat != 8 && sampleFormat != 16)) {
     NS_WARNING("Invalid WAVE metadata");
-    return PR_FALSE;
+    return false;
   }
 
   ReentrantMonitorAutoEnter monitor(mDecoder->GetReentrantMonitor());
   mSampleRate = rate;
   mChannels = channels;
   mFrameSize = frameSize;
   if (sampleFormat == 8) {
     mSampleFormat = nsAudioStream::FORMAT_U8;
   } else {
     mSampleFormat = nsAudioStream::FORMAT_S16_LE;
   }
-  return PR_TRUE;
+  return true;
 }
 
 bool
 nsWaveReader::FindDataOffset()
 {
   // RIFF chunks are always word (two byte) aligned.
   NS_ABORT_IF_FALSE(mDecoder->GetCurrentStream()->Tell() % 2 == 0,
                     "FindDataOffset called with unaligned stream");
 
   // The "data" chunk may not directly follow the "format" chunk, so skip
   // over any intermediate chunks.
   PRUint32 length;
   if (!ScanForwardUntil(DATA_CHUNK_MAGIC, &length)) {
-    return PR_FALSE;
+    return false;
   }
 
   PRInt64 offset = mDecoder->GetCurrentStream()->Tell();
   if (offset <= 0 || offset > PR_UINT32_MAX) {
     NS_WARNING("PCM data offset out of range");
-    return PR_FALSE;
+    return false;
   }
 
   ReentrantMonitorAutoEnter monitor(mDecoder->GetReentrantMonitor());
   mWaveLength = length;
   mWavePCMOffset = PRUint32(offset);
-  return PR_TRUE;
+  return true;
 }
 
 double
 nsWaveReader::BytesToTime(PRInt64 aBytes) const
 {
   NS_ABORT_IF_FALSE(aBytes >= 0, "Must be >= 0");
   return float(aBytes) / mSampleRate / mFrameSize;
 }
--- a/content/media/wave/nsWaveReader.h
+++ b/content/media/wave/nsWaveReader.h
@@ -50,22 +50,22 @@ public:
 
   virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor);
   virtual bool DecodeAudioData();
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
                                   PRInt64 aTimeThreshold);
 
   virtual bool HasAudio()
   {
-    return PR_TRUE;
+    return true;
   }
 
   virtual bool HasVideo()
   {
-    return PR_FALSE;
+    return false;
   }
 
   virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
   virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
 
 private:
   bool ReadAll(char* aBuf, PRInt64 aSize, PRInt64* aBytesRead = nsnull);
--- a/content/media/webm/nsWebMReader.cpp
+++ b/content/media/webm/nsWebMReader.cpp
@@ -92,17 +92,17 @@ static int webm_read(void *aBuffer, size
   nsresult rv = NS_OK;
   bool eof = false;
 
   char *p = static_cast<char *>(aBuffer);
   while (NS_SUCCEEDED(rv) && aLength > 0) {
     PRUint32 bytes = 0;
     rv = stream->Read(p, aLength, &bytes);
     if (bytes == 0) {
-      eof = PR_TRUE;
+      eof = true;
       break;
     }
     decoder->NotifyBytesConsumed(bytes);
     aLength -= bytes;
     p += bytes;
   }
 
   return NS_FAILED(rv) ? -1 : eof ? 0 : 1;
@@ -131,18 +131,18 @@ nsWebMReader::nsWebMReader(nsBuiltinDeco
   : nsBuiltinDecoderReader(aDecoder),
   mContext(nsnull),
   mPacketCount(0),
   mChannels(0),
   mVideoTrack(0),
   mAudioTrack(0),
   mAudioStartUsec(-1),
   mAudioFrames(0),
-  mHasVideo(PR_FALSE),
-  mHasAudio(PR_FALSE)
+  mHasVideo(false),
+  mHasAudio(false)
 {
   MOZ_COUNT_CTOR(nsWebMReader);
 }
 
 nsWebMReader::~nsWebMReader()
 {
   Cleanup();
 
@@ -230,18 +230,18 @@ nsresult nsWebMReader::ReadMetadata(nsVi
 
   unsigned int ntracks = 0;
   r = nestegg_track_count(mContext, &ntracks);
   if (r == -1) {
     Cleanup();
     return NS_ERROR_FAILURE;
   }
 
-  mInfo.mHasAudio = PR_FALSE;
-  mInfo.mHasVideo = PR_FALSE;
+  mInfo.mHasAudio = false;
+  mInfo.mHasVideo = false;
   for (PRUint32 track = 0; track < ntracks; ++track) {
     int id = nestegg_track_codec_id(mContext, track);
     if (id == -1) {
       Cleanup();
       return NS_ERROR_FAILURE;
     }
     int type = nestegg_track_type(mContext, track);
     if (!mHasVideo && type == NESTEGG_TRACK_VIDEO) {
@@ -276,18 +276,18 @@ nsresult nsWebMReader::ReadMetadata(nsVi
       nsIntSize displaySize(params.display_width, params.display_height);
       nsIntSize frameSize(params.width, params.height);
       if (!nsVideoInfo::ValidateVideoRegion(frameSize, pictureRect, displaySize)) {
         // Video track's frame sizes will overflow. Ignore the video track.
         continue;
       }
 
       mVideoTrack = track;
-      mHasVideo = PR_TRUE;
-      mInfo.mHasVideo = PR_TRUE;
+      mHasVideo = true;
+      mInfo.mHasVideo = true;
 
       mInfo.mDisplay = displaySize;
       mPicture = pictureRect;
       mInitialFrame = frameSize;
 
       switch (params.stereo_mode) {
       case NESTEGG_VIDEO_MONO:
         mInfo.mStereoMode = STEREO_MODE_MONO;
@@ -331,18 +331,18 @@ nsresult nsWebMReader::ReadMetadata(nsVi
       nestegg_audio_params params;
       r = nestegg_track_audio_params(mContext, track, &params);
       if (r == -1) {
         Cleanup();
         return NS_ERROR_FAILURE;
       }
 
       mAudioTrack = track;
-      mHasAudio = PR_TRUE;
-      mInfo.mHasAudio = PR_TRUE;
+      mHasAudio = true;
+      mInfo.mHasAudio = true;
 
       // Get the Vorbis header data
       unsigned int nheaders = 0;
       r = nestegg_track_codec_data_count(mContext, track, &nheaders);
       if (r == -1 || nheaders != 3) {
         Cleanup();
         return NS_ERROR_FAILURE;
       }
@@ -352,17 +352,17 @@ nsresult nsWebMReader::ReadMetadata(nsVi
         size_t length = 0;
 
         r = nestegg_track_codec_data(mContext, track, header, &data, &length);
         if (r == -1) {
           Cleanup();
           return NS_ERROR_FAILURE;
         }
 
-        ogg_packet opacket = InitOggPacket(data, length, header == 0, PR_FALSE, 0);
+        ogg_packet opacket = InitOggPacket(data, length, header == 0, false, 0);
 
         r = vorbis_synthesis_headerin(&mVorbisInfo,
                                       &mVorbisComment,
                                       &opacket);
         if (r != 0) {
           Cleanup();
           return NS_ERROR_FAILURE;
         }
@@ -410,49 +410,49 @@ ogg_packet nsWebMReader::InitOggPacket(u
 bool nsWebMReader::DecodeAudioPacket(nestegg_packet* aPacket, PRInt64 aOffset)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   int r = 0;
   unsigned int count = 0;
   r = nestegg_packet_count(aPacket, &count);
   if (r == -1) {
-    return PR_FALSE;
+    return false;
   }
 
   uint64_t tstamp = 0;
   r = nestegg_packet_tstamp(aPacket, &tstamp);
   if (r == -1) {
-    return PR_FALSE;
+    return false;
   }
 
   const PRUint32 rate = mVorbisDsp.vi->rate;
   PRUint64 tstamp_usecs = tstamp / NS_PER_USEC;
   if (mAudioStartUsec == -1) {
     // This is the first audio chunk. Assume the start time of our decode
     // is the start of this chunk.
     mAudioStartUsec = tstamp_usecs;
   }
   // If there's a gap between the start of this audio chunk and the end of
   // the previous audio chunk, we need to increment the packet count so that
   // the vorbis decode doesn't use data from before the gap to help decode
   // from after the gap.
   PRInt64 tstamp_frames = 0;
   if (!UsecsToFrames(tstamp_usecs, rate, tstamp_frames)) {
     NS_WARNING("Int overflow converting WebM timestamp to frames");
-    return PR_FALSE;
+    return false;
   }
   PRInt64 decoded_frames = 0;
   if (!UsecsToFrames(mAudioStartUsec, rate, decoded_frames)) {
     NS_WARNING("Int overflow converting WebM start time to frames");
-    return PR_FALSE;
+    return false;
   }
   if (!AddOverflow(decoded_frames, mAudioFrames, decoded_frames)) {
     NS_WARNING("Int overflow adding decoded_frames");
-    return PR_FALSE;
+    return false;
   }
   if (tstamp_frames > decoded_frames) {
 #ifdef DEBUG
     PRInt64 usecs = 0;
     LOG(PR_LOG_DEBUG, ("WebMReader detected gap of %lld, %lld frames, in audio stream\n",
       FramesToUsecs(tstamp_frames - decoded_frames, rate, usecs) ? usecs: -1,
       tstamp_frames - decoded_frames));
 #endif
@@ -462,68 +462,68 @@ bool nsWebMReader::DecodeAudioPacket(nes
   }
 
   PRInt32 total_frames = 0;
   for (PRUint32 i = 0; i < count; ++i) {
     unsigned char* data;
     size_t length;
     r = nestegg_packet_data(aPacket, i, &data, &length);
     if (r == -1) {
-      return PR_FALSE;
+      return false;
     }
 
-    ogg_packet opacket = InitOggPacket(data, length, PR_FALSE, PR_FALSE, -1);
+    ogg_packet opacket = InitOggPacket(data, length, false, false, -1);
 
     if (vorbis_synthesis(&mVorbisBlock, &opacket) != 0) {
-      return PR_FALSE;
+      return false;
     }
 
     if (vorbis_synthesis_blockin(&mVorbisDsp,
                                  &mVorbisBlock) != 0) {
-      return PR_FALSE;
+      return false;
     }
 
     VorbisPCMValue** pcm = 0;
     PRInt32 frames = 0;
     while ((frames = vorbis_synthesis_pcmout(&mVorbisDsp, &pcm)) > 0) {
       nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[frames * mChannels]);
       for (PRUint32 j = 0; j < mChannels; ++j) {
         VorbisPCMValue* channel = pcm[j];
         for (PRUint32 i = 0; i < PRUint32(frames); ++i) {
           buffer[i*mChannels + j] = MOZ_CONVERT_VORBIS_SAMPLE(channel[i]);
         }
       }
 
       PRInt64 duration = 0;
       if (!FramesToUsecs(frames, rate, duration)) {
         NS_WARNING("Int overflow converting WebM audio duration");
-        return PR_FALSE;
+        return false;
       }
       PRInt64 total_duration = 0;
       if (!FramesToUsecs(total_frames, rate, total_duration)) {
         NS_WARNING("Int overflow converting WebM audio total_duration");
-        return PR_FALSE;
+        return false;
       }
       
       PRInt64 time = tstamp_usecs + total_duration;
       total_frames += frames;
       mAudioQueue.Push(new AudioData(aOffset,
                                      time,
                                      duration,
                                      frames,
                                      buffer.forget(),
                                      mChannels));
       mAudioFrames += frames;
       if (vorbis_synthesis_read(&mVorbisDsp, frames) != 0) {
-        return PR_FALSE;
+        return false;
       }
     }
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 nsReturnRef<NesteggPacketHolder> nsWebMReader::NextPacket(TrackType aTrackType)
 {
   // The packet queue that packets will be pushed on if they
   // are not the type we are interested in.
   PacketQueue& otherPackets = 
     aTrackType == VIDEO ? mAudioPackets : mVideoPackets;
@@ -573,30 +573,30 @@ nsReturnRef<NesteggPacketHolder> nsWebMR
         otherPackets.Push(holder.disown());
         continue;
       }
 
       // The packet is for the track we want to play
       if (hasType && ourTrack == track) {
         break;
       }
-    } while (PR_TRUE);
+    } while (true);
   }
 
   return holder.out();
 }
 
 bool nsWebMReader::DecodeAudioData()
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   nsAutoRef<NesteggPacketHolder> holder(NextPacket(AUDIO));
   if (!holder) {
     mAudioQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
   return DecodeAudioPacket(holder->mPacket, holder->mOffset);
 }
 
 bool nsWebMReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                       PRInt64 aTimeThreshold)
 {
@@ -605,88 +605,88 @@ bool nsWebMReader::DecodeVideoFrame(bool
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
   PRUint32 parsed = 0, decoded = 0;
   nsMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
 
   nsAutoRef<NesteggPacketHolder> holder(NextPacket(VIDEO));
   if (!holder) {
     mVideoQueue.Finish();
-    return PR_FALSE;
+    return false;
   }
 
   nestegg_packet* packet = holder->mPacket;
   unsigned int track = 0;
   int r = nestegg_packet_track(packet, &track);
   if (r == -1) {
-    return PR_FALSE;
+    return false;
   }
 
   unsigned int count = 0;
   r = nestegg_packet_count(packet, &count);
   if (r == -1) {
-    return PR_FALSE;
+    return false;
   }
 
   uint64_t tstamp = 0;
   r = nestegg_packet_tstamp(packet, &tstamp);
   if (r == -1) {
-    return PR_FALSE;
+    return false;
   }
 
   // The end time of this frame is the start time of the next frame.  Fetch
   // the timestamp of the next packet for this track.  If we've reached the
   // end of the stream, use the file's duration as the end time of this
   // video frame.
   uint64_t next_tstamp = 0;
   {
     nsAutoRef<NesteggPacketHolder> next_holder(NextPacket(VIDEO));
     if (next_holder) {
       r = nestegg_packet_tstamp(next_holder->mPacket, &next_tstamp);
       if (r == -1) {
-        return PR_FALSE;
+        return false;
       }
       mVideoPackets.PushFront(next_holder.disown());
     } else {
       ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
       nsBuiltinDecoderStateMachine* s =
         static_cast<nsBuiltinDecoderStateMachine*>(mDecoder->GetStateMachine());
       PRInt64 endTime = s->GetEndMediaTime();
       if (endTime == -1) {
-        return PR_FALSE;
+        return false;
       }
       next_tstamp = endTime * NS_PER_USEC;
     }
   }
 
   PRInt64 tstamp_usecs = tstamp / NS_PER_USEC;
   for (PRUint32 i = 0; i < count; ++i) {
     unsigned char* data;
     size_t length;
     r = nestegg_packet_data(packet, i, &data, &length);
     if (r == -1) {
-      return PR_FALSE;
+      return false;
     }
 
     vpx_codec_stream_info_t si;
     memset(&si, 0, sizeof(si));
     si.sz = sizeof(si);
     vpx_codec_peek_stream_info(&vpx_codec_vp8_dx_algo, data, length, &si);
     if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
       // Skipping to next keyframe...
       parsed++; // Assume 1 frame per chunk.
       continue;
     }
 
     if (aKeyframeSkip && si.is_kf) {
-      aKeyframeSkip = PR_FALSE;
+      aKeyframeSkip = false;
     }
 
     if (vpx_codec_decode(&mVP8, data, length, NULL, 0)) {
-      return PR_FALSE;
+      return false;
     }
 
     // If the timestamp of the video frame is less than
     // the time threshold required then it is not added
     // to the video queue and won't be displayed.
     if (tstamp_usecs < aTimeThreshold) {
       parsed++; // Assume 1 frame per chunk.
       continue;
@@ -711,17 +711,18 @@ bool nsWebMReader::DecodeVideoFrame(bool
       b.mPlanes[1].mWidth = img->d_w >> img->x_chroma_shift;
  
       b.mPlanes[2].mData = img->planes[2];
       b.mPlanes[2].mStride = img->stride[2];
       b.mPlanes[2].mHeight = img->d_h >> img->y_chroma_shift;
       b.mPlanes[2].mWidth = img->d_w >> img->x_chroma_shift;
   
       nsIntRect picture = mPicture;
-      if (img->d_w != mInitialFrame.width || img->d_h != mInitialFrame.height) {
+      if (img->d_w != static_cast<PRUint32>(mInitialFrame.width) ||
+          img->d_h != static_cast<PRUint32>(mInitialFrame.height)) {
         // Frame size is different from what the container reports. This is legal
         // in WebM, and we will preserve the ratio of the crop rectangle as it
         // was reported relative to the picture size reported by the container.
         picture.x = (mPicture.x * img->d_w) / mInitialFrame.width;
         picture.y = (mPicture.y * img->d_h) / mInitialFrame.height;
         picture.width = (img->d_w * mPicture.width) / mInitialFrame.width;
         picture.height = (img->d_h * mPicture.height) / mInitialFrame.height;
       }
@@ -731,27 +732,27 @@ bool nsWebMReader::DecodeVideoFrame(bool
                                        holder->mOffset,
                                        tstamp_usecs,
                                        next_tstamp / NS_PER_USEC,
                                        b,
                                        si.is_kf,
                                        -1,
                                        picture);
       if (!v) {
-        return PR_FALSE;
+        return false;
       }
       parsed++;
       decoded++;
       NS_ASSERTION(decoded <= parsed,
         "Expect only 1 frame per chunk per packet in WebM...");
       mVideoQueue.Push(v);
     }
   }
 
-  return PR_TRUE;
+  return true;
 }
 
 nsresult nsWebMReader::Seek(PRInt64 aTarget, PRInt64 aStartTime, PRInt64 aEndTime,
                             PRInt64 aCurrentTime)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   LOG(PR_LOG_DEBUG, ("%p About to seek to %lldms", mDecoder, aTarget));
--- a/content/media/webm/nsWebMReader.h
+++ b/content/media/webm/nsWebMReader.h
@@ -174,18 +174,18 @@ private:
   // Returns an initialized ogg packet with data obtained from the WebM container.
   ogg_packet InitOggPacket(unsigned char* aData,
                            size_t aLength,
                            bool aBOS,
                            bool aEOS,
                            PRInt64 aGranulepos);
 
   // Decode a nestegg packet of audio data. Push the audio data on the
-  // audio queue. Returns PR_TRUE when there's more audio to decode,
-  // PR_FALSE if the audio is finished, end of file has been reached,
+  // audio queue. Returns true when there's more audio to decode,
+  // false if the audio is finished, end of file has been reached,
   // or an un-recoverable read error has occured. The reader's monitor
   // must be held during this call. This function will free the packet
   // so the caller must not use the packet after calling.
   bool DecodeAudioPacket(nestegg_packet* aPacket, PRInt64 aOffset);
 
   // Release context and set to null. Called when an error occurs during
   // reading metadata or destruction of the reader itself.
   void Cleanup();
--- a/dom/interfaces/css/nsIDOMCSS2Properties.idl
+++ b/dom/interfaces/css/nsIDOMCSS2Properties.idl
@@ -46,17 +46,17 @@
  * The nsIDOMCSS2Properties interface is a datatype for additional
  * reflection of data already provided in nsIDOMCSSStyleDeclaration in
  * the Document Object Model.
  *
  * For more information on this interface please see
  * http://www.w3.org/TR/DOM-Level-2-Style
  */
 
-[builtinclass, scriptable, uuid(79b66107-f9d2-42ac-bc68-b558d79037ec)]
+[builtinclass, scriptable, uuid(519ae4fa-0fee-4aaa-bcb9-34b503236801)]
 interface nsIDOMCSS2Properties : nsISupports
 {
            attribute DOMString        background;
                                         // raises(DOMException) on setting
 
            attribute DOMString        backgroundAttachment;
                                         // raises(DOMException) on setting
 
@@ -679,16 +679,28 @@ interface nsIDOMCSS2Properties : nsISupp
                                         // raises(DOMException) on setting
 
            attribute DOMString        MozTransform;
                                         // raises(DOMException) on setting
 
            attribute DOMString        MozTransformOrigin;
                                         // raises(DOMException) on setting 
 
+           attribute DOMString        MozPerspective;
+                                        // raises(DOMException) on setting
+
+           attribute DOMString        MozPerspectiveOrigin;
+                                        // raises(DOMException) on setting
+
+           attribute DOMString        MozBackfaceVisibility;
+                                        // raises(DOMException) on setting
+
+           attribute DOMString        MozTransformStyle;
+                                        // raises(DOMException) on setting
+
            attribute DOMString        MozWindowShadow;
                                         // raises(DOMException) on setting
 
            attribute DOMString        backgroundSize;
                                         // raises(DOMException) on setting
 
            attribute DOMString        MozTextBlink;
                                         // raises(DOMException) on setting
--- a/gfx/layers/basic/BasicLayers.cpp
+++ b/gfx/layers/basic/BasicLayers.cpp
@@ -1361,28 +1361,16 @@ BasicLayerManager::PopGroupToSourceWithC
     aTarget->IdentityMatrix();
     aTarget->SetSource(current);
     mCachedSurfaceInUse = PR_FALSE;
   } else {
     aTarget->PopGroupToSource();
   }
 }
 
-already_AddRefed<gfxASurface>
-BasicLayerManager::PopGroupToSurface(gfxContext *aTarget, gfxContext *aPushed)
-{
-  if (!aTarget)
-    return nsnull;
-  nsRefPtr<gfxASurface> current = aPushed->CurrentSurface();
-  NS_ASSERTION(!mCachedSurface.IsSurface(current), "Should never be popping cached surface here!");
-  nsRefPtr<gfxPattern> pat = aTarget->PopGroup();
-  current = pat->GetSurface();
-  return current.forget();
-}
-
 void
 BasicLayerManager::BeginTransactionWithTarget(gfxContext* aTarget)
 {
 #ifdef MOZ_LAYERS_HAVE_LOG
   MOZ_LAYERS_LOG(("[----- BeginTransaction"));
   Log();
 #endif
 
@@ -1840,45 +1828,50 @@ BasicLayerManager::PaintLayer(gfxContext
   gfxMatrix transform;
   // Will return an identity matrix for 3d transforms, and is handled separately below.
   bool is2D = effectiveTransform.CanDraw2D(&transform);
   NS_ABORT_IF_FALSE(is2D || needsGroup || !aLayer->GetFirstChild(), "Must PushGroup for 3d transforms!");
   if (is2D) {
     aTarget->SetMatrix(transform);
   } else {
     aTarget->SetMatrix(gfxMatrix());
-    // Save so we can restore clipping after PushGroupForLayer changes it.
-    aTarget->Save();
   }
 
   const nsIntRegion& visibleRegion = aLayer->GetEffectiveVisibleRegion();
   // If needsGroup is true, we'll clip to the visible region after we've popped the group
   if (needsClipToVisibleRegion && !needsGroup) {
     gfxUtils::ClipToRegion(aTarget, visibleRegion);
     // Don't need to clip to visible region again
     needsClipToVisibleRegion = PR_FALSE;
   }
 
   bool pushedTargetOpaqueRect = false;
   nsRefPtr<gfxASurface> currentSurface = aTarget->CurrentSurface();
   const gfxRect& targetOpaqueRect = currentSurface->GetOpaqueRect();
 
   // Try to annotate currentSurface with a region of pixels that have been
   // (or will be) painted opaque, if no such region is currently set.
+  const nsIntRect& bounds = visibleRegion.GetBounds();
   if (targetOpaqueRect.IsEmpty() && visibleRegion.GetNumRects() == 1 &&
       (aLayer->GetContentFlags() & Layer::CONTENT_OPAQUE) &&
       !transform.HasNonAxisAlignedTransform()) {
-    const nsIntRect& bounds = visibleRegion.GetBounds();
     currentSurface->SetOpaqueRect(
         aTarget->UserToDevice(gfxRect(bounds.x, bounds.y, bounds.width, bounds.height)));
     pushedTargetOpaqueRect = PR_TRUE;
   }
 
   nsRefPtr<gfxContext> groupTarget;
-  if (needsGroup) {
+  nsRefPtr<gfxASurface> untransformedSurface;
+  if (!is2D) {
+    untransformedSurface = 
+      gfxPlatform::GetPlatform()->CreateOffscreenSurface(gfxIntSize(bounds.width, bounds.height), 
+                                                         gfxASurface::CONTENT_COLOR_ALPHA);
+    untransformedSurface->SetDeviceOffset(gfxPoint(-bounds.x, -bounds.y));
+    groupTarget = new gfxContext(untransformedSurface);
+  } else if (needsGroup) {
     groupTarget = PushGroupForLayer(aTarget, aLayer, aLayer->GetEffectiveVisibleRegion(),
                                     &needsClipToVisibleRegion);
   } else {
     groupTarget = aTarget;
   }
 
   /* Only paint ourself, or our children - This optimization relies on this! */
   Layer* child = aLayer->GetFirstChild();
@@ -1906,26 +1899,24 @@ BasicLayerManager::PaintLayer(gfxContext
     }
   }
 
   if (needsGroup) {
     bool blitComplete = false;
     if (is2D) {
       PopGroupToSourceWithCachedSurface(aTarget, groupTarget);
     } else {
-      nsRefPtr<gfxASurface> sourceSurface = PopGroupToSurface(aTarget, groupTarget);
-      aTarget->Restore();
-      NS_ABORT_IF_FALSE(sourceSurface, "PopGroup should always return a surface pattern");
-      gfxRect bounds = visibleRegion.GetBounds();
+      NS_ABORT_IF_FALSE(untransformedSurface, 
+                        "We should always allocate an untransformed surface with 3d transforms!");
 
       gfxPoint offset;
       bool dontBlit = needsClipToVisibleRegion || mTransactionIncomplete || 
                         aLayer->GetEffectiveOpacity() != 1.0f;
       nsRefPtr<gfxASurface> result = 
-        Transform3D(sourceSurface, aTarget, bounds,
+        Transform3D(untransformedSurface, aTarget, bounds,
                     effectiveTransform, offset, dontBlit);
 
       blitComplete = !result;
       if (result) {
         aTarget->SetSource(result, offset);
       }
     }
     // If we're doing our own double-buffering, we need to avoid drawing
--- a/gfx/layers/basic/BasicLayers.h
+++ b/gfx/layers/basic/BasicLayers.h
@@ -167,17 +167,16 @@ public:
   void SetTransactionIncomplete() { mTransactionIncomplete = true; }
 
   already_AddRefed<gfxContext> PushGroupForLayer(gfxContext* aContext, Layer* aLayer,
                                                  const nsIntRegion& aRegion,
                                                  bool* aNeedsClipToVisibleRegion);
   already_AddRefed<gfxContext> PushGroupWithCachedSurface(gfxContext *aTarget,
                                                           gfxASurface::gfxContentType aContent);
   void PopGroupToSourceWithCachedSurface(gfxContext *aTarget, gfxContext *aPushed);
-  already_AddRefed<gfxASurface> PopGroupToSurface(gfxContext *aTarget, gfxContext *aPushed);
 
   virtual bool IsCompositingCheap() { return false; }
   virtual bool HasShadowManagerInternal() const { return false; }
   bool HasShadowManager() const { return HasShadowManagerInternal(); }
 
 protected:
 #ifdef DEBUG
   enum TransactionPhase {
--- a/layout/generic/nsFrame.cpp
+++ b/layout/generic/nsFrame.cpp
@@ -1523,19 +1523,16 @@ WrapPreserve3DList(nsIFrame *aFrame, nsD
   nsresult rv = NS_OK;
   nsDisplayList newList;
   while (nsDisplayItem *item = aList->RemoveBottom()) {
     nsIFrame *childFrame = item->GetUnderlyingFrame();
     NS_ASSERTION(childFrame, "All display items to be wrapped must have a frame!");
     if (childFrame->GetParent()->Preserves3DChildren()) {
       switch (item->GetType()) {
         case nsDisplayItem::TYPE_TRANSFORM: {
-          // The child transform frame should always preserve 3d. In the cases where preserve-3d is disabled
-          // such as clipping, this would be wrapped in a clip display object, and we wouldn't reach this point.
-          NS_ASSERTION(childFrame->Preserves3D(), "Child transform frame must preserve 3d!");
           break;
         }
         case nsDisplayItem::TYPE_WRAP_LIST: {
           nsDisplayWrapList *list = static_cast<nsDisplayWrapList*>(item);
           rv = WrapPreserve3DList(aFrame, aBuilder, list->GetList());
           break;
         }
         case nsDisplayItem::TYPE_OPACITY: {
--- a/layout/reftests/reftest.list
+++ b/layout/reftests/reftest.list
@@ -268,18 +268,18 @@ skip-if(Android) include text-shadow/ref
 include ../../toolkit/themes/pinstripe/reftests/reftest.list
 
 # text-transform/
 include text-transform/reftest.list
 
 # -moz-transform/
 include transform/reftest.list
 
-# 3d transforms - disabled currently
-#include transform-3d/reftest.list
+# 3d transforms
+include transform-3d/reftest.list
 
 # unicode/ (verify that we don't do expend effort doing unicode-aware case checks)
 include unicode/reftest.list
 
 # widget/
 include ../../widget/reftests/reftest.list
 
 # xml-stylesheet/
--- a/layout/style/nsCSSProps.cpp
+++ b/layout/style/nsCSSProps.cpp
@@ -347,22 +347,16 @@ nsCSSProps::LookupProperty(const nsACStr
                             *alias_end = gAliases + NS_ARRAY_LENGTH(gAliases);
          alias < alias_end; ++alias) {
       if (aProperty.LowerCaseEqualsASCII(alias->name)) {
         res = alias->id;
         break;
       }
     }
   }
-  
-  if (res == eCSSProperty_perspective || res == eCSSProperty_perspective_origin || 
-      res == eCSSProperty_backface_visibility || res == eCSSProperty_transform_style) {
-    return eCSSProperty_UNKNOWN;
-  }
-
   return res;
 }
 
 nsCSSProperty
 nsCSSProps::LookupProperty(const nsAString& aProperty)
 {
   // This is faster than converting and calling
   // LookupProperty(nsACString&).  The table will do its own
@@ -374,22 +368,16 @@ nsCSSProps::LookupProperty(const nsAStri
                             *alias_end = gAliases + NS_ARRAY_LENGTH(gAliases);
          alias < alias_end; ++alias) {
       if (aProperty.LowerCaseEqualsASCII(alias->name)) {
         res = alias->id;
         break;
       }
     }
   }
-  
-  if (res == eCSSProperty_perspective || res == eCSSProperty_perspective_origin || 
-      res == eCSSProperty_backface_visibility || res == eCSSProperty_transform_style) {
-    return eCSSProperty_UNKNOWN;
-  }
-
   return res;
 }
 
 nsCSSFontDesc
 nsCSSProps::LookupFontDesc(const nsACString& aFontDesc)
 {
   NS_ABORT_IF_FALSE(gFontDescTable, "no lookup table, needs addref");
   return nsCSSFontDesc(gFontDescTable->Lookup(aFontDesc));
--- a/layout/style/nsDOMCSSDeclaration.h
+++ b/layout/style/nsDOMCSSDeclaration.h
@@ -62,25 +62,16 @@ class nsDOMCSSDeclaration : public nsICS
 {
 public:
   // Only implement QueryInterface; subclasses have the responsibility
   // of implementing AddRef/Release.
   NS_IMETHOD QueryInterface(REFNSIID aIID, void** aInstancePtr);
 
   NS_DECL_NSICSSDECLARATION
 
-  NS_IMETHOD GetMozPerspective(nsAString_internal&);
-  NS_IMETHOD SetMozPerspective(const nsAString_internal&);
-  NS_IMETHOD GetMozPerspectiveOrigin(nsAString_internal&);
-  NS_IMETHOD SetMozPerspectiveOrigin(const nsAString_internal&);
-  NS_IMETHOD GetMozBackfaceVisibility(nsAString_internal&);
-  NS_IMETHOD SetMozBackfaceVisibility(const nsAString_internal&);
-  NS_IMETHOD GetMozTransformStyle(nsAString_internal&);
-  NS_IMETHOD SetMozTransformStyle(const nsAString_internal&);
-
   // Require subclasses to implement |GetParentRule|.
   //NS_DECL_NSIDOMCSSSTYLEDECLARATION
   NS_IMETHOD GetCssText(nsAString & aCssText);
   NS_IMETHOD SetCssText(const nsAString & aCssText);
   NS_IMETHOD GetPropertyValue(const nsAString & propertyName,
                               nsAString & _retval);
   NS_IMETHOD GetPropertyCSSValue(const nsAString & propertyName,
                                  nsIDOMCSSValue **_retval);
--- a/layout/style/test/ListCSSProperties.cpp
+++ b/layout/style/test/ListCSSProperties.cpp
@@ -188,24 +188,16 @@ print_array(const char *aName,
         const PropertyInfo *p = aProps + i;
 
         if (is_inaccessible(p->propName))
             // inaccessible properties never have DOM props, so don't
             // worry about incrementing j.  The assertion below will
             // catch if they do.
             continue;
 
-        if (strcmp(p->propName, "-moz-perspective") == 0 ||
-            strcmp(p->propName, "-moz-perspective-origin") == 0 ||
-            strcmp(p->propName, "-moz-backface-visibility") == 0 ||
-            strcmp(p->propName, "-moz-transform-style") == 0) {
-            ++j;
-            continue;
-        }
-
         if (first)
             first = 0;
         else
             printf(",\n");
 
         printf("\t{ name: \"%s\", prop: ", p->propName);
         if (j >= aDOMPropsLength || strcmp(p->propName, aDOMProps[j]) != 0)
             printf("null");
--- a/layout/style/test/property_database.js
+++ b/layout/style/test/property_database.js
@@ -976,16 +976,67 @@ var gCSSProperties = {
 			"-moz-calc(20px + 50%) -moz-calc(50% - 10px)",
 			"-moz-calc(-20px) -moz-calc(-50%)",
 			"-moz-calc(-20%) -moz-calc(-50%)"
 		],
 		invalid_values: ["red", "auto", "none", "0.5 0.5", "40px #0000ff",
 						 "border", "center red", "right diagonal",
 						 "#00ffff bottom"]
 	},
+    "-moz-perspective-origin": {
+        domProp: "MozPerspectiveOrigin",
+        inherited: false,
+        type: CSS_TYPE_LONGHAND,
+        /* no subproperties */
+        prerequisites: { "width": "10px", "height": "10px", "display": "block"},
+        initial_values: [ "50% 50%", "center", "center center" ],
+        other_values: [ "25% 25%", "5px 5px", "20% 3em", "0 0", "0in 1in",
+                        "top", "bottom","top left", "top right",
+                        "top center", "center left", "center right",
+                        "bottom left", "bottom right", "bottom center",
+                        "20% center", "5px center", "13in bottom",
+                        "left 50px", "right 13%", "center 40px",
+                        "-moz-calc(20px)",
+                        "-moz-calc(20px) 10px",
+                        "10px -moz-calc(20px)",
+                        "-moz-calc(20px) 25%",
+                        "25% -moz-calc(20px)",
+                        "-moz-calc(20px) -moz-calc(20px)",
+                        "-moz-calc(20px + 1em) -moz-calc(20px / 2)",
+                        "-moz-calc(20px + 50%) -moz-calc(50% - 10px)",
+                        "-moz-calc(-20px) -moz-calc(-50%)",
+                        "-moz-calc(-20%) -moz-calc(-50%)" ],
+        invalid_values: [ "red", "auto", "none", "0.5 0.5", "40px #0000ff",
+                          "border", "center red", "right diagonal",
+                          "#00ffff bottom"]
+    },
+    "-moz-perspective": {
+		domProp: "MozPerspective",
+		inherited: false,
+		type: CSS_TYPE_LONGHAND,
+		initial_values: [ "none", "0" ],
+		other_values: [ "1000px", "500.2px", "-100px", "-27.2em" ],
+		invalid_values: [ "pants", "200" ]
+	},
+    "-moz-backface-visibility": {
+        domProp: "MozBackfaceVisibility",
+        inherited: false,
+        type: CSS_TYPE_LONGHAND,
+        initial_values: [ "visible" ],
+        other_values: [ "hidden" ],
+        invalid_values: [ "collapse" ]
+    },
+	"-moz-transform-style": {
+		domProp: "MozTransformStyle",
+		inherited: false,
+		type: CSS_TYPE_LONGHAND,
+		initial_values: [ "flat" ],
+		other_values: [ "preserve-3d" ],
+		invalid_values: []
+	},
 	"-moz-user-focus": {
 		domProp: "MozUserFocus",
 		inherited: true,
 		type: CSS_TYPE_LONGHAND,
 		initial_values: [ "none" ],
 		other_values: [ "normal", "ignore", "select-all", "select-before", "select-after", "select-same", "select-menu" ],
 		invalid_values: []
 	},
--- a/layout/style/test/test_transitions_per_property.html
+++ b/layout/style/test/test_transitions_per_property.html
@@ -75,16 +75,19 @@ var supported_properties = {
     "-moz-outline-radius-topleft": [ test_radius_transition ],
     "-moz-outline-radius-topright": [ test_radius_transition ],
     "-moz-text-decoration-color": [ test_color_transition,
                                     test_border_color_transition ],
     "-moz-transform": [ test_transform_transition ],
     "-moz-transform-origin": [ test_length_pair_transition,
                                test_length_percent_pair_transition,
                                test_length_percent_pair_unclamped ],
+    "-moz-perspective-origin": [ test_length_pair_transition,
+                                 test_length_percent_pair_transition,
+                                 test_length_percent_pair_unclamped ],
     "background-color": [ test_color_transition ],
     "background-position": [ test_background_position_transition,
                              // FIXME: We don't currently test clamping,
                              // since background-position uses calc() as
                              // an intermediate form.
                              /* test_length_percent_pair_unclamped */ ],
     "background-size": [ test_background_size_transition,
                          // FIXME: We don't currently test clamping,
@@ -185,16 +188,17 @@ var supported_properties = {
                       test_length_percent_calc_transition,
                       test_length_clamped, test_percent_clamped ],
     "padding-right": [ test_length_transition, test_percent_transition,
                        test_length_percent_calc_transition,
                        test_length_clamped, test_percent_clamped ],
     "padding-top": [ test_length_transition, test_percent_transition,
                      test_length_percent_calc_transition,
                      test_length_clamped, test_percent_clamped ],
+    "-moz-perspective": [ test_length_transition ],
     "right": [ test_length_transition, test_percent_transition,
                test_length_percent_calc_transition,
                test_length_unclamped, test_percent_unclamped ],
     "stop-color": [ test_color_transition ],
     "stop-opacity" : [ test_float_zeroToOne_transition,
                        // opacity is clamped in computed style
                        // (not parsing/interpolation)
                        test_float_zeroToOne_clamped ],
@@ -1318,21 +1322,21 @@ function test_transform_transition(prop)
     { start: 'translateX(-20px)', end: 'none',
       expected_uncomputed: 'translateX(-15px)',
       expected: 'matrix(1, 0, 0, 1, -15px, 0px)' },
     { start: 'translateY(-40px)', end: 'none',
       expected_uncomputed: 'translateY(-30px)',
       expected: 'matrix(1, 0, 0, 1, 0px, -30px)' },
     { start: 'translateZ(40px)', end: 'none',
       expected_uncomputed: 'translateZ(30px)',
-      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 30, 1)',
+      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0px, 0px, 30px, 1)',
       requires_3d: true },
     { start: 'none', end: 'translate3D(40px, 60px, -40px)',
       expected_uncomputed: 'translate3D(10px, 15px, -10px)',
-      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 10, 15, -10, 1)',
+      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 10px, 15px, -10px, 1)',
       requires_3d: true },
     // percentages are relative to 300px (width) and 50px (height)
     // per the prerequisites in property_database.js
     { start: 'translate(20%)', end: 'none',
       expected_uncomputed: 'translate(15%)',
       expected: 'matrix(1, 0, 0, 1, 45px, 0px)',
       round_error_ok: true },
     { start: 'translate(20%, 12%)', end: 'none',
@@ -1395,21 +1399,21 @@ function test_transform_transition(prop)
     { start: 'scaleX(3)', end: 'none',
       expected_uncomputed: 'scaleX(2.5)',
       expected: 'matrix(2.5, 0, 0, 1, 0px, 0px)' },
     { start: 'scaleY(5)', end: 'none',
       expected_uncomputed: 'scaleY(4)',
       expected: 'matrix(1, 0, 0, 4, 0px, 0px)' },
     { start: 'scaleZ(5)', end: 'none',
       expected_uncomputed: 'scaleZ(4)',
-      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 0, 0, 0, 1)',
+      expected: 'matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 0px, 0px, 0px, 1)',
       requires_3d: true },
     { start: 'none', end: 'scale3D(5, 5, 5)',
       expected_uncomputed: 'scale3D(2, 2, 2)',
-      expected: 'matrix3d(2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1)',
+      expected: 'matrix3d(2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0px, 0px, 0px, 1)',
       requires_3d: true },
 
     // skew
     { start: 'skewX(45deg)', end: 'none',
       expected_uncomputed: 'skewX(33.75deg)' },
     { start: 'skewY(45deg)', end: 'none',
       expected_uncomputed: 'skewY(33.75deg)' },
     { start: 'skew(45deg)', end: 'none',
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -3357,9 +3357,9 @@ pref("full-screen-api.enabled", false);
 pref("full-screen-api.allow-trusted-requests-only", true);
 pref("full-screen-api.key-input-restricted", true);
 
 // Time limit, in milliseconds, for nsEventStateManager::IsHandlingUserInput().
 // Used to detect long running handlers of user-generated events.
 pref("dom.event.handling-user-input-time-limit", 1000);
  
 //3D Transforms
-pref("layout.3d-transforms.enabled", false);
+pref("layout.3d-transforms.enabled", true);
--- a/toolkit/components/telemetry/TelemetryHistograms.h
+++ b/toolkit/components/telemetry/TelemetryHistograms.h
@@ -48,16 +48,18 @@
  * Please note that only BOOLEAN histograms are allowed to have a minimum value
  * of zero, and that bucket counts should be >= 3.
  */
 
 /**
  * a11y telemetry
  */
 HISTOGRAM(A11Y_INSTANTIATED, 0, 1, 2, BOOLEAN, "has accessibility support been instantiated")
+HISTOGRAM(ISIMPLE_DOM_USAGE, 0, 1, 2, BOOLEAN, "have the ISimpleDOM* accessibility interfaces been used")
+HISTOGRAM(IACCESSIBLE_TABLE_USAGE, 0, 1, 2, BOOLEAN, "has the IAccessibleTable accessibility interface been used")
 
 HISTOGRAM(CYCLE_COLLECTOR, 1, 10000, 50, EXPONENTIAL, "Time spent on one cycle collection (ms)")
 HISTOGRAM(CYCLE_COLLECTOR_VISITED_REF_COUNTED, 1, 300000, 50, EXPONENTIAL, "Number of ref counted objects visited by the cycle collector")
 HISTOGRAM(CYCLE_COLLECTOR_VISITED_GCED, 1, 300000, 50, EXPONENTIAL, "Number of JS objects visited by the cycle collector")
 HISTOGRAM(CYCLE_COLLECTOR_COLLECTED, 1, 100000, 50, EXPONENTIAL, "Number of objects collected by the cycle collector")
 HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)")
 HISTOGRAM(TELEMETRY_SUCCESS, 0, 1, 2, BOOLEAN,  "Successful telemetry submission")
 HISTOGRAM(MEMORY_JS_COMPARTMENTS_SYSTEM, 1, 1000, 10, EXPONENTIAL, "Total JavaScript compartments used for add-ons and internals.")