Merge latest green b2g-inbound changeset and mozilla-central; a=merge
authorEd Morley <emorley@mozilla.com>
Mon, 09 Jun 2014 15:28:38 +0100
changeset 206809 8bbb61b80df4579958b38b74a9487f559466c740
parent 206808 34fdf4d7fa375508883178a4ccfd495bcb14564b (current diff)
parent 206764 43a2f570cea26b5ee50699a321462b23f6a98ce8 (diff)
child 206812 68ac46c1b1f744bc4f13083a4031b7c20dc82c6f
push id3741
push userasasaki@mozilla.com
push dateMon, 21 Jul 2014 20:25:18 +0000
treeherdermozilla-beta@4d6f46f5af68 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone32.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge latest green b2g-inbound changeset and mozilla-central; a=merge
--- a/build/gyp.mozbuild
+++ b/build/gyp.mozbuild
@@ -26,17 +26,17 @@ gyp_vars = {
     'build_libvpx': 0,
     'build_libyuv': 0,
     'libyuv_dir': '/media/libyuv',
     'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1,
     # don't use openssl
     'use_openssl': 0,
 
     # saves 4MB when webrtc_trace is off
-    'enable_lazy_trace_alloc': 1,
+    'enable_lazy_trace_alloc': 0,
 
      # turn off mandatory use of NEON and instead use NEON detection
     'arm_neon': 0,
     'arm_neon_optional': 1,
 
     'moz_widget_toolkit_gonk': 0,
     'moz_webrtc_omx': 0,
 
--- a/content/media/AudioSegment.h
+++ b/content/media/AudioSegment.h
@@ -173,48 +173,57 @@ public:
 
   AudioSegment() : MediaSegmentBase<AudioSegment, AudioChunk>(AUDIO) {}
 
   // Resample the whole segment in place.
   template<typename T>
   void Resample(SpeexResamplerState* aResampler, uint32_t aInRate, uint32_t aOutRate)
   {
     mDuration = 0;
+#ifdef DEBUG
+    uint32_t segmentChannelCount = ChannelCount();
+#endif
 
     for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
       nsAutoTArray<nsTArray<T>, GUESS_AUDIO_CHANNELS> output;
       nsAutoTArray<const T*, GUESS_AUDIO_CHANNELS> bufferPtrs;
       AudioChunk& c = *ci;
       // If this chunk is null, don't bother resampling, just alter its duration
       if (c.IsNull()) {
         c.mDuration *= aOutRate / aInRate;
         mDuration += c.mDuration;
         continue;
       }
       uint32_t channels = c.mChannelData.Length();
+      MOZ_ASSERT(channels == segmentChannelCount);
       output.SetLength(channels);
       bufferPtrs.SetLength(channels);
-      uint32_t inFrames = c.mDuration,
-      outFrames = c.mDuration * aOutRate / aInRate;
+      uint32_t inFrames = c.mDuration;
+      // Round up to allocate; the last frame may not be used.
+      NS_ASSERTION((UINT32_MAX - aInRate + 1) / c.mDuration >= aOutRate,
+                   "Dropping samples");
+      uint32_t outSize = (c.mDuration * aOutRate + aInRate - 1) / aInRate;
       for (uint32_t i = 0; i < channels; i++) {
         const T* in = static_cast<const T*>(c.mChannelData[i]);
-        T* out = output[i].AppendElements(outFrames);
+        T* out = output[i].AppendElements(outSize);
+        uint32_t outFrames = outSize;
 
         dom::WebAudioUtils::SpeexResamplerProcess(aResampler, i,
                                                   in, &inFrames,
                                                   out, &outFrames);
-
+        MOZ_ASSERT(inFrames == c.mDuration);
         bufferPtrs[i] = out;
         output[i].SetLength(outFrames);
       }
+      MOZ_ASSERT(channels > 0);
+      c.mDuration = output[0].Length();
       c.mBuffer = new mozilla::SharedChannelArrayBuffer<T>(&output);
       for (uint32_t i = 0; i < channels; i++) {
         c.mChannelData[i] = bufferPtrs[i];
       }
-      c.mDuration = outFrames;
       mDuration += c.mDuration;
     }
   }
 
   void ResampleChunks(SpeexResamplerState* aResampler);
 
   void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
                     const nsTArray<const float*>& aChannelData,
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -2284,30 +2284,35 @@ SourceMediaStream::AddTrack(TrackID aID,
 void
 SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
 {
   if (aSegment->GetType() != MediaSegment::AUDIO ||
       aTrackData->mInputRate == GraphImpl()->AudioSampleRate()) {
     return;
   }
   AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
-  if (!aTrackData->mResampler) {
-    int channels = segment->ChannelCount();
+  int channels = segment->ChannelCount();
 
-    // If this segment is just silence, we delay instanciating the resampler.
-    if (channels) {
+  // If this segment is just silence, we delay instanciating the resampler.
+  if (channels) {
+    if (aTrackData->mResampler) {
+      MOZ_ASSERT(aTrackData->mResamplerChannelCount == segment->ChannelCount());
+    } else {
       SpeexResamplerState* state = speex_resampler_init(channels,
                                                         aTrackData->mInputRate,
                                                         GraphImpl()->AudioSampleRate(),
                                                         SPEEX_RESAMPLER_QUALITY_DEFAULT,
                                                         nullptr);
       if (!state) {
         return;
       }
       aTrackData->mResampler.own(state);
+#ifdef DEBUG
+      aTrackData->mResamplerChannelCount = channels;
+#endif
     }
   }
   segment->ResampleChunks(aTrackData->mResampler);
 }
 
 bool
 SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
 {
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -795,16 +795,19 @@ public:
     // Sample rate of the input data.
     TrackRate mInputRate;
     // Sample rate of the output data, always equal to the sample rate of the
     // graph.
     TrackRate mOutputRate;
     // Resampler if the rate of the input track does not match the
     // MediaStreamGraph's.
     nsAutoRef<SpeexResamplerState> mResampler;
+#ifdef DEBUG
+    int mResamplerChannelCount;
+#endif
     TrackTicks mStart;
     // Each time the track updates are flushed to the media graph thread,
     // this is cleared.
     uint32_t mCommands;
     // Each time the track updates are flushed to the media graph thread,
     // the segment buffer is emptied.
     nsAutoPtr<MediaSegment> mData;
     nsTArray<ThreadAndRunnable> mDispatchWhenNotEnough;
--- a/content/media/fmp4/ffmpeg/FFmpegH264Decoder.cpp
+++ b/content/media/fmp4/ffmpeg/FFmpegH264Decoder.cpp
@@ -66,42 +66,40 @@ FFmpegH264Decoder::DecodeFrame(mp4_demux
     avcodec_decode_video2(&mCodecContext, frame, &decoded, &packet);
 
   if (bytesConsumed < 0) {
     NS_WARNING("FFmpeg video decoder error.");
     mCallback->Error();
     return;
   }
 
-  if (!decoded) {
-    // The decoder doesn't have enough data to decode a frame yet.
-    return;
-  }
+  // If we've decoded a frame then we need to output it
+  if (decoded) {
+    nsAutoPtr<VideoData> data;
 
-  nsAutoPtr<VideoData> data;
-
-  VideoInfo info;
-  info.mDisplay = nsIntSize(mCodecContext.width, mCodecContext.height);
-  info.mStereoMode = StereoMode::MONO;
-  info.mHasVideo = true;
+    VideoInfo info;
+    info.mDisplay = nsIntSize(mCodecContext.width, mCodecContext.height);
+    info.mStereoMode = StereoMode::MONO;
+    info.mHasVideo = true;
 
-  data = VideoData::CreateFromImage(
-    info, mImageContainer, aSample->byte_offset, aSample->composition_timestamp,
-    aSample->duration, mCurrentImage, aSample->is_sync_point, -1,
-    gfx::IntRect(0, 0, mCodecContext.width, mCodecContext.height));
+    data = VideoData::CreateFromImage(
+      info, mImageContainer, aSample->byte_offset, aSample->composition_timestamp,
+      aSample->duration, mCurrentImage, aSample->is_sync_point, -1,
+      gfx::IntRect(0, 0, mCodecContext.width, mCodecContext.height));
+
+    // Insert the frame into the heap for reordering.
+    mDelayedFrames.Push(data.forget());
 
-  // Insert the frame into the heap for reordering.
-  mDelayedFrames.Push(data.forget());
-
-  // Reorder video frames from decode order to presentation order. The minimum
-  // size of the heap comes from one P frame + |max_b_frames| B frames, which
-  // is the maximum number of frames in a row which will be out-of-order.
-  if (mDelayedFrames.Length() > (uint32_t)mCodecContext.max_b_frames + 1) {
-    VideoData* d = mDelayedFrames.Pop();
-    mCallback->Output(d);
+    // Reorder video frames from decode order to presentation order. The minimum
+    // size of the heap comes from one P frame + |max_b_frames| B frames, which
+    // is the maximum number of frames in a row which will be out-of-order.
+    if (mDelayedFrames.Length() > (uint32_t)mCodecContext.max_b_frames + 1) {
+      VideoData* d = mDelayedFrames.Pop();
+      mCallback->Output(d);
+    }
   }
 
   if (mTaskQueue->IsEmpty()) {
     mCallback->InputExhausted();
   }
 }
 
 static void
--- a/content/media/webrtc/MediaEngineWebRTC.cpp
+++ b/content/media/webrtc/MediaEngineWebRTC.cpp
@@ -22,25 +22,16 @@ GetUserMediaLog()
 {
   static PRLogModuleInfo *sLog;
   if (!sLog)
     sLog = PR_NewLogModule("GetUserMedia");
   return sLog;
 }
 #endif
 
-static PRLogModuleInfo*
-GetWebrtcTraceLog()
-{
-  static PRLogModuleInfo *sLog;
-  if (!sLog)
-    sLog = PR_NewLogModule("webrtc_trace");
-  return sLog;
-}
-
 #include "MediaEngineWebRTC.h"
 #include "ImageContainer.h"
 #include "nsIComponentRegistrar.h"
 #include "MediaEngineTabVideoSource.h"
 #include "nsITabSource.h"
 #include "MediaTrackConstraints.h"
 
 #ifdef MOZ_WIDGET_ANDROID
@@ -70,24 +61,16 @@ MediaEngineWebRTC::MediaEngineWebRTC(Med
 #else
   AsyncLatencyLogger::Get()->AddRef();
 #endif
   // XXX
   gFarendObserver = new AudioOutputObserver();
 }
 
 void
-MediaEngineWebRTC::Print(webrtc::TraceLevel level, const char* message, int length)
-{
-  PRLogModuleInfo *log = GetWebrtcTraceLog();
-  // XXX look at log level?
-  PR_LOG(log, PR_LOG_DEBUG, ("%s", message));
-}
-
-void
 MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
 {
 #ifdef MOZ_B2G_CAMERA
   MutexAutoLock lock(mMutex);
 
   /**
    * We still enumerate every time, in case a new device was plugged in since
    * the last call. TODO: Verify that WebRTC actually does deal with hotplugging
@@ -140,36 +123,16 @@ MediaEngineWebRTC::EnumerateVideoDevices
   }
 #endif
   if (!mVideoEngine) {
     if (!(mVideoEngine = webrtc::VideoEngine::Create())) {
       return;
     }
   }
 
-  PRLogModuleInfo *logs = GetWebrtcTraceLog();
-  if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
-    // no need to a critical section or lock here
-    gWebrtcTraceLoggingOn = 1;
-
-    const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
-    if (!file) {
-      file = "WebRTC.log";
-    }
-
-    LOG(("%s Logging webrtc to %s level %d", __FUNCTION__, file, logs->level));
-
-    mVideoEngine->SetTraceFilter(logs->level);
-    if (strcmp(file, "nspr") == 0) {
-      mVideoEngine->SetTraceCallback(this);
-    } else {
-      mVideoEngine->SetTraceFile(file);
-    }
-  }
-
   ptrViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
   if (!ptrViEBase) {
     return;
   }
 
   if (!mVideoEngineInit) {
     if (ptrViEBase->Init() < 0) {
       return;
@@ -277,36 +240,16 @@ MediaEngineWebRTC::EnumerateAudioDevices
 
   if (!mVoiceEngine) {
     mVoiceEngine = webrtc::VoiceEngine::Create();
     if (!mVoiceEngine) {
       return;
     }
   }
 
-  PRLogModuleInfo *logs = GetWebrtcTraceLog();
-  if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
-    // no need to a critical section or lock here
-    gWebrtcTraceLoggingOn = 1;
-
-    const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
-    if (!file) {
-      file = "WebRTC.log";
-    }
-
-    LOG(("Logging webrtc to %s level %d", __FUNCTION__, file, logs->level));
-
-    mVoiceEngine->SetTraceFilter(logs->level);
-    if (strcmp(file, "nspr") == 0) {
-      mVoiceEngine->SetTraceCallback(this);
-    } else {
-      mVoiceEngine->SetTraceFile(file);
-    }
-  }
-
   ptrVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
   if (!ptrVoEBase) {
     return;
   }
 
   if (!mAudioEngineInit) {
     if (ptrVoEBase->Init() < 0) {
       return;
--- a/content/media/webrtc/MediaEngineWebRTC.h
+++ b/content/media/webrtc/MediaEngineWebRTC.h
@@ -353,32 +353,28 @@ private:
   webrtc::EcModes  mEchoCancel;
   webrtc::AgcModes mAGC;
   webrtc::NsModes  mNoiseSuppress;
   int32_t mPlayoutDelay;
 
   NullTransport *mNullTransport;
 };
 
-class MediaEngineWebRTC : public MediaEngine,
-                          public webrtc::TraceCallback
+class MediaEngineWebRTC : public MediaEngine
 {
 public:
   MediaEngineWebRTC(MediaEnginePrefs &aPrefs);
 
   // Clients should ensure to clean-up sources video/audio sources
   // before invoking Shutdown on this class.
   void Shutdown();
 
   virtual void EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
   virtual void EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
 
-  // Webrtc trace callbacks for proxying to NSPR
-  virtual void Print(webrtc::TraceLevel level, const char* message, int length);
-
 private:
   ~MediaEngineWebRTC() {
     Shutdown();
 #ifdef MOZ_B2G_CAMERA
     AsyncLatencyLogger::Get()->Release();
 #endif
     // XXX
     gFarendObserver = nullptr;
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -36,28 +36,28 @@
 
 // For PR_snprintf
 #include "prprf.h"
 
 #include "nsJSUtils.h"
 #include "nsDOMFile.h"
 #include "nsGlobalWindow.h"
 
-#include "mozilla/Preferences.h"
-
 /* Using WebRTC backend on Desktops (Mac, Windows, Linux), otherwise default */
 #include "MediaEngineDefault.h"
 #if defined(MOZ_WEBRTC)
 #include "MediaEngineWebRTC.h"
 #endif
 
 #ifdef MOZ_B2G
 #include "MediaPermissionGonk.h"
 #endif
 
+#include "browser_logging/WebRtcLog.h"
+
 // GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
 // GetTickCount() and conflicts with MediaStream::GetCurrentTime.
 #ifdef GetCurrentTime
 #undef GetCurrentTime
 #endif
 
 // XXX Workaround for bug 986974 to maintain the existing broken semantics
 template<>
@@ -1572,16 +1572,18 @@ MediaManager::GetUserMedia(bool aPrivile
       array->AppendElement(callID);
     }
     nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
     nsRefPtr<GetUserMediaRequest> req = new GetUserMediaRequest(aWindow,
                                                                 callID, c, isHTTPS);
     obs->NotifyObservers(req, "getUserMedia:request", nullptr);
   }
 
+  EnableWebRtcLog();
+
   return NS_OK;
 }
 
 nsresult
 MediaManager::GetUserMediaDevices(nsPIDOMWindow* aWindow,
   const MediaStreamConstraints& aConstraints,
   nsIGetUserMediaDevicesSuccessCallback* aOnSuccess,
   nsIDOMGetUserMediaErrorCallback* aOnError,
--- a/dom/webidl/LegacyQueryInterface.webidl
+++ b/dom/webidl/LegacyQueryInterface.webidl
@@ -31,23 +31,16 @@ DocumentFragment implements LegacyQueryI
 DocumentType implements LegacyQueryInterface;
 Element implements LegacyQueryInterface;
 Event implements LegacyQueryInterface;
 EventSource implements LegacyQueryInterface;
 FileList implements LegacyQueryInterface;
 FormData implements LegacyQueryInterface;
 HTMLCollection implements LegacyQueryInterface;
 History implements LegacyQueryInterface;
-IDBCursor implements LegacyQueryInterface;
-IDBDatabase implements LegacyQueryInterface;
-IDBFactory implements LegacyQueryInterface;
-IDBIndex implements LegacyQueryInterface;
-IDBObjectStore implements LegacyQueryInterface;
-IDBRequest implements LegacyQueryInterface;
-IDBTransaction implements LegacyQueryInterface;
 MimeTypeArray implements LegacyQueryInterface;
 MozNamedAttrMap implements LegacyQueryInterface;
 MutationObserver implements LegacyQueryInterface;
 MutationRecord implements LegacyQueryInterface;
 Navigator implements LegacyQueryInterface;
 NodeIterator implements LegacyQueryInterface;
 NodeList implements LegacyQueryInterface;
 Notification implements LegacyQueryInterface;
--- a/dom/webidl/WebrtcGlobalInformation.webidl
+++ b/dom/webidl/WebrtcGlobalInformation.webidl
@@ -16,11 +16,19 @@ interface WebrtcGlobalInformation {
 
   [Throws]
   static void getAllStats(WebrtcGlobalStatisticsCallback callback,
                           optional DOMString pcIdFilter);
 
   [Throws]
   static void getLogging(DOMString pattern,
                          WebrtcGlobalLoggingCallback callback);
+
+  // NSPR WebRTC Trace debug level (0 - 65535)
+  //
+  // Notes:
+  // - Setting a non-zero debug level turns on gathering of log for file output.
+  // - Subsequently setting a zero debug level writes that log to disk.
+
+  static attribute long debugLevel;
 };
 
 
--- a/layout/inspector/inDOMUtils.cpp
+++ b/layout/inspector/inDOMUtils.cpp
@@ -873,8 +873,27 @@ NS_IMETHODIMP
 inDOMUtils::ParseStyleSheet(nsIDOMCSSStyleSheet *aSheet,
                             const nsAString& aInput)
 {
   nsRefPtr<nsCSSStyleSheet> sheet = do_QueryObject(aSheet);
   NS_ENSURE_ARG_POINTER(sheet);
 
   return sheet->ParseSheet(aInput);
 }
+
+NS_IMETHODIMP
+inDOMUtils::ScrollElementIntoView(nsIDOMElement *aElement)
+{
+  nsCOMPtr<nsIContent> content = do_QueryInterface(aElement);
+  NS_ENSURE_ARG_POINTER(content);
+
+  nsIPresShell* presShell = content->OwnerDoc()->GetShell();
+  if (!presShell) {
+    return NS_OK;
+  }
+
+  presShell->ScrollContentIntoView(content,
+                                   nsIPresShell::ScrollAxis(),
+                                   nsIPresShell::ScrollAxis(),
+                                   nsIPresShell::SCROLL_OVERFLOW_HIDDEN);
+
+  return NS_OK;
+}
--- a/layout/inspector/inIDOMUtils.idl
+++ b/layout/inspector/inIDOMUtils.idl
@@ -11,17 +11,17 @@ interface nsIDOMElement;
 interface nsIDOMDocument;
 interface nsIDOMCSSStyleRule;
 interface nsIDOMNode;
 interface nsIDOMNodeList;
 interface nsIDOMFontFaceList;
 interface nsIDOMRange;
 interface nsIDOMCSSStyleSheet;
 
-[scriptable, uuid(ceae6c68-f5d4-4597-a3d9-ca5646c25f1a)]
+[scriptable, uuid(948792a7-b786-4a2c-910d-f55b0773c2ca)]
 interface inIDOMUtils : nsISupports
 {
   // CSS utilities
   void getAllStyleSheets (in nsIDOMDocument aDoc,
                           [optional] out unsigned long aLength,
                           [array, size_is (aLength), retval] out nsISupports aSheets);
   nsISupportsArray getCSSStyleRules(in nsIDOMElement aElement, [optional] in DOMString aPseudo);
   unsigned long getRuleLine(in nsIDOMCSSStyleRule aRule);
@@ -98,13 +98,20 @@ interface inIDOMUtils : nsISupports
   /**
    * Parse CSS and update the style sheet in place.
    *
    * @param DOMCSSStyleSheet aSheet
    * @param DOMString aInput
    *        The new source string for the style sheet.
    */
   void parseStyleSheet(in nsIDOMCSSStyleSheet aSheet, in DOMString aInput);
+  /**
+   * Scroll an element completely into view, if possible.
+   * This is similar to ensureElementIsVisible but for all ancestors.
+   *
+   * @param DOMElement aElement
+   */
+  void scrollElementIntoView(in nsIDOMElement aElement);
 };
 
 %{ C++
 #define IN_DOMUTILS_CONTRACTID "@mozilla.org/inspector/dom-utils;1"
 %}
--- a/media/libstagefright/frameworks/av/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/frameworks/av/media/libstagefright/MPEG4Extractor.cpp
@@ -722,16 +722,21 @@ static bool underMetaDataPath(const Vect
         && path[2] == FOURCC('m', 'e', 't', 'a')
         && path[3] == FOURCC('i', 'l', 's', 't');
 }
 
 // Given a time in seconds since Jan 1 1904, produce a human-readable string.
 static void convertTimeToDate(int64_t time_1904, String8 *s) {
     time_t time_1970 = time_1904 - (((66 * 365 + 17) * 24) * 3600);
 
+    if (time_1970 < 0) {
+        s->clear();
+        return;
+    }
+
     char tmp[32];
     strftime(tmp, sizeof(tmp), "%Y%m%dT%H%M%S.000Z", gmtime(&time_1970));
 
     s->setTo(tmp);
 }
 
 status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
     ALOGV("entering parseChunk %lld/%d", *offset, depth);
@@ -1649,18 +1654,19 @@ status_t MPEG4Extractor::parseChunk(off6
                 return ERROR_MALFORMED;
             } else {
                 creationTime = U32_AT(&header[4]);
                 mHeaderTimescale = U32_AT(&header[12]);
             }
 
             String8 s;
             convertTimeToDate(creationTime, &s);
-
-            mFileMetaData->setCString(kKeyDate, s.string());
+            if (s.length()) {
+                mFileMetaData->setCString(kKeyDate, s.string());
+            }
 
             *offset += chunk_size;
             break;
         }
 
         case FOURCC('m', 'd', 'a', 't'):
         {
             ALOGV("mdat chunk, drm: %d", mIsDrm);
--- a/media/libstagefright/patches/frameworks/av.patch
+++ b/media/libstagefright/patches/frameworks/av.patch
@@ -573,17 +573,17 @@ index 4a0c35c..ccf60e3 100644
      return OK;
  }
  
 -}  // namespace android
 +}  // namespace stagefright
  
 +#undef LOG_TAG
 diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
-index ad985ee..f0165e5 100644
+index ad985ee..71a0613 100644
 --- a/media/libstagefright/MPEG4Extractor.cpp
 +++ b/media/libstagefright/MPEG4Extractor.cpp
 @@ -15,6 +15,7 @@
   */
  
  //#define LOG_NDEBUG 0
 +#undef LOG_TAG
  #define LOG_TAG "MPEG4Extractor"
@@ -593,52 +593,76 @@ index ad985ee..f0165e5 100644
  #include <media/stagefright/MetaData.h>
  #include <utils/String8.h>
  
 -namespace android {
 +namespace stagefright {
  
  class MPEG4Source : public MediaSource {
  public:
-@@ -1248,6 +1249,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
+@@ -726,6 +727,11 @@ static bool underMetaDataPath(const Vector<uint32_t> &path) {
+ static void convertTimeToDate(int64_t time_1904, String8 *s) {
+     time_t time_1970 = time_1904 - (((66 * 365 + 17) * 24) * 3600);
+ 
++    if (time_1970 < 0) {
++      s->clear();
++      return;
++    }
++
+     char tmp[32];
+     strftime(tmp, sizeof(tmp), "%Y%m%dT%H%M%S.000Z", gmtime(&time_1970));
+ 
+@@ -1248,6 +1254,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
              ALOGV("*** coding='%s' %d channels, size %d, rate %d\n",
                     chunk, num_channels, sample_size, sample_rate);
              mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
 +            mLastTrack->meta->setInt32(kKeySampleSize, sample_size);
              mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
  
              off64_t stop_offset = *offset + chunk_size;
-@@ -2278,6 +2280,10 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
+@@ -1652,8 +1659,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
+ 
+             String8 s;
+             convertTimeToDate(creationTime, &s);
+-
+-            mFileMetaData->setCString(kKeyDate, s.string());
++            if (s.length()) {
++                mFileMetaData->setCString(kKeyDate, s.string());
++            }
+ 
+             *offset += chunk_size;
+             break;
+@@ -2278,6 +2286,10 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
          objectType = 32 + br.getBits(6);
      }
  
 +    if (objectType >= 1 && objectType <= 4) {
 +      mLastTrack->meta->setInt32(kKeyAACProfile, objectType);
 +    }
 +
      uint32_t freqIndex = br.getBits(4);
  
      int32_t sampleRate = 0;
-@@ -3154,6 +3160,7 @@ status_t MPEG4Source::read(
+@@ -3154,6 +3166,7 @@ status_t MPEG4Source::read(
              CHECK(mBuffer != NULL);
              mBuffer->set_range(0, size);
              mBuffer->meta_data()->clear();
 +            mBuffer->meta_data()->setInt64(kKey64BitFileOffset, offset);
              mBuffer->meta_data()->setInt64(
                      kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
  
-@@ -3276,6 +3283,7 @@ status_t MPEG4Source::read(
+@@ -3276,6 +3289,7 @@ status_t MPEG4Source::read(
          }
  
          mBuffer->meta_data()->clear();
 +        mBuffer->meta_data()->setInt64(kKey64BitFileOffset, offset);
          mBuffer->meta_data()->setInt64(
                  kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
  
-@@ -3360,6 +3368,18 @@ status_t MPEG4Source::fragmentedRead(
+@@ -3360,6 +3374,18 @@ status_t MPEG4Source::fragmentedRead(
              // move to next fragment
              Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1];
              off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size;
 +
 +            // If we're pointing to a sidx box then we skip it.
 +            uint32_t hdr[2];
 +            if (mDataSource->readAt(nextMoof, hdr, 8) < 8) {
 +                return ERROR_END_OF_STREAM;
@@ -647,25 +671,25 @@ index ad985ee..f0165e5 100644
 +            uint32_t chunk_type = ntohl(hdr[1]);
 +            if (chunk_type == FOURCC('s', 'i', 'd', 'x')) {
 +                nextMoof += chunk_size;
 +            }
 +
              mCurrentMoofOffset = nextMoof;
              mCurrentSamples.clear();
              mCurrentSampleIndex = 0;
-@@ -3626,6 +3646,7 @@ static bool isCompatibleBrand(uint32_t fourcc) {
+@@ -3626,6 +3652,7 @@ static bool isCompatibleBrand(uint32_t fourcc) {
      return false;
  }
  
 +#if 0
  // Attempt to actually parse the 'ftyp' atom and determine if a suitable
  // compatible brand is present.
  // Also try to identify where this file's metadata ends
-@@ -3756,5 +3777,8 @@ bool SniffMPEG4(
+@@ -3756,5 +3783,8 @@ bool SniffMPEG4(
  
      return false;
  }
 +#endif
 +
 +}  // namespace stagefright
  
 -}  // namespace android
--- a/media/webrtc/signaling/signaling.gyp
+++ b/media/webrtc/signaling/signaling.gyp
@@ -94,16 +94,18 @@
         './src/common/csf_common.h',
         './src/common/NullDeleter.h',
         './src/common/Wrapper.h',
         './src/common/NullTransport.h',
         './src/common/YuvStamper.cpp',
         # Browser Logging
         './src/common/browser_logging/CSFLog.cpp',
         './src/common/browser_logging/CSFLog.h',
+        './src/common/browser_logging/WebRtcLog.cpp',
+        './src/common/browser_logging/WebRtcLog.h',
         # Browser Logging
         './src/common/time_profiling/timecard.c',
         './src/common/time_profiling/timecard.h',
         # Call Control
         './src/callcontrol/CC_CallTypes.cpp',
         './src/callcontrol/CallControlManager.cpp',
         './src/callcontrol/CallControlManagerImpl.cpp',
         './src/callcontrol/ECC_Types.cpp',
--- a/media/webrtc/signaling/src/common/browser_logging/CSFLog.cpp
+++ b/media/webrtc/signaling/src/common/browser_logging/CSFLog.cpp
@@ -30,17 +30,16 @@ PRLogModuleInfo *GetSignalingLogInfo()
 {
   if (gLogModuleInfo == nullptr)
     gLogModuleInfo = PR_NewLogModule("signaling");
 
   return gLogModuleInfo;
 }
 
 static PRLogModuleInfo *gWebRTCLogModuleInfo = nullptr;
-int gWebrtcTraceLoggingOn = 0;
 
 PRLogModuleInfo *GetWebRTCLogInfo()
 {
   if (gWebRTCLogModuleInfo == nullptr)
     gWebRTCLogModuleInfo = PR_NewLogModule("webrtc_trace");
 
   return gWebRTCLogModuleInfo;
 }
--- a/media/webrtc/signaling/src/common/browser_logging/CSFLog.h
+++ b/media/webrtc/signaling/src/common/browser_logging/CSFLog.h
@@ -23,18 +23,16 @@ typedef enum{
     CSF_LOG_ERROR,
     CSF_LOG_WARNING,
     CSF_LOG_NOTICE,
     CSF_LOG_INFO,
     CSF_LOG_DEBUG,
     CSF_LOG_OBNOXIOUS
 } CSFLogLevel;
 
-
-
 #define CSFLogError(tag , format, ...) CSFLog( CSF_LOG_ERROR, __FILE__ , __LINE__ , tag , format , ## __VA_ARGS__ )
 #define CSFLogErrorV(tag , format, va_list_arg) CSFLogV(CSF_LOG_ERROR, __FILE__ , __LINE__ , tag , format , va_list_arg )
 #define CSFLogWarn(tag , format, ...) CSFLog( CSF_LOG_WARNING, __FILE__ , __LINE__ , tag , format , ## __VA_ARGS__ )
 #define CSFLogWarnV(tag , format, va_list_arg) CSFLogV(CSF_LOG_WARNING, __FILE__ , __LINE__ , tag , format , va_list_arg )
 #define CSFLogNotice(tag , format, ...) CSFLog( CSF_LOG_NOTICE, __FILE__ , __LINE__ , tag , format , ## __VA_ARGS__ )
 #define CSFLogNoticeV(tag , format, va_list_arg) CSFLogV(CSF_LOG_NOTICE, __FILE__ , __LINE__ , tag , format , va_list_arg )
 #define CSFLogInfo(tag , format, ...) CSFLog( CSF_LOG_INFO, __FILE__ , __LINE__ , tag , format , ## __VA_ARGS__ )
 #define CSFLogInfoV(tag , format, va_list_arg) CSFLogV(CSF_LOG_INFO, __FILE__ , __LINE__ , tag , format , va_list_arg )
@@ -47,19 +45,19 @@ typedef enum{
 extern "C"
 {
 #endif
 void CSFLog( CSFLogLevel priority, const char* sourceFile, int sourceLine, const char* tag , const char* format, ...)
 #ifdef __GNUC__
   __attribute__ ((format (printf, 5, 6)))
 #endif
 ;
+
 void CSFLogV( CSFLogLevel priority, const char* sourceFile, int sourceLine, const char* tag , const char* format, va_list args);
 
 PRLogModuleInfo *GetSignalingLogInfo();
 PRLogModuleInfo *GetWebRTCLogInfo();
-extern int gWebrtcTraceLoggingOn;
 
 #ifdef __cplusplus
 }
 #endif
 
 #endif
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/common/browser_logging/WebRtcLog.cpp
@@ -0,0 +1,164 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebRtcLog.h"
+
+#include "prlog.h"
+#include "prenv.h"
+#include "webrtc/system_wrappers/interface/trace.h"
+
+#include "nscore.h"
+#ifdef MOZILLA_INTERNAL_API
+#include "nsString.h"
+#include "mozilla/Preferences.h"
+#else
+#include "nsStringAPI.h"
+#endif
+
+static int gWebRtcTraceLoggingOn = 0;
+static const char *default_log = "WebRTC.log";
+
+static PRLogModuleInfo* GetWebRtcTraceLog()
+{
+  static PRLogModuleInfo *sLog;
+  if (!sLog) {
+    sLog = PR_NewLogModule("webrtc_trace");
+  }
+  return sLog;
+}
+
+class WebRtcTraceCallback: public webrtc::TraceCallback
+{
+public:
+  void Print(webrtc::TraceLevel level, const char* message, int length)
+  {
+    PRLogModuleInfo *log = GetWebRtcTraceLog();
+    PR_LOG(log, PR_LOG_DEBUG, ("%s", message));
+    return;
+  }
+};
+
+static WebRtcTraceCallback gWebRtcCallback;
+
+#ifdef MOZILLA_INTERNAL_API
+void GetWebRtcLogPrefs(uint32_t *aTraceMask, nsACString* aLogFile, bool *aMultiLog)
+{
+  *aMultiLog = mozilla::Preferences::GetBool("media.webrtc.debug.multi_log");
+  *aTraceMask = mozilla::Preferences::GetUint("media.webrtc.debug.trace_mask");
+  mozilla::Preferences::GetCString("media.webrtc.debug.log_file", aLogFile);
+}
+#endif
+
+void CheckOverrides(uint32_t *aTraceMask, nsACString *aLogFile, bool *aMultiLog)
+{
+  if (!aTraceMask || !aLogFile || !aMultiLog) {
+    return;
+  }
+
+  // Override or fill in attributes from the environment if possible.
+
+  PRLogModuleInfo *log_info = GetWebRtcTraceLog();
+  /* When webrtc_trace:x is not part of the NSPR_LOG_MODULES var the structure returned from
+     the GetWebRTCLogInfo call will be non-null and show a level of 0. This cannot
+     be reliably used to turn off the trace and override a log level from about:config as
+     there is no way to differentiate between NSPR_LOG_MODULES=webrtc_trace:0 and the complete
+     absense of the webrtc_trace in the environment string at all.
+  */
+  if (log_info && (log_info->level != 0)) {
+    *aTraceMask = log_info->level;
+  }
+
+  const char *file_name = PR_GetEnv("WEBRTC_TRACE_FILE");
+  if (file_name) {
+    aLogFile->Assign(file_name);
+  }
+}
+
+void ConfigWebRtcLog(uint32_t trace_mask, nsCString &aLogFile, bool multi_log)
+{
+  if (gWebRtcTraceLoggingOn || trace_mask == 0) {
+    return;
+  }
+
+  if (aLogFile.IsEmpty()) {
+#if defined(XP_WIN)
+    // Use the Windows TEMP environment variable as part of the default location.
+    const char *temp_dir = PR_GetEnv("TEMP");
+    if (!temp_dir) {
+      aLogFile.Assign(default_log);
+    } else {
+      aLogFile.Assign(temp_dir);
+      aLogFile.Append('/');
+      aLogFile.Append(default_log);
+    }
+#elif defined(ANDROID)
+    // Special case: use callback to pipe to NSPR logging.
+    aLogFile.Assign("nspr");
+#else
+    // UNIX-like place for the others
+    aLogFile.Assign("/tmp/");
+    aLogFile.Append(default_log);
+#endif
+  }
+
+  webrtc::Trace::set_level_filter(trace_mask);
+  if (aLogFile.EqualsLiteral("nspr")) {
+    webrtc::Trace::SetTraceCallback(&gWebRtcCallback);
+  } else {
+    webrtc::Trace::SetTraceFile(aLogFile.get(), multi_log);
+  }
+
+  return;
+}
+
+void StartWebRtcLog(uint32_t log_level)
+{
+  if (gWebRtcTraceLoggingOn && log_level != 0) {
+    return;
+  }
+
+  if (log_level == 0) { 
+    if (gWebRtcTraceLoggingOn) {
+      gWebRtcTraceLoggingOn = false;
+      webrtc::Trace::set_level_filter(webrtc::kTraceNone);
+    }
+    return;
+  }
+
+  uint32_t trace_mask = 0;
+  bool multi_log = false;
+  nsAutoCString log_file;
+
+#ifdef MOZILLA_INTERNAL_API
+  GetWebRtcLogPrefs(&trace_mask, &log_file, &multi_log);
+#endif
+  CheckOverrides(&trace_mask, &log_file, &multi_log);
+
+  if (trace_mask == 0) {
+    trace_mask = log_level;
+  }
+
+  ConfigWebRtcLog(trace_mask, log_file, multi_log);
+  return;
+
+}
+
+void EnableWebRtcLog()
+{
+  if (gWebRtcTraceLoggingOn) {
+    return;
+  }
+
+  uint32_t trace_mask = 0;
+  bool multi_log = false;
+  nsAutoCString log_file;
+
+#ifdef MOZILLA_INTERNAL_API
+  GetWebRtcLogPrefs(&trace_mask, &log_file, &multi_log);
+#endif
+  CheckOverrides(&trace_mask, &log_file, &multi_log);
+  ConfigWebRtcLog(trace_mask, log_file, multi_log);
+  return;
+}
+
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/common/browser_logging/WebRtcLog.h
@@ -0,0 +1,13 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WEBRTCLOG_H_
+#define WEBRTCLOG_H_
+
+#include "webrtc/common_types.h"
+
+void StartWebRtcLog(uint32_t log_level = webrtc::kTraceDefault);
+void EnableWebRtcLog();
+
+#endif
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -20,16 +20,17 @@
 #include "nsThreadUtils.h"
 #ifdef MOZILLA_INTERNAL_API
 #include "Latency.h"
 #include "mozilla/Telemetry.h"
 #endif
 
 #include "webrtc/voice_engine/include/voe_errors.h"
 #include "webrtc/system_wrappers/interface/clock.h"
+#include "browser_logging/WebRtcLog.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #endif
 
 namespace mozilla {
 
 static const char* logTag ="WebrtcAudioSessionConduit";
@@ -224,30 +225,17 @@ MediaConduitErrorCode WebrtcAudioConduit
 
     // Per WebRTC APIs below function calls return nullptr on failure
     if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
     {
       CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__);
       return kMediaConduitSessionNotInited;
     }
 
-    PRLogModuleInfo *logs = GetWebRTCLogInfo();
-    if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
-      // no need to a critical section or lock here
-      gWebrtcTraceLoggingOn = 1;
-
-      const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
-      if (!file) {
-        file = "WebRTC.log";
-      }
-      CSFLogDebug(logTag,  "%s Logging webrtc to %s level %d", __FUNCTION__,
-                  file, logs->level);
-      mVoiceEngine->SetTraceFilter(logs->level);
-      mVoiceEngine->SetTraceFile(file);
-    }
+    EnableWebRtcLog();
   }
 
   if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine)))
   {
     CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -16,16 +16,17 @@
 #include "YuvStamper.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 
 #include "webrtc/common_types.h"
 #include "webrtc/common_video/interface/native_handle.h"
 #include "webrtc/video_engine/include/vie_errors.h"
+#include "browser_logging/WebRtcLog.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #endif
 
 #include <algorithm>
 #include <math.h>
 
@@ -300,30 +301,17 @@ MediaConduitErrorCode WebrtcVideoConduit
 
     // Per WebRTC APIs below function calls return nullptr on failure
     if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
     {
       CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
       return kMediaConduitSessionNotInited;
     }
 
-    PRLogModuleInfo *logs = GetWebRTCLogInfo();
-    if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
-      // no need to a critical section or lock here
-      gWebrtcTraceLoggingOn = 1;
-
-      const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
-      if (!file) {
-        file = "WebRTC.log";
-      }
-      CSFLogDebug(logTag,  "%s Logging webrtc to %s level %d", __FUNCTION__,
-                  file, logs->level);
-      mVideoEngine->SetTraceFilter(logs->level);
-      mVideoEngine->SetTraceFile(file);
-    }
+    EnableWebRtcLog();
   }
 
   if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
   {
     CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
--- a/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.cpp
+++ b/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.cpp
@@ -3,17 +3,17 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "WebrtcGlobalInformation.h"
 
 #include <deque>
 #include <string>
 
 #include "CSFLog.h"
-
+#include "WebRtcLog.h"
 #include "mozilla/dom/WebrtcGlobalInformationBinding.h"
 
 #include "nsAutoPtr.h"
 #include "nsNetCID.h" // NS_SOCKETTRANSPORTSERVICE_CONTRACTID
 #include "nsServiceManagerUtils.h" // do_GetService
 #include "mozilla/ErrorResult.h"
 #include "mozilla/Vector.h"
 #include "nsProxyRelease.h"
@@ -227,16 +227,31 @@ WebrtcGlobalInformation::GetLogging(
 
   if (NS_FAILED(rv)) {
     aLoggingCallback.Release();
   }
 
   aRv = rv;
 }
 
+static int32_t sLastSetLevel = 0;
+
+void
+WebrtcGlobalInformation::SetDebugLevel(const GlobalObject& aGlobal, int32_t aLevel)
+{
+  StartWebRtcLog(webrtc::TraceLevel(aLevel));
+  sLastSetLevel = aLevel;
+}
+
+int32_t
+WebrtcGlobalInformation::DebugLevel(const GlobalObject& aGlobal)
+{
+  return sLastSetLevel;
+}
+
 struct StreamResult {
   StreamResult() : candidateTypeBitpattern(0), streamSucceeded(false) {}
   uint8_t candidateTypeBitpattern;
   bool streamSucceeded;
 };
 
 static void StoreLongTermICEStatisticsImpl_m(
     nsresult result,
--- a/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.h
+++ b/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.h
@@ -28,16 +28,19 @@ public:
                           const Optional<nsAString>& pcIdFilter,
                           ErrorResult& aRv);
 
   static void GetLogging(const GlobalObject& aGlobal,
                          const nsAString& aPattern,
                          WebrtcGlobalLoggingCallback& aLoggingCallback,
                          ErrorResult& aRv);
 
+  static void SetDebugLevel(const GlobalObject& aGlobal, int32_t aLevel);
+  static int32_t DebugLevel(const GlobalObject& aGlobal);
+
   static void StoreLongTermICEStatistics(sipcc::PeerConnectionImpl& aPc);
 
 private:
   WebrtcGlobalInformation() MOZ_DELETE;
   WebrtcGlobalInformation(const WebrtcGlobalInformation& aOrig) MOZ_DELETE;
   WebrtcGlobalInformation& operator=(
     const WebrtcGlobalInformation& aRhs) MOZ_DELETE;
 };
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
@@ -468,27 +468,27 @@ void TraceImpl::AddMessageToList(
   // cache misses/etc
   if (!message_queue_[active_queue_][idx]) {
   return;
 }
 #endif
 
   if (idx >= WEBRTC_TRACE_MAX_QUEUE) {
     if (!trace_file_.Open() && !callback_) {
-      // Keep at least the last 1/4 of old messages when not logging.
+      // Drop the first 1/4 of old messages when not logging.
       // TODO(hellner): isn't this redundant. The user will make it known
       //                when to start logging. Why keep messages before
       //                that?
-      for (int n = 0; n < WEBRTC_TRACE_MAX_QUEUE / 4; ++n) {
-        const int last_quarter_offset = (3 * WEBRTC_TRACE_MAX_QUEUE / 4);
+      for (int n = 0; n < WEBRTC_TRACE_MAX_QUEUE * 3 / 4; ++n) {
+        const int last_quarter_offset = (1 * WEBRTC_TRACE_MAX_QUEUE / 4);
         memcpy(message_queue_[active_queue_][n],
                message_queue_[active_queue_][n + last_quarter_offset],
                WEBRTC_TRACE_MAX_MESSAGE_SIZE);
       }
-      idx = next_free_idx_[active_queue_] = WEBRTC_TRACE_MAX_QUEUE / 4;
+      idx = next_free_idx_[active_queue_] = WEBRTC_TRACE_MAX_QUEUE * 3 / 4;
     } else {
       // More messages are being written than there is room for in the
       // buffer. Drop any new messages.
       // TODO(hellner): its probably better to drop old messages instead
       //                of new ones. One step further: if this happens
       //                it's due to writing faster than what can be
       //                processed. Maybe modify the filter at this point.
       //                E.g. turn of STREAM.
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.h
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.h
@@ -22,17 +22,17 @@ namespace webrtc {
 
 // TODO(pwestin) WEBRTC_TRACE_MAX_QUEUE needs to be tweaked
 // TODO(hellner) the buffer should be close to how much the system can write to
 //               file. Increasing the buffer will not solve anything. Sooner or
 //               later the buffer is going to fill up anyways.
 #if defined(WEBRTC_IOS)
 #define WEBRTC_TRACE_MAX_QUEUE  2000
 #else
-#define WEBRTC_TRACE_MAX_QUEUE  8000
+#define WEBRTC_TRACE_MAX_QUEUE  16000
 #endif
 #define WEBRTC_TRACE_NUM_ARRAY 2
 #define WEBRTC_TRACE_MAX_MESSAGE_SIZE 256
 // Total buffer size is WEBRTC_TRACE_NUM_ARRAY (number of buffer partitions) *
 // WEBRTC_TRACE_MAX_QUEUE (number of lines per buffer partition) *
 // WEBRTC_TRACE_MAX_MESSAGE_SIZE (number of 1 byte charachters per line) =
 // 1 or 4 Mbyte.
 
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -252,16 +252,25 @@ pref("media.navigator.enabled", true);
 pref("media.navigator.video.enabled", true);
 pref("media.navigator.load_adapt", true);
 pref("media.navigator.load_adapt.measure_interval",1000);
 pref("media.navigator.load_adapt.avg_seconds",3);
 pref("media.navigator.load_adapt.high_load","0.90");
 pref("media.navigator.load_adapt.low_load","0.40");
 pref("media.navigator.video.default_fps",30);
 pref("media.navigator.video.default_minfps",10);
+
+pref("media.webrtc.debug.trace_mask", 0);
+pref("media.webrtc.debug.multi_log", false);
+#if defined(ANDROID) || defined(XP_WIN)
+pref("media.webrtc.debug.log_file", "");
+#else
+pref("media.webrtc.debug.log_file", "/tmp/WebRTC.log");
+#endif
+
 #ifdef MOZ_WIDGET_GONK
 pref("media.navigator.video.default_width",320);
 pref("media.navigator.video.default_height",240);
 pref("media.peerconnection.enabled", true);
 pref("media.peerconnection.video.enabled", true);
 pref("media.navigator.video.max_fs", 1200); // 640x480 == 1200mb
 pref("media.navigator.video.max_fr", 30);
 pref("media.peerconnection.video.h264_enabled", false);
--- a/toolkit/content/aboutWebrtc.xhtml
+++ b/toolkit/content/aboutWebrtc.xhtml
@@ -379,22 +379,50 @@ function displayStats(globalReport) {
     if (pcDiv) {
       document.getElementById('stats').replaceChild(newPcDiv, pcDiv);
     } else {
       document.getElementById('stats').appendChild(newPcDiv);
     }
   });
 }
 
+function onLoad() {
+  WebrtcGlobalInformation.getAllStats(displayStats);
+  if (WebrtcGlobalInformation.debugLevel) {
+    setDebugButton(true);
+  } else {
+    setDebugButton(false);
+  }
+}
+
+function startDebugMode() {
+  WebrtcGlobalInformation.debugLevel = 65535;
+  setDebugButton(true);
+}
+
+function stopDebugMode() {
+  WebrtcGlobalInformation.debugLevel = 0;
+  setDebugButton(false);
+}
+
+function setDebugButton(on) {
+  var button = document.getElementById("debug-toggle-button");
+  button.innerHTML = on ? "Stop debug mode" : "Start debug mode";
+  button.onclick = on ? stopDebugMode : startDebugMode;
+}
+
   </script>
 
-  <body id="body" onload="WebrtcGlobalInformation.getAllStats(displayStats)">
+  <body id="body" onload="onLoad()">
     <div id="stats">
     </div>
     <button onclick="WebrtcGlobalInformation.getLogging('', displayLogs)">
-      Show/refresh logging
+      Connection log
+    </button>
+    <button id="debug-toggle-button" onclick="startDebugMode()">
+      Start debug mode
     </button>
     <div id="logs">
     </div>
   </body>
 </html>
 <!-- vim: softtabstop=2:shiftwidth=2:expandtab
 -->