Bug 868405. Support 'enabled' attribute on MediaStreamTrack. r=jesup a=akeybl
authorRobert O'Callahan <robert@ocallahan.org>
Thu, 30 May 2013 16:44:43 +1200
changeset 142791 487b28716f467e580ba4e01a2c68381cb7638243
parent 142790 c9ffcebc6abc0851efe8a41980233e475e725ee8
child 142792 71a5638a8fdf926ac7d75603831e2b184deb5b8c
push id2579
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 18:52:47 +0000
treeherdermozilla-beta@b69b7de8a05a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup, akeybl
bugs868405
milestone23.0a2
Bug 868405. Support 'enabled' attribute on MediaStreamTrack. r=jesup a=akeybl
content/media/AudioNodeStream.cpp
content/media/MediaStreamGraph.cpp
content/media/MediaStreamGraph.h
content/media/MediaStreamTrack.cpp
content/media/MediaStreamTrack.h
content/media/TrackUnionStream.h
content/media/VideoSegment.cpp
content/media/VideoSegment.h
dom/webidl/MediaStreamTrack.webidl
gfx/layers/ImageContainer.h
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/content/media/AudioNodeStream.cpp
+++ b/content/media/AudioNodeStream.cpp
@@ -390,16 +390,22 @@ AudioNodeStream::ProduceOutput(GraphTime
     } else {
       mEngine->ProduceAudioBlocksOnPorts(this, inputChunks, mLastChunks, &finished);
     }
     if (finished) {
       mMarkAsFinishedAfterThisBlock = true;
     }
   }
 
+  if (mDisabledTrackIDs.Contains(AUDIO_NODE_STREAM_TRACK_ID)) {
+    for (uint32_t i = 0; i < mLastChunks.Length(); ++i) {
+      mLastChunks[i].SetNull(WEBAUDIO_BLOCK_SIZE);
+    }
+  }
+
   if (mKind == MediaStreamGraph::EXTERNAL_STREAM) {
     segment->AppendAndConsumeChunk(&mLastChunks[0]);
   } else {
     segment->AppendNullData(mLastChunks[0].GetDuration());
   }
 
   for (uint32_t j = 0; j < mListeners.Length(); ++j) {
     MediaStreamListener* l = mListeners[j];
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -139,16 +139,17 @@ MediaStreamGraphImpl::ExtractPendingInpu
             l->NotifyPull(this, t);
           }
         }
       }
     }
     finished = aStream->mUpdateFinished;
     for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
       SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
+      aStream->ApplyTrackDisabling(data->mID, data->mData);
       for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
         MediaStreamListener* l = aStream->mListeners[j];
         TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
             ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
         l->NotifyQueuedTrackChanges(this, data->mID, data->mRate,
                                     offset, data->mCommands, *data->mData);
       }
       if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
@@ -801,16 +802,30 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
                              startTicks, endTicks));
       }
       output.WriteTo(audioOutput.mStream);
       t = end;
     }
   }
 }
 
+static void
+SetImageToBlackPixel(PlanarYCbCrImage* aImage)
+{
+  uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
+
+  PlanarYCbCrImage::Data data;
+  data.mYChannel = blackPixel;
+  data.mCbChannel = blackPixel + 1;
+  data.mCrChannel = blackPixel + 2;
+  data.mYStride = data.mCbCrStride = 1;
+  data.mPicSize = data.mYSize = data.mCbCrSize = gfxIntSize(1, 1);
+  aImage->SetData(data);
+}
+
 void
 MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
 {
   MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
 
   if (aStream->mVideoOutputs.IsEmpty())
     return;
 
@@ -842,18 +857,33 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
                        aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
                        frame->GetIntrinsicSize().height));
   GraphTime startTime = StreamTimeToGraphTime(aStream,
       track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
   TimeStamp targetTime = mCurrentTimeStamp +
       TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
   for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
     VideoFrameContainer* output = aStream->mVideoOutputs[i];
-    output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
-                            targetTime);
+
+    if (frame->GetForceBlack()) {
+      static const ImageFormat formats[1] = { PLANAR_YCBCR };
+      nsRefPtr<Image> image =
+        output->GetImageContainer()->CreateImage(formats, 1);
+      if (image) {
+        // Sets the image to a single black pixel, which will be scaled to fill
+        // the rendered size.
+        SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get()));
+      }
+      output->SetCurrentFrame(frame->GetIntrinsicSize(), image,
+                              targetTime);
+    } else {
+      output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
+                              targetTime);
+    }
+
     nsCOMPtr<nsIRunnable> event =
       NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
     NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
   }
   if (!aStream->mNotifiedFinished) {
     aStream->mLastPlayedVideoFrame = *frame;
   }
 }
@@ -1650,16 +1680,73 @@ MediaStream::RemoveListener(MediaStreamL
   // If the stream is destroyed the Listeners have or will be
   // removed.
   if (!IsDestroyed()) {
     GraphImpl()->AppendMessage(new Message(this, aListener));
   }
 }
 
 void
+MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
+{
+  if (aEnabled) {
+    mDisabledTrackIDs.RemoveElement(aTrackID);
+  } else {
+    if (!mDisabledTrackIDs.Contains(aTrackID)) {
+      mDisabledTrackIDs.AppendElement(aTrackID);
+    }
+  }
+}
+
+void
+MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
+{
+  class Message : public ControlMessage {
+  public:
+    Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) :
+      ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {}
+    virtual void Run()
+    {
+      mStream->SetTrackEnabledImpl(mTrackID, mEnabled);
+    }
+    TrackID mTrackID;
+    bool mEnabled;
+  };
+  GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled));
+}
+
+void
+MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment)
+{
+  if (!mDisabledTrackIDs.Contains(aTrackID)) {
+    return;
+  }
+
+  switch (aSegment->GetType()) {
+  case MediaSegment::AUDIO: {
+    TrackTicks duration = aSegment->GetDuration();
+    aSegment->Clear();
+    aSegment->AppendNullData(duration);
+    break;
+  }
+  case MediaSegment::VIDEO: {
+    for (VideoSegment::ChunkIterator i(*static_cast<VideoSegment*>(aSegment));
+         !i.IsEnded(); i.Next()) {
+      VideoChunk& chunk = *i;
+      chunk.SetForceBlack(true);
+    }
+    break;
+  }
+  default:
+    MOZ_NOT_REACHED("Unknown track type");
+    break;
+  }
+}
+
+void
 SourceMediaStream::DestroyImpl()
 {
   {
     MutexAutoLock lock(mMutex);
     mDestroyed = true;
   }
   MediaStream::DestroyImpl();
 }
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -155,17 +155,16 @@ public:
     TRACK_EVENT_CREATED = 0x01,
     TRACK_EVENT_ENDED = 0x02
   };
   /**
    * Notify that changes to one of the stream tracks have been queued.
    * aTrackEvents can be any combination of TRACK_EVENT_CREATED and
    * TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
    * at aTrackOffset (relative to the start of the stream).
-   * aQueuedMedia can be null if there is no output.
    */
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         TrackRate aTrackRate,
                                         TrackTicks aTrackOffset,
                                         uint32_t aTrackEvents,
                                         const MediaSegment& aQueuedMedia) {}
 };
 
@@ -298,30 +297,33 @@ public:
 
   // Control API.
   // Since a stream can be played multiple ways, we need to combine independent
   // volume settings. The aKey parameter is used to keep volume settings
   // separate. Since the stream is always playing the same contents, only
   // a single audio output stream is used; the volumes are combined.
   // Currently only the first enabled audio track is played.
   // XXX change this so all enabled audio tracks are mixed and played.
-  virtual void AddAudioOutput(void* aKey);
-  virtual void SetAudioOutputVolume(void* aKey, float aVolume);
-  virtual void RemoveAudioOutput(void* aKey);
+  void AddAudioOutput(void* aKey);
+  void SetAudioOutputVolume(void* aKey, float aVolume);
+  void RemoveAudioOutput(void* aKey);
   // Since a stream can be played multiple ways, we need to be able to
   // play to multiple VideoFrameContainers.
   // Only the first enabled video track is played.
-  virtual void AddVideoOutput(VideoFrameContainer* aContainer);
-  virtual void RemoveVideoOutput(VideoFrameContainer* aContainer);
+  void AddVideoOutput(VideoFrameContainer* aContainer);
+  void RemoveVideoOutput(VideoFrameContainer* aContainer);
   // Explicitly block. Useful for example if a media element is pausing
   // and we need to stop its stream emitting its buffered data.
-  virtual void ChangeExplicitBlockerCount(int32_t aDelta);
+  void ChangeExplicitBlockerCount(int32_t aDelta);
   // Events will be dispatched by calling methods of aListener.
-  virtual void AddListener(MediaStreamListener* aListener);
-  virtual void RemoveListener(MediaStreamListener* aListener);
+  void AddListener(MediaStreamListener* aListener);
+  void RemoveListener(MediaStreamListener* aListener);
+  // A disabled track has video replaced by black, and audio replaced by
+  // silence.
+  void SetTrackEnabled(TrackID aTrackID, bool aEnabled);
   // Events will be dispatched by calling methods of aListener. It is the
   // responsibility of the caller to remove aListener before it is destroyed.
   void AddMainThreadListener(MainThreadMediaStreamListener* aListener)
   {
     NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
     mMainThreadListeners.AppendElement(aListener);
   }
   // It's safe to call this even if aListener is not currently a listener;
@@ -388,16 +390,17 @@ public:
   }
   void ChangeExplicitBlockerCountImpl(GraphTime aTime, int32_t aDelta)
   {
     mExplicitBlockerCount.SetAtAndAfter(aTime, mExplicitBlockerCount.GetAt(aTime) + aDelta);
   }
   void AddListenerImpl(already_AddRefed<MediaStreamListener> aListener);
   void RemoveListenerImpl(MediaStreamListener* aListener);
   void RemoveAllListenersImpl();
+  void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled);
 
   void AddConsumer(MediaInputPort* aPort)
   {
     mConsumers.AppendElement(aPort);
   }
   void RemoveConsumer(MediaInputPort* aPort)
   {
     mConsumers.RemoveElement(aPort);
@@ -422,16 +425,18 @@ public:
    * will not be blocked after mStateComputedTime.
    */
   GraphTime StreamTimeToGraphTime(StreamTime aTime);
   bool IsFinishedOnGraphThread() { return mFinished; }
   void FinishOnGraphThread();
 
   bool HasCurrentData() { return mHasCurrentData; }
 
+  void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment);
+
   DOMMediaStream* GetWrapper()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only use DOMMediaStream on main thread");
     return mWrapper;
   }
 
 protected:
   virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
@@ -467,16 +472,17 @@ protected:
   // We record the last played video frame to avoid redundant setting
   // of the current video frame.
   VideoFrame mLastPlayedVideoFrame;
   // The number of times this stream has been explicitly blocked by the control
   // API, minus the number of times it has been explicitly unblocked.
   TimeVarying<GraphTime,uint32_t,0> mExplicitBlockerCount;
   nsTArray<nsRefPtr<MediaStreamListener> > mListeners;
   nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
+  nsTArray<TrackID> mDisabledTrackIDs;
 
   // Precomputed blocking status (over GraphTime).
   // This is only valid between the graph's mCurrentTime and
   // mStateComputedTime. The stream is considered to have
   // not been blocked before mCurrentTime (its mBufferStartTime is increased
   // as necessary to account for that time instead) --- this avoids us having to
   // record the entire history of the stream's blocking-ness in mBlocked.
   TimeVarying<GraphTime,bool,5> mBlocked;
--- a/content/media/MediaStreamTrack.cpp
+++ b/content/media/MediaStreamTrack.cpp
@@ -3,22 +3,23 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaStreamTrack.h"
 
 #include "DOMMediaStream.h"
 #include "nsIUUIDGenerator.h"
 #include "nsServiceManagerUtils.h"
+#include "MediaStreamGraph.h"
 
 namespace mozilla {
 namespace dom {
 
 MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID)
-  : mStream(aStream), mTrackID(aTrackID), mEnded(false)
+  : mStream(aStream), mTrackID(aTrackID), mEnded(false), mEnabled(true)
 {
   SetIsDOMBinding();
 
   memset(&mID, 0, sizeof(mID));
 
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
     do_GetService("@mozilla.org/uuid-generator;1", &rv);
@@ -42,10 +43,20 @@ NS_INTERFACE_MAP_END_INHERITING(nsDOMEve
 void
 MediaStreamTrack::GetId(nsAString& aID)
 {
   char chars[NSID_LENGTH];
   mID.ToProvidedString(chars);
   aID = NS_ConvertASCIItoUTF16(chars);
 }
 
+void
+MediaStreamTrack::SetEnabled(bool aEnabled)
+{
+  mEnabled = aEnabled;
+  MediaStream* stream = mStream->GetStream();
+  if (stream) {
+    stream->SetTrackEnabled(mTrackID, aEnabled);
+  }
+}
+
 }
 }
--- a/content/media/MediaStreamTrack.h
+++ b/content/media/MediaStreamTrack.h
@@ -42,23 +42,26 @@ public:
   TrackID GetTrackID() const { return mTrackID; }
   virtual AudioStreamTrack* AsAudioStreamTrack() { return nullptr; }
   virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; }
 
   // WebIDL
   virtual void GetKind(nsAString& aKind) = 0;
   void GetId(nsAString& aID);
   void GetLabel(nsAString& aLabel) { aLabel.Truncate(); }
+  bool Enabled() { return mEnabled; }
+  void SetEnabled(bool aEnabled);
 
   // Notifications from the MediaStreamGraph
   void NotifyEnded() { mEnded = true; }
 
 protected:
   nsRefPtr<DOMMediaStream> mStream;
   TrackID mTrackID;
   nsID mID;
   bool mEnded;
+  bool mEnabled;
 };
 
 }
 }
 
 #endif /* MEDIASTREAMTRACK_H_ */
--- a/content/media/TrackUnionStream.h
+++ b/content/media/TrackUnionStream.h
@@ -234,16 +234,17 @@ protected:
         TrackTicks inputStartTicks = inputEndTicks - ticks;
         segment->AppendSlice(*aInputTrack->GetSegment(),
                              std::min(inputTrackEndPoint, inputStartTicks),
                              std::min(inputTrackEndPoint, inputEndTicks));
         LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of input data to track %d",
             this, (long long)(std::min(inputTrackEndPoint, inputEndTicks) - std::min(inputTrackEndPoint, inputStartTicks)),
             outputTrack->GetID()));
       }
+      ApplyTrackDisabling(outputTrack->GetID(), segment);
       for (uint32_t j = 0; j < mListeners.Length(); ++j) {
         MediaStreamListener* l = mListeners[j];
         l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(),
                                     outputTrack->GetRate(), startTicks, 0,
                                     *segment);
       }
       outputTrack->GetSegment()->AppendFrom(segment);
     }
--- a/content/media/VideoSegment.cpp
+++ b/content/media/VideoSegment.cpp
@@ -6,21 +6,21 @@
 #include "VideoSegment.h"
 #include "ImageContainer.h"
 
 namespace mozilla {
 
 using namespace layers;
 
 VideoFrame::VideoFrame(already_AddRefed<Image> aImage, const gfxIntSize& aIntrinsicSize)
-  : mImage(aImage), mIntrinsicSize(aIntrinsicSize)
+  : mImage(aImage), mIntrinsicSize(aIntrinsicSize), mForceBlack(false)
 {}
 
 VideoFrame::VideoFrame()
-  : mIntrinsicSize(0, 0)
+  : mIntrinsicSize(0, 0), mForceBlack(false)
 {}
 
 VideoFrame::~VideoFrame()
 {}
 
 void
 VideoFrame::SetNull() {
   mImage = nullptr;
--- a/content/media/VideoSegment.h
+++ b/content/media/VideoSegment.h
@@ -23,37 +23,41 @@ public:
   typedef mozilla::layers::Image Image;
 
   VideoFrame(already_AddRefed<Image> aImage, const gfxIntSize& aIntrinsicSize);
   VideoFrame();
   ~VideoFrame();
 
   bool operator==(const VideoFrame& aFrame) const
   {
-    return mImage == aFrame.mImage && mIntrinsicSize == aFrame.mIntrinsicSize;
+    return mIntrinsicSize == aFrame.mIntrinsicSize &&
+           mForceBlack == aFrame.mForceBlack &&
+           ((mForceBlack && aFrame.mForceBlack) || mImage == aFrame.mImage);
   }
   bool operator!=(const VideoFrame& aFrame) const
   {
     return !operator==(aFrame);
   }
 
   Image* GetImage() const { return mImage; }
+  void SetForceBlack(bool aForceBlack) { mForceBlack = true; }
+  bool GetForceBlack() const { return mForceBlack; }
   const gfxIntSize& GetIntrinsicSize() const { return mIntrinsicSize; }
   void SetNull();
   void TakeFrom(VideoFrame* aFrame);
 
 protected:
   // mImage can be null to indicate "no video" (aka "empty frame"). It can
   // still have an intrinsic size in this case.
   nsRefPtr<Image> mImage;
   // The desired size to render the video frame at.
   gfxIntSize mIntrinsicSize;
+  bool mForceBlack;
 };
 
-
 struct VideoChunk {
   VideoChunk();
   ~VideoChunk();
   void SliceTo(TrackTicks aStart, TrackTicks aEnd)
   {
     NS_ASSERTION(aStart >= 0 && aStart < aEnd && aEnd <= mDuration,
                  "Slice out of bounds");
     mDuration = aEnd - aStart;
@@ -64,16 +68,17 @@ struct VideoChunk {
     return aOther.mFrame == mFrame;
   }
   bool IsNull() const { return !mFrame.GetImage(); }
   void SetNull(TrackTicks aDuration)
   {
     mDuration = aDuration;
     mFrame.SetNull();
   }
+  void SetForceBlack(bool aForceBlack) { mFrame.SetForceBlack(aForceBlack); }
 
   TrackTicks mDuration;
   VideoFrame mFrame;
 };
 
 class VideoSegment : public MediaSegmentBase<VideoSegment, VideoChunk> {
 public:
   typedef mozilla::layers::Image Image;
--- a/dom/webidl/MediaStreamTrack.webidl
+++ b/dom/webidl/MediaStreamTrack.webidl
@@ -9,17 +9,17 @@
  * Copyright © 2012 W3C® (MIT, ERCIM, Keio), All Rights Reserved. W3C
  * liability, trademark and document use rules apply.
  */
 
 interface MediaStreamTrack {
     readonly    attribute DOMString             kind;
     readonly    attribute DOMString             id;
     readonly    attribute DOMString             label;
-//                attribute boolean               enabled;
+                attribute boolean               enabled;
 //    readonly    attribute MediaStreamTrackState readyState;
 //    readonly    attribute SourceTypeEnum        sourceType;
 //    readonly    attribute DOMString             sourceId;
 //                attribute EventHandler          onstarted;
 //                attribute EventHandler          onmute;
 //                attribute EventHandler          onunmute;
 //                attribute EventHandler          onended;
 //    any                    getConstraint (DOMString constraintName, optional boolean mandatory = false);
--- a/gfx/layers/ImageContainer.h
+++ b/gfx/layers/ImageContainer.h
@@ -148,17 +148,17 @@ class CompositionNotifySink
 {
 public:
   virtual void DidComposite() = 0;
   virtual ~CompositionNotifySink() {}
 };
 
 /**
  * A class that manages Image creation for a LayerManager. The only reason
- * we need a separate class here is that LayerMananers aren't threadsafe
+ * we need a separate class here is that LayerManagers aren't threadsafe
  * (because layers can only be used on the main thread) and we want to
  * be able to create images from any thread, to facilitate video playback
  * without involving the main thread, for example.
  * Different layer managers can implement child classes of this making it
  * possible to create layer manager specific images.
  * This class is not meant to be used directly but rather can be set on an
  * image container. This is usually done by the layer system internally and
  * not explicitly by users. For PlanarYCbCr or Cairo images the default
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -777,22 +777,53 @@ void MediaPipelineTransmit::PipelineList
   if (chunk_remaining) {
     memcpy(samples_10ms_buffer_, samples_tmp, chunk_remaining * sizeof(int16_t));
     buffer_current_ = chunk_remaining;
   }
 
 }
 
 #ifdef MOZILLA_INTERNAL_API
+static void FillBlackYCbCr420PixelData(uint8_t* aBuffer, const gfxIntSize& aSize)
+{
+  // Fill Y plane
+}
+
 void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
     VideoSessionConduit* conduit,
     TrackRate rate,
     VideoChunk& chunk) {
+  layers::Image *img = chunk.mFrame.GetImage();
+  gfxIntSize size = img ? img->GetSize() : chunk.mFrame.GetIntrinsicSize();
+  if ((size.width & 1) != 0 || (size.height & 1) != 0) {
+    MOZ_ASSERT(false, "Can't handle odd-sized images");
+    return;
+  }
+
+  if (chunk.mFrame.GetForceBlack()) {
+    uint32_t yPlaneLen = size.width*size.height;
+    uint32_t cbcrPlaneLen = yPlaneLen/2;
+    uint32_t length = yPlaneLen + cbcrPlaneLen;
+
+    // Send a black image.
+    nsAutoArrayPtr<uint8_t> pixelData;
+    pixelData = new (fallible_t()) uint8_t[length];
+    if (pixelData) {
+      memset(pixelData, 0x10, yPlaneLen);
+      // Fill Cb/Cr planes
+      memset(pixelData + yPlaneLen, 0x80, cbcrPlaneLen);
+
+      MOZ_MTLOG(PR_LOG_DEBUG, "Sending a black video frame");
+      conduit->SendVideoFrame(pixelData, length, size.width, size.height,
+                              mozilla::kVideoI420, 0);
+    }
+    return;
+  }
+
   // We now need to send the video frame to the other side
-  layers::Image *img = chunk.mFrame.GetImage();
   if (!img) {
     // segment.AppendFrame() allows null images, which show up here as null
     return;
   }
 
   ImageFormat format = img->GetFormat();
 #ifdef MOZ_WIDGET_GONK
   if (format == GONK_IO_SURFACE) {