Bug 1325707: P2. Handle OOM conditions when creating MediaRawData object. r?gerald draft
authorJean-Yves Avenard <jyavenard@mozilla.com>
Mon, 06 Feb 2017 16:01:54 +0100
changeset 479412 5f97405f13c9a48c76065cc0d8e0b1a90086bac3
parent 479411 7acead4b2311ef7a47d56f5aa5c0b49b9af4d53f
child 479413 4b58a1f36491c856af84cbee83221729976bcd31
push id44244
push userbmo:jyavenard@mozilla.com
push dateMon, 06 Feb 2017 16:32:25 +0000
reviewersgerald
bugs1325707
milestone54.0a1
Bug 1325707: P2. Handle OOM conditions when creating MediaRawData object. r?gerald MozReview-Commit-ID: HtkhrT36Kf4
dom/media/gmp/widevine-adapter/WidevineVideoDecoder.cpp
dom/media/ipc/VideoDecoderParent.cpp
dom/media/ogg/OggCodecState.cpp
dom/media/webm/WebMDemuxer.cpp
--- a/dom/media/gmp/widevine-adapter/WidevineVideoDecoder.cpp
+++ b/dom/media/gmp/widevine-adapter/WidevineVideoDecoder.cpp
@@ -105,17 +105,23 @@ WidevineVideoDecoder::Decode(GMPVideoEnc
   // may be some latency, i.e. we may need to input (say) 30 frames before
   // we receive output. So we need to store the durations of the frames input,
   // and retrieve them on output.
   mFrameDurations[aInputFrame->TimeStamp()] = aInputFrame->Duration();
 
   mSentInput = true;
   InputBuffer sample;
 
-  RefPtr<MediaRawData> raw(new MediaRawData(aInputFrame->Buffer(), aInputFrame->Size()));
+  RefPtr<MediaRawData> raw(
+      new MediaRawData(aInputFrame->Buffer(), aInputFrame->Size()));
+  if (!raw->Data()) {
+    // OOM.
+    mCallback->Error(GMPAllocErr);
+    return;
+  }
   raw->mExtraData = mExtraData;
   raw->mKeyframe = (aInputFrame->FrameType() == kGMPKeyFrame);
   if (mCodecType == kGMPVideoCodecH264) {
     // Convert input from AVCC, which GMPAPI passes in, to AnnexB, which
     // Chromium uses internally.
     mp4_demuxer::AnnexB::ConvertSampleToAnnexB(raw);
   }
 
--- a/dom/media/ipc/VideoDecoderParent.cpp
+++ b/dom/media/ipc/VideoDecoderParent.cpp
@@ -119,17 +119,23 @@ VideoDecoderParent::RecvInit()
 }
 
 mozilla::ipc::IPCResult
 VideoDecoderParent::RecvInput(const MediaRawDataIPDL& aData)
 {
   MOZ_ASSERT(OnManagerThread());
   // XXX: This copies the data into a buffer owned by the MediaRawData. Ideally we'd just take ownership
   // of the shmem.
-  RefPtr<MediaRawData> data = new MediaRawData(aData.buffer().get<uint8_t>(), aData.buffer().Size<uint8_t>());
+  RefPtr<MediaRawData> data = new MediaRawData(aData.buffer().get<uint8_t>(),
+                                               aData.buffer().Size<uint8_t>());
+  if (!data->Data()) {
+    // OOM
+    Error(NS_ERROR_OUT_OF_MEMORY);
+    return IPC_OK();
+  }
   data->mOffset = aData.base().offset();
   data->mTime = aData.base().time();
   data->mTimecode = aData.base().timecode();
   data->mDuration = aData.base().duration();
   data->mKeyframe = aData.base().keyframe();
 
   DeallocShmem(aData.buffer());
 
--- a/dom/media/ogg/OggCodecState.cpp
+++ b/dom/media/ogg/OggCodecState.cpp
@@ -251,18 +251,24 @@ OggCodecState::PushFront(OggPacketQueue 
 already_AddRefed<MediaRawData>
 OggCodecState::PacketOutAsMediaRawData()
 {
   ogg_packet* packet = PacketOut();
   if (!packet) {
     return nullptr;
   }
 
-  NS_ASSERTION(!IsHeader(packet), "PacketOutAsMediaRawData can only be called on non-header packets");
+  NS_ASSERTION(
+    !IsHeader(packet),
+    "PacketOutAsMediaRawData can only be called on non-header packets");
   RefPtr<MediaRawData> sample = new MediaRawData(packet->packet, packet->bytes);
+  if (!sample->Data()) {
+    // OOM.
+    return nullptr;
+  }
 
   int64_t end_tstamp = Time(packet->granulepos);
   NS_ASSERTION(end_tstamp >= 0, "timestamp invalid");
 
   int64_t duration = PacketDuration(packet);
   NS_ASSERTION(duration >= 0, "duration invalid");
 
   sample->mTimecode = packet->granulepos;
--- a/dom/media/webm/WebMDemuxer.cpp
+++ b/dom/media/webm/WebMDemuxer.cpp
@@ -679,18 +679,26 @@ WebMDemuxer::GetNextPacket(TrackInfo::Tr
       }
     }
 
     WEBM_DEBUG("push sample tstamp: %ld next_tstamp: %ld length: %ld kf: %d",
                tstamp, next_tstamp, length, isKeyframe);
     RefPtr<MediaRawData> sample;
     if (mInfo.mVideo.HasAlpha() && alphaLength != 0) {
       sample = new MediaRawData(data, length, alphaData, alphaLength);
+      if (!sample->Data() || !sample->AlphaData()) {
+        // OOM.
+        return false;
+      }
     } else {
       sample = new MediaRawData(data, length);
+      if (!sample->Data()) {
+        // OOM.
+        return false;
+      }
     }
     sample->mTimecode = tstamp;
     sample->mTime = tstamp;
     sample->mDuration = next_tstamp - tstamp;
     sample->mOffset = holder->Offset();
     sample->mKeyframe = isKeyframe;
     if (discardPadding && i == count - 1) {
       CheckedInt64 discardFrames;