Bug 1114910: - 1. fix crash at promise. 2. Re-write code relative to promise. r=cpearce
authorBenjamin Chen <bechen@mozilla.com>
Tue, 13 Jan 2015 15:42:53 +0800
changeset 250695 f50cce157844301567c7235c446d1481b6b95398
parent 250694 bee5bb286d47274cbfa0b3b11ed5d410e3252507
child 250696 b5300132d105323848af8323e21a1314c885b44f
push id4610
push userjlund@mozilla.com
push dateMon, 30 Mar 2015 18:32:55 +0000
treeherdermozilla-beta@4df54044d9ef [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce
bugs1114910
milestone38.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1114910: - 1. fix crash at promise. 2. Re-write code relative to promise. r=cpearce
dom/media/omx/MediaCodecReader.cpp
dom/media/omx/MediaCodecReader.h
--- a/dom/media/omx/MediaCodecReader.cpp
+++ b/dom/media/omx/MediaCodecReader.cpp
@@ -136,25 +136,25 @@ MediaCodecReader::TrackInputCopier::Copy
          aSourceBuffer->range_length());
 
   return true;
 }
 
 MediaCodecReader::Track::Track(Type type)
   : mType(type)
   , mSourceIsStopped(true)
-  , mDurationLock("MediaCodecReader::Track::mDurationLock")
   , mDurationUs(INT64_C(0))
   , mInputIndex(sInvalidInputIndex)
   , mInputEndOfStream(false)
   , mOutputEndOfStream(false)
   , mSeekTimeUs(sInvalidTimestampUs)
   , mFlushed(false)
   , mDiscontinuity(false)
   , mTaskQueue(nullptr)
+  , mTrackMonitor("MediaCodecReader::mTrackMonitor")
 {
   MOZ_ASSERT(mType != kUnknown, "Should have a valid Track::Type");
 }
 
 // Append the value of |kKeyValidSamples| to the end of each vorbis buffer.
 // https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/OMXCodec.cpp#L3128
 // https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/NuMediaExtractor.cpp#L472
 bool
@@ -180,27 +180,29 @@ MediaCodecReader::VorbisInputCopier::Cop
          &numPageSamples, sizeof(numPageSamples));
 
   return true;
 }
 
 MediaCodecReader::AudioTrack::AudioTrack()
   : Track(kAudio)
 {
+  mAudioPromise.SetMonitor(&mTrackMonitor);
 }
 
 MediaCodecReader::VideoTrack::VideoTrack()
   : Track(kVideo)
   , mWidth(0)
   , mHeight(0)
   , mStride(0)
   , mSliceHeight(0)
   , mColorFormat(0)
   , mRotation(0)
 {
+  mVideoPromise.SetMonitor(&mTrackMonitor);
 }
 
 MediaCodecReader::CodecBufferInfo::CodecBufferInfo()
   : mIndex(0)
   , mOffset(0)
   , mSize(0)
   , mTimeUs(0)
   , mFlags(0)
@@ -343,85 +345,85 @@ MediaCodecReader::ReleaseMediaResources(
     mAudioTrack.mSourceIsStopped = true;
   }
   ReleaseCriticalResources();
 }
 
 nsRefPtr<ShutdownPromise>
 MediaCodecReader::Shutdown()
 {
-  MOZ_ASSERT(mAudioPromise.IsEmpty());
-  MOZ_ASSERT(mVideoPromise.IsEmpty());
+  MOZ_ASSERT(mAudioTrack.mAudioPromise.IsEmpty());
+  MOZ_ASSERT(mVideoTrack.mVideoPromise.IsEmpty());
   ReleaseResources();
   return MediaDecoderReader::Shutdown();
 }
 
 void
 MediaCodecReader::DispatchAudioTask()
 {
-  if (mAudioTrack.mTaskQueue && mAudioTrack.mTaskQueue->IsEmpty()) {
+  if (mAudioTrack.mTaskQueue) {
     RefPtr<nsIRunnable> task =
       NS_NewRunnableMethod(this,
                            &MediaCodecReader::DecodeAudioDataTask);
     mAudioTrack.mTaskQueue->Dispatch(task);
   }
 }
 
 void
 MediaCodecReader::DispatchVideoTask(int64_t aTimeThreshold)
 {
-  if (mVideoTrack.mTaskQueue && mVideoTrack.mTaskQueue->IsEmpty()) {
+  if (mVideoTrack.mTaskQueue) {
     RefPtr<nsIRunnable> task =
       NS_NewRunnableMethodWithArg<int64_t>(this,
                                            &MediaCodecReader::DecodeVideoFrameTask,
                                            aTimeThreshold);
     mVideoTrack.mTaskQueue->Dispatch(task);
   }
 }
 
 nsRefPtr<MediaDecoderReader::AudioDataPromise>
 MediaCodecReader::RequestAudioData()
 {
   MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
   MOZ_ASSERT(HasAudio());
 
-  nsRefPtr<AudioDataPromise> p = mAudioPromise.Ensure(__func__);
+  MonitorAutoLock al(mAudioTrack.mTrackMonitor);
   if (CheckAudioResources()) {
     DispatchAudioTask();
   }
-
-  return p;
+  MOZ_ASSERT(mAudioTrack.mAudioPromise.IsEmpty());
+  return mAudioTrack.mAudioPromise.Ensure(__func__);
 }
 
 nsRefPtr<MediaDecoderReader::VideoDataPromise>
 MediaCodecReader::RequestVideoData(bool aSkipToNextKeyframe,
                                    int64_t aTimeThreshold)
 {
   MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
   MOZ_ASSERT(HasVideo());
 
-  nsRefPtr<VideoDataPromise> p = mVideoPromise.Ensure(__func__);
   int64_t threshold = sInvalidTimestampUs;
   if (aSkipToNextKeyframe && IsValidTimestampUs(aTimeThreshold)) {
-    mVideoTrack.mTaskQueue->Flush();
     threshold = aTimeThreshold;
   }
+
+  MonitorAutoLock al(mVideoTrack.mTrackMonitor);
   if (CheckVideoResources()) {
     DispatchVideoTask(threshold);
   }
-
-  return p;
+  MOZ_ASSERT(mVideoTrack.mVideoPromise.IsEmpty());
+  return mVideoTrack.mVideoPromise.Ensure(__func__);
 }
 
-bool
+void
 MediaCodecReader::DecodeAudioDataSync()
 {
   if (mAudioTrack.mCodec == nullptr || !mAudioTrack.mCodec->allocated() ||
       mAudioTrack.mOutputEndOfStream) {
-    return false;
+    return;
   }
 
   // Get one audio output data from MediaCodec
   CodecBufferInfo bufferInfo;
   status_t status;
   TimeStamp timeout = TimeStamp::Now() +
                       TimeDuration::FromSeconds(sMaxAudioDecodeDurationS);
   while (true) {
@@ -434,40 +436,39 @@ MediaCodecReader::DecodeAudioDataSync()
     if (status == OK || status == ERROR_END_OF_STREAM) {
       break;
     } else if (status == -EAGAIN) {
       if (TimeStamp::Now() > timeout) {
         // Don't let this loop run for too long. Try it again later.
         if (CheckAudioResources()) {
           DispatchAudioTask();
         }
-        return true;
+        return;
       }
       continue; // Try it again now.
     } else if (status == INFO_FORMAT_CHANGED) {
       if (UpdateAudioInfo()) {
         continue; // Try it again now.
       } else {
-        return false;
+        return;
       }
     } else {
-      return false;
+      return;
     }
   }
 
-  bool result = false;
   if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 &&
       bufferInfo.mBuffer->data() != nullptr) {
     // This is the approximate byte position in the stream.
     int64_t pos = mDecoder->GetResource()->Tell();
 
     uint32_t frames = bufferInfo.mSize /
                       (mInfo.mAudio.mChannels * sizeof(AudioDataValue));
 
-    result = mAudioCompactor.Push(
+    mAudioCompactor.Push(
       pos,
       bufferInfo.mTimeUs,
       mInfo.mAudio.mRate,
       frames,
       mInfo.mAudio.mChannels,
       AudioCompactor::NativeCopy(
         bufferInfo.mBuffer->data() + bufferInfo.mOffset,
         bufferInfo.mSize,
@@ -475,57 +476,54 @@ MediaCodecReader::DecodeAudioDataSync()
   }
 
   if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
       (status == ERROR_END_OF_STREAM)) {
     AudioQueue().Finish();
   }
   mAudioTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
 
-  return result;
 }
 
-bool
+void
 MediaCodecReader::DecodeAudioDataTask()
 {
-  bool result = DecodeAudioDataSync();
+  DecodeAudioDataSync();
+  MonitorAutoLock al(mAudioTrack.mTrackMonitor);
   if (AudioQueue().GetSize() > 0) {
     nsRefPtr<AudioData> a = AudioQueue().PopFront();
     if (a) {
       if (mAudioTrack.mDiscontinuity) {
         a->mDiscontinuity = true;
         mAudioTrack.mDiscontinuity = false;
       }
-      mAudioPromise.Resolve(a, __func__);
+      mAudioTrack.mAudioPromise.Resolve(a, __func__);
     }
+  } else if (AudioQueue().AtEndOfStream()) {
+    mAudioTrack.mAudioPromise.Reject(END_OF_STREAM, __func__);
   }
-  else if (AudioQueue().AtEndOfStream()) {
-    mAudioPromise.Reject(END_OF_STREAM, __func__);
-  }
-  return result;
 }
 
-bool
+void
 MediaCodecReader::DecodeVideoFrameTask(int64_t aTimeThreshold)
 {
-  bool result = DecodeVideoFrameSync(aTimeThreshold);
+  DecodeVideoFrameSync(aTimeThreshold);
+  MonitorAutoLock al(mVideoTrack.mTrackMonitor);
   if (VideoQueue().GetSize() > 0) {
     nsRefPtr<VideoData> v = VideoQueue().PopFront();
     if (v) {
       if (mVideoTrack.mDiscontinuity) {
         v->mDiscontinuity = true;
         mVideoTrack.mDiscontinuity = false;
       }
-      mVideoPromise.Resolve(v, __func__);
+      mVideoTrack.mVideoPromise.Resolve(v, __func__);
     }
+  } else if (VideoQueue().AtEndOfStream()) {
+    mVideoTrack.mVideoPromise.Reject(END_OF_STREAM, __func__);
   }
-  else if (VideoQueue().AtEndOfStream()) {
-    mVideoPromise.Reject(END_OF_STREAM, __func__);
-  }
-  return result;
 }
 
 bool
 MediaCodecReader::HasAudio()
 {
   return mInfo.mAudio.mHasAudio;
 }
 
@@ -647,25 +645,25 @@ MediaCodecReader::ParseDataSegment(const
     mMP3FrameParser->Parse(aBuffer, aLength, aOffset);
 
     duration = mMP3FrameParser->GetDuration();
   }
 
   bool durationUpdateRequired = false;
 
   {
-    MutexAutoLock al(mAudioTrack.mDurationLock);
+    MonitorAutoLock al(mAudioTrack.mTrackMonitor);
     if (duration > mAudioTrack.mDurationUs) {
       mAudioTrack.mDurationUs = duration;
       durationUpdateRequired = true;
     }
   }
 
   if (durationUpdateRequired && HasVideo()) {
-    MutexAutoLock al(mVideoTrack.mDurationLock);
+    MonitorAutoLock al(mVideoTrack.mTrackMonitor);
     durationUpdateRequired = duration > mVideoTrack.mDurationUs;
   }
 
   if (durationUpdateRequired) {
     MOZ_ASSERT(mDecoder);
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     mDecoder->UpdateEstimatedMediaDuration(duration);
   }
@@ -715,22 +713,22 @@ MediaCodecReader::ReadMetadata(MediaInfo
 
   if (!UpdateVideoInfo()) {
     return NS_ERROR_FAILURE;
   }
 
   // Set the total duration (the max of the audio and video track).
   int64_t audioDuration = INT64_C(-1);
   {
-    MutexAutoLock al(mAudioTrack.mDurationLock);
+    MonitorAutoLock al(mAudioTrack.mTrackMonitor);
     audioDuration = mAudioTrack.mDurationUs;
   }
   int64_t videoDuration = INT64_C(-1);
   {
-    MutexAutoLock al(mVideoTrack.mDurationLock);
+    MonitorAutoLock al(mVideoTrack.mTrackMonitor);
     videoDuration = mVideoTrack.mDurationUs;
   }
   int64_t duration = audioDuration > videoDuration ? audioDuration : videoDuration;
   if (duration >= INT64_C(0)) {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     mDecoder->SetMediaDuration(duration);
   }
 
@@ -752,22 +750,22 @@ MediaCodecReader::ReadMetadata(MediaInfo
 
   return NS_OK;
 }
 
 nsresult
 MediaCodecReader::ResetDecode()
 {
   if (CheckAudioResources()) {
-    mAudioTrack.mTaskQueue->Flush();
+    mAudioTrack.mTaskQueue->AwaitIdle();
     FlushCodecData(mAudioTrack);
     mAudioTrack.mDiscontinuity = true;
   }
   if (CheckVideoResources()) {
-    mVideoTrack.mTaskQueue->Flush();
+    mVideoTrack.mTaskQueue->AwaitIdle();
     FlushCodecData(mVideoTrack);
     mVideoTrack.mDiscontinuity = true;
   }
 
   return MediaDecoderReader::ResetDecode();
 }
 
 void
@@ -841,22 +839,22 @@ MediaCodecReader::ReleaseAllTextureClien
     return;
   }
   printf_stderr("All TextureClients should be released already");
 
   mTextureClientIndexes.Enumerate(MediaCodecReader::ReleaseTextureClient, this);
   mTextureClientIndexes.Clear();
 }
 
-bool
+void
 MediaCodecReader::DecodeVideoFrameSync(int64_t aTimeThreshold)
 {
   if (mVideoTrack.mCodec == nullptr || !mVideoTrack.mCodec->allocated() ||
       mVideoTrack.mOutputEndOfStream) {
-    return false;
+    return;
   }
 
   // Get one video output data from MediaCodec
   CodecBufferInfo bufferInfo;
   status_t status;
   TimeStamp timeout = TimeStamp::Now() +
                       TimeDuration::FromSeconds(sMaxVideoDecodeDurationS);
   while (true) {
@@ -869,31 +867,30 @@ MediaCodecReader::DecodeVideoFrameSync(i
     if (status == OK || status == ERROR_END_OF_STREAM) {
       break;
     } else if (status == -EAGAIN) {
       if (TimeStamp::Now() > timeout) {
         // Don't let this loop run for too long. Try it again later.
         if (CheckVideoResources()) {
           DispatchVideoTask(aTimeThreshold);
         }
-        return true;
+        return;
       }
       continue; // Try it again now.
     } else if (status == INFO_FORMAT_CHANGED) {
       if (UpdateVideoInfo()) {
         continue; // Try it again now.
       } else {
-        return false;
+        return;
       }
     } else {
-      return false;
+      return;
     }
   }
 
-  bool result = false;
   nsRefPtr<VideoData> v;
   RefPtr<TextureClient> textureClient;
   sp<GraphicBuffer> graphicBuffer;
   if (bufferInfo.mBuffer != nullptr) {
     // This is the approximate byte position in the stream.
     int64_t pos = mDecoder->GetResource()->Tell();
 
     if (mVideoTrack.mNativeWindow != nullptr &&
@@ -925,17 +922,17 @@ MediaCodecReader::DecodeVideoFrameSync(i
 
         yuv420p_buffer = GetColorConverterBuffer(mVideoTrack.mWidth,
                                                  mVideoTrack.mHeight);
         if (mColorConverter.convertDecoderOutputToI420(
               bufferInfo.mBuffer->data(), mVideoTrack.mWidth, mVideoTrack.mHeight,
               crop, yuv420p_buffer) != OK) {
           mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
           NS_WARNING("Unable to convert color format");
-          return false;
+          return;
         }
 
         stride = mVideoTrack.mWidth;
         slice_height = mVideoTrack.mHeight;
       }
 
       size_t yuv420p_y_size = stride * slice_height;
       size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
@@ -972,37 +969,34 @@ MediaCodecReader::DecodeVideoFrameSync(i
                             1, // We don't know the duration.
                             b,
                             bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
                             -1,
                             mVideoTrack.mRelativePictureRect);
     }
 
     if (v) {
-      result = true;
       VideoQueue().Push(v);
     } else {
       NS_WARNING("Unable to create VideoData");
     }
   }
 
   if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
       (status == ERROR_END_OF_STREAM)) {
     VideoQueue().Finish();
   }
 
-  if (v != nullptr && textureClient != nullptr && graphicBuffer != nullptr && result) {
+  if (v != nullptr && textureClient != nullptr && graphicBuffer != nullptr) {
     MutexAutoLock al(mTextureClientIndexesLock);
     mTextureClientIndexes.Put(textureClient.get(), bufferInfo.mIndex);
     textureClient->SetRecycleCallback(MediaCodecReader::TextureClientRecycleCallback, this);
   } else {
     mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
   }
-
-  return result;
 }
 
 nsRefPtr<MediaDecoderReader::SeekPromise>
 MediaCodecReader::Seek(int64_t aTime,
                        int64_t aStartTime,
                        int64_t aEndTime,
                        int64_t aCurrentTime)
 {
@@ -1039,27 +1033,16 @@ MediaCodecReader::Seek(int64_t aTime,
       if (format->findInt64(kKeyTime, &timestamp) &&
           IsValidTimestampUs(timestamp)) {
         mVideoTrack.mSeekTimeUs = timestamp;
         mAudioTrack.mSeekTimeUs = timestamp;
       }
       format = nullptr;
     }
     source_buffer->release();
-
-    MOZ_ASSERT(mVideoTrack.mTaskQueue->IsEmpty());
-    DispatchVideoTask(mVideoTrack.mSeekTimeUs);
-
-    if (CheckAudioResources()) {
-      MOZ_ASSERT(mAudioTrack.mTaskQueue->IsEmpty());
-      DispatchAudioTask();
-    }
-  } else if (CheckAudioResources()) {// Audio only
-    MOZ_ASSERT(mAudioTrack.mTaskQueue->IsEmpty());
-    DispatchAudioTask();
   }
   return SeekPromise::CreateAndResolve(aTime, __func__);
 }
 
 bool
 MediaCodecReader::IsMediaSeekable()
 {
   // Check the MediaExtract flag if the source is seekable.
@@ -1451,17 +1434,17 @@ MediaCodecReader::TriggerIncrementalPars
           signalObject->Wait();
         }
       }
       duration = mMP3FrameParser->GetDuration();
     }
   }
 
   {
-    MutexAutoLock al(mAudioTrack.mDurationLock);
+    MonitorAutoLock al(mAudioTrack.mTrackMonitor);
     if (duration > mAudioTrack.mDurationUs) {
       mAudioTrack.mDurationUs = duration;
     }
   }
 
   return true;
 }
 
@@ -1469,31 +1452,31 @@ bool
 MediaCodecReader::UpdateDuration()
 {
   // read audio duration
   if (mAudioTrack.mSource != nullptr) {
     sp<MetaData> audioFormat = mAudioTrack.mSource->getFormat();
     if (audioFormat != nullptr) {
       int64_t duration = INT64_C(0);
       if (audioFormat->findInt64(kKeyDuration, &duration)) {
-        MutexAutoLock al(mAudioTrack.mDurationLock);
+        MonitorAutoLock al(mAudioTrack.mTrackMonitor);
         if (duration > mAudioTrack.mDurationUs) {
           mAudioTrack.mDurationUs = duration;
         }
       }
     }
   }
 
   // read video duration
   if (mVideoTrack.mSource != nullptr) {
     sp<MetaData> videoFormat = mVideoTrack.mSource->getFormat();
     if (videoFormat != nullptr) {
       int64_t duration = INT64_C(0);
       if (videoFormat->findInt64(kKeyDuration, &duration)) {
-        MutexAutoLock al(mVideoTrack.mDurationLock);
+        MonitorAutoLock al(mVideoTrack.mTrackMonitor);
         if (duration > mVideoTrack.mDurationUs) {
           mVideoTrack.mDurationUs = duration;
         }
       }
     }
   }
 
   return true;
--- a/dom/media/omx/MediaCodecReader.h
+++ b/dom/media/omx/MediaCodecReader.h
@@ -143,32 +143,32 @@ protected:
     android::sp<android::GonkNativeWindow> mNativeWindow;
 #if ANDROID_VERSION >= 21
     android::sp<android::IGraphicBufferProducer> mGraphicBufferProducer;
 #endif
 
     // pipeline copier
     nsAutoPtr<TrackInputCopier> mInputCopier;
 
-    // media parameters
-    Mutex mDurationLock; // mDurationUs might be read or updated from multiple
-                         // threads.
+    // Protected by mTrackMonitor.
+    // mDurationUs might be read or updated from multiple threads.
     int64_t mDurationUs;
 
     // playback parameters
     CheckedUint32 mInputIndex;
     // mDiscontinuity, mFlushed, mInputEndOfStream, mInputEndOfStream,
     // mSeekTimeUs don't be protected by a lock because the
     // mTaskQueue->Flush() will flush all tasks.
     bool mInputEndOfStream;
     bool mOutputEndOfStream;
     int64_t mSeekTimeUs;
     bool mFlushed; // meaningless when mSeekTimeUs is invalid.
     bool mDiscontinuity;
     nsRefPtr<MediaTaskQueue> mTaskQueue;
+    Monitor mTrackMonitor;
 
   private:
     // Forbidden
     Track(const Track &rhs) = delete;
     const Track &operator=(const Track&) = delete;
   };
 
   // Receive a message from MessageHandler.
@@ -237,16 +237,18 @@ private:
   {
     virtual bool Copy(android::MediaBuffer* aSourceBuffer,
                       android::sp<android::ABuffer> aCodecBuffer);
   };
 
   struct AudioTrack : public Track
   {
     AudioTrack();
+    // Protected by mTrackMonitor.
+    MediaPromiseHolder<AudioDataPromise> mAudioPromise;
 
   private:
     // Forbidden
     AudioTrack(const AudioTrack &rhs) = delete;
     const AudioTrack &operator=(const AudioTrack &rhs) = delete;
   };
 
   struct VideoTrack : public Track
@@ -257,16 +259,18 @@ private:
     int32_t mHeight;
     int32_t mStride;
     int32_t mSliceHeight;
     int32_t mColorFormat;
     int32_t mRotation;
     nsIntSize mFrameSize;
     nsIntRect mPictureRect;
     gfx::IntRect mRelativePictureRect;
+    // Protected by mTrackMonitor.
+    MediaPromiseHolder<VideoDataPromise> mVideoPromise;
 
   private:
     // Forbidden
     VideoTrack(const VideoTrack &rhs) = delete;
     const VideoTrack &operator=(const VideoTrack &rhs) = delete;
   };
 
   struct CodecBufferInfo
@@ -367,20 +371,20 @@ private:
                                bool aAsync,
                                android::wp<android::MediaCodecProxy::CodecResourceListener> aListener);
   static bool ConfigureMediaCodec(Track& aTrack);
   void DestroyMediaCodecs();
   static void DestroyMediaCodec(Track& aTrack);
 
   bool CreateTaskQueues();
   void ShutdownTaskQueues();
-  bool DecodeVideoFrameTask(int64_t aTimeThreshold);
-  bool DecodeVideoFrameSync(int64_t aTimeThreshold);
-  bool DecodeAudioDataTask();
-  bool DecodeAudioDataSync();
+  void DecodeVideoFrameTask(int64_t aTimeThreshold);
+  void DecodeVideoFrameSync(int64_t aTimeThreshold);
+  void DecodeAudioDataTask();
+  void DecodeAudioDataSync();
   void DispatchVideoTask(int64_t aTimeThreshold);
   void DispatchAudioTask();
   inline bool CheckVideoResources() {
     return (HasVideo() && mVideoTrack.mSource != nullptr &&
             mVideoTrack.mTaskQueue);
   }
 
   inline bool CheckAudioResources() {
@@ -433,19 +437,16 @@ private:
   Mutex mTextureClientIndexesLock;
   nsDataHashtable<nsPtrHashKey<TextureClient>, size_t> mTextureClientIndexes;
 
   // media tracks
   AudioTrack mAudioTrack;
   VideoTrack mVideoTrack;
   AudioTrack mAudioOffloadTrack; // only Track::mSource is valid
 
-  MediaPromiseHolder<AudioDataPromise> mAudioPromise;
-  MediaPromiseHolder<VideoDataPromise> mVideoPromise;
-
   // color converter
   android::I420ColorConverterHelper mColorConverter;
   nsAutoArrayPtr<uint8_t> mColorConverterBuffer;
   size_t mColorConverterBufferSize;
 
   // incremental parser
   Monitor mParserMonitor;
   bool mParseDataFromCache;