Bug 1350241 -Part2: Create a customized player based on ExoPlayer and inject customzied {Audio,Video}Renderers as the source of HLS demuxed samples. draft
authorKilik Kuo <kikuo@mozilla.com>
Thu, 11 May 2017 19:33:17 +0800
changeset 576187 5e7d1b564e7087dfb74769e105eb658d0ecf895b
parent 576186 8b5f4c28dc27c6c55d1112dae541c51224f07972
child 628118 f431080cc0028e90b9190fe07a6bb84578545b26
push id58272
push userbmo:kikuo@mozilla.com
push dateThu, 11 May 2017 11:41:13 +0000
bugs1350241
milestone55.0a1
Bug 1350241 -Part2: Create a customized player based on ExoPlayer and inject customzied {Audio,Video}Renderers as the source of HLS demuxed samples. MozReview-Commit-ID: 9Vs1M2znnY
mobile/android/base/moz.build
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/EventLogger.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsAudioRenderer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsPlayer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsRendererBase.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsVideoRenderer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/Utils.java
--- a/mobile/android/base/moz.build
+++ b/mobile/android/base/moz.build
@@ -435,19 +435,25 @@ gvjar.sources += [geckoview_source_dir +
 gvjar.sources += [geckoview_thirdparty_source_dir + f for f in [
     'java/com/googlecode/eyesfree/braille/selfbraille/ISelfBrailleService.java',
     'java/com/googlecode/eyesfree/braille/selfbraille/SelfBrailleClient.java',
     'java/com/googlecode/eyesfree/braille/selfbraille/WriteData.java',
 ]]
 
 if CONFIG['MOZ_ANDROID_HLS_SUPPORT']:
     gvjar.sources += [geckoview_source_dir + 'java/org/mozilla/gecko/' + x for x in [
+        'media/EventLogger.java',
         'media/GeckoAudioInfo.java',
+        'media/GeckoHlsAudioRenderer.java',
+        'media/GeckoHlsPlayer.java',
+        'media/GeckoHlsRendererBase.java',
         'media/GeckoHlsSample.java',
+        'media/GeckoHlsVideoRenderer.java',
         'media/GeckoVideoInfo.java',
+        'media/Utils.java',
     ]]
 
 
 gvjar.extra_jars += [
     CONFIG['ANDROID_SUPPORT_ANNOTATIONS_JAR_LIB'],
     CONFIG['ANDROID_SUPPORT_V4_AAR_LIB'],
     CONFIG['ANDROID_SUPPORT_V4_AAR_INTERNAL_LIB'],
     'constants.jar',
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/EventLogger.java
@@ -0,0 +1,461 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.mozilla.gecko.media;
+
+import android.os.SystemClock;
+import android.util.Log;
+import android.view.Surface;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.ExoPlayer;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.Timeline;
+import com.google.android.exoplayer2.audio.AudioRendererEventListener;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
+import com.google.android.exoplayer2.metadata.Metadata;
+import com.google.android.exoplayer2.metadata.MetadataRenderer;
+import com.google.android.exoplayer2.metadata.emsg.EventMessage;
+import com.google.android.exoplayer2.metadata.id3.ApicFrame;
+import com.google.android.exoplayer2.metadata.id3.CommentFrame;
+import com.google.android.exoplayer2.metadata.id3.GeobFrame;
+import com.google.android.exoplayer2.metadata.id3.Id3Frame;
+import com.google.android.exoplayer2.metadata.id3.PrivFrame;
+import com.google.android.exoplayer2.metadata.id3.TextInformationFrame;
+import com.google.android.exoplayer2.metadata.id3.UrlLinkFrame;
+import com.google.android.exoplayer2.source.AdaptiveMediaSourceEventListener;
+import com.google.android.exoplayer2.source.ExtractorMediaSource;
+import com.google.android.exoplayer2.source.TrackGroup;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.MappingTrackSelector;
+import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.upstream.DataSpec;
+import com.google.android.exoplayer2.video.VideoRendererEventListener;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.util.Locale;
+
+/**
+ * Logs player events using {@link Log}.
+ */
+/* package */ final class EventLogger implements ExoPlayer.EventListener,
+        AudioRendererEventListener, VideoRendererEventListener, AdaptiveMediaSourceEventListener,
+        ExtractorMediaSource.EventListener, DefaultDrmSessionManager.EventListener,
+        MetadataRenderer.Output {
+
+  private static final String TAG = "EventLogger";
+  private static final int MAX_TIMELINE_ITEM_LINES = 3;
+  private static final NumberFormat TIME_FORMAT;
+  static {
+    TIME_FORMAT = NumberFormat.getInstance(Locale.US);
+    TIME_FORMAT.setMinimumFractionDigits(2);
+    TIME_FORMAT.setMaximumFractionDigits(2);
+    TIME_FORMAT.setGroupingUsed(false);
+  }
+
+  private final MappingTrackSelector trackSelector;
+  private final Timeline.Window window;
+  private final Timeline.Period period;
+  private final long startTimeMs;
+
+  public EventLogger(MappingTrackSelector trackSelector) {
+    this.trackSelector = trackSelector;
+    window = new Timeline.Window();
+    period = new Timeline.Period();
+    startTimeMs = SystemClock.elapsedRealtime();
+  }
+
+  // ExoPlayer.EventListener
+
+  @Override
+  public void onLoadingChanged(boolean isLoading) {
+    Log.d(TAG, "loading [" + isLoading + "]");
+  }
+
+  @Override
+  public void onPlayerStateChanged(boolean playWhenReady, int state) {
+    Log.d(TAG, "state [" + getSessionTimeString() + ", " + playWhenReady + ", "
+            + getStateString(state) + "]");
+  }
+
+  @Override
+  public void onPositionDiscontinuity() {
+    Log.d(TAG, "positionDiscontinuity");
+  }
+
+  @Override
+  public void onTimelineChanged(Timeline timeline, Object manifest) {
+    if (timeline == null) {
+      return;
+    }
+    int periodCount = timeline.getPeriodCount();
+    int windowCount = timeline.getWindowCount();
+    Log.d(TAG, "sourceInfo [periodCount=" + periodCount + ", windowCount=" + windowCount);
+    for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) {
+      timeline.getPeriod(i, period);
+      Log.d(TAG, "  " +  "period [" + getTimeString(period.getDurationMs()) + "]");
+    }
+    if (periodCount > MAX_TIMELINE_ITEM_LINES) {
+      Log.d(TAG, "  ...");
+    }
+    for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) {
+      timeline.getWindow(i, window);
+      Log.d(TAG, "  " +  "window [" + getTimeString(window.getDurationMs()) + ", "
+              + window.isSeekable + ", " + window.isDynamic + "]");
+    }
+    if (windowCount > MAX_TIMELINE_ITEM_LINES) {
+      Log.d(TAG, "  ...");
+    }
+    Log.d(TAG, "]");
+  }
+
+  @Override
+  public void onPlayerError(ExoPlaybackException e) {
+    Log.e(TAG, "playerFailed [" + getSessionTimeString() + "]", e);
+  }
+
+  @Override
+  public void onTracksChanged(TrackGroupArray ignored, TrackSelectionArray trackSelections) {
+    MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
+    if (mappedTrackInfo == null) {
+      Log.d(TAG, "Tracks []");
+      return;
+    }
+    Log.d(TAG, "Tracks [");
+    // Log tracks associated to renderers.
+    for (int rendererIndex = 0; rendererIndex < mappedTrackInfo.length; rendererIndex++) {
+      TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
+      TrackSelection trackSelection = trackSelections.get(rendererIndex);
+      if (rendererTrackGroups.length > 0) {
+        Log.d(TAG, "  Renderer:" + rendererIndex + " [");
+        for (int groupIndex = 0; groupIndex < rendererTrackGroups.length; groupIndex++) {
+          TrackGroup trackGroup = rendererTrackGroups.get(groupIndex);
+          String adaptiveSupport = getAdaptiveSupportString(trackGroup.length,
+                  mappedTrackInfo.getAdaptiveSupport(rendererIndex, groupIndex, false));
+          Log.d(TAG, "    Group:" + groupIndex + ", adaptive_supported=" + adaptiveSupport + " [");
+          for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
+            String status = getTrackStatusString(trackSelection, trackGroup, trackIndex);
+            String formatSupport = getFormatSupportString(
+                    mappedTrackInfo.getTrackFormatSupport(rendererIndex, groupIndex, trackIndex));
+            Log.d(TAG, "      " + status + " Track:" + trackIndex + ", "
+                    + Format.toLogString(trackGroup.getFormat(trackIndex))
+                    + ", supported=" + formatSupport);
+          }
+          Log.d(TAG, "    ]");
+        }
+        // Log metadata for at most one of the tracks selected for the renderer.
+        if (trackSelection != null) {
+          for (int selectionIndex = 0; selectionIndex < trackSelection.length(); selectionIndex++) {
+            Metadata metadata = trackSelection.getFormat(selectionIndex).metadata;
+            if (metadata != null) {
+              Log.d(TAG, "    Metadata [");
+              printMetadata(metadata, "      ");
+              Log.d(TAG, "    ]");
+              break;
+            }
+          }
+        }
+        Log.d(TAG, "  ]");
+      }
+    }
+    // Log tracks not associated with a renderer.
+    TrackGroupArray unassociatedTrackGroups = mappedTrackInfo.getUnassociatedTrackGroups();
+    if (unassociatedTrackGroups.length > 0) {
+      Log.d(TAG, "  Renderer:None [");
+      for (int groupIndex = 0; groupIndex < unassociatedTrackGroups.length; groupIndex++) {
+        Log.d(TAG, "    Group:" + groupIndex + " [");
+        TrackGroup trackGroup = unassociatedTrackGroups.get(groupIndex);
+        for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
+          String status = getTrackStatusString(false);
+          String formatSupport = getFormatSupportString(
+                  RendererCapabilities.FORMAT_UNSUPPORTED_TYPE);
+          Log.d(TAG, "      " + status + " Track:" + trackIndex + ", "
+                  + Format.toLogString(trackGroup.getFormat(trackIndex))
+                  + ", supported=" + formatSupport);
+        }
+        Log.d(TAG, "    ]");
+      }
+      Log.d(TAG, "  ]");
+    }
+    Log.d(TAG, "]");
+  }
+
+  // MetadataRenderer.Output
+
+  @Override
+  public void onMetadata(Metadata metadata) {
+    Log.d(TAG, "onMetadata [");
+    printMetadata(metadata, "  ");
+    Log.d(TAG, "]");
+  }
+
+  // AudioRendererEventListener
+
+  @Override
+  public void onAudioEnabled(DecoderCounters counters) {
+    Log.d(TAG, "audioEnabled [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onAudioSessionId(int audioSessionId) {
+    Log.d(TAG, "audioSessionId [" + audioSessionId + "]");
+  }
+
+  @Override
+  public void onAudioDecoderInitialized(String decoderName, long elapsedRealtimeMs,
+                                        long initializationDurationMs) {
+    Log.d(TAG, "audioDecoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
+  }
+
+  @Override
+  public void onAudioInputFormatChanged(Format format) {
+    Log.d(TAG, "audioFormatChanged [" + getSessionTimeString() + ", " + Format.toLogString(format)
+            + "]");
+  }
+
+  @Override
+  public void onAudioDisabled(DecoderCounters counters) {
+    Log.d(TAG, "audioDisabled [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
+    printInternalError("audioTrackUnderrun [" + bufferSize + ", " + bufferSizeMs + ", "
+            + elapsedSinceLastFeedMs + "]", null);
+  }
+
+  // VideoRendererEventListener
+
+  @Override
+  public void onVideoEnabled(DecoderCounters counters) {
+    Log.d(TAG, "videoEnabled [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onVideoDecoderInitialized(String decoderName, long elapsedRealtimeMs,
+                                        long initializationDurationMs) {
+    Log.d(TAG, "videoDecoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
+  }
+
+  @Override
+  public void onVideoInputFormatChanged(Format format) {
+    Log.d(TAG, "videoFormatChanged [" + getSessionTimeString() + ", " + Format.toLogString(format)
+            + "]");
+  }
+
+  @Override
+  public void onVideoDisabled(DecoderCounters counters) {
+    Log.d(TAG, "videoDisabled [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onDroppedFrames(int count, long elapsed) {
+    Log.d(TAG, "droppedFrames [" + getSessionTimeString() + ", " + count + "]");
+  }
+
+  @Override
+  public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
+                                 float pixelWidthHeightRatio) {
+    // Do nothing.
+  }
+
+  @Override
+  public void onRenderedFirstFrame(Surface surface) {
+    // Do nothing.
+  }
+
+  // DefaultDrmSessionManager.EventListener
+
+  @Override
+  public void onDrmSessionManagerError(Exception e) {
+    printInternalError("drmSessionManagerError", e);
+  }
+
+  @Override
+  public void onDrmKeysRestored() {
+    Log.d(TAG, "drmKeysRestored [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onDrmKeysRemoved() {
+    Log.d(TAG, "drmKeysRemoved [" + getSessionTimeString() + "]");
+  }
+
+  @Override
+  public void onDrmKeysLoaded() {
+    Log.d(TAG, "drmKeysLoaded [" + getSessionTimeString() + "]");
+  }
+
+  // ExtractorMediaSource.EventListener
+
+  @Override
+  public void onLoadError(IOException error) {
+    printInternalError("loadError", error);
+  }
+
+  // AdaptiveMediaSourceEventListener
+
+  @Override
+  public void onLoadStarted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
+                            int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
+                            long mediaEndTimeMs, long elapsedRealtimeMs) {
+    // Do nothing.
+  }
+
+  @Override
+  public void onLoadError(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
+                          int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
+                          long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded,
+                          IOException error, boolean wasCanceled) {
+    printInternalError("loadError", error);
+  }
+
+  @Override
+  public void onLoadCanceled(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
+                             int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
+                             long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded) {
+    // Do nothing.
+  }
+
+  @Override
+  public void onLoadCompleted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
+                              int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
+                              long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded) {
+    // Do nothing.
+  }
+
+  @Override
+  public void onUpstreamDiscarded(int trackType, long mediaStartTimeMs, long mediaEndTimeMs) {
+    // Do nothing.
+  }
+
+  @Override
+  public void onDownstreamFormatChanged(int trackType, Format trackFormat, int trackSelectionReason,
+                                        Object trackSelectionData, long mediaTimeMs) {
+    // Do nothing.
+  }
+
+  // Internal methods
+
+  private void printInternalError(String type, Exception e) {
+    Log.e(TAG, "internalError [" + getSessionTimeString() + ", " + type + "]", e);
+  }
+
+  private void printMetadata(Metadata metadata, String prefix) {
+    for (int i = 0; i < metadata.length(); i++) {
+      Metadata.Entry entry = metadata.get(i);
+      if (entry instanceof TextInformationFrame) {
+        TextInformationFrame textInformationFrame = (TextInformationFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: value=%s", textInformationFrame.id,
+                textInformationFrame.value));
+      } else if (entry instanceof UrlLinkFrame) {
+        UrlLinkFrame urlLinkFrame = (UrlLinkFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: url=%s", urlLinkFrame.id, urlLinkFrame.url));
+      } else if (entry instanceof PrivFrame) {
+        PrivFrame privFrame = (PrivFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: owner=%s", privFrame.id, privFrame.owner));
+      } else if (entry instanceof GeobFrame) {
+        GeobFrame geobFrame = (GeobFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: mimeType=%s, filename=%s, description=%s",
+                geobFrame.id, geobFrame.mimeType, geobFrame.filename, geobFrame.description));
+      } else if (entry instanceof ApicFrame) {
+        ApicFrame apicFrame = (ApicFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: mimeType=%s, description=%s",
+                apicFrame.id, apicFrame.mimeType, apicFrame.description));
+      } else if (entry instanceof CommentFrame) {
+        CommentFrame commentFrame = (CommentFrame) entry;
+        Log.d(TAG, prefix + String.format("%s: language=%s, description=%s", commentFrame.id,
+                commentFrame.language, commentFrame.description));
+      } else if (entry instanceof Id3Frame) {
+        Id3Frame id3Frame = (Id3Frame) entry;
+        Log.d(TAG, prefix + String.format("%s", id3Frame.id));
+      } else if (entry instanceof EventMessage) {
+        EventMessage eventMessage = (EventMessage) entry;
+        Log.d(TAG, prefix + String.format("EMSG: scheme=%s, id=%d, value=%s",
+                eventMessage.schemeIdUri, eventMessage.id, eventMessage.value));
+      }
+    }
+  }
+
+  private String getSessionTimeString() {
+    return getTimeString(SystemClock.elapsedRealtime() - startTimeMs);
+  }
+
+  private static String getTimeString(long timeMs) {
+    return timeMs == C.TIME_UNSET ? "?" : TIME_FORMAT.format((timeMs) / 1000f);
+  }
+
+  private static String getStateString(int state) {
+    switch (state) {
+      case ExoPlayer.STATE_BUFFERING:
+        return "B";
+      case ExoPlayer.STATE_ENDED:
+        return "E";
+      case ExoPlayer.STATE_IDLE:
+        return "I";
+      case ExoPlayer.STATE_READY:
+        return "R";
+      default:
+        return "?";
+    }
+  }
+
+  private static String getFormatSupportString(int formatSupport) {
+    switch (formatSupport) {
+      case RendererCapabilities.FORMAT_HANDLED:
+        return "YES";
+      case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
+        return "NO_EXCEEDS_CAPABILITIES";
+      case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
+        return "NO_UNSUPPORTED_TYPE";
+      case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
+        return "NO";
+      default:
+        return "?";
+    }
+  }
+
+  private static String getAdaptiveSupportString(int trackCount, int adaptiveSupport) {
+    if (trackCount < 2) {
+      return "N/A";
+    }
+    switch (adaptiveSupport) {
+      case RendererCapabilities.ADAPTIVE_SEAMLESS:
+        return "YES";
+      case RendererCapabilities.ADAPTIVE_NOT_SEAMLESS:
+        return "YES_NOT_SEAMLESS";
+      case RendererCapabilities.ADAPTIVE_NOT_SUPPORTED:
+        return "NO";
+      default:
+        return "?";
+    }
+  }
+
+  private static String getTrackStatusString(TrackSelection selection, TrackGroup group,
+                                             int trackIndex) {
+    return getTrackStatusString(selection != null && selection.getTrackGroup() == group
+            && selection.indexOf(trackIndex) != C.INDEX_UNSET);
+  }
+
+  private static String getTrackStatusString(boolean enabled) {
+    return enabled ? "[X]" : "[ ]";
+  }
+
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsAudioRenderer.java
@@ -0,0 +1,223 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodec.CryptoInfo;
+import android.os.Handler;
+import android.util.Log;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.audio.AudioRendererEventListener;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
+import com.google.android.exoplayer2.util.MimeTypes;
+
+import java.nio.ByteBuffer;
+
+import org.mozilla.gecko.AppConstants.Versions;
+
+public class GeckoHlsAudioRenderer extends GeckoHlsRendererBase {
+    private final AudioRendererEventListener.EventDispatcher mEventDispatcher;
+
+    public GeckoHlsAudioRenderer(Handler eventHandler,
+                                 AudioRendererEventListener eventListener) {
+        super(C.TRACK_TYPE_AUDIO, (GeckoHlsPlayer.ComponentListener) eventListener);
+        assertTrue(Versions.feature16Plus);
+        LOGTAG = getClass().getSimpleName();
+        DEBUG = false;
+
+        mEventDispatcher = new AudioRendererEventListener.EventDispatcher(eventHandler, eventListener);
+    }
+
+    @Override
+    public final int supportsFormat(Format format) {
+        /*
+         * FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
+         *                               formats with the same mime type, but
+         *                               the properties of the format exceed
+         *                               the renderer's capability.
+         * FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
+         *                              renderer for formats of the same
+         *                              top-level type, but is not capable of
+         *                              rendering the format or any other format
+         *                              with the same mime type because the
+         *                              sub-type is not supported.
+         * FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
+         *                           the format, either because it does not support
+         *                           the format's top-level type, or because it's
+         *                           a specialized renderer for a different mime type.
+         * ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
+         *                         but may suffer a brief discontinuity (~50-100ms)
+         *                         when adaptation occurs.
+         */
+        String mimeType = mFormat.sampleMimeType;
+        if (!MimeTypes.isAudio(mimeType)) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
+        }
+        MediaCodecInfo decoderInfo = null;
+        try {
+            MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
+            decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
+        } catch (MediaCodecUtil.DecoderQueryException e) {
+            Log.e(LOGTAG, e.getMessage());
+        }
+        if (decoderInfo == null) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
+        }
+        /*
+         *  Note : If the code can make it to this place, ExoPlayer assumes
+         *         support for unknown sampleRate and channelCount when
+         *         SDK version is less than 21, otherwise, further check is needed
+         *         if there's no sampleRate/channelCount in format.
+         */
+        boolean decoderCapable = Versions.preLollipop ||
+                                 ((mFormat.sampleRate == Format.NO_VALUE ||
+                                  decoderInfo.isAudioSampleRateSupportedV21(mFormat.sampleRate)) &&
+                                 (mFormat.channelCount == Format.NO_VALUE ||
+                                  decoderInfo.isAudioChannelCountSupportedV21(mFormat.channelCount)));
+        int formatSupport = decoderCapable ?
+            RendererCapabilities.FORMAT_HANDLED :
+            RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
+        return RendererCapabilities.ADAPTIVE_NOT_SEAMLESS | formatSupport;
+    }
+
+    protected void handleDrmInitChanged(Format oldFormat, Format newFormat) {
+        Object oldDrmInit = oldFormat == null ? null : oldFormat.drmInitData;
+        Object newDrnInit = newFormat.drmInitData;
+
+//      TODO: Notify MFR if the content is encrypted or not.
+        if (newDrnInit != oldDrmInit) {
+            if (newDrnInit != null) {
+            } else {
+            }
+        }
+    }
+
+    @Override
+    protected final void maybeInitRenderer() {
+        if (mInitialized || mFormat == null) {
+            return;
+        }
+        if (DEBUG) { Log.d(LOGTAG, "Initializing ... "); }
+        mInputBuffer = ByteBuffer.wrap(new byte[22048]);
+        mInitialized = true;
+    }
+
+    @Override
+    protected void resetRenderer() {
+        mInputBuffer = null;
+        mInitialized = false;
+    }
+
+    /*
+     * The place we get demuxed data from HlsMediaSource(ExoPlayer).
+     * The data will then be converted to GeckoHlsSample and deliver to
+     * GeckoHlsDemuxerWrapper for further use.
+     */
+    @Override
+    protected synchronized boolean feedInputBuffersQueue() {
+        if (!mInitialized || mInputStreamEnded || isQueuedEnoughData()) {
+            // Need to reinitialize the renderer or the input stream has ended
+            // or we just reached the maximum queue size.
+            return false;
+        }
+        DecoderInputBuffer bufferForRead =
+            new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
+        bufferForRead.data = mInputBuffer;
+        bufferForRead.clear();
+
+        // Read data from HlsMediaSource
+        int result = C.RESULT_NOTHING_READ;
+        try {
+            result = readSource(mFormatHolder, bufferForRead);
+        } catch (Exception e) {
+            Log.e(LOGTAG, "Exception when readSource :", e);
+            return false;
+        }
+
+        if (result == C.RESULT_NOTHING_READ) {
+            return false;
+        }
+        if (result == C.RESULT_FORMAT_READ) {
+            onInputFormatChanged(mFormatHolder.format);
+            return true;
+        }
+
+        // We've read a buffer.
+        if (bufferForRead.isEndOfStream()) {
+            if (DEBUG) { Log.d(LOGTAG, "Now we're at the End Of Stream."); }
+            mInputStreamEnded = true;
+            mDemuxedInputSamples.offer(GeckoHlsSample.EOS);
+            return false;
+        }
+
+        bufferForRead.flip();
+
+        int size = bufferForRead.data.limit();
+        byte[] realData = new byte[size];
+        bufferForRead.data.get(realData, 0, size);
+        ByteBuffer buffer = ByteBuffer.wrap(realData);
+        mInputBuffer.clear();
+
+        CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
+        BufferInfo bufferInfo = new BufferInfo();
+        // Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
+        int flags = 0;
+        flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
+        flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+        bufferInfo.set(0, size, bufferForRead.timeUs, flags);
+
+        assertTrue(mFormats.size() >= 0);
+        // We add a new format in the list once format changes, so the formatIndex
+        // should indicate to the last(latest) format.
+        GeckoHlsSample sample = GeckoHlsSample.create(buffer,
+                                                      bufferInfo,
+                                                      cryptoInfo,
+                                                      mFormats.size() - 1);
+
+        mDemuxedInputSamples.offer(sample);
+
+        if (DEBUG) {
+            Log.d(LOGTAG, "Demuxed sample PTS : " +
+                          sample.info.presentationTimeUs + ", duration :" +
+                          sample.duration + ", formatIndex(" +
+                          sample.formatIndex + "), queue size : " +
+                          mDemuxedInputSamples.size());
+        }
+
+        if (mWaitingForData && isQueuedEnoughData()) {
+            if (DEBUG) { Log.d(LOGTAG, "onDataArrived"); }
+            mPlayerListener.onDataArrived();
+            mWaitingForData = false;
+        }
+        return true;
+    }
+
+    @Override
+    protected boolean clearInputSamplesQueue() {
+        if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
+        mDemuxedInputSamples.clear();
+        return true;
+    }
+
+    @Override
+    protected void onInputFormatChanged(Format newFormat) {
+        Format oldFormat = mFormat;
+        mFormat = newFormat;
+
+        handleDrmInitChanged(oldFormat, newFormat);
+
+        resetRenderer();
+        maybeInitRenderer();
+        mFormats.add(mFormat);
+        mEventDispatcher.inputFormatChanged(newFormat);
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsPlayer.java
@@ -0,0 +1,573 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.content.Context;
+import android.net.Uri;
+import android.os.Handler;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Surface;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.ExoPlayer;
+import com.google.android.exoplayer2.ExoPlayerFactory;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.Timeline;
+import com.google.android.exoplayer2.audio.AudioRendererEventListener;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.metadata.Metadata;
+import com.google.android.exoplayer2.metadata.MetadataRenderer;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.source.TrackGroup;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.source.hls.HlsMediaSource;
+import com.google.android.exoplayer2.trackselection.AdaptiveVideoTrackSelection;
+import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.upstream.DataSource;
+import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
+import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
+import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
+import com.google.android.exoplayer2.upstream.HttpDataSource;
+import com.google.android.exoplayer2.util.Util;
+import com.google.android.exoplayer2.video.VideoRendererEventListener;
+
+import org.mozilla.gecko.GeckoAppShell;
+
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+public class GeckoHlsPlayer implements ExoPlayer.EventListener {
+    private static final String LOGTAG = "GeckoHlsPlayer";
+    private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
+    private static final int MAX_TIMELINE_ITEM_LINES = 3;
+    private static boolean DEBUG = false;
+
+    private DataSource.Factory mMediaDataSourceFactory;
+    private Timeline.Period mPeriod;
+    private Timeline.Window mWindow;
+    private Handler mMainHandler;
+    private EventLogger mEventLogger;
+    private ExoPlayer mPlayer;
+    private GeckoHlsRendererBase[] mRenderers;
+    private DefaultTrackSelector mTrackSelector;
+    private MediaSource mMediaSource;
+    private ComponentListener mComponentListener;
+
+    private boolean mIsTimelineStatic = false;
+    private long mDurationUs;
+
+    private GeckoHlsVideoRenderer mVRenderer = null;
+    private GeckoHlsAudioRenderer mARenderer = null;
+
+    // Able to control if we only want V/A/V+A tracks from bitstream.
+    private class RendererController {
+        private final boolean mEnableV;
+        private final boolean mEnableA;
+        RendererController(boolean enableVideoRenderer, boolean enableAudioRenderer) {
+            this.mEnableV = enableVideoRenderer;
+            this.mEnableA = enableAudioRenderer;
+        }
+        boolean VideoRendererEnabled() { return mEnableV; }
+        boolean AudioRendererEnabled() { return mEnableA; }
+    }
+    private RendererController mRendererController = new RendererController(true, true);
+
+    // Provide statistical information of tracks.
+    private class HlsMediaTracksInfo {
+        private int mNumVideoTracks = 0;
+        private int mNumAudioTracks = 0;
+        private boolean mVideoInfoUpdated = false;
+        private boolean mAudioInfoUpdated = false;
+        HlsMediaTracksInfo(int numVideoTracks, int numAudioTracks) {
+            this.mNumVideoTracks = numVideoTracks;
+            this.mNumAudioTracks = numAudioTracks;
+        }
+        public boolean hasVideo() { return mNumVideoTracks > 0; }
+        public boolean hasAudio() { return mNumAudioTracks > 0; }
+        public int getNumOfVideoTracks() { return mNumVideoTracks; }
+        public int getNumOfAudioTracks() { return mNumAudioTracks; }
+        public void onVideoInfoUpdated() { mVideoInfoUpdated = true; }
+        public void onAudioInfoUpdated() { mAudioInfoUpdated = true; }
+        public boolean videoReady() {
+            return hasVideo() ? mVideoInfoUpdated : true;
+        }
+        public boolean audioReady() {
+            return hasAudio() ? mAudioInfoUpdated : true;
+        }
+    }
+    private HlsMediaTracksInfo mTracksInfo = null;
+
+    private boolean mIsPlayerInitDone = false;
+    private boolean mIsDemuxerInitDone = false;
+    private DemuxerCallbacks mDemuxerCallbacks;
+    private ResourceCallbacks mResourceCallbacks;
+
+    protected String mUserAgent;
+
+    public enum Track_Type {
+        TRACK_UNDEFINED,
+        TRACK_AUDIO,
+        TRACK_VIDEO,
+        TRACK_TEXT,
+    }
+
+    public enum RESOURCE_ERROR {
+        BASE(-100),
+        UNKNOWN(-101),
+        PLAYER(-102),
+        UNSUPPORTED(-103);
+
+        private int mNumVal;
+        RESOURCE_ERROR(int numVal) {
+            this.mNumVal = numVal;
+        }
+        public int code() {
+            return mNumVal;
+        }
+    }
+
+    public enum DEMUXER_ERROR {
+        BASE(-200),
+        UNKNOWN(-201),
+        PLAYER(-202),
+        UNSUPPORTED(-203);
+
+        private int mNumVal;
+        DEMUXER_ERROR(int numVal) {
+            this.mNumVal = numVal;
+        }
+        public int code() {
+            return mNumVal;
+        }
+    }
+
+    public interface DemuxerCallbacks {
+        void onInitialized(boolean hasAudio, boolean hasVideo);
+        void onDemuxerError(int errorCode);
+    }
+
+    public interface ResourceCallbacks {
+        void onDataArrived();
+        void onResourceError(int errorCode);
+    }
+
+    private static void assertTrue(boolean condition) {
+      if (DEBUG && !condition) {
+        throw new AssertionError("Expected condition to be true");
+      }
+    }
+
+    public void checkInitDone() {
+        assertTrue(mDemuxerCallbacks != null);
+        assertTrue(mTracksInfo != null);
+        if (mIsDemuxerInitDone) {
+            return;
+        }
+
+        if (mTracksInfo.videoReady() && mTracksInfo.audioReady()) {
+            mDemuxerCallbacks.onInitialized(mTracksInfo.hasAudio(), mTracksInfo.hasVideo());
+            mIsDemuxerInitDone = true;
+        }
+    }
+
+    public final class ComponentListener implements VideoRendererEventListener,
+            AudioRendererEventListener, MetadataRenderer.Output {
+
+        // General purpose implementation
+        public void onDataArrived() {
+            assertTrue(mResourceCallbacks != null);
+            mResourceCallbacks.onDataArrived();
+        }
+
+        // VideoRendererEventListener implementation
+        @Override
+        public void onVideoEnabled(DecoderCounters counters) {
+            // do nothing
+        }
+
+        @Override
+        public void onVideoDecoderInitialized(String decoderName, long initializedTimestampMs,
+                                              long initializationDurationMs) {
+            // do nothing
+        }
+
+        @Override
+        public void onVideoInputFormatChanged(Format format) {
+            assertTrue(mDemuxerCallbacks != null);
+            assertTrue(mTracksInfo != null);
+            if (DEBUG) {
+                Log.d(LOGTAG, "[CB] onVideoInputFormatChanged [" + format + "]");
+                Log.d(LOGTAG, "[CB] SampleMIMEType [" +
+                              format.sampleMimeType + "], ContainerMIMEType [" +
+                              format.containerMimeType + "]");
+            }
+            mTracksInfo.onVideoInfoUpdated();
+            checkInitDone();
+        }
+
+        @Override
+        public void onDroppedFrames(int count, long elapsed) {
+            // do nothing
+        }
+
+        @Override
+        public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
+                                       float pixelWidthHeightRatio) {
+            // do nothing
+        }
+
+        @Override
+        public void onRenderedFirstFrame(Surface surface) {
+            // do nothing
+        }
+
+        @Override
+        public void onVideoDisabled(DecoderCounters counters) {}
+
+        // AudioRendererEventListener implementation
+        @Override
+        public void onAudioEnabled(DecoderCounters counters) {
+            // do nothing
+        }
+
+        @Override
+        public void onAudioSessionId(int sessionId) {
+            // do nothing
+        }
+
+        @Override
+        public void onAudioDecoderInitialized(String decoderName, long initializedTimestampMs,
+                                              long initializationDurationMs) {
+            // do nothing
+        }
+
+        @Override
+        public void onAudioInputFormatChanged(Format format) {
+            assertTrue(mDemuxerCallbacks != null);
+            assertTrue(mTracksInfo != null);
+            if (DEBUG) { Log.d(LOGTAG, "[CB] onAudioInputFormatChanged [" + format + "]"); }
+            mTracksInfo.onAudioInfoUpdated();
+            checkInitDone();
+        }
+
+        @Override
+        public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
+                                         long elapsedSinceLastFeedMs) {
+            // do nothing
+        }
+
+        @Override
+        public void onAudioDisabled(DecoderCounters counters) {}
+
+        // MetadataRenderer.Output implementation
+        @Override
+        public void onMetadata(Metadata metadata) {
+            // do nothing
+        }
+    }
+
+    public DataSource.Factory buildDataSourceFactory(Context va, DefaultBandwidthMeter bandwidthMeter) {
+        return new DefaultDataSourceFactory(va, bandwidthMeter,
+                buildHttpDataSourceFactory(bandwidthMeter));
+    }
+
+    public HttpDataSource.Factory buildHttpDataSourceFactory(DefaultBandwidthMeter bandwidthMeter) {
+        return new DefaultHttpDataSourceFactory(mUserAgent, bandwidthMeter);
+    }
+
+    private MediaSource buildMediaSource(Uri uri, String overrideExtension) {
+        if (DEBUG) { Log.d(LOGTAG, "buildMediaSource uri[" + uri + "]" + ", overridedExt[" + overrideExtension + "]"); }
+        int type = Util.inferContentType(!TextUtils.isEmpty(overrideExtension) ? "." + overrideExtension
+                : uri.getLastPathSegment());
+        switch (type) {
+            case C.TYPE_HLS:
+                return new HlsMediaSource(uri, mMediaDataSourceFactory, mMainHandler, null);
+            default: {
+                mResourceCallbacks.onResourceError(RESOURCE_ERROR.UNSUPPORTED.code());
+                throw new IllegalStateException("Unsupported type: " + type);
+            }
+        }
+    }
+
+    GeckoHlsPlayer() {
+        if (DEBUG) { Log.d(LOGTAG, " construct"); }
+    }
+
+    void addResourceWrapperCallbackListener(ResourceCallbacks callback) {
+        if (DEBUG) { Log.d(LOGTAG, " addResourceWrapperCallbackListener ..."); }
+        mResourceCallbacks = callback;
+    }
+
+    void addDemuxerWrapperCallbackListener(DemuxerCallbacks callback) {
+        if (DEBUG) { Log.d(LOGTAG, " addDemuxerWrapperCallbackListener ..."); }
+        mDemuxerCallbacks = callback;
+    }
+
+    synchronized void init(String url) {
+        if (DEBUG) { Log.d(LOGTAG, " init"); }
+        assertTrue(mResourceCallbacks != null);
+        if (mIsPlayerInitDone == true) {
+            return;
+        }
+        Context ctx = GeckoAppShell.getApplicationContext();
+        mComponentListener = new ComponentListener();
+        mMainHandler = new Handler();
+
+        mDurationUs = 0;
+        mWindow = new Timeline.Window();
+        mPeriod = new Timeline.Period();
+
+        // Prepare trackSelector
+        TrackSelection.Factory videoTrackSelectionFactory =
+                new AdaptiveVideoTrackSelection.Factory(BANDWIDTH_METER);
+        mTrackSelector = new DefaultTrackSelector(videoTrackSelectionFactory);
+
+        // Prepare customized renderer
+        ArrayList<GeckoHlsRendererBase> renderersList = new ArrayList<>();
+        mVRenderer = new GeckoHlsVideoRenderer(mMainHandler, mComponentListener);
+        renderersList.add(mVRenderer);
+        mARenderer = new GeckoHlsAudioRenderer(mMainHandler, mComponentListener);
+        renderersList.add(mARenderer);
+        mRenderers = renderersList.toArray(new GeckoHlsRendererBase[renderersList.size()]);
+
+        // Create ExoPlayer instance with specific components.
+        mPlayer = ExoPlayerFactory.newInstance(mRenderers, mTrackSelector);
+        mPlayer.addListener(this);
+
+        if (DEBUG) {
+            mEventLogger = new EventLogger(mTrackSelector);
+            mPlayer.addListener(mEventLogger);
+        }
+
+        Uri[] uris = new Uri[]{Uri.parse(url)};
+        mUserAgent = Util.getUserAgent(ctx, "RemoteDecoder");
+        mMediaDataSourceFactory = buildDataSourceFactory(ctx, BANDWIDTH_METER);
+
+        MediaSource[] mediaSources = new MediaSource[1];
+        mediaSources[0] = buildMediaSource(uris[0], null);
+        mMediaSource = mediaSources[0];
+
+        mPlayer.prepare(mMediaSource);
+        mIsPlayerInitDone = true;
+    }
+
+    @Override
+    public void onLoadingChanged(boolean isLoading) {
+        if (DEBUG) { Log.d(LOGTAG, "loading [" + isLoading + "]"); }
+        if (!isLoading) {
+            // To update buffered position.
+            mComponentListener.onDataArrived();
+        }
+    }
+
+    @Override
+    public void onPlayerStateChanged(boolean playWhenReady, int state) {
+        if (DEBUG) { Log.d(LOGTAG, "state [" + playWhenReady + ", " + getStateString(state) + "]"); }
+        if (state == ExoPlayer.STATE_READY) {
+            mPlayer.setPlayWhenReady(true);
+        }
+    }
+
+    @Override
+    public void onPositionDiscontinuity() {
+        if (DEBUG) { Log.d(LOGTAG, "positionDiscontinuity"); }
+    }
+
+    @Override
+    public void onPlayerError(ExoPlaybackException e) {
+        if (DEBUG) { Log.e(LOGTAG, "playerFailed" , e); }
+        if (mResourceCallbacks != null) {
+            mResourceCallbacks.onResourceError(RESOURCE_ERROR.PLAYER.code());
+        }
+        if (mDemuxerCallbacks != null) {
+            mDemuxerCallbacks.onDemuxerError(DEMUXER_ERROR.PLAYER.code());
+        }
+    }
+
+    @Override
+    public synchronized void onTracksChanged(TrackGroupArray ignored, TrackSelectionArray trackSelections) {
+        if (DEBUG) {
+            Log.e(LOGTAG, "onTracksChanged : TGA[" + ignored +
+                          "], TSA[" + trackSelections + "]");
+        }
+        mTracksInfo = null;
+        int numVideoTracks = 0;
+        int numAudioTracks = 0;
+        for (int j = 0; j < ignored.length; j++) {
+            TrackGroup tg = ignored.get(j);
+            for (int i = 0; i < tg.length; i++) {
+                Format fmt = tg.getFormat(i);
+                if (fmt.sampleMimeType != null) {
+                    if (mRendererController.VideoRendererEnabled() &&
+                        fmt.sampleMimeType.startsWith(new String("video"))) {
+                        numVideoTracks++;
+                    } else if (mRendererController.AudioRendererEnabled() &&
+                               fmt.sampleMimeType.startsWith(new String("audio"))) {
+                        numAudioTracks++;
+                    }
+                }
+            }
+        }
+        mTracksInfo = new HlsMediaTracksInfo(numVideoTracks, numAudioTracks);
+    }
+
+    @Override
+    public void onTimelineChanged(Timeline timeline, Object manifest) {
+        mIsTimelineStatic = !timeline.isEmpty()
+                && !timeline.getWindow(timeline.getWindowCount() - 1, mWindow).isDynamic;
+
+        int periodCount = timeline.getPeriodCount();
+        int windowCount = timeline.getWindowCount();
+        if (DEBUG) { Log.d(LOGTAG, "sourceInfo [periodCount=" + periodCount + ", windowCount=" + windowCount); }
+        for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) {
+          timeline.getPeriod(i, mPeriod);
+          if (mDurationUs < mPeriod.getDurationUs()) {
+              mDurationUs = mPeriod.getDurationUs();
+          }
+        }
+        for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) {
+          timeline.getWindow(i, mWindow);
+          if (mDurationUs < mWindow.getDurationUs()) {
+              mDurationUs = mWindow.getDurationUs();
+          }
+        }
+        // TODO : Need to check if the duration from play.getDuration is different
+        // with the one calculated from multi-timelines/windows.
+        if (DEBUG) {
+            Log.d(LOGTAG, "Media duration (from Timeline) = " + mDurationUs +
+                          "(us)" + " player.getDuration() = " + mPlayer.getDuration() +
+                          "(ms)");
+        }
+    }
+
+    private static String getStateString(int state) {
+        switch (state) {
+            case ExoPlayer.STATE_BUFFERING:
+                return "B";
+            case ExoPlayer.STATE_ENDED:
+                return "E";
+            case ExoPlayer.STATE_IDLE:
+                return "I";
+            case ExoPlayer.STATE_READY:
+                return "R";
+            default:
+                return "?";
+        }
+    }
+
+    // =======================================================================
+    // API for GeckoHlsDemuxerWrapper
+    // =======================================================================
+    public ConcurrentLinkedQueue<GeckoHlsSample> getVideoSamples(int number) {
+        return mVRenderer != null ? mVRenderer.getQueuedSamples(number) :
+                                    new ConcurrentLinkedQueue<GeckoHlsSample>();
+    }
+
+    public ConcurrentLinkedQueue<GeckoHlsSample> getAudioSamples(int number) {
+        return mARenderer != null ? mARenderer.getQueuedSamples(number) :
+                                    new ConcurrentLinkedQueue<GeckoHlsSample>();
+    }
+
+    public long getDuration() {
+        assertTrue(mPlayer != null);
+        // Value returned by getDuration() is in milliseconds.
+        long duratoin = mPlayer.getDuration() * 1000;
+        if (DEBUG) { Log.d(LOGTAG, "getDuration : " + duratoin  + "(Us)"); }
+        return duratoin;
+    }
+
+    public long getBufferedPosition() {
+        assertTrue(mPlayer != null);
+        // Value returned by getBufferedPosition() is in milliseconds.
+        long bufferedPos = mPlayer.getBufferedPosition() * 1000;
+        if (DEBUG) { Log.d(LOGTAG, "getBufferedPosition : " + bufferedPos + "(Us)"); }
+        return bufferedPos;
+    }
+
+    public synchronized int getNumberTracks(Track_Type trackType) {
+        if (DEBUG) { Log.d(LOGTAG, "getNumberTracks"); }
+        assertTrue(mTracksInfo != null);
+
+        if (trackType == Track_Type.TRACK_VIDEO) {
+            return mTracksInfo.getNumOfVideoTracks();
+        } else if (trackType == Track_Type.TRACK_AUDIO) {
+            return mTracksInfo.getNumOfAudioTracks();
+        }
+        return 0;
+    }
+
+    public Format getVideoTrackFormat(int index) {
+        if (DEBUG) { Log.d(LOGTAG, "getVideoTrackFormat"); }
+        assertTrue(mVRenderer != null);
+        assertTrue(mTracksInfo != null);
+        return mTracksInfo.hasVideo() ? mVRenderer.getFormat(index) : null;
+    }
+
+    public Format getAudioTrackFormat(int index) {
+        if (DEBUG) { Log.d(LOGTAG, "getAudioTrackFormat"); }
+        assertTrue(mARenderer != null);
+        assertTrue(mTracksInfo != null);
+        return mTracksInfo.hasAudio() ? mARenderer.getFormat(index) : null;
+    }
+
+    public boolean seek(long positionUs) {
+        // positionUs : microseconds.
+        // NOTE : 1) It's not possible to seek media by tracktype via ExoPlayer Interface.
+        //        2) positionUs is samples PTS from MFR, we need to re-adjust it
+        //           for ExoPlayer by subtracting sample start time.
+        //        3) Time unit for ExoPlayer.seek() is milliseconds.
+        try {
+            // TODO : Gather Timeline Period / Window information to develop
+            //        complete timeline, and seekTime should be inside the duration.
+            Long startTime = Long.MAX_VALUE;
+            for (GeckoHlsRendererBase r : mRenderers) {
+                if (r == mVRenderer  && mRendererController.VideoRendererEnabled() ||
+                    r == mARenderer  && mRendererController.AudioRendererEnabled()) {
+                // Find the min value of the start time
+                    startTime = Math.min(startTime, r.getFirstSamplePTS());
+                }
+            }
+            if (DEBUG) {
+                Log.d(LOGTAG, "seeking  : " + positionUs / 1000 +
+                              " (ms); startTime : " + startTime / 1000 + " (ms)");
+            }
+            assertTrue(startTime == Long.MAX_VALUE);
+            mPlayer.seekTo(positionUs / 1000 - startTime / 1000);
+        } catch (Exception e) {
+            mDemuxerCallbacks.onDemuxerError(DEMUXER_ERROR.UNKNOWN.code());
+            return false;
+        }
+        return true;
+    }
+
+    public long getNextKeyFrameTime() {
+        long nextKeyFrameTime = mVRenderer != null ? mVRenderer.getNextKeyFrameTime() : 0;
+        return nextKeyFrameTime;
+    }
+
+    public void release() {
+        if (DEBUG) { Log.d(LOGTAG, "releasing  ..."); }
+        if (mPlayer != null) {
+            if (mEventLogger != null) {
+                mPlayer.removeListener(mEventLogger);
+            }
+            mPlayer.removeListener(this);
+            mPlayer.stop();
+            mPlayer.release();
+            mVRenderer = null;
+            mARenderer = null;
+            mPlayer = null;
+        }
+        mDemuxerCallbacks = null;
+        mResourceCallbacks = null;
+        mIsPlayerInitDone = false;
+        mIsDemuxerInitDone = false;
+    }
+}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsRendererBase.java
@@ -0,0 +1,158 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.util.Log;
+
+import com.google.android.exoplayer2.BaseRenderer;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.FormatHolder;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+public abstract class GeckoHlsRendererBase extends BaseRenderer {
+    protected static final int QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD = 1000000; //1sec
+    protected final FormatHolder mFormatHolder = new FormatHolder();
+    protected boolean DEBUG;
+    protected String LOGTAG;
+    // Notify GeckoHlsPlayer about renderer's status, i.e. data has arrived.
+    protected GeckoHlsPlayer.ComponentListener mPlayerListener;
+
+    protected ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedInputSamples = new ConcurrentLinkedQueue<>();
+
+    protected ByteBuffer mInputBuffer = null;
+    protected Format mFormat = null;
+    protected ArrayList<Format> mFormats = new ArrayList<Format>();
+    protected boolean mInitialized = false;
+    protected boolean mWaitingForData = true;
+    protected boolean mInputStreamEnded = false;
+    protected long mFirstSampleStartTime = 0;
+
+    protected abstract void maybeInitRenderer();
+    protected abstract void resetRenderer();
+    protected abstract boolean feedInputBuffersQueue();
+    protected abstract boolean clearInputSamplesQueue();
+    protected abstract void onInputFormatChanged(Format newFormat);
+
+    protected void assertTrue(boolean condition) {
+        if (DEBUG && !condition) {
+            throw new AssertionError("Expected condition to be true");
+        }
+    }
+
+    public GeckoHlsRendererBase(int trackType, GeckoHlsPlayer.ComponentListener eventListener) {
+        super(trackType);
+        mPlayerListener = eventListener;
+    }
+
+    protected boolean isQueuedEnoughData() {
+        if (mDemuxedInputSamples.isEmpty()) {
+            return false;
+        }
+        int size = mDemuxedInputSamples.size();
+        GeckoHlsSample[] queuedSamples =
+                mDemuxedInputSamples.toArray(new GeckoHlsSample[size]);
+        return Math.abs(queuedSamples[size - 1].info.presentationTimeUs -
+                        queuedSamples[0].info.presentationTimeUs) >
+               QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD;
+    }
+
+    public Format getFormat(int index) {
+        assertTrue(index >= 0);
+        Format fmt = index < mFormats.size() ? mFormats.get(index) : null;
+        if (DEBUG) { Log.d(LOGTAG, "getFormat : index = " + index + ", format : " + fmt); }
+        return fmt;
+    }
+
+    public long getFirstSamplePTS() { return mFirstSampleStartTime; }
+
+    public synchronized ConcurrentLinkedQueue<GeckoHlsSample> getQueuedSamples(int number) {
+        ConcurrentLinkedQueue<GeckoHlsSample> samples =
+            new ConcurrentLinkedQueue<GeckoHlsSample>();
+
+        int queuedSize = mDemuxedInputSamples.size();
+        for (int i = 0; i < queuedSize; i++) {
+            if (i >= number) {
+                break;
+            }
+            GeckoHlsSample sample = mDemuxedInputSamples.poll();
+            samples.offer(sample);
+        }
+        if (samples.isEmpty()) {
+            if (DEBUG) { Log.d(LOGTAG, "getQueuedSamples isEmpty, mWaitingForData = true !"); }
+            mWaitingForData = true;
+        } else if (mFirstSampleStartTime == 0) {
+            mFirstSampleStartTime = samples.peek().info.presentationTimeUs;
+            if (DEBUG) { Log.d(LOGTAG, "mFirstSampleStartTime = " + mFirstSampleStartTime); }
+        }
+        return samples;
+    }
+
+    private void readFormat() {
+        int result = readSource(mFormatHolder, null);
+        if (result == C.RESULT_FORMAT_READ) {
+            onInputFormatChanged(mFormatHolder.format);
+        }
+    }
+
+    @Override
+    protected void onEnabled(boolean joining) {
+        // Do nothing.
+    }
+
+    @Override
+    protected void onDisabled() {
+        mFormat = null;
+        mFormats.clear();
+        resetRenderer();
+    }
+
+    @Override
+    public boolean isReady() {
+        return mFormat != null;
+    }
+
+    @Override
+    public boolean isEnded() {
+        return mInputStreamEnded;
+    }
+
+    @Override
+    protected void onPositionReset(long positionUs, boolean joining) {
+        if (DEBUG) { Log.d(LOGTAG, "onPositionReset : positionUs = " + positionUs); }
+        mInputStreamEnded = false;
+        if (mInitialized) {
+            clearInputSamplesQueue();
+        }
+    }
+
+    /*
+     * This is called by ExoPlayerImplInternal.java.
+     * ExoPlayer checks the status of renderer, i.e. isReady() / isEnded(), and
+     * calls renderer.render by passing its wall clock time.
+     */
+    @Override
+    public void render(long positionUs, long elapsedRealtimeUs) {
+        if (DEBUG) {
+            Log.d(LOGTAG, "positionUs = " + positionUs +
+                          ", mInputStreamEnded = " + mInputStreamEnded);
+        }
+        if (mInputStreamEnded) {
+            return;
+        }
+        if (mFormat == null) {
+            readFormat();
+        }
+
+        maybeInitRenderer();
+        if (mInitialized) {
+            while (feedInputBuffersQueue()) {
+            }
+        }
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsVideoRenderer.java
@@ -0,0 +1,522 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodec.CryptoInfo;
+import android.os.Handler;
+import android.util.Log;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.util.MimeTypes;
+import com.google.android.exoplayer2.video.VideoRendererEventListener;
+
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.nio.ByteBuffer;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+import org.mozilla.gecko.AppConstants.Versions;
+
+public class GeckoHlsVideoRenderer extends GeckoHlsRendererBase {
+    private final VideoRendererEventListener.EventDispatcher mEventDispatcher;
+
+    /*
+     * By configuring these states, initialization data is provided for
+     * ExoPlayer's HlsMediaSource to parse HLS bitstream and then provide samples
+     * starting with an Access Unit Delimiter including SPS/PPS for TS,
+     * and provide samples starting with an AUD without SPS/PPS for FMP4.
+     */
+    private enum RECONFIGURATION_STATE {
+        NONE,
+        WRITE_PENDING,
+        QUEUE_PENDING
+    }
+    private boolean mRendererReconfigured;
+    private RECONFIGURATION_STATE mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+
+    // A list of the formats which may be included in the bitstream.
+    private Format[] mStreamFormats;
+    // The max width/height/inputBufferSize for specific codec format.
+    private CodecMaxValues mCodecMaxValues;
+    // A temporary queue for samples whose duration is not calculated yet.
+    private ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedNoDurationSamples =
+        new ConcurrentLinkedQueue<>();
+
+    // Contain CSD-0(SPS)/CSD-1(PPS) information (in AnnexB format) for
+    // prepending each keyframe. When video format changes, this information
+    // changes accordingly.
+    private byte[] mCSDInfo = null;
+
+    public GeckoHlsVideoRenderer(Handler eventHandler,
+                                 VideoRendererEventListener eventListener) {
+        super(C.TRACK_TYPE_VIDEO, (GeckoHlsPlayer.ComponentListener) eventListener);
+        assertTrue(Versions.feature16Plus);
+        LOGTAG = getClass().getSimpleName();
+        DEBUG = false;
+        mEventDispatcher = new VideoRendererEventListener.EventDispatcher(eventHandler, eventListener);
+    }
+
+    @Override
+    public final int supportsMixedMimeTypeAdaptation() {
+        return ADAPTIVE_NOT_SEAMLESS;
+    }
+
+    @Override
+    public final int supportsFormat(Format format) {
+        /*
+         * FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
+         *                               formats with the same mime type, but
+         *                               the properties of the format exceed
+         *                               the renderer's capability.
+         * FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
+         *                              renderer for formats of the same
+         *                              top-level type, but is not capable of
+         *                              rendering the format or any other format
+         *                              with the same mime type because the
+         *                              sub-type is not supported.
+         * FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
+         *                           the format, either because it does not support
+         *                           the format's top-level type, or because it's
+         *                           a specialized renderer for a different mime type.
+         * ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
+         *                         but may suffer a brief discontinuity (~50-100ms)
+         *                         when adaptation occurs.
+         * ADAPTIVE_SEAMLESS : The Renderer can seamlessly adapt between formats.
+         */
+        final String mimeType = mFormat.sampleMimeType;
+        if (!MimeTypes.isVideo(mimeType)) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
+        }
+
+        MediaCodecInfo decoderInfo = null;
+        try {
+            MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
+            decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
+        } catch (MediaCodecUtil.DecoderQueryException e) {
+            Log.e(LOGTAG, e.getMessage());
+        }
+        if (decoderInfo == null) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
+        }
+
+        boolean decoderCapable = decoderInfo.isCodecSupported(mFormat.codecs);
+        if (decoderCapable && mFormat.width > 0 && mFormat.height > 0) {
+            if (Versions.preLollipop) {
+                try {
+                    decoderCapable = mFormat.width * mFormat.height <= MediaCodecUtil.maxH264DecodableFrameSize();
+                } catch (MediaCodecUtil.DecoderQueryException e) {
+                    Log.e(LOGTAG, e.getMessage());
+                }
+                if (!decoderCapable) {
+                    if (DEBUG) {
+                        Log.d(LOGTAG, "Check [legacyFrameSize, " +
+                                      mFormat.width + "x" + mFormat.height + "]");
+                    }
+                }
+            } else {
+                decoderCapable =
+                    decoderInfo.isVideoSizeAndRateSupportedV21(mFormat.width,
+                                                               mFormat.height,
+                                                               mFormat.frameRate);
+            }
+        }
+
+        int adaptiveSupport = decoderInfo.adaptive ?
+            RendererCapabilities.ADAPTIVE_SEAMLESS :
+            RendererCapabilities.ADAPTIVE_NOT_SEAMLESS;
+        int formatSupport = decoderCapable ?
+            RendererCapabilities.FORMAT_HANDLED :
+            RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
+        return adaptiveSupport | formatSupport;
+    }
+
+    @Override
+    protected final void maybeInitRenderer() {
+        if (mInitialized || mFormat == null) {
+            return;
+        }
+        if (DEBUG) { Log.d(LOGTAG, "Initializing ... "); }
+        // Calculate maximum size which might be used for target format.
+        mCodecMaxValues = getCodecMaxValues(mFormat, mStreamFormats);
+        // Create a buffer with maximal size for reading source.
+        mInputBuffer = ByteBuffer.wrap(new byte[mCodecMaxValues.inputSize]);
+        mInitialized = true;
+    }
+
+    @Override
+    protected void resetRenderer() {
+        if (DEBUG) { Log.d(LOGTAG, "[resetRenderer] mInitialized = " + mInitialized); }
+        if (mInitialized) {
+            mRendererReconfigured = false;
+            mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+            mInputBuffer = null;
+            mCSDInfo = null;
+            mInitialized = false;
+        }
+    }
+
+    /*
+     * The place we get demuxed data from HlsMediaSource(ExoPlayer).
+     * The data will then be converted to GeckoHlsSample and deliver to
+     * GeckoHlsDemuxerWrapper for further use.
+     */
+    @Override
+    protected synchronized boolean feedInputBuffersQueue() {
+        if (!mInitialized || mInputStreamEnded || isQueuedEnoughData()) {
+            // Need to reinitialize the renderer or the input stream has ended
+            // or we just reached the maximum queue size.
+            return false;
+        }
+
+        DecoderInputBuffer bufferForRead =
+            new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
+        bufferForRead.data = mInputBuffer;
+        bufferForRead.clear();
+
+        // For adaptive reconfiguration OMX decoders expect all reconfiguration
+        // data to be supplied at the start of the buffer that also contains
+        // the first frame in the new format.
+        if (mRendererReconfigurationState == RECONFIGURATION_STATE.WRITE_PENDING) {
+            if (DEBUG) { Log.d(LOGTAG, "[feedInput][WRITE_PENDING] put initialization data"); }
+            for (int i = 0; i < mFormat.initializationData.size(); i++) {
+                byte[] data = mFormat.initializationData.get(i);
+                bufferForRead.data.put(data);
+            }
+            mRendererReconfigurationState = RECONFIGURATION_STATE.QUEUE_PENDING;
+        }
+
+        // Read data from HlsMediaSource
+        int result = C.RESULT_NOTHING_READ;
+        try {
+            result = readSource(mFormatHolder, bufferForRead);
+        } catch (Exception e) {
+            Log.e(LOGTAG, "[feedInput] Exception when readSource :", e);
+            return false;
+        }
+
+        if (result == C.RESULT_NOTHING_READ) {
+            return false;
+        }
+        if (result == C.RESULT_FORMAT_READ) {
+            if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
+                if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] 2 formats in a row."); }
+                // We received two formats in a row. Clear the current buffer of any reconfiguration data
+                // associated with the first format.
+                bufferForRead.clear();
+                mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+            }
+            onInputFormatChanged(mFormatHolder.format);
+            return true;
+        }
+
+        // We've read a buffer.
+        if (bufferForRead.isEndOfStream()) {
+            if (DEBUG) { Log.d(LOGTAG, "Now we're at the End Of Stream."); }
+            if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
+                if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] isEndOfStream."); }
+                // We received a new format immediately before the end of the stream. We need to clear
+                // the corresponding reconfiguration data from the current buffer, but re-write it into
+                // a subsequent buffer if there are any (e.g. if the user seeks backwards).
+                bufferForRead.clear();
+                mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+            }
+            mInputStreamEnded = true;
+            GeckoHlsSample sample = GeckoHlsSample.EOS;
+            calculatDuration(sample);
+            return false;
+        }
+
+        bufferForRead.flip();
+
+        int csdInfoSize = mCSDInfo != null ? mCSDInfo.length : 0;
+        int dataSize = bufferForRead.data.limit();
+        int size = bufferForRead.isKeyFrame() ? csdInfoSize + dataSize : dataSize;
+        byte[] realData = new byte[size];
+        if (bufferForRead.isKeyFrame()) {
+            // Prepend the CSD information to the sample if it's a key frame.
+            System.arraycopy(mCSDInfo, 0, realData, 0, csdInfoSize);
+            bufferForRead.data.get(realData, csdInfoSize, dataSize);
+        } else {
+            bufferForRead.data.get(realData, 0, dataSize);
+        }
+        ByteBuffer buffer = ByteBuffer.wrap(realData);
+        mInputBuffer.clear();
+
+        CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
+        BufferInfo bufferInfo = new BufferInfo();
+        // Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
+        int flags = 0;
+        flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
+        flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+        bufferInfo.set(0, size, bufferForRead.timeUs, flags);
+
+        assertTrue(mFormats.size() >= 0);
+        // We add a new format in the list once format changes, so the formatIndex
+        // should indicate to the last(latest) format.
+        GeckoHlsSample sample = GeckoHlsSample.create(buffer,
+                                                      bufferInfo,
+                                                      cryptoInfo,
+                                                      mFormats.size() - 1);
+
+        // There's no duration information from the ExoPlayer's sample, we need
+        // to calculate it.
+        calculatDuration(sample);
+        mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+
+        if (mWaitingForData && isQueuedEnoughData()) {
+            if (DEBUG) { Log.d(LOGTAG, "onDataArrived"); }
+            mPlayerListener.onDataArrived();
+            mWaitingForData = false;
+        }
+        return true;
+    }
+
+    @Override
+    protected void onPositionReset(long positionUs, boolean joining) {
+        super.onPositionReset(positionUs, joining);
+        if (mInitialized && mRendererReconfigured && mFormat != null) {
+            if (DEBUG) { Log.d(LOGTAG, "[onPositionReset] WRITE_PENDING"); }
+            // Any reconfiguration data that we put shortly before the reset
+            // may be invalid. We avoid this issue by sending reconfiguration
+            // data following every position reset.
+            mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+        }
+    }
+
+    @Override
+    protected boolean clearInputSamplesQueue() {
+        if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
+        mDemuxedInputSamples.clear();
+        mDemuxedNoDurationSamples.clear();
+        return true;
+    }
+
+    @Override
+    protected void onInputFormatChanged(Format newFormat) {
+        Format oldFormat = mFormat;
+        mFormat = newFormat;
+        if (DEBUG) { Log.d(LOGTAG, "[onInputFormatChanged] old : " + oldFormat + " => new : " + mFormat); }
+
+        handleDrmInitChanged(oldFormat, newFormat);
+
+        if (mInitialized && canReconfigure(oldFormat, mFormat)) {
+            if (DEBUG) { Log.d(LOGTAG, "[onInputFormatChanged] starting reconfiguration !"); }
+            mRendererReconfigured = true;
+            mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+        } else {
+            // The begining of demuxing, 1st time format change.
+            resetRenderer();
+            maybeInitRenderer();
+        }
+        mFormats.add(mFormat);
+        updateCSDInfo(mFormat);
+        mEventDispatcher.inputFormatChanged(newFormat);
+    }
+
+    protected boolean canReconfigure(Format oldFormat, Format newFormat) {
+        boolean canReconfig = areAdaptationCompatible(oldFormat, newFormat)
+          && newFormat.width <= mCodecMaxValues.width && newFormat.height <= mCodecMaxValues.height
+          && newFormat.maxInputSize <= mCodecMaxValues.inputSize;
+        if (DEBUG) { Log.d(LOGTAG, "[canReconfigure] : " + canReconfig); }
+        return canReconfig;
+    }
+
+    private void calculatDuration(GeckoHlsSample inputSample) {
+        /*
+         * NOTE :
+         * Since we customized renderer as a demuxer. Here we're not able to
+         * obtain duration from the DecoderInputBuffer as there's no duration inside.
+         * So we calcualte it by referring to nearby samples' timestamp.
+         * A temporary queue |mDemuxedNoDurationSamples| is used to queue demuxed
+         * samples from HlsMediaSource which have no duration information at first.
+         * Considering there're 9 demuxed samples in the _no duration_ queue already,
+         * e.g. |-2|-1|0|1|2|3|4|5|6|...
+         * Once a new demuxed(No duration) sample A (10th) is put into the
+         * temporary queue,
+         * e.g. |-2|-1|0|1|2|3|4|5|6|A|...
+         * we are able to calculate the correct duration for sample 0 by finding
+         * the closest but greater pts than sample 0 among these 9 samples,
+         * here, let's say sample -2 to 6.
+         */
+        if (inputSample != null) {
+            mDemuxedNoDurationSamples.offer(inputSample);
+        }
+        int sizeOfNoDura = mDemuxedNoDurationSamples.size();
+        // A calculation window we've ever found suitable for both HLS TS & FMP4.
+        int range = sizeOfNoDura >= 10 ? 10 : sizeOfNoDura;
+        GeckoHlsSample[] inputArray =
+            mDemuxedNoDurationSamples.toArray(new GeckoHlsSample[sizeOfNoDura]);
+        if (range >= 10 && !mInputStreamEnded) {
+            // Calculate the first 'range' elements.
+            for (int i = 0; i < range; i++) {
+                // Comparing among samples in the window.
+                for (int j = -2; j < 7; j++) {
+                    if (i + j >= 0 &&
+                        i + j < range &&
+                        inputArray[i + j].info.presentationTimeUs > inputArray[i].info.presentationTimeUs) {
+                        inputArray[i].duration =
+                            Math.min(inputArray[i].duration,
+                                     inputArray[i + j].info.presentationTimeUs - inputArray[i].info.presentationTimeUs);
+                    }
+                }
+            }
+            GeckoHlsSample toQueue = mDemuxedNoDurationSamples.poll();
+            mDemuxedInputSamples.offer(toQueue);
+            if (DEBUG) {
+                Log.d(LOGTAG, "Demuxed sample PTS : " +
+                              toQueue.info.presentationTimeUs + ", duration :" +
+                              toQueue.duration + ", isKeyFrame(" +
+                              toQueue.isKeyFrame() + ", formatIndex(" +
+                              toQueue.formatIndex + "), queue size : " +
+                              mDemuxedInputSamples.size() + ", NoDuQueue size : " +
+                              mDemuxedNoDurationSamples.size());
+            }
+        } else if (mInputStreamEnded) {
+            for (int i = 0; i < sizeOfNoDura; i++) {
+                for (int j = -2; j < 7; j++) {
+                    if (i + j >= 0 && i + j < sizeOfNoDura &&
+                        inputArray[i + j].info.presentationTimeUs > inputArray[i].info.presentationTimeUs) {
+                        inputArray[i].duration =
+                            Math.min(inputArray[i].duration,
+                                     inputArray[i + j].info.presentationTimeUs - inputArray[i].info.presentationTimeUs);
+                    }
+                }
+            }
+            // NOTE : We're not able to calculate the duration for the last sample.
+            //        A workaround here is to assign a close duration to it.
+            long prevDuration = 33333;
+            GeckoHlsSample sample = null;
+            for (sample = mDemuxedNoDurationSamples.poll(); sample != null; sample = mDemuxedNoDurationSamples.poll()) {
+                if (sample.duration == Long.MAX_VALUE) {
+                    sample.duration = prevDuration;
+                    if (DEBUG) { Log.d(LOGTAG, "Adjust the PTS of the last sample to " + sample.duration + " (us)"); }
+                }
+                prevDuration = sample.duration;
+                if (DEBUG) {
+                    Log.d(LOGTAG, "last loop to offer samples - PTS : " +
+                                  sample.info.presentationTimeUs + ", Duration : " +
+                                  sample.duration + ", isEOS : " + sample.isEOS());
+                }
+                mDemuxedInputSamples.offer(sample);
+            }
+        }
+    }
+
+    // Return the time of first keyframe sample in the queue.
+    // If there's no key frame in the queue, return the MAX_VALUE so
+    // MFR won't mistake for that which the decode is getting slow.
+    public long getNextKeyFrameTime() {
+        long nextKeyFrameTime = Long.MAX_VALUE;
+        for (GeckoHlsSample sample : mDemuxedInputSamples) {
+            if ((sample.info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
+                nextKeyFrameTime = sample.info.presentationTimeUs;
+                break;
+            }
+        }
+        return nextKeyFrameTime;
+    }
+
+    @Override
+    protected void onStreamChanged(Format[] formats) {
+        mStreamFormats = formats;
+    }
+
+    protected void handleDrmInitChanged(Format oldFormat, Format newFormat) {
+        Object oldDrmInit = oldFormat == null ? null : oldFormat.drmInitData;
+        Object newDrnInit = newFormat.drmInitData;
+
+//      TODO: Notify MFR if the content is encrypted or not.
+        if (newDrnInit != oldDrmInit) {
+            if (newDrnInit != null) {
+            } else {
+            }
+        }
+    }
+
+    private static CodecMaxValues getCodecMaxValues(Format format, Format[] streamFormats) {
+        int maxWidth = format.width;
+        int maxHeight = format.height;
+        int maxInputSize = getMaxInputSize(format);
+        for (Format streamFormat : streamFormats) {
+            if (areAdaptationCompatible(format, streamFormat)) {
+                maxWidth = Math.max(maxWidth, streamFormat.width);
+                maxHeight = Math.max(maxHeight, streamFormat.height);
+                maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat));
+            }
+        }
+        return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
+    }
+
+    private static int getMaxInputSize(Format format) {
+        if (format.maxInputSize != Format.NO_VALUE) {
+            // The format defines an explicit maximum input size.
+            return format.maxInputSize;
+        }
+
+        if (format.width == Format.NO_VALUE || format.height == Format.NO_VALUE) {
+            // We can't infer a maximum input size without video dimensions.
+            return Format.NO_VALUE;
+        }
+
+        // Attempt to infer a maximum input size from the format.
+        int maxPixels;
+        int minCompressionRatio;
+        switch (format.sampleMimeType) {
+            case MimeTypes.VIDEO_H264:
+                // Round up width/height to an integer number of macroblocks.
+                maxPixels = ((format.width + 15) / 16) * ((format.height + 15) / 16) * 16 * 16;
+                minCompressionRatio = 2;
+                break;
+            default:
+                // Leave the default max input size.
+                return Format.NO_VALUE;
+        }
+        // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
+        return (maxPixels * 3) / (2 * minCompressionRatio);
+    }
+
+    private static boolean areAdaptationCompatible(Format first, Format second) {
+        return first.sampleMimeType.equals(second.sampleMimeType) &&
+               getRotationDegrees(first) == getRotationDegrees(second);
+    }
+
+    private static int getRotationDegrees(Format format) {
+        return format.rotationDegrees == Format.NO_VALUE ? 0 : format.rotationDegrees;
+    }
+
+    private static final class CodecMaxValues {
+        public final int width;
+        public final int height;
+        public final int inputSize;
+        public CodecMaxValues(int width, int height, int inputSize) {
+            this.width = width;
+            this.height = height;
+            this.inputSize = inputSize;
+        }
+    }
+
+    private void updateCSDInfo(Format format) {
+        int size = 0;
+        for (int i = 0; i < format.initializationData.size(); i++) {
+            size += format.initializationData.get(i).length;
+        }
+        int startPos = 0;
+        mCSDInfo = new byte[size];
+        for (int i = 0; i < format.initializationData.size(); i++) {
+            byte[] data = format.initializationData.get(i);
+            System.arraycopy(data, 0, mCSDInfo, startPos, data.length);
+            startPos = data.length;
+        }
+        if (DEBUG) { Log.d(LOGTAG, "mCSDInfo [" + Utils.bytesToHex(mCSDInfo) + "]"); }
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/Utils.java
@@ -0,0 +1,41 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.util.Log;
+
+public class Utils {
+    public static long getThreadId() {
+        Thread t = Thread.currentThread();
+        return t.getId();
+    }
+
+    public static String getThreadSignature() {
+        Thread t = Thread.currentThread();
+        long l = t.getId();
+        String name = t.getName();
+        long p = t.getPriority();
+        String gname = t.getThreadGroup().getName();
+        return (name
+                + ":(id)" + l
+                + ":(priority)" + p
+                + ":(group)" + gname);
+    }
+
+    public static void logThreadSignature() {
+        Log.d("ThreadUtils", getThreadSignature());
+    }
+
+    private final static char[] hexArray = "0123456789ABCDEF".toCharArray();
+    public static String bytesToHex(byte[] bytes) {
+        char[] hexChars = new char[bytes.length * 2];
+        for ( int j = 0; j < bytes.length; j++ ) {
+            int v = bytes[j] & 0xFF;
+            hexChars[j * 2] = hexArray[v >>> 4];
+            hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+        }
+        return new String(hexChars);
+    }
+}
\ No newline at end of file