Backed out 6 changesets (bug 1337111) for build bustages at builds/worker/workspace/build/src/image/SurfaceFilters.h on a CLOSED TREE
authorCoroiu Cristina <ccoroiu@mozilla.com>
Mon, 17 Sep 2018 20:42:30 +0300
changeset 492573 b2ac47c5ebf333b38629a2ef590628b3a69c4a24
parent 492572 2cdf22687fa74d1f01f4888837e46c8ae2ad7b5e
child 492574 0273829e46ea49c8f4c2213c2c7282be99b170c5
push id9984
push userffxbld-merge
push dateMon, 15 Oct 2018 21:07:35 +0000
treeherdermozilla-beta@183d27ea8570 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1337111
milestone64.0a1
backs outca0caa556dc9c92c57f86ea998aae7a3634bf0fd
d7d7fa868d0dbc09f294ec9e0cb08566dc43828a
93e956e89a2119553a19c04c2147fbddf5f7253a
f36337c1309b1de5f7205deb5ea42f2a97690314
1b1e25b0b345cf3774ae7d186dc082430420e056
3785cdebe6a3ff30cdbd5291559ac7bb3d15c4d0
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 6 changesets (bug 1337111) for build bustages at builds/worker/workspace/build/src/image/SurfaceFilters.h on a CLOSED TREE Backed out changeset ca0caa556dc9 (bug 1337111) Backed out changeset d7d7fa868d0d (bug 1337111) Backed out changeset 93e956e89a21 (bug 1337111) Backed out changeset f36337c1309b (bug 1337111) Backed out changeset 1b1e25b0b345 (bug 1337111) Backed out changeset 3785cdebe6a3 (bug 1337111)
gfx/thebes/gfxPrefs.h
image/AnimationSurfaceProvider.cpp
image/Decoder.cpp
image/Decoder.h
image/DecoderFactory.cpp
image/DecoderFactory.h
image/DecoderFlags.h
image/FrameAnimator.cpp
image/ImageOps.cpp
image/RasterImage.cpp
image/SurfaceFilters.h
image/SurfacePipeFactory.h
image/decoders/GIF2.h
image/decoders/nsGIFDecoder2.cpp
image/decoders/nsPNGDecoder.cpp
image/imgFrame.cpp
image/imgFrame.h
image/test/gtest/Common.cpp
image/test/gtest/Common.h
image/test/gtest/TestBlendAnimationFilter.cpp
image/test/gtest/TestDecoders.cpp
image/test/gtest/TestMetadata.cpp
image/test/gtest/moz.build
modules/libpref/init/all.js
--- a/gfx/thebes/gfxPrefs.h
+++ b/gfx/thebes/gfxPrefs.h
@@ -538,17 +538,16 @@ private:
 #if defined(XP_MACOSX)
   DECL_GFX_PREF(Live, "gl.multithreaded",                      GLMultithreaded, bool, false);
 #endif
   DECL_GFX_PREF(Live, "gl.require-hardware",                   RequireHardwareGL, bool, false);
   DECL_GFX_PREF(Live, "gl.use-tls-is-current",                 UseTLSIsCurrent, int32_t, 0);
 
   DECL_GFX_PREF(Live, "image.animated.decode-on-demand.threshold-kb", ImageAnimatedDecodeOnDemandThresholdKB, uint32_t, 20480);
   DECL_GFX_PREF(Live, "image.animated.decode-on-demand.batch-size", ImageAnimatedDecodeOnDemandBatchSize, uint32_t, 6);
-  DECL_GFX_PREF(Live, "image.animated.generate-full-frames",   ImageAnimatedGenerateFullFrames, bool, false);
   DECL_GFX_PREF(Live, "image.animated.resume-from-last-displayed", ImageAnimatedResumeFromLastDisplayed, bool, false);
   DECL_GFX_PREF(Live, "image.cache.factor2.threshold-surfaces", ImageCacheFactor2ThresholdSurfaces, int32_t, -1);
   DECL_GFX_PREF(Once, "image.cache.size",                      ImageCacheSize, int32_t, 5*1024*1024);
   DECL_GFX_PREF(Once, "image.cache.timeweight",                ImageCacheTimeWeight, int32_t, 500);
   DECL_GFX_PREF(Live, "image.decode-immediately.enabled",      ImageDecodeImmediatelyEnabled, bool, false);
   DECL_GFX_PREF(Live, "image.downscale-during-decode.enabled", ImageDownscaleDuringDecodeEnabled, bool, true);
   DECL_GFX_PREF(Live, "image.infer-src-animation.threshold-ms", ImageInferSrcAnimationThresholdMS, uint32_t, 2000);
   DECL_GFX_PREF(Live, "image.layout_network_priority",         ImageLayoutNetworkPriority, bool, true);
--- a/image/AnimationSurfaceProvider.cpp
+++ b/image/AnimationSurfaceProvider.cpp
@@ -27,20 +27,20 @@ AnimationSurfaceProvider::AnimationSurfa
   , mDecoder(aDecoder.get())
   , mFramesMutex("AnimationSurfaceProvider::mFrames")
 {
   MOZ_ASSERT(!mDecoder->IsMetadataDecode(),
              "Use MetadataDecodingTask for metadata decodes");
   MOZ_ASSERT(!mDecoder->IsFirstFrameDecode(),
              "Use DecodedSurfaceProvider for single-frame image decodes");
 
-  // We may produce paletted surfaces for GIF which means the frames are smaller
-  // than one would expect.
-  size_t pixelSize = !aDecoder->ShouldBlendAnimation() &&
-                     aDecoder->GetType() == DecoderType::GIF
+  // We still produce paletted surfaces for GIF which means the frames are
+  // smaller than one would expect for APNG. This may be removed if/when
+  // bug 1337111 lands and it is enabled by default.
+  size_t pixelSize = aDecoder->GetType() == DecoderType::GIF
                      ? sizeof(uint8_t) : sizeof(uint32_t);
 
   // Calculate how many frames we need to decode in this animation before we
   // enter decode-on-demand mode.
   IntSize frameSize = aSurfaceKey.Size();
   size_t threshold =
     (size_t(gfxPrefs::ImageAnimatedDecodeOnDemandThresholdKB()) * 1024) /
     (pixelSize * frameSize.width * frameSize.height);
--- a/image/Decoder.cpp
+++ b/image/Decoder.cpp
@@ -286,17 +286,17 @@ nsresult
 Decoder::AllocateFrame(const gfx::IntSize& aOutputSize,
                        const gfx::IntRect& aFrameRect,
                        gfx::SurfaceFormat aFormat,
                        uint8_t aPaletteDepth,
                        const Maybe<AnimationParams>& aAnimParams)
 {
   mCurrentFrame = AllocateFrameInternal(aOutputSize, aFrameRect, aFormat,
                                         aPaletteDepth, aAnimParams,
-                                        std::move(mCurrentFrame));
+                                        mCurrentFrame.get());
 
   if (mCurrentFrame) {
     mHasFrameToTake = true;
 
     // Gather the raw pointers the decoders will use.
     mCurrentFrame->GetImageData(&mImageData, &mImageDataLength);
     mCurrentFrame->GetPaletteData(&mColormap, &mColormapSize);
 
@@ -316,17 +316,17 @@ Decoder::AllocateFrame(const gfx::IntSiz
 }
 
 RawAccessFrameRef
 Decoder::AllocateFrameInternal(const gfx::IntSize& aOutputSize,
                                const gfx::IntRect& aFrameRect,
                                SurfaceFormat aFormat,
                                uint8_t aPaletteDepth,
                                const Maybe<AnimationParams>& aAnimParams,
-                               RawAccessFrameRef&& aPreviousFrame)
+                               imgFrame* aPreviousFrame)
 {
   if (HasError()) {
     return RawAccessFrameRef();
   }
 
   uint32_t frameNum = aAnimParams ? aAnimParams->mFrameNum : 0;
   if (frameNum != mFrameCount) {
     MOZ_ASSERT_UNREACHABLE("Allocating frames out of order");
@@ -338,17 +338,17 @@ Decoder::AllocateFrameInternal(const gfx
     NS_WARNING("Trying to add frame with zero or negative size");
     return RawAccessFrameRef();
   }
 
   auto frame = MakeNotNull<RefPtr<imgFrame>>();
   bool nonPremult = bool(mSurfaceFlags & SurfaceFlags::NO_PREMULTIPLY_ALPHA);
   if (NS_FAILED(frame->InitForDecoder(aOutputSize, aFrameRect, aFormat,
                                       aPaletteDepth, nonPremult,
-                                      aAnimParams, ShouldBlendAnimation()))) {
+                                      aAnimParams))) {
     NS_WARNING("imgFrame::Init should succeed");
     return RawAccessFrameRef();
   }
 
   RawAccessFrameRef ref = frame->RawAccessRef();
   if (!ref) {
     frame->Abort();
     return RawAccessFrameRef();
@@ -369,37 +369,17 @@ Decoder::AllocateFrameInternal(const gfx
     }
   }
 
   if (frameNum > 0) {
     ref->SetRawAccessOnly();
 
     // Some GIFs are huge but only have a small area that they animate. We only
     // need to refresh that small area when frame 0 comes around again.
-    mFirstFrameRefreshArea.UnionRect(mFirstFrameRefreshArea,
-                                     ref->GetBoundedBlendRect());
-
-    if (ShouldBlendAnimation()) {
-      if (aPreviousFrame->GetDisposalMethod() !=
-          DisposalMethod::RESTORE_PREVIOUS) {
-        // If the new restore frame is the direct previous frame, then we know
-        // the dirty rect is composed only of the current frame's blend rect and
-        // the restore frame's clear rect (if applicable) which are handled in
-        // filters.
-        mRestoreFrame = std::move(aPreviousFrame);
-        mRestoreDirtyRect.SetBox(0, 0, 0, 0);
-      } else {
-        // We only need the previous frame's dirty rect, because while there may
-        // have been several frames between us and mRestoreFrame, the only areas
-        // that changed are the restore frame's clear rect, the current frame
-        // blending rect, and the previous frame's blending rect. All else is
-        // forgotten due to us restoring the same frame again.
-        mRestoreDirtyRect = aPreviousFrame->GetBoundedBlendRect();
-      }
-    }
+    mFirstFrameRefreshArea.UnionRect(mFirstFrameRefreshArea, frame->GetRect());
   }
 
   mFrameCount++;
 
   return ref;
 }
 
 /*
--- a/image/Decoder.h
+++ b/image/Decoder.h
@@ -264,25 +264,16 @@ public:
    * Should we stop decoding after the first frame?
    */
   bool IsFirstFrameDecode() const
   {
     return bool(mDecoderFlags & DecoderFlags::FIRST_FRAME_ONLY);
   }
 
   /**
-   * Should blend the current frame with the previous frames to produce a
-   * complete frame instead of a partial frame for animated images.
-   */
-  bool ShouldBlendAnimation() const
-  {
-    return bool(mDecoderFlags & DecoderFlags::BLEND_ANIMATION);
-  }
-
-  /**
    * @return the number of complete animation frames which have been decoded so
    * far, if it has changed since the last call to TakeCompleteFrameCount();
    * otherwise, returns Nothing().
    */
   Maybe<uint32_t> TakeCompleteFrameCount();
 
   // The number of frames we have, including anything in-progress. Thus, this
   // is only 0 if we haven't begun any frames.
@@ -414,51 +405,24 @@ public:
   RasterImage* GetImageMaybeNull() const { return mImage.get(); }
 
   RawAccessFrameRef GetCurrentFrameRef()
   {
     return mCurrentFrame ? mCurrentFrame->RawAccessRef()
                          : RawAccessFrameRef();
   }
 
-  /**
-   * For use during decoding only. Allows the BlendAnimationFilter to get the
-   * current frame we are producing for its animation parameters.
-   */
-  imgFrame* GetCurrentFrame()
-  {
-    MOZ_ASSERT(ShouldBlendAnimation());
-    return mCurrentFrame.get();
-  }
-
-  /**
-   * For use during decoding only. Allows the BlendAnimationFilter to get the
-   * frame it should be pulling the previous frame data from.
-   */
-  const RawAccessFrameRef& GetRestoreFrameRef() const
-  {
-    MOZ_ASSERT(ShouldBlendAnimation());
-    return mRestoreFrame;
-  }
-
-  const gfx::IntRect& GetRestoreDirtyRect() const
-  {
-    MOZ_ASSERT(ShouldBlendAnimation());
-    return mRestoreDirtyRect;
-  }
-
   bool HasFrameToTake() const { return mHasFrameToTake; }
   void ClearHasFrameToTake() {
     MOZ_ASSERT(mHasFrameToTake);
     mHasFrameToTake = false;
   }
 
 protected:
   friend class AutoRecordDecoderTelemetry;
-  friend class DecoderTestHelper;
   friend class nsICODecoder;
   friend class PalettedSurfaceSink;
   friend class SurfaceSink;
 
   virtual ~Decoder();
 
   /*
    * Internal hooks. Decoder implementations may override these and
@@ -575,42 +539,32 @@ private:
     return mInFrame ? mFrameCount - 1 : mFrameCount;
   }
 
   RawAccessFrameRef AllocateFrameInternal(const gfx::IntSize& aOutputSize,
                                           const gfx::IntRect& aFrameRect,
                                           gfx::SurfaceFormat aFormat,
                                           uint8_t aPaletteDepth,
                                           const Maybe<AnimationParams>& aAnimParams,
-                                          RawAccessFrameRef&& aPreviousFrame);
+                                          imgFrame* aPreviousFrame);
 
 protected:
   Maybe<Downscaler> mDownscaler;
 
   uint8_t* mImageData;  // Pointer to image data in either Cairo or 8bit format
   uint32_t mImageDataLength;
   uint32_t* mColormap;  // Current colormap to be used in Cairo format
   uint32_t mColormapSize;
 
 private:
   RefPtr<RasterImage> mImage;
   Maybe<SourceBufferIterator> mIterator;
-
-  // The current frame the decoder is producing.
   RawAccessFrameRef mCurrentFrame;
-
-  // The complete frame to combine with the current partial frame to produce
-  // a complete current frame.
-  RawAccessFrameRef mRestoreFrame;
-
   ImageMetadata mImageMetadata;
-
-  gfx::IntRect mInvalidRect; // Tracks new rows as the current frame is decoded.
-  gfx::IntRect mRestoreDirtyRect; // Tracks an invalidation region between the
-                                  // restore frame and the previous frame.
+  gfx::IntRect mInvalidRect; // Tracks an invalidation region in the current frame.
   Maybe<gfx::IntSize> mOutputSize;  // The size of our output surface.
   Maybe<gfx::IntSize> mExpectedSize; // The expected size of the image.
   Progress mProgress;
 
   uint32_t mFrameCount; // Number of frames, including anything in-progress
   FrameTimeout mLoopLength;  // Length of a single loop of this image.
   gfx::IntRect mFirstFrameRefreshArea;  // The area of the image that needs to
                                         // be invalidated when the animation loops.
--- a/image/DecoderFactory.cpp
+++ b/image/DecoderFactory.cpp
@@ -327,17 +327,16 @@ DecoderFactory::CreateDecoderForICOResou
 
   return decoder.forget();
 }
 
 /* static */ already_AddRefed<Decoder>
 DecoderFactory::CreateAnonymousDecoder(DecoderType aType,
                                        NotNull<SourceBuffer*> aSourceBuffer,
                                        const Maybe<IntSize>& aOutputSize,
-                                       DecoderFlags aDecoderFlags,
                                        SurfaceFlags aSurfaceFlags)
 {
   if (aType == DecoderType::UNKNOWN) {
     return nullptr;
   }
 
   RefPtr<Decoder> decoder =
     GetDecoder(aType, /* aImage = */ nullptr, /* aIsRedecode = */ false);
@@ -346,17 +345,24 @@ DecoderFactory::CreateAnonymousDecoder(D
   // Initialize the decoder.
   decoder->SetMetadataDecode(false);
   decoder->SetIterator(aSourceBuffer->Iterator());
 
   // Anonymous decoders are always transient; we don't want to optimize surfaces
   // or do any other expensive work that might be wasted.
   DecoderFlags decoderFlags = DecoderFlags::IMAGE_IS_TRANSIENT;
 
-  decoder->SetDecoderFlags(aDecoderFlags | decoderFlags);
+  // Without an image, the decoder can't store anything in the SurfaceCache, so
+  // callers will only be able to retrieve the most recent frame via
+  // Decoder::GetCurrentFrame(). That means that anonymous decoders should
+  // always be first-frame-only decoders, because nobody ever wants the *last*
+  // frame.
+  decoderFlags |= DecoderFlags::FIRST_FRAME_ONLY;
+
+  decoder->SetDecoderFlags(decoderFlags);
   decoder->SetSurfaceFlags(aSurfaceFlags);
 
   // Set an output size for downscale-during-decode if requested.
   if (aOutputSize) {
     decoder->SetOutputSize(*aOutputSize);
   }
 
   if (NS_FAILED(decoder->Init())) {
--- a/image/DecoderFactory.h
+++ b/image/DecoderFactory.h
@@ -172,25 +172,23 @@ public:
    *
    * @param aType Which type of decoder to create - JPEG, PNG, etc.
    * @param aSourceBuffer The SourceBuffer which the decoder will read its data
    *                      from.
    * @param aOutputSize If Some(), the output size for the decoder. If this is
    *                    smaller than the intrinsic size, the decoder will
    *                    downscale the image. If Nothing(), the output size will
    *                    be the intrinsic size.
-   * @param aDecoderFlags Flags specifying the behavior of this decoder.
    * @param aSurfaceFlags Flags specifying the type of output this decoder
    *                      should produce.
    */
   static already_AddRefed<Decoder>
   CreateAnonymousDecoder(DecoderType aType,
                          NotNull<SourceBuffer*> aSourceBuffer,
                          const Maybe<gfx::IntSize>& aOutputSize,
-                         DecoderFlags aDecoderFlags,
                          SurfaceFlags aSurfaceFlags);
 
   /**
    * Creates and initializes an anonymous metadata decoder (one which isn't
    * associated with an Image object). This decoder will only decode the image's
    * header, extracting metadata like the size of the image. No actual image
    * data will be decoded and no surfaces will be allocated.
    *
--- a/image/DecoderFlags.h
+++ b/image/DecoderFlags.h
@@ -26,25 +26,17 @@ enum class DecoderFlags : uint8_t
   ASYNC_NOTIFY                   = 1 << 3,
 
   /**
    * By default, a surface is considered substitutable. That means callers are
    * willing to accept a less than ideal match to display. If a caller requires
    * a specific size and won't accept alternatives, then this flag should be
    * set.
    */
-  CANNOT_SUBSTITUTE              = 1 << 4,
-
-  /**
-   * By default, an animation decoder will produce partial frames that need to
-   * be combined with the previously displayed/composited frame by FrameAnimator
-   * to produce a complete frame. If this flag is set, the decoder will perform
-   * this blending at decode time, and the frames produced are complete.
-   */
-  BLEND_ANIMATION                = 1 << 5
+  CANNOT_SUBSTITUTE              = 1 << 4
 };
 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(DecoderFlags)
 
 /**
  * @return the default set of decode flags.
  */
 inline DecoderFlags
 DefaultDecoderFlags()
--- a/image/FrameAnimator.cpp
+++ b/image/FrameAnimator.cpp
@@ -311,38 +311,36 @@ FrameAnimator::AdvanceFrame(AnimationSta
     return ret;
   }
 
   if (nextFrame->GetTimeout() == FrameTimeout::Forever()) {
     ret.mAnimationFinished = true;
   }
 
   if (nextFrameIndex == 0) {
-    MOZ_ASSERT(nextFrame->IsFullFrame());
     ret.mDirtyRect = aState.FirstFrameRefreshArea();
-  } else if (!nextFrame->IsFullFrame()) {
+  } else {
     MOZ_ASSERT(nextFrameIndex == currentFrameIndex + 1);
+
     // Change frame
     if (!DoBlend(aCurrentFrame, nextFrame, nextFrameIndex, &ret.mDirtyRect)) {
       // something went wrong, move on to next
       NS_WARNING("FrameAnimator::AdvanceFrame(): Compositing of frame failed");
       nextFrame->SetCompositingFailed(true);
       aState.mCurrentAnimationFrameTime =
         GetCurrentImgFrameEndTime(aState, aCurrentFrame->GetTimeout());
       aState.mCurrentAnimationFrameIndex = nextFrameIndex;
       aState.mCompositedFrameRequested = false;
       aCurrentFrame = std::move(nextFrame);
       aFrames.Advance(nextFrameIndex);
 
       return ret;
     }
 
     nextFrame->SetCompositingFailed(false);
-  } else {
-    ret.mDirtyRect = nextFrame->GetDirtyRect();
   }
 
   aState.mCurrentAnimationFrameTime =
     GetCurrentImgFrameEndTime(aState, aCurrentFrame->GetTimeout());
 
   // If we can get closer to the current time by a multiple of the image's loop
   // time, we should. We can only do this if we're done decoding; otherwise, we
   // don't know the full loop length, and LoopLength() will have to return
@@ -480,23 +478,18 @@ FrameAnimator::RequestRefresh(AnimationS
     // If we didn't advance a frame, and our frame end time didn't change,
     // then we need to break out of this loop & wait for the frame(s)
     // to finish downloading.
     if (!frameRes.mFrameAdvanced && currentFrameEndTime == oldFrameEndTime) {
       break;
     }
   }
 
-  // We should only mark the composited frame as valid and reset the dirty rect
-  // if we advanced (meaning the next frame was actually produced somehow), the
-  // composited frame was previously invalid (so we may need to repaint
-  // everything) and the frame index is valid (to know we were doing blending
-  // on the main thread, instead of on the decoder threads in advance).
-  if (currentFrameEndTime > aTime && aState.mCompositedFrameInvalid &&
-      mLastCompositedFrameIndex >= 0) {
+  // Advanced to the correct frame, the composited frame is now valid to be drawn.
+  if (currentFrameEndTime > aTime) {
     aState.mCompositedFrameInvalid = false;
     ret.mDirtyRect = IntRect(IntPoint(0,0), mSize);
   }
 
   MOZ_ASSERT(!aState.mIsCurrentlyDecoded || !aState.mCompositedFrameInvalid);
 
   return ret;
 }
--- a/image/ImageOps.cpp
+++ b/image/ImageOps.cpp
@@ -220,19 +220,17 @@ ImageOps::DecodeToSurface(ImageBuffer* a
   }
 
   // Create a decoder.
   DecoderType decoderType =
     DecoderFactory::GetDecoderType(PromiseFlatCString(aMimeType).get());
   RefPtr<Decoder> decoder =
     DecoderFactory::CreateAnonymousDecoder(decoderType,
                                            WrapNotNull(sourceBuffer),
-                                           aSize,
-                                           DecoderFlags::FIRST_FRAME_ONLY,
-                                           ToSurfaceFlags(aFlags));
+                                           aSize, ToSurfaceFlags(aFlags));
   if (!decoder) {
     return nullptr;
   }
 
   // Run the decoder synchronously.
   RefPtr<IDecodingTask> task = new AnonymousDecodingTask(WrapNotNull(decoder));
   task->Run();
   if (!decoder->GetDecodeDone() || decoder->HasError()) {
--- a/image/RasterImage.cpp
+++ b/image/RasterImage.cpp
@@ -1281,20 +1281,16 @@ RasterImage::Decode(const IntSize& aSize
     surfaceFlags &= ~SurfaceFlags::NO_PREMULTIPLY_ALPHA;
   }
 
   // Create a decoder.
   RefPtr<IDecodingTask> task;
   nsresult rv;
   bool animated = mAnimationState && aPlaybackType == PlaybackType::eAnimated;
   if (animated) {
-    if (gfxPrefs::ImageAnimatedGenerateFullFrames()) {
-      decoderFlags |= DecoderFlags::BLEND_ANIMATION;
-    }
-
     size_t currentFrame = mAnimationState->GetCurrentAnimationFrameIndex();
     rv = DecoderFactory::CreateAnimationDecoder(mDecoderType, WrapNotNull(this),
                                                 mSourceBuffer, mSize,
                                                 decoderFlags, surfaceFlags,
                                                 currentFrame,
                                                 getter_AddRefs(task));
   } else {
     rv = DecoderFactory::CreateDecoder(mDecoderType, WrapNotNull(this),
--- a/image/SurfaceFilters.h
+++ b/image/SurfaceFilters.h
@@ -15,17 +15,16 @@
 #include <algorithm>
 #include <stdint.h>
 #include <string.h>
 
 #include "mozilla/Likely.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/UniquePtr.h"
 #include "mozilla/gfx/2D.h"
-#include "skia/src/core/SkBlitRow.h"
 
 #include "DownscalingFilter.h"
 #include "SurfaceCache.h"
 #include "SurfacePipe.h"
 
 namespace mozilla {
 namespace image {
 
@@ -324,389 +323,16 @@ private:
   UniquePtr<uint8_t[]> mBuffer;  /// The buffer used to store reordered rows.
   int32_t mInputRow;             /// The current row we're reading. (0-indexed)
   int32_t mOutputRow;            /// The current row we're writing. (0-indexed)
   uint8_t mPass;                 /// Which pass we're on. (0-indexed)
   bool mProgressiveDisplay;      /// If true, duplicate rows to optimize for
                                  /// progressive display.
 };
 
-//////////////////////////////////////////////////////////////////////////////
-// BlendAnimationFilter
-//////////////////////////////////////////////////////////////////////////////
-
-template <typename Next> class BlendAnimationFilter;
-
-/**
- * A configuration struct for BlendAnimationFilter.
- */
-struct BlendAnimationConfig
-{
-  template <typename Next> using Filter = BlendAnimationFilter<Next>;
-  Decoder* mDecoder;           /// The decoder producing the animation.
-};
-
-/**
- * BlendAnimationFilter turns a partial image as part of an animation into a
- * complete frame given its frame rect, blend method, and the base frame's
- * data buffer, frame rect and disposal method. Any excess data caused by a
- * frame rect not being contained by the output size will be discarded.
- *
- * The base frame is an already produced complete frame from the animation.
- * It may be any previous frame depending on the disposal method, although
- * most often it will be the immediate previous frame to the current we are
- * generating.
- *
- * The 'Next' template parameter specifies the next filter in the chain.
- */
-template <typename Next>
-class BlendAnimationFilter final : public SurfaceFilter
-{
-public:
-  BlendAnimationFilter()
-    : mRow(0)
-    , mRowLength(0)
-    , mOverProc(nullptr)
-    , mBaseFrameStartPtr(nullptr)
-    , mBaseFrameRowPtr(nullptr)
-  { }
-
-  template <typename... Rest>
-  nsresult Configure(const BlendAnimationConfig& aConfig, Rest... aRest)
-  {
-    nsresult rv = mNext.Configure(aRest...);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-
-    if (!aConfig.mDecoder || !aConfig.mDecoder->ShouldBlendAnimation()) {
-      MOZ_ASSERT_UNREACHABLE("Expected image decoder that is blending!");
-      return NS_ERROR_INVALID_ARG;
-    }
-
-    imgFrame* currentFrame = aConfig.mDecoder->GetCurrentFrame();
-    if (!currentFrame) {
-      MOZ_ASSERT_UNREACHABLE("Decoder must have current frame!");
-      return NS_ERROR_FAILURE;
-    }
-
-    mFrameRect = mUnclampedFrameRect = currentFrame->GetBlendRect();
-    gfx::IntSize outputSize = mNext.InputSize();
-    mRowLength = outputSize.width * sizeof(uint32_t);
-
-    // Forbid frame rects with negative size.
-    if (mUnclampedFrameRect.width < 0 || mUnclampedFrameRect.height < 0) {
-      return NS_ERROR_FAILURE;
-    }
-
-    // Clamp mFrameRect to the output size.
-    gfx::IntRect outputRect(0, 0, outputSize.width, outputSize.height);
-    mFrameRect = mFrameRect.Intersect(outputRect);
-    bool fullFrame = outputRect.IsEqualEdges(mFrameRect);
-
-    // If there's no intersection, |mFrameRect| will be an empty rect positioned
-    // at the maximum of |inputRect|'s and |aFrameRect|'s coordinates, which is
-    // not what we want. Force it to (0, 0) in that case.
-    if (mFrameRect.IsEmpty()) {
-      mFrameRect.MoveTo(0, 0);
-    }
-
-    BlendMethod blendMethod = currentFrame->GetBlendMethod();
-    switch (blendMethod) {
-      default:
-        blendMethod = BlendMethod::SOURCE;
-        MOZ_FALLTHROUGH_ASSERT("Unexpected blend method!");
-      case BlendMethod::SOURCE:
-        // Default, overwrites base frame data (if any) with new.
-        break;
-      case BlendMethod::OVER:
-        // OVER only has an impact on the output if we have new data to blend
-        // with.
-        if (mFrameRect.IsEmpty()) {
-          blendMethod = BlendMethod::SOURCE;
-        }
-        break;
-    }
-
-    // Determine what we need to clear and what we need to copy. If this frame
-    // is a full frame and uses source blending, there is no need to consider
-    // the disposal method of the previous frame.
-    gfx::IntRect dirtyRect(outputRect);
-    if (!fullFrame || blendMethod != BlendMethod::SOURCE) {
-      const RawAccessFrameRef& restoreFrame =
-        aConfig.mDecoder->GetRestoreFrameRef();
-      if (restoreFrame) {
-        MOZ_ASSERT(restoreFrame->GetImageSize() == outputSize);
-        MOZ_ASSERT(restoreFrame->IsFinished());
-
-        // We can safely use this pointer without holding a RawAccessFrameRef
-        // because the decoder will keep it alive for us.
-        mBaseFrameStartPtr = restoreFrame.Data();
-        MOZ_ASSERT(mBaseFrameStartPtr);
-
-        gfx::IntRect restoreBlendRect = restoreFrame->GetBoundedBlendRect();
-        gfx::IntRect restoreDirtyRect = aConfig.mDecoder->GetRestoreDirtyRect();
-        switch (restoreFrame->GetDisposalMethod()) {
-          default:
-          case DisposalMethod::RESTORE_PREVIOUS:
-            MOZ_FALLTHROUGH_ASSERT("Unexpected DisposalMethod");
-          case DisposalMethod::NOT_SPECIFIED:
-          case DisposalMethod::KEEP:
-            dirtyRect = mFrameRect.Union(restoreDirtyRect);
-            break;
-          case DisposalMethod::CLEAR:
-            // We only need to clear if the rect is outside the frame rect (i.e.
-            // overwrites a non-overlapping area) or the blend method may cause
-            // us to combine old data and new.
-            if (!mFrameRect.Contains(restoreBlendRect) ||
-                blendMethod == BlendMethod::OVER) {
-              mClearRect = restoreBlendRect;
-            }
-
-            // If we are clearing the whole frame, we do not need to retain a
-            // reference to the base frame buffer.
-            if (outputRect.IsEqualEdges(mClearRect)) {
-              mBaseFrameStartPtr = nullptr;
-            } else {
-              dirtyRect = mFrameRect.Union(restoreDirtyRect).Union(mClearRect);
-            }
-            break;
-        }
-      } else if (!fullFrame) {
-        // This must be the first frame, clear everything.
-        mClearRect = outputRect;
-      }
-    }
-
-    // The dirty rect, or delta between the current frame and the previous frame
-    // (chronologically, not necessarily the restore frame) is the last
-    // animation parameter we need to initialize the new frame with.
-    currentFrame->SetDirtyRect(dirtyRect);
-
-    if (!mBaseFrameStartPtr) {
-      // Switch to SOURCE if no base frame to ensure we don't allocate an
-      // intermediate buffer below. OVER does nothing without the base frame
-      // data.
-      blendMethod = BlendMethod::SOURCE;
-    }
-
-    // Skia provides arch-specific accelerated methods to perform blending.
-    // Note that this is an internal Skia API and may be prone to change,
-    // but we avoid the overhead of setting up Skia objects.
-    if (blendMethod == BlendMethod::OVER) {
-      mOverProc = SkBlitRow::Factory32(SkBlitRow::kSrcPixelAlpha_Flag32);
-      MOZ_ASSERT(mOverProc);
-    }
-
-    // We don't need an intermediate buffer unless the unclamped frame rect
-    // width is larger than the clamped frame rect width. In that case, the
-    // caller will end up writing data that won't end up in the final image at
-    // all, and we'll need a buffer to give that data a place to go.
-    if (mFrameRect.width < mUnclampedFrameRect.width || mOverProc) {
-      mBuffer.reset(new (fallible) uint8_t[mUnclampedFrameRect.width *
-                                           sizeof(uint32_t)]);
-      if (MOZ_UNLIKELY(!mBuffer)) {
-        return NS_ERROR_OUT_OF_MEMORY;
-      }
-
-      memset(mBuffer.get(), 0, mUnclampedFrameRect.width * sizeof(uint32_t));
-    }
-
-    ConfigureFilter(mUnclampedFrameRect.Size(), sizeof(uint32_t));
-    return NS_OK;
-  }
-
-  Maybe<SurfaceInvalidRect> TakeInvalidRect() override
-  {
-    return mNext.TakeInvalidRect();
-  }
-
-protected:
-  uint8_t* DoResetToFirstRow() override
-  {
-    uint8_t* rowPtr = mNext.ResetToFirstRow();
-    if (rowPtr == nullptr) {
-      mRow = mFrameRect.YMost();
-      return nullptr;
-    }
-
-    mRow = 0;
-    mBaseFrameRowPtr = mBaseFrameStartPtr;
-
-    while (mRow < mFrameRect.y) {
-      WriteBaseFrameRow();
-      AdvanceRowOutsideFrameRect();
-    }
-
-    // We're at the beginning of the frame rect now, so return if we're either
-    // ready for input or we're already done.
-    rowPtr = mBuffer ? mBuffer.get() : mNext.CurrentRowPointer();
-    if (!mFrameRect.IsEmpty() || rowPtr == nullptr) {
-      // Note that the pointer we're returning is for the next row we're
-      // actually going to write to, but we may discard writes before that point
-      // if mRow < mFrameRect.y.
-      mRow = mUnclampedFrameRect.y;
-      WriteBaseFrameRow();
-      return AdjustRowPointer(rowPtr);
-    }
-
-    // We've finished the region specified by the frame rect, but the frame rect
-    // is empty, so we need to output the rest of the image immediately. Advance
-    // to the end of the next pipeline stage's buffer, outputting rows that are
-    // copied from the base frame and/or cleared.
-    WriteBaseFrameRowsUntilComplete();
-
-    mRow = mFrameRect.YMost();
-    return nullptr;  // We're done.
-  }
-
-  uint8_t* DoAdvanceRow() override
-  {
-    uint8_t* rowPtr = nullptr;
-
-    const int32_t currentRow = mRow;
-    mRow++;
-
-    // The unclamped frame rect has a negative offset which means -y rows from
-    // the decoder need to be discarded before we advance properly.
-    if (currentRow >= 0 && mBaseFrameRowPtr) {
-      mBaseFrameRowPtr += mRowLength;
-    }
-
-    if (currentRow < mFrameRect.y) {
-      // This row is outside of the frame rect, so just drop it on the floor.
-      rowPtr = mBuffer ? mBuffer.get() : mNext.CurrentRowPointer();
-      return AdjustRowPointer(rowPtr);
-    } else if (NS_WARN_IF(currentRow >= mFrameRect.YMost())) {
-      return nullptr;
-    }
-
-    // If we had to buffer, merge the data into the row. Otherwise we had the
-    // decoder write directly to the next stage's buffer.
-    if (mBuffer) {
-      int32_t width = mFrameRect.width;
-      uint32_t* dst = reinterpret_cast<uint32_t*>(mNext.CurrentRowPointer());
-      uint32_t* src = reinterpret_cast<uint32_t*>(mBuffer.get()) -
-                      std::min(mUnclampedFrameRect.x, 0);
-      dst += mFrameRect.x;
-      if (mOverProc) {
-        mOverProc(dst, src, width, 0xFF);
-      } else {
-        memcpy(dst, src, width * sizeof(uint32_t));
-      }
-      rowPtr = mNext.AdvanceRow() ? mBuffer.get() : nullptr;
-    } else {
-      MOZ_ASSERT(!mOverProc);
-      rowPtr = mNext.AdvanceRow();
-    }
-
-    // If there's still more data coming or we're already done, just adjust the
-    // pointer and return.
-    if (mRow < mFrameRect.YMost() || rowPtr == nullptr) {
-      WriteBaseFrameRow();
-      return AdjustRowPointer(rowPtr);
-    }
-
-    // We've finished the region specified by the frame rect. Advance to the end
-    // of the next pipeline stage's buffer, outputting rows that are copied from
-    // the base frame and/or cleared.
-    WriteBaseFrameRowsUntilComplete();
-
-    return nullptr;  // We're done.
-  }
-
-private:
-  void WriteBaseFrameRowsUntilComplete()
-  {
-    do {
-      WriteBaseFrameRow();
-    } while (AdvanceRowOutsideFrameRect());
-  }
-
-  void WriteBaseFrameRow()
-  {
-    uint8_t* dest = mNext.CurrentRowPointer();
-    if (!dest) {
-      return;
-    }
-
-    if (!mBaseFrameRowPtr) {
-      // No base frame, so we are clearing everything.
-      memset(dest, 0, mRowLength);
-    } else if (mClearRect.height > 0 &&
-               mClearRect.y <= mRow &&
-               mClearRect.YMost() > mRow) {
-      // We have a base frame, but we are inside the area to be cleared.
-      // Only copy the data we need from the source.
-      size_t prefixLength = mClearRect.x * sizeof(uint32_t);
-      size_t clearLength = mClearRect.width * sizeof(uint32_t);
-      size_t postfixOffset = prefixLength + clearLength;
-      size_t postfixLength = mRowLength - postfixOffset;
-      MOZ_ASSERT(prefixLength + clearLength + postfixLength == mRowLength);
-      memcpy(dest, mBaseFrameRowPtr, prefixLength);
-      memset(dest + prefixLength, 0, clearLength);
-      memcpy(dest + postfixOffset, mBaseFrameRowPtr + postfixOffset, postfixLength);
-    } else {
-      memcpy(dest, mBaseFrameRowPtr, mRowLength);
-    }
-  }
-
-  bool AdvanceRowOutsideFrameRect()
-  {
-    // The unclamped frame rect may have a negative offset however we should
-    // never be advancing the row via this path (otherwise mBaseFrameRowPtr
-    // will be wrong.
-    MOZ_ASSERT(mRow >= 0);
-    MOZ_ASSERT(mRow < mFrameRect.y || mRow >= mFrameRect.YMost());
-
-    mRow++;
-    if (mBaseFrameRowPtr) {
-      mBaseFrameRowPtr += mRowLength;
-    }
-
-    return mNext.AdvanceRow() != nullptr;
-  }
-
-  uint8_t* AdjustRowPointer(uint8_t* aNextRowPointer) const
-  {
-    if (mBuffer) {
-      MOZ_ASSERT(aNextRowPointer == mBuffer.get() || aNextRowPointer == nullptr);
-      return aNextRowPointer;  // No adjustment needed for an intermediate buffer.
-    }
-
-    if (mFrameRect.IsEmpty() ||
-        mRow >= mFrameRect.YMost() ||
-        aNextRowPointer == nullptr) {
-      return nullptr;  // Nothing left to write.
-    }
-
-    MOZ_ASSERT(!mOverProc);
-    return aNextRowPointer + mFrameRect.x * sizeof(uint32_t);
-  }
-
-  Next mNext;                          /// The next SurfaceFilter in the chain.
-
-  gfx::IntRect mFrameRect;             /// The surface subrect which contains data,
-                                       /// clamped to the image size.
-  gfx::IntRect mUnclampedFrameRect;    /// The frame rect before clamping.
-  UniquePtr<uint8_t[]> mBuffer;        /// The intermediate buffer, if one is
-                                       /// necessary because the frame rect width
-                                       /// is larger than the image's logical width.
-  int32_t  mRow;                       /// The row in unclamped frame rect space
-                                       /// that we're currently writing.
-  size_t mRowLength;                   /// Length in bytes of a row that is the input
-                                       /// for the next filter.
-  SkBlitRow::Proc32 mOverProc;         /// Function pointer to perform over blending.
-  const uint8_t* mBaseFrameStartPtr;   /// Starting row pointer to the base frame
-                                       /// data from which we copy pixel data from.
-  const uint8_t* mBaseFrameRowPtr;     /// Current row pointer to the base frame
-                                       /// data.
-  gfx::IntRect mClearRect;             /// The frame area to clear before blending
-                                       /// the current frame.
-};
 
 //////////////////////////////////////////////////////////////////////////////
 // RemoveFrameRectFilter
 //////////////////////////////////////////////////////////////////////////////
 
 template <typename Next> class RemoveFrameRectFilter;
 
 /**
--- a/image/SurfacePipeFactory.h
+++ b/image/SurfacePipeFactory.h
@@ -49,25 +49,21 @@ enum class SurfacePipeFlags
   DEINTERLACE         = 1 << 0,  // If set, deinterlace the image.
 
   ADAM7_INTERPOLATE   = 1 << 1,  // If set, the caller is deinterlacing the
                                  // image using ADAM7, and we may want to
                                  // interpolate it for better intermediate results.
 
   FLIP_VERTICALLY     = 1 << 2,  // If set, flip the image vertically.
 
-  PROGRESSIVE_DISPLAY = 1 << 3,  // If set, we expect the image to be displayed
+  PROGRESSIVE_DISPLAY = 1 << 3   // If set, we expect the image to be displayed
                                  // progressively. This enables features that
                                  // result in a better user experience for
                                  // progressive display but which may be more
                                  // computationally expensive.
-
-  BLEND_ANIMATION     = 1 << 4   // If set, produce the next full frame of an
-                                 // animation instead of a partial frame to be
-                                 // blended later.
 };
 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(SurfacePipeFlags)
 
 class SurfacePipeFactory
 {
 public:
   /**
    * Creates and initializes a normal (i.e., non-paletted) SurfacePipe.
@@ -100,50 +96,45 @@ public:
                     SurfacePipeFlags aFlags)
   {
     const bool deinterlace = bool(aFlags & SurfacePipeFlags::DEINTERLACE);
     const bool flipVertically = bool(aFlags & SurfacePipeFlags::FLIP_VERTICALLY);
     const bool progressiveDisplay = bool(aFlags & SurfacePipeFlags::PROGRESSIVE_DISPLAY);
     const bool downscale = aInputSize != aOutputSize;
     const bool removeFrameRect =
       !aFrameRect.IsEqualEdges(nsIntRect(0, 0, aInputSize.width, aInputSize.height));
-    const bool blendAnimation = bool(aFlags & SurfacePipeFlags::BLEND_ANIMATION);
 
     // Don't interpolate if we're sure we won't show this surface to the user
     // until it's completely decoded. The final pass of an ADAM7 image doesn't
     // need interpolation, so we only need to interpolate if we'll be displaying
     // the image while it's still being decoded.
     const bool adam7Interpolate = bool(aFlags & SurfacePipeFlags::ADAM7_INTERPOLATE) &&
                                   progressiveDisplay;
 
     if (deinterlace && adam7Interpolate) {
       MOZ_ASSERT_UNREACHABLE("ADAM7 deinterlacing is handled by libpng");
       return Nothing();
     }
 
-    MOZ_ASSERT_IF(blendAnimation, aAnimParams);
-
     // Construct configurations for the SurfaceFilters. Note that the order of
     // these filters is significant. We want to deinterlace or interpolate raw
     // input rows, before any other transformations, and we want to remove the
     // frame rect (which may involve adding blank rows or columns to the image)
     // before any downscaling, so that the new rows and columns are taken into
     // account.
     DeinterlacingConfig<uint32_t> deinterlacingConfig { progressiveDisplay };
     ADAM7InterpolatingConfig interpolatingConfig;
     RemoveFrameRectConfig removeFrameRectConfig { aFrameRect };
-    BlendAnimationConfig blendAnimationConfig { aDecoder };
     DownscalingConfig downscalingConfig { aInputSize, aFormat };
     SurfaceConfig surfaceConfig { aDecoder, aOutputSize, aFormat,
                                   flipVertically, aAnimParams };
 
     Maybe<SurfacePipe> pipe;
 
     if (downscale) {
-      MOZ_ASSERT(!blendAnimation);
       if (removeFrameRect) {
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, removeFrameRectConfig,
                           downscalingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, removeFrameRectConfig,
                           downscalingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
@@ -154,33 +145,25 @@ public:
           pipe = MakePipe(deinterlacingConfig, downscalingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, downscalingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(downscalingConfig, surfaceConfig);
         }
       }
     } else {  // (downscale is false)
-      if (blendAnimation) {
-        if (deinterlace) {
-          pipe = MakePipe(deinterlacingConfig, blendAnimationConfig, surfaceConfig);
-        } else if (adam7Interpolate) {
-          pipe = MakePipe(interpolatingConfig, blendAnimationConfig, surfaceConfig);
-        } else {  // (deinterlace and adam7Interpolate are false)
-          pipe = MakePipe(blendAnimationConfig, surfaceConfig);
-        }
-      } else if (removeFrameRect) {
+      if (removeFrameRect) {
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, removeFrameRectConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, removeFrameRectConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(removeFrameRectConfig, surfaceConfig);
         }
-      } else {  // (blendAnimation and removeFrameRect is false)
+      } else {  // (removeFrameRect is false)
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(surfaceConfig);
         }
       }
--- a/image/decoders/GIF2.h
+++ b/image/decoders/GIF2.h
@@ -32,18 +32,16 @@ typedef struct gif_struct {
 
     // Output state machine
     int64_t pixels_remaining;  // Pixels remaining to be output.
 
     // Parameters for image frame currently being decoded
     int tpixel;                 // Index of transparent pixel
     int32_t disposal_method;    // Restore to background, leave in place, etc.
     uint32_t* local_colormap;   // Per-image colormap
-    uint32_t local_colormap_buffer_size; // Size of the buffer containing the
-                                         // local colormap.
     int local_colormap_size;    // Size of local colormap array.
     uint32_t delay_time;        // Display time, in milliseconds,
                                 // for this image in a multi-image GIF
 
     // Global (multi-image) state
     int version;                // Either 89 for GIF89 or 87 for GIF87
     int32_t screen_width;       // Logical screen width & height
     int32_t screen_height;
--- a/image/decoders/nsGIFDecoder2.cpp
+++ b/image/decoders/nsGIFDecoder2.cpp
@@ -174,50 +174,40 @@ nsGIFDecoder2::CheckForTransparency(cons
 nsresult
 nsGIFDecoder2::BeginImageFrame(const IntRect& aFrameRect,
                                uint16_t aDepth,
                                bool aIsInterlaced)
 {
   MOZ_ASSERT(HasSize());
 
   bool hasTransparency = CheckForTransparency(aFrameRect);
-  bool blendAnimation = ShouldBlendAnimation();
 
   // Make sure there's no animation if we're downscaling.
   MOZ_ASSERT_IF(Size() != OutputSize(), !GetImageMetadata().HasAnimation());
 
   AnimationParams animParams {
     aFrameRect,
     FrameTimeout::FromRawMilliseconds(mGIFStruct.delay_time),
     uint32_t(mGIFStruct.images_decoded),
     BlendMethod::OVER,
     DisposalMethod(mGIFStruct.disposal_method)
   };
 
   SurfacePipeFlags pipeFlags = aIsInterlaced
                              ? SurfacePipeFlags::DEINTERLACE
                              : SurfacePipeFlags();
 
-  gfx::SurfaceFormat format;
+  Maybe<SurfacePipe> pipe;
   if (mGIFStruct.images_decoded == 0) {
+    gfx::SurfaceFormat format = hasTransparency ? SurfaceFormat::B8G8R8A8
+                                                : SurfaceFormat::B8G8R8X8;
+
     // The first frame may be displayed progressively.
     pipeFlags |= SurfacePipeFlags::PROGRESSIVE_DISPLAY;
 
-    format = hasTransparency ? SurfaceFormat::B8G8R8A8
-                             : SurfaceFormat::B8G8R8X8;
-  } else {
-    format = SurfaceFormat::B8G8R8A8;
-  }
-
-  if (blendAnimation) {
-    pipeFlags |= SurfacePipeFlags::BLEND_ANIMATION;
-  }
-
-  Maybe<SurfacePipe> pipe;
-  if (mGIFStruct.images_decoded == 0 || blendAnimation) {
     // The first frame is always decoded into an RGB surface.
     pipe =
       SurfacePipeFactory::CreateSurfacePipe(this, Size(), OutputSize(),
                                             aFrameRect, format,
                                             Some(animParams), pipeFlags);
   } else {
     // This is an animation frame (and not the first). To minimize the memory
     // usage of animations, the image data is stored in paletted form.
@@ -225,18 +215,18 @@ nsGIFDecoder2::BeginImageFrame(const Int
     // We should never use paletted surfaces with a draw target directly, so
     // the only practical difference between B8G8R8A8 and B8G8R8X8 is the
     // cleared pixel value if we get truncated. We want 0 in that case to
     // ensure it is an acceptable value for the color map as was the case
     // historically.
     MOZ_ASSERT(Size() == OutputSize());
     pipe =
       SurfacePipeFactory::CreatePalettedSurfacePipe(this, Size(), aFrameRect,
-                                                    format, aDepth,
-                                                    Some(animParams),
+                                                    SurfaceFormat::B8G8R8A8,
+                                                    aDepth, Some(animParams),
                                                     pipeFlags);
   }
 
   mCurrentFrameIndex = mGIFStruct.images_decoded;
 
   if (!pipe) {
     mPipe = SurfacePipe();
     return NS_ERROR_FAILURE;
@@ -913,40 +903,28 @@ nsGIFDecoder2::FinishImageDescriptor(con
   mGIFStruct.pixels_remaining =
     int64_t(frameRect.Width()) * int64_t(frameRect.Height());
 
   if (haveLocalColorTable) {
     // We have a local color table, so prepare to read it into the palette of
     // the current frame.
     mGIFStruct.local_colormap_size = 1 << depth;
 
-    if (!mColormap) {
-      // Allocate a buffer to store the local color tables. This could be if the
-      // first frame has a local color table, or for subsequent frames when
-      // blending the animation during decoding.
-      MOZ_ASSERT(mGIFStruct.images_decoded == 0 || ShouldBlendAnimation());
-
-      // Ensure our current colormap buffer is large enough to hold the new one.
+    if (mGIFStruct.images_decoded == 0) {
+      // The first frame has a local color table. Allocate space for it as we
+      // use a BGRA or BGRX surface for the first frame; such surfaces don't
+      // have their own palettes internally.
       mColormapSize = sizeof(uint32_t) << realDepth;
-      if (mGIFStruct.local_colormap_buffer_size < mColormapSize) {
-        if (mGIFStruct.local_colormap) {
-          free(mGIFStruct.local_colormap);
-        }
-        mGIFStruct.local_colormap_buffer_size = mColormapSize;
+      if (!mGIFStruct.local_colormap) {
         mGIFStruct.local_colormap =
           static_cast<uint32_t*>(moz_xmalloc(mColormapSize));
-      } else {
-        mColormapSize = mGIFStruct.local_colormap_buffer_size;
       }
-
       mColormap = mGIFStruct.local_colormap;
     }
 
-    MOZ_ASSERT(mColormap);
-
     const size_t size = 3 << depth;
     if (mColormapSize > size) {
       // Clear the part of the colormap which will be unused with this palette.
       // If a GIF references an invalid palette entry, ensure the entry is opaque white.
       // This is needed for Skia as if it isn't, RGBX surfaces will cause blending issues
       // with Skia.
       memset(reinterpret_cast<uint8_t*>(mColormap) + size, 0xFF,
              mColormapSize - size);
@@ -958,17 +936,17 @@ nsGIFDecoder2::FinishImageDescriptor(con
     // large and it'd be preferable to avoid unnecessary copies.
     return Transition::ToUnbuffered(State::FINISHED_LOCAL_COLOR_TABLE,
                                     State::LOCAL_COLOR_TABLE,
                                     size);
   }
 
   // There's no local color table; copy the global color table into the palette
   // of the current frame.
-  if (mColormap) {
+  if (mGIFStruct.images_decoded > 0) {
     memcpy(mColormap, mGIFStruct.global_colormap, mColormapSize);
   } else {
     mColormap = mGIFStruct.global_colormap;
   }
 
   return Transition::To(State::IMAGE_DATA_BLOCK, BLOCK_HEADER_LEN);
 }
 
@@ -1068,17 +1046,17 @@ nsGIFDecoder2::ReadLZWData(const char* a
 {
   const uint8_t* data = reinterpret_cast<const uint8_t*>(aData);
   size_t length = aLength;
 
   while (mGIFStruct.pixels_remaining > 0 &&
          (length > 0 || mGIFStruct.bits >= mGIFStruct.codesize)) {
     size_t bytesRead = 0;
 
-    auto result = mGIFStruct.images_decoded == 0 || ShouldBlendAnimation()
+    auto result = mGIFStruct.images_decoded == 0
       ? mPipe.WritePixelBlocks<uint32_t>([&](uint32_t* aPixelBlock, int32_t aBlockSize) {
           return YieldPixels<uint32_t>(data, length, &bytesRead, aPixelBlock, aBlockSize);
         })
       : mPipe.WritePixelBlocks<uint8_t>([&](uint8_t* aPixelBlock, int32_t aBlockSize) {
           return YieldPixels<uint8_t>(data, length, &bytesRead, aPixelBlock, aBlockSize);
         });
 
     if (MOZ_UNLIKELY(bytesRead > length)) {
--- a/image/decoders/nsPNGDecoder.cpp
+++ b/image/decoders/nsPNGDecoder.cpp
@@ -228,20 +228,16 @@ nsPNGDecoder::CreateFrame(const FrameInf
                              ? SurfacePipeFlags::ADAM7_INTERPOLATE
                              : SurfacePipeFlags();
 
   if (mNumFrames == 0) {
     // The first frame may be displayed progressively.
     pipeFlags |= SurfacePipeFlags::PROGRESSIVE_DISPLAY;
   }
 
-  if (ShouldBlendAnimation()) {
-    pipeFlags |= SurfacePipeFlags::BLEND_ANIMATION;
-  }
-
   Maybe<SurfacePipe> pipe =
     SurfacePipeFactory::CreateSurfacePipe(this, Size(), OutputSize(),
                                           aFrameInfo.mFrameRect, mFormat,
                                           animParams, pipeFlags);
 
   if (!pipe) {
     mPipe = SurfacePipe();
     return NS_ERROR_FAILURE;
--- a/image/imgFrame.cpp
+++ b/image/imgFrame.cpp
@@ -103,33 +103,34 @@ ShouldUseHeap(const IntSize& aSize,
   }
 
   return false;
 }
 
 static already_AddRefed<DataSourceSurface>
 AllocateBufferForImage(const IntSize& size,
                        SurfaceFormat format,
-                       bool aIsAnimated = false,
-                       bool aIsFullFrame = true)
+                       bool aIsAnimated = false)
 {
   int32_t stride = VolatileSurfaceStride(size, format);
 
-  if (gfxVars::GetUseWebRenderOrDefault() &&
-      gfxPrefs::ImageMemShared() && aIsFullFrame) {
-    RefPtr<SourceSurfaceSharedData> newSurf = new SourceSurfaceSharedData();
-    if (newSurf->Init(size, stride, format)) {
-      return newSurf.forget();
-    }
-  } else if (ShouldUseHeap(size, stride, aIsAnimated)) {
+  if (ShouldUseHeap(size, stride, aIsAnimated)) {
     RefPtr<SourceSurfaceAlignedRawData> newSurf =
       new SourceSurfaceAlignedRawData();
     if (newSurf->Init(size, format, false, 0, stride)) {
       return newSurf.forget();
     }
+  }
+
+  if (!aIsAnimated && gfxVars::GetUseWebRenderOrDefault()
+                   && gfxPrefs::ImageMemShared()) {
+    RefPtr<SourceSurfaceSharedData> newSurf = new SourceSurfaceSharedData();
+    if (newSurf->Init(size, stride, format)) {
+      return newSurf.forget();
+    }
   } else {
     RefPtr<SourceSurfaceVolatileData> newSurf= new SourceSurfaceVolatileData();
     if (newSurf->Init(size, stride, format)) {
       return newSurf.forget();
     }
   }
   return nullptr;
 }
@@ -207,17 +208,16 @@ imgFrame::imgFrame()
   , mOptimizable(false)
   , mTimeout(FrameTimeout::FromRawMilliseconds(100))
   , mDisposalMethod(DisposalMethod::NOT_SPECIFIED)
   , mBlendMethod(BlendMethod::OVER)
   , mFormat(SurfaceFormat::UNKNOWN)
   , mPalettedImageData(nullptr)
   , mPaletteDepth(0)
   , mNonPremult(false)
-  , mIsFullFrame(false)
   , mCompositingFailed(false)
 {
 }
 
 imgFrame::~imgFrame()
 {
 #ifdef DEBUG
   MonitorAutoLock lock(mMonitor);
@@ -230,44 +230,36 @@ imgFrame::~imgFrame()
 }
 
 nsresult
 imgFrame::InitForDecoder(const nsIntSize& aImageSize,
                          const nsIntRect& aRect,
                          SurfaceFormat aFormat,
                          uint8_t aPaletteDepth /* = 0 */,
                          bool aNonPremult /* = false */,
-                         const Maybe<AnimationParams>& aAnimParams /* = Nothing() */,
-                         bool aIsFullFrame /* = false */)
+                         const Maybe<AnimationParams>& aAnimParams /* = Nothing() */)
 {
   // Assert for properties that should be verified by decoders,
   // warn for properties related to bad content.
   if (!AllowedImageAndFrameDimensions(aImageSize, aRect)) {
     NS_WARNING("Should have legal image size");
     mAborted = true;
     return NS_ERROR_FAILURE;
   }
 
   mImageSize = aImageSize;
   mFrameRect = aRect;
 
-  // May be updated shortly after InitForDecoder by BlendAnimationFilter
-  // because it needs to take into consideration the previous frames to
-  // properly calculate. We start with the whole frame as dirty.
-  mDirtyRect = aRect;
-
   if (aAnimParams) {
     mBlendRect = aAnimParams->mBlendRect;
     mTimeout = aAnimParams->mTimeout;
     mBlendMethod = aAnimParams->mBlendMethod;
     mDisposalMethod = aAnimParams->mDisposalMethod;
-    mIsFullFrame = aAnimParams->mFrameNum == 0 || aIsFullFrame;
   } else {
     mBlendRect = aRect;
-    mIsFullFrame = true;
   }
 
   // We only allow a non-trivial frame rect (i.e., a frame rect that doesn't
   // cover the entire image) for paletted animation frames. We never draw those
   // frames directly; we just use FrameAnimator to composite them and produce a
   // BGRA surface that we actually draw. We enforce this here to make sure that
   // imgFrame::Draw(), which is responsible for drawing all other kinds of
   // frames, never has to deal with a non-trivial frame rect.
@@ -298,18 +290,17 @@ imgFrame::InitForDecoder(const nsIntSize
     if (!mPalettedImageData) {
       NS_WARNING("Call to calloc for paletted image data should succeed");
     }
     NS_ENSURE_TRUE(mPalettedImageData, NS_ERROR_OUT_OF_MEMORY);
   } else {
     MOZ_ASSERT(!mLockedSurface, "Called imgFrame::InitForDecoder() twice?");
 
     bool postFirstFrame = aAnimParams && aAnimParams->mFrameNum > 0;
-    mRawSurface = AllocateBufferForImage(mFrameRect.Size(), mFormat,
-                                         postFirstFrame, mIsFullFrame);
+    mRawSurface = AllocateBufferForImage(mFrameRect.Size(), mFormat, postFirstFrame);
     if (!mRawSurface) {
       mAborted = true;
       return NS_ERROR_OUT_OF_MEMORY;
     }
 
     mLockedSurface = CreateLockedSurface(mRawSurface, mFrameRect.Size(), mFormat);
     if (!mLockedSurface) {
       NS_WARNING("Failed to create LockedSurface");
--- a/image/imgFrame.h
+++ b/image/imgFrame.h
@@ -54,32 +54,27 @@ public:
    * when drawing content into an imgFrame, as it may use a different graphics
    * backend than normal content drawing.
    */
   nsresult InitForDecoder(const nsIntSize& aImageSize,
                           const nsIntRect& aRect,
                           SurfaceFormat aFormat,
                           uint8_t aPaletteDepth = 0,
                           bool aNonPremult = false,
-                          const Maybe<AnimationParams>& aAnimParams = Nothing(),
-                          bool aIsFullFrame = false);
+                          const Maybe<AnimationParams>& aAnimParams = Nothing());
 
   nsresult InitForAnimator(const nsIntSize& aSize,
                            SurfaceFormat aFormat)
   {
     nsIntRect frameRect(0, 0, aSize.width, aSize.height);
     AnimationParams animParams { frameRect, FrameTimeout::Forever(),
                                  /* aFrameNum */ 1, BlendMethod::OVER,
                                  DisposalMethod::NOT_SPECIFIED };
-    // We set aIsFullFrame to false because we don't want the compositing frame
-    // to be allocated into shared memory for WebRender. mIsFullFrame is only
-    // otherwise used for frames produced by Decoder, so it isn't relevant.
-    return InitForDecoder(aSize, frameRect, aFormat, /* aPaletteDepth */ 0,
-                          /* aNonPremult */ false, Some(animParams),
-                          /* aIsFullFrame */ false);
+    return InitForDecoder(aSize, frameRect,
+                          aFormat, 0, false, Some(animParams));
   }
 
 
   /**
    * Initialize this imgFrame with a new surface and draw the provided
    * gfxDrawable into it.
    *
    * This is appropriate to use when drawing content into an imgFrame, as it
@@ -188,21 +183,16 @@ public:
   void GetImageData(uint8_t** aData, uint32_t* length) const;
   uint8_t* GetImageData() const;
 
   bool GetIsPaletted() const;
   void GetPaletteData(uint32_t** aPalette, uint32_t* length) const;
   uint32_t* GetPaletteData() const;
   uint8_t GetPaletteDepth() const { return mPaletteDepth; }
 
-  const IntRect& GetDirtyRect() const { return mDirtyRect; }
-  void SetDirtyRect(const IntRect& aDirtyRect) { mDirtyRect = aDirtyRect; }
-
-  bool IsFullFrame() const { return mIsFullFrame; }
-
   bool GetCompositingFailed() const;
   void SetCompositingFailed(bool val);
 
   void SetOptimizable();
 
   void FinalizeSurface();
   already_AddRefed<SourceSurface> GetSourceSurface();
 
@@ -300,61 +290,36 @@ private: // data
   bool mFinished;
   bool mOptimizable;
 
 
   //////////////////////////////////////////////////////////////////////////////
   // Effectively const data, only mutated in the Init methods.
   //////////////////////////////////////////////////////////////////////////////
 
-  //! The size of the buffer we are decoding to.
   IntSize      mImageSize;
-
-  //! XXX(aosmond): This means something different depending on the context. We
-  //!               should correct this.
-  //!
-  //! There are several different contexts for mFrameRect:
-  //! - If for non-animated image, it will be originate at (0, 0) and matches
-  //!   the dimensions of mImageSize.
-  //! - If for an APNG, it also matches the above.
-  //! - If for a GIF which is producing full frames, it matches the above.
-  //! - If for a GIF which is producing partial frames, it matches mBlendRect.
   IntRect      mFrameRect;
-
-  //! The contents for the frame, as represented in the encoded image. This may
-  //! differ from mImageSize because it may be a partial frame. For the first
-  //! frame, this means we need to shift the data in place, and for animated
-  //! frames, it likely need to combine with a previous frame to get the full
-  //! contents.
   IntRect      mBlendRect;
 
-  //! This is the region that has changed between this frame and the previous
-  //! frame of an animation. For the first frame, this will be the same as
-  //! mFrameRect.
-  IntRect      mDirtyRect;
-
   //! The timeout for this frame.
   FrameTimeout mTimeout;
 
   DisposalMethod mDisposalMethod;
   BlendMethod    mBlendMethod;
   SurfaceFormat  mFormat;
 
   // The palette and image data for images that are paletted, since Cairo
   // doesn't support these images.
   // The paletted data comes first, then the image data itself.
   // Total length is PaletteDataLength() + GetImageDataLength().
   uint8_t*     mPalettedImageData;
   uint8_t      mPaletteDepth;
 
   bool mNonPremult;
 
-  //! True if the frame has all of the data stored in it, false if it needs to
-  //! be combined with another frame (e.g. the previous frame) to be complete.
-  bool mIsFullFrame;
 
   //////////////////////////////////////////////////////////////////////////////
   // Main-thread-only mutable data.
   //////////////////////////////////////////////////////////////////////////////
 
   bool mCompositingFailed;
 };
 
--- a/image/test/gtest/Common.cpp
+++ b/image/test/gtest/Common.cpp
@@ -190,31 +190,30 @@ RectIsSolidColor(SourceSurface* aSurface
   DataSourceSurface::ScopedMap mapping(dataSurface,
                                        DataSourceSurface::MapType::READ);
   ASSERT_TRUE_OR_RETURN(mapping.IsMapped(), false);
   ASSERT_EQ_OR_RETURN(mapping.GetStride(), surfaceSize.width * 4, false);
 
   uint8_t* data = mapping.GetData();
   ASSERT_TRUE_OR_RETURN(data != nullptr, false);
 
-  BGRAColor pmColor = aColor.Premultiply();
   int32_t rowLength = mapping.GetStride();
   for (int32_t row = rect.Y(); row < rect.YMost(); ++row) {
     for (int32_t col = rect.X(); col < rect.XMost(); ++col) {
       int32_t i = row * rowLength + col * 4;
       if (aFuzz != 0) {
-        ASSERT_LE_OR_RETURN(abs(pmColor.mBlue - data[i + 0]), aFuzz, false);
-        ASSERT_LE_OR_RETURN(abs(pmColor.mGreen - data[i + 1]), aFuzz, false);
-        ASSERT_LE_OR_RETURN(abs(pmColor.mRed - data[i + 2]), aFuzz, false);
-        ASSERT_LE_OR_RETURN(abs(pmColor.mAlpha - data[i + 3]), aFuzz, false);
+        ASSERT_LE_OR_RETURN(abs(aColor.mBlue - data[i + 0]), aFuzz, false);
+        ASSERT_LE_OR_RETURN(abs(aColor.mGreen - data[i + 1]), aFuzz, false);
+        ASSERT_LE_OR_RETURN(abs(aColor.mRed - data[i + 2]), aFuzz, false);
+        ASSERT_LE_OR_RETURN(abs(aColor.mAlpha - data[i + 3]), aFuzz, false);
       } else {
-        ASSERT_EQ_OR_RETURN(pmColor.mBlue,  data[i + 0], false);
-        ASSERT_EQ_OR_RETURN(pmColor.mGreen, data[i + 1], false);
-        ASSERT_EQ_OR_RETURN(pmColor.mRed,   data[i + 2], false);
-        ASSERT_EQ_OR_RETURN(pmColor.mAlpha, data[i + 3], false);
+        ASSERT_EQ_OR_RETURN(aColor.mBlue,  data[i + 0], false);
+        ASSERT_EQ_OR_RETURN(aColor.mGreen, data[i + 1], false);
+        ASSERT_EQ_OR_RETURN(aColor.mRed,   data[i + 2], false);
+        ASSERT_EQ_OR_RETURN(aColor.mAlpha, data[i + 3], false);
       }
     }
   }
 
   return true;
 }
 
 bool
@@ -296,17 +295,16 @@ RowHasPixels(SourceSurface* aSurface,
 already_AddRefed<Decoder>
 CreateTrivialDecoder()
 {
   gfxPrefs::GetSingleton();
   DecoderType decoderType = DecoderFactory::GetDecoderType("image/gif");
   auto sourceBuffer = MakeNotNull<RefPtr<SourceBuffer>>();
   RefPtr<Decoder> decoder =
     DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer, Nothing(),
-                                           DefaultDecoderFlags(),
                                            DefaultSurfaceFlags());
   return decoder.forget();
 }
 
 void
 AssertCorrectPipelineFinalState(SurfaceFilter* aFilter,
                                 const gfx::IntRect& aInputSpaceRect,
                                 const gfx::IntRect& aOutputSpaceRect)
--- a/image/test/gtest/Common.h
+++ b/image/test/gtest/Common.h
@@ -5,17 +5,16 @@
 
 #ifndef mozilla_image_test_gtest_Common_h
 #define mozilla_image_test_gtest_Common_h
 
 #include <vector>
 
 #include "gtest/gtest.h"
 
-#include "mozilla/Attributes.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/UniquePtr.h"
 #include "mozilla/gfx/2D.h"
 #include "Decoder.h"
 #include "gfxColor.h"
 #include "imgITools.h"
 #include "nsCOMPtr.h"
 #include "SurfacePipe.h"
@@ -71,53 +70,34 @@ struct ImageTestCase
   gfx::IntSize mOutputSize;
   uint32_t mFlags;
 };
 
 struct BGRAColor
 {
   BGRAColor() : BGRAColor(0, 0, 0, 0) { }
 
-  BGRAColor(uint8_t aBlue, uint8_t aGreen, uint8_t aRed, uint8_t aAlpha, bool aPremultiplied = false)
+  BGRAColor(uint8_t aBlue, uint8_t aGreen, uint8_t aRed, uint8_t aAlpha)
     : mBlue(aBlue)
     , mGreen(aGreen)
     , mRed(aRed)
     , mAlpha(aAlpha)
-    , mPremultiplied(aPremultiplied)
   { }
 
   static BGRAColor Green() { return BGRAColor(0x00, 0xFF, 0x00, 0xFF); }
   static BGRAColor Red()   { return BGRAColor(0x00, 0x00, 0xFF, 0xFF); }
   static BGRAColor Blue()   { return BGRAColor(0xFF, 0x00, 0x00, 0xFF); }
   static BGRAColor Transparent() { return BGRAColor(0x00, 0x00, 0x00, 0x00); }
 
-  BGRAColor Premultiply() const
-  {
-    if (!mPremultiplied) {
-      return BGRAColor(gfxPreMultiply(mBlue, mAlpha),
-                       gfxPreMultiply(mGreen, mAlpha),
-                       gfxPreMultiply(mRed, mAlpha),
-                       mAlpha,
-                       true);
-    }
-    return *this;
-  }
-
-  uint32_t AsPixel() const {
-    if (!mPremultiplied) {
-      return gfxPackedPixel(mAlpha, mRed, mGreen, mBlue);
-    }
-    return gfxPackedPixelNoPreMultiply(mAlpha, mRed, mGreen, mBlue);
-  }
+  uint32_t AsPixel() const { return gfxPackedPixel(mAlpha, mRed, mGreen, mBlue); }
 
   uint8_t mBlue;
   uint8_t mGreen;
   uint8_t mRed;
   uint8_t mAlpha;
-  bool mPremultiplied;
 };
 
 
 ///////////////////////////////////////////////////////////////////////////////
 // General Helpers
 ///////////////////////////////////////////////////////////////////////////////
 
 /**
@@ -256,38 +236,30 @@ already_AddRefed<Decoder> CreateTrivialD
  * it to the provided lambda @aFunc. Assertions that the pipeline is constructly
  * correctly and cleanup of any allocated surfaces is handled automatically.
  *
  * @param aDecoder The decoder to use for allocating surfaces.
  * @param aFunc The lambda function to pass the filter pipeline to.
  * @param aConfigs The configuration for the pipeline.
  */
 template <typename Func, typename... Configs>
-void WithFilterPipeline(Decoder* aDecoder, Func aFunc, bool aFinish, const Configs&... aConfigs)
+void WithFilterPipeline(Decoder* aDecoder, Func aFunc, const Configs&... aConfigs)
 {
   auto pipe = MakeUnique<typename detail::FilterPipeline<Configs...>::Type>();
   nsresult rv = pipe->Configure(aConfigs...);
   ASSERT_TRUE(NS_SUCCEEDED(rv));
 
   aFunc(aDecoder, pipe.get());
 
-  if (aFinish) {
-    RawAccessFrameRef currentFrame = aDecoder->GetCurrentFrameRef();
-    if (currentFrame) {
-      currentFrame->Finish();
-    }
+  RawAccessFrameRef currentFrame = aDecoder->GetCurrentFrameRef();
+  if (currentFrame) {
+    currentFrame->Finish();
   }
 }
 
-template <typename Func, typename... Configs>
-void WithFilterPipeline(Decoder* aDecoder, Func aFunc, const Configs&... aConfigs)
-{
-  WithFilterPipeline(aDecoder, aFunc, true, aConfigs...);
-}
-
 /**
  * Creates a pipeline of SurfaceFilters from a list of Config structs and
  * asserts that configuring it fails. Cleanup of any allocated surfaces is
  * handled automatically.
  *
  * @param aDecoder The decoder to use for allocating surfaces.
  * @param aConfigs The configuration for the pipeline.
  */
@@ -392,41 +364,16 @@ void CheckWritePixels(Decoder* aDecoder,
 void CheckPalettedWritePixels(Decoder* aDecoder,
                               SurfaceFilter* aFilter,
                               const Maybe<gfx::IntRect>& aOutputRect = Nothing(),
                               const Maybe<gfx::IntRect>& aInputRect = Nothing(),
                               const Maybe<gfx::IntRect>& aInputWriteRect = Nothing(),
                               const Maybe<gfx::IntRect>& aOutputWriteRect = Nothing(),
                               uint8_t aFuzz = 0);
 
-///////////////////////////////////////////////////////////////////////////////
-// Decoder Helpers
-///////////////////////////////////////////////////////////////////////////////
-
-// Friend class of Decoder to access internals for tests.
-class MOZ_STACK_CLASS DecoderTestHelper final
-{
-public:
-  explicit DecoderTestHelper(Decoder* aDecoder)
-    : mDecoder(aDecoder)
-  { }
-
-  void PostIsAnimated(FrameTimeout aTimeout)
-  {
-    mDecoder->PostIsAnimated(aTimeout);
-  }
-
-  void PostFrameStop(Opacity aOpacity)
-  {
-    mDecoder->PostFrameStop(aOpacity);
-  }
-
-private:
-  Decoder* mDecoder;
-};
 
 ///////////////////////////////////////////////////////////////////////////////
 // Test Data
 ///////////////////////////////////////////////////////////////////////////////
 
 ImageTestCase GreenPNGTestCase();
 ImageTestCase GreenGIFTestCase();
 ImageTestCase GreenJPGTestCase();
deleted file mode 100644
--- a/image/test/gtest/TestBlendAnimationFilter.cpp
+++ /dev/null
@@ -1,504 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "gtest/gtest.h"
-
-#include "mozilla/gfx/2D.h"
-#include "skia/include/core/SkColorPriv.h" // for SkPMSrcOver
-#include "Common.h"
-#include "Decoder.h"
-#include "DecoderFactory.h"
-#include "SourceBuffer.h"
-#include "SurfaceFilters.h"
-#include "SurfacePipe.h"
-
-using namespace mozilla;
-using namespace mozilla::gfx;
-using namespace mozilla::image;
-
-static already_AddRefed<Decoder>
-CreateTrivialBlendingDecoder()
-{
-  gfxPrefs::GetSingleton();
-  DecoderType decoderType = DecoderFactory::GetDecoderType("image/gif");
-  DecoderFlags decoderFlags = DecoderFlags::BLEND_ANIMATION;
-  SurfaceFlags surfaceFlags = DefaultSurfaceFlags();
-  auto sourceBuffer = MakeNotNull<RefPtr<SourceBuffer>>();
-  return DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer,
-                                                Nothing(), decoderFlags,
-                                                surfaceFlags);
-}
-
-template <typename Func> RawAccessFrameRef
-WithBlendAnimationFilter(Decoder* aDecoder,
-                         const AnimationParams& aAnimParams,
-                         const IntSize& aOutputSize,
-                         Func aFunc)
-{
-  DecoderTestHelper decoderHelper(aDecoder);
-
-  if (!aDecoder->HasAnimation()) {
-    decoderHelper.PostIsAnimated(aAnimParams.mTimeout);
-  }
-
-  BlendAnimationConfig blendAnim { aDecoder };
-  SurfaceConfig surfaceSink { aDecoder, aOutputSize, SurfaceFormat::B8G8R8A8,
-                              false, Some(aAnimParams) };
-
-  auto func = [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-    aFunc(aDecoder, aFilter);
-  };
-
-  WithFilterPipeline(aDecoder, func, false, blendAnim, surfaceSink);
-
-  RawAccessFrameRef current = aDecoder->GetCurrentFrameRef();
-  if (current) {
-    decoderHelper.PostFrameStop(Opacity::SOME_TRANSPARENCY);
-  }
-
-  return current;
-}
-
-void
-AssertConfiguringBlendAnimationFilterFails(const IntRect& aFrameRect,
-                                           const IntSize& aOutputSize)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams animParams { aFrameRect, FrameTimeout::FromRawMilliseconds(0),
-                               0, BlendMethod::SOURCE, DisposalMethod::KEEP };
-  BlendAnimationConfig blendAnim { decoder };
-  SurfaceConfig surfaceSink { decoder, aOutputSize,
-                              SurfaceFormat::B8G8R8A8, false,
-                              Some(animParams) };
-  AssertConfiguringPipelineFails(decoder, blendAnim, surfaceSink);
-}
-
-TEST(ImageBlendAnimationFilter, BlendFailsForNegativeFrameRect)
-{
-  // A negative frame rect size is disallowed.
-  AssertConfiguringBlendAnimationFilterFails(IntRect(IntPoint(0, 0), IntSize(-1, -1)),
-                                             IntSize(100, 100));
-}
-
-TEST(ImageBlendAnimationFilter, WriteFullFirstFrame)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params { IntRect(0, 0, 100, 100),
-                           FrameTimeout::FromRawMilliseconds(0),
-                           /* aFrameNum */ 0, BlendMethod::SOURCE,
-                           DisposalMethod::KEEP };
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      CheckWritePixels(aDecoder, aFilter, Some(IntRect(0, 0, 100, 100)));
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-}
-
-TEST(ImageBlendAnimationFilter, WritePartialFirstFrame)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params { IntRect(25, 50, 50, 25),
-                           FrameTimeout::FromRawMilliseconds(0),
-                           /* aFrameNum */ 0, BlendMethod::SOURCE,
-                           DisposalMethod::KEEP };
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      CheckWritePixels(aDecoder, aFilter, Some(IntRect(0, 0, 100, 100)),
-                                          Nothing(),
-                                          Some(IntRect(25, 50, 50, 25)),
-                                          Some(IntRect(25, 50, 50, 25)));
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-}
-
-static void
-TestWithBlendAnimationFilterClear(BlendMethod aBlendMethod)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(BGRAColor::Green().AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::SOURCE,
-                            DisposalMethod::CLEAR };
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(BGRAColor::Red().AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 40, 100, 20), frame1->GetDirtyRect());
-
-  ASSERT_TRUE(frame1.get() != nullptr);
-
-  RefPtr<SourceSurface> surface = frame1->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, BGRAColor::Green()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, BGRAColor::Red()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, BGRAColor::Green()));
-
-  AnimationParams params2 { IntRect(0, 50, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 2, aBlendMethod,
-                            DisposalMethod::KEEP };
-  RawAccessFrameRef frame2 =
-    WithBlendAnimationFilter(decoder, params2, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(BGRAColor::Blue().AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-
-  ASSERT_TRUE(frame2.get() != nullptr);
-
-  surface = frame2->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, BGRAColor::Green()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 10, BGRAColor::Transparent()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 50, 20, BGRAColor::Blue()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 70, 30, BGRAColor::Green()));
-}
-
-TEST(ImageBlendAnimationFilter, ClearWithOver)
-{
-  TestWithBlendAnimationFilterClear(BlendMethod::OVER);
-}
-
-TEST(ImageBlendAnimationFilter, ClearWithSource)
-{
-  TestWithBlendAnimationFilterClear(BlendMethod::SOURCE);
-}
-
-TEST(ImageBlendAnimationFilter, KeepWithSource)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(BGRAColor::Green().AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(BGRAColor::Red().AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 40, 100, 20), frame1->GetDirtyRect());
-
-  ASSERT_TRUE(frame1.get() != nullptr);
-
-  RefPtr<SourceSurface> surface = frame1->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, BGRAColor::Green()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, BGRAColor::Red()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, BGRAColor::Green()));
-}
-
-TEST(ImageBlendAnimationFilter, KeepWithOver)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor0(0, 0xFF, 0, 0x40);
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor0.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::OVER,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor1(0, 0, 0xFF, 0x80);
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor1.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 40, 100, 20), frame1->GetDirtyRect());
-
-  ASSERT_TRUE(frame1.get() != nullptr);
-
-  BGRAColor blendedColor(0, 0x20, 0x80, 0xA0, true); // already premultiplied
-  EXPECT_EQ(SkPMSrcOver(frameColor1.AsPixel(), frameColor0.AsPixel()),
-            blendedColor.AsPixel());
-
-  RefPtr<SourceSurface> surface = frame1->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, frameColor0));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, blendedColor));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, frameColor0));
-}
-
-TEST(ImageBlendAnimationFilter, RestorePreviousWithOver)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor0(0, 0xFF, 0, 0x40);
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor0.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 10, 100, 80),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::SOURCE,
-                            DisposalMethod::RESTORE_PREVIOUS };
-  BGRAColor frameColor1 = BGRAColor::Green();
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor1.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 10, 100, 80), frame1->GetDirtyRect());
-
-  AnimationParams params2 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 2, BlendMethod::OVER,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor2(0, 0, 0xFF, 0x80);
-  RawAccessFrameRef frame2 =
-    WithBlendAnimationFilter(decoder, params2, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor2.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 10, 100, 80), frame2->GetDirtyRect());
-
-  ASSERT_TRUE(frame2.get() != nullptr);
-
-  BGRAColor blendedColor(0, 0x20, 0x80, 0xA0, true); // already premultiplied
-  EXPECT_EQ(SkPMSrcOver(frameColor2.AsPixel(), frameColor0.AsPixel()),
-            blendedColor.AsPixel());
-
-  RefPtr<SourceSurface> surface = frame2->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, frameColor0));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, blendedColor));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, frameColor0));
-}
-
-TEST(ImageBlendAnimationFilter, RestorePreviousWithSource)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor0(0, 0xFF, 0, 0x40);
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor0.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 10, 100, 80),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::SOURCE,
-                            DisposalMethod::RESTORE_PREVIOUS };
-  BGRAColor frameColor1 = BGRAColor::Green();
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor1.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 10, 100, 80), frame1->GetDirtyRect());
-
-  AnimationParams params2 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 2, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor2(0, 0, 0xFF, 0x80);
-  RawAccessFrameRef frame2 =
-    WithBlendAnimationFilter(decoder, params2, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor2.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 10, 100, 80), frame2->GetDirtyRect());
-
-  ASSERT_TRUE(frame2.get() != nullptr);
-
-  RefPtr<SourceSurface> surface = frame2->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 40, frameColor0));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, frameColor2));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, frameColor0));
-}
-
-TEST(ImageBlendAnimationFilter, RestorePreviousClearWithSource)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(0, 0, 100, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor0 = BGRAColor::Red();
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor0.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  AnimationParams params1 { IntRect(0, 0, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 1, BlendMethod::SOURCE,
-                            DisposalMethod::CLEAR };
-  BGRAColor frameColor1 = BGRAColor::Blue();
-  RawAccessFrameRef frame1 =
-    WithBlendAnimationFilter(decoder, params1, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor1.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 20), frame1->GetDirtyRect());
-
-  AnimationParams params2 { IntRect(0, 10, 100, 80),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 2, BlendMethod::SOURCE,
-                            DisposalMethod::RESTORE_PREVIOUS };
-  BGRAColor frameColor2 = BGRAColor::Green();
-  RawAccessFrameRef frame2 =
-    WithBlendAnimationFilter(decoder, params2, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor2.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 90), frame2->GetDirtyRect());
-
-  AnimationParams params3 { IntRect(0, 40, 100, 20),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 3, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor3 = BGRAColor::Blue();
-  RawAccessFrameRef frame3 =
-    WithBlendAnimationFilter(decoder, params3, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor3.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 90), frame3->GetDirtyRect());
-
-  ASSERT_TRUE(frame3.get() != nullptr);
-
-  RefPtr<SourceSurface> surface = frame3->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 20, BGRAColor::Transparent()));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 20, 20, frameColor0));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 40, 20, frameColor3));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 60, 40, frameColor0));
-}
-
-TEST(ImageBlendAnimationFilter, PartialOverlapFrameRect)
-{
-  RefPtr<Decoder> decoder = CreateTrivialBlendingDecoder();
-  ASSERT_TRUE(decoder != nullptr);
-
-  AnimationParams params0 { IntRect(-10, -20, 110, 100),
-                            FrameTimeout::FromRawMilliseconds(0),
-                            /* aFrameNum */ 0, BlendMethod::SOURCE,
-                            DisposalMethod::KEEP };
-  BGRAColor frameColor0 = BGRAColor::Red();
-  RawAccessFrameRef frame0 =
-    WithBlendAnimationFilter(decoder, params0, IntSize(100, 100),
-                             [&](Decoder* aDecoder, SurfaceFilter* aFilter) {
-      auto result = aFilter->WritePixels<uint32_t>([&] {
-        return AsVariant(frameColor0.AsPixel());
-      });
-      EXPECT_EQ(WriteState::FINISHED, result);
-    });
-  EXPECT_EQ(IntRect(0, 0, 100, 100), frame0->GetDirtyRect());
-
-  RefPtr<SourceSurface> surface = frame0->GetSourceSurface();
-  EXPECT_TRUE(RowsAreSolidColor(surface, 0, 80, frameColor0));
-  EXPECT_TRUE(RowsAreSolidColor(surface, 80, 20, BGRAColor::Transparent()));
-}
-
--- a/image/test/gtest/TestDecoders.cpp
+++ b/image/test/gtest/TestDecoders.cpp
@@ -1,16 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "gtest/gtest.h"
 
 #include "Common.h"
-#include "AnimationSurfaceProvider.h"
 #include "Decoder.h"
 #include "DecoderFactory.h"
 #include "decoders/nsBMPDecoder.h"
 #include "IDecodingTask.h"
 #include "ImageOps.h"
 #include "imgIContainer.h"
 #include "imgITools.h"
 #include "ImageFactory.h"
@@ -111,17 +110,16 @@ void WithSingleChunkDecode(const ImageTe
   ASSERT_TRUE(NS_SUCCEEDED(rv));
   sourceBuffer->Complete(NS_OK);
 
   // Create a decoder.
   DecoderType decoderType =
     DecoderFactory::GetDecoderType(aTestCase.mMimeType);
   RefPtr<Decoder> decoder =
     DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer, aOutputSize,
-                                           DecoderFlags::FIRST_FRAME_ONLY,
                                            DefaultSurfaceFlags());
   ASSERT_TRUE(decoder != nullptr);
   RefPtr<IDecodingTask> task = new AnonymousDecodingTask(WrapNotNull(decoder));
 
   // Run the full decoder synchronously.
   task->Run();
 
   // Call the lambda to verify the expected results.
@@ -149,17 +147,16 @@ CheckDecoderMultiChunk(const ImageTestCa
 
   // Create a SourceBuffer and a decoder.
   auto sourceBuffer = MakeNotNull<RefPtr<SourceBuffer>>();
   sourceBuffer->ExpectLength(length);
   DecoderType decoderType =
     DecoderFactory::GetDecoderType(aTestCase.mMimeType);
   RefPtr<Decoder> decoder =
     DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer, Nothing(),
-                                           DecoderFlags::FIRST_FRAME_ONLY,
                                            DefaultSurfaceFlags());
   ASSERT_TRUE(decoder != nullptr);
   RefPtr<IDecodingTask> task = new AnonymousDecodingTask(WrapNotNull(decoder));
 
   for (uint64_t read = 0; read < length ; ++read) {
     uint64_t available = 0;
     rv = inputStream->Available(&available);
     ASSERT_TRUE(available > 0);
@@ -202,138 +199,16 @@ CheckDownscaleDuringDecode(const ImageTe
     // small amount of fuzz; this is just the nature of Lanczos downscaling.
     EXPECT_TRUE(RowsAreSolidColor(surface, 0, 4, BGRAColor::Green(), /* aFuzz = */ 47));
     EXPECT_TRUE(RowsAreSolidColor(surface, 6, 3, BGRAColor::Red(), /* aFuzz = */ 27));
     EXPECT_TRUE(RowsAreSolidColor(surface, 11, 3, BGRAColor::Green(), /* aFuzz = */ 47));
     EXPECT_TRUE(RowsAreSolidColor(surface, 16, 4, BGRAColor::Red(), /* aFuzz = */ 27));
   });
 }
 
-static void
-CheckAnimationDecoderResults(const ImageTestCase& aTestCase,
-                             AnimationSurfaceProvider* aProvider,
-                             Decoder* aDecoder)
-{
-  EXPECT_TRUE(aDecoder->GetDecodeDone());
-  EXPECT_EQ(bool(aTestCase.mFlags & TEST_CASE_HAS_ERROR),
-            aDecoder->HasError());
-
-  if (aTestCase.mFlags & TEST_CASE_HAS_ERROR) {
-    return;  // That's all we can check for bad images.
-  }
-
-  // The decoder should get the correct size.
-  IntSize size = aDecoder->Size();
-  EXPECT_EQ(aTestCase.mSize.width, size.width);
-  EXPECT_EQ(aTestCase.mSize.height, size.height);
-
-  if (aTestCase.mFlags & TEST_CASE_IGNORE_OUTPUT) {
-    return;
-  }
-
-  // Check the output.
-  AutoTArray<BGRAColor, 2> framePixels;
-  framePixels.AppendElement(BGRAColor::Green());
-  framePixels.AppendElement(BGRAColor(0x7F, 0x7F, 0x7F, 0xFF));
-
-  DrawableSurface drawableSurface(WrapNotNull(aProvider));
-  for (size_t i = 0; i < framePixels.Length(); ++i) {
-    nsresult rv = drawableSurface.Seek(i);
-    EXPECT_TRUE(NS_SUCCEEDED(rv));
-
-    // Check the first frame, all green.
-    RawAccessFrameRef rawFrame = drawableSurface->RawAccessRef();
-    RefPtr<SourceSurface> surface = rawFrame->GetSourceSurface();
-
-    // Verify that the resulting surfaces matches our expectations.
-    EXPECT_TRUE(surface->IsDataSourceSurface());
-    EXPECT_TRUE(surface->GetFormat() == SurfaceFormat::B8G8R8X8 ||
-                surface->GetFormat() == SurfaceFormat::B8G8R8A8);
-    EXPECT_EQ(aTestCase.mOutputSize, surface->GetSize());
-    EXPECT_TRUE(IsSolidColor(surface, framePixels[i],
-                             aTestCase.mFlags & TEST_CASE_IS_FUZZY ? 1 : 0));
-  }
-
-  // Should be no more frames.
-  nsresult rv = drawableSurface.Seek(framePixels.Length());
-  EXPECT_TRUE(NS_FAILED(rv));
-}
-
-template <typename Func>
-static void
-WithSingleChunkAnimationDecode(const ImageTestCase& aTestCase,
-                               Func aResultChecker)
-{
-  // Create an image.
-  RefPtr<Image> image =
-    ImageFactory::CreateAnonymousImage(nsDependentCString(aTestCase.mMimeType));
-  ASSERT_TRUE(!image->HasError());
-
-  NotNull<RefPtr<RasterImage>> rasterImage =
-    WrapNotNull(static_cast<RasterImage*>(image.get()));
-
-  nsCOMPtr<nsIInputStream> inputStream = LoadFile(aTestCase.mPath);
-  ASSERT_TRUE(inputStream != nullptr);
-
-  // Figure out how much data we have.
-  uint64_t length;
-  nsresult rv = inputStream->Available(&length);
-  ASSERT_TRUE(NS_SUCCEEDED(rv));
-
-  // Write the data into a SourceBuffer.
-  NotNull<RefPtr<SourceBuffer>> sourceBuffer = WrapNotNull(new SourceBuffer());
-  sourceBuffer->ExpectLength(length);
-  rv = sourceBuffer->AppendFromInputStream(inputStream, length);
-  ASSERT_TRUE(NS_SUCCEEDED(rv));
-  sourceBuffer->Complete(NS_OK);
-
-  // Create a metadata decoder first, because otherwise RasterImage will get
-  // unhappy about finding out the image is animated during a full decode.
-  DecoderType decoderType =
-    DecoderFactory::GetDecoderType(aTestCase.mMimeType);
-  RefPtr<IDecodingTask> task =
-    DecoderFactory::CreateMetadataDecoder(decoderType, rasterImage, sourceBuffer);
-  ASSERT_TRUE(task != nullptr);
-
-  // Run the metadata decoder synchronously.
-  task->Run();
-
-  // Create a decoder.
-  DecoderFlags decoderFlags = DecoderFlags::BLEND_ANIMATION;
-  SurfaceFlags surfaceFlags = DefaultSurfaceFlags();
-  RefPtr<Decoder> decoder =
-    DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer, Nothing(),
-                                           decoderFlags, surfaceFlags);
-  ASSERT_TRUE(decoder != nullptr);
-
-  // Create an AnimationSurfaceProvider which will manage the decoding process
-  // and make this decoder's output available in the surface cache.
-  SurfaceKey surfaceKey =
-    RasterSurfaceKey(aTestCase.mOutputSize, surfaceFlags, PlaybackType::eAnimated);
-  RefPtr<AnimationSurfaceProvider> provider =
-    new AnimationSurfaceProvider(rasterImage,
-                                 surfaceKey,
-                                 WrapNotNull(decoder),
-                                 /* aCurrentFrame */ 0);
-
-  // Run the full decoder synchronously.
-  provider->Run();
-
-  // Call the lambda to verify the expected results.
-  aResultChecker(provider, decoder);
-}
-
-static void
-CheckAnimationDecoderSingleChunk(const ImageTestCase& aTestCase)
-{
-  WithSingleChunkAnimationDecode(aTestCase, [&](AnimationSurfaceProvider* aProvider, Decoder* aDecoder) {
-    CheckAnimationDecoderResults(aTestCase, aProvider, aDecoder);
-  });
-}
-
 class ImageDecoders : public ::testing::Test
 {
 protected:
   AutoInitializeImageLib mInit;
 };
 
 TEST_F(ImageDecoders, PNGSingleChunk)
 {
@@ -435,36 +310,26 @@ TEST_F(ImageDecoders, AnimatedGIFSingleC
   CheckDecoderSingleChunk(GreenFirstFrameAnimatedGIFTestCase());
 }
 
 TEST_F(ImageDecoders, AnimatedGIFMultiChunk)
 {
   CheckDecoderMultiChunk(GreenFirstFrameAnimatedGIFTestCase());
 }
 
-TEST_F(ImageDecoders, AnimatedGIFWithBlendedFrames)
-{
-  CheckAnimationDecoderSingleChunk(GreenFirstFrameAnimatedGIFTestCase());
-}
-
 TEST_F(ImageDecoders, AnimatedPNGSingleChunk)
 {
   CheckDecoderSingleChunk(GreenFirstFrameAnimatedPNGTestCase());
 }
 
 TEST_F(ImageDecoders, AnimatedPNGMultiChunk)
 {
   CheckDecoderMultiChunk(GreenFirstFrameAnimatedPNGTestCase());
 }
 
-TEST_F(ImageDecoders, AnimatedPNGWithBlendedFrames)
-{
-  CheckAnimationDecoderSingleChunk(GreenFirstFrameAnimatedPNGTestCase());
-}
-
 TEST_F(ImageDecoders, CorruptSingleChunk)
 {
   CheckDecoderSingleChunk(CorruptTestCase());
 }
 
 TEST_F(ImageDecoders, CorruptMultiChunk)
 {
   CheckDecoderMultiChunk(CorruptTestCase());
--- a/image/test/gtest/TestMetadata.cpp
+++ b/image/test/gtest/TestMetadata.cpp
@@ -103,17 +103,16 @@ CheckMetadata(const ImageTestCase& aTest
   EXPECT_EQ(expectTransparency, bool(metadataProgress & FLAG_HAS_TRANSPARENCY));
 
   EXPECT_EQ(bool(aTestCase.mFlags & TEST_CASE_IS_ANIMATED),
             bool(metadataProgress & FLAG_IS_ANIMATED));
 
   // Create a full decoder, so we can compare the result.
   decoder =
     DecoderFactory::CreateAnonymousDecoder(decoderType, sourceBuffer, Nothing(),
-                                           DecoderFlags::FIRST_FRAME_ONLY,
                                            DefaultSurfaceFlags());
   ASSERT_TRUE(decoder != nullptr);
   task = new AnonymousDecodingTask(WrapNotNull(decoder));
 
   if (aBMPWithinICO == BMPWithinICO::YES) {
     static_cast<nsBMPDecoder*>(decoder.get())->SetIsWithinICO();
   }
 
--- a/image/test/gtest/moz.build
+++ b/image/test/gtest/moz.build
@@ -5,17 +5,16 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 Library('imagetest')
 
 UNIFIED_SOURCES = [
     'Common.cpp',
     'TestADAM7InterpolatingFilter.cpp',
     'TestAnimationFrameBuffer.cpp',
-    'TestBlendAnimationFilter.cpp',
     'TestContainers.cpp',
     'TestCopyOnWrite.cpp',
     'TestDecoders.cpp',
     'TestDecodeToSurface.cpp',
     'TestDeinterlacingFilter.cpp',
     'TestLoader.cpp',
     'TestMetadata.cpp',
     'TestRemoveFrameRectFilter.cpp',
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -4603,20 +4603,16 @@ pref("toolkit.zoomManager.zoomValues", "
 // before it starts to discard already displayed frames and redecode them as
 // necessary.
 pref("image.animated.decode-on-demand.threshold-kb", 20480);
 
 // The minimum number of frames we want to have buffered ahead of an
 // animation's currently displayed frame.
 pref("image.animated.decode-on-demand.batch-size", 6);
 
-// Whether we should generate full frames at decode time or partial frames which
-// are combined at display time (historical behavior and default).
-pref("image.animated.generate-full-frames", false);
-
 // Resume an animated image from the last displayed frame rather than
 // advancing when out of view.
 pref("image.animated.resume-from-last-displayed", true);
 
 // Maximum number of surfaces for an image before entering "factor of 2" mode.
 // This in addition to the number of "native" sizes of an image. A native size
 // is a size for which we can decode a frame without up or downscaling. Most
 // images only have 1, but some (i.e. ICOs) may have multiple frames for the